Compare commits

..

4 Commits

2833 changed files with 524744 additions and 876252 deletions

View File

@@ -1,10 +1,11 @@
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.

View File

@@ -1,100 +0,0 @@
---
BasedOnStyle: LLVM
AlignConsecutiveAssignments: true
BraceWrapping:
AfterFunction: true
#llvm10-11: AfterControlStatement: false - Never
BeforeCatch: true
BeforeElse: true
#llvm11: BeforeLambdaBody: false
#llvm11: BeforeWhile: false
BreakBeforeBraces: Stroustrup
BreakAfterJavaFieldAnnotations: true
BreakStringLiterals: true
ColumnLimit: 110 # Update $max_trace_macro_line_len in bin/trace also
IndentWidth: 4
---
Language: Cpp
#llvm11: AlignConsecutiveBitFields: false
AlignConsecutiveDeclarations: true
AlignConsecutiveMacros: true
#llvm10-11: AlignOperands: true - Align
#llvm11: AllowShortEnumsOnASingleLine: true
AllowShortFunctionsOnASingleLine: None
AlwaysBreakAfterReturnType: AllDefinitions
# Can enable the following section when llvm 12.x is out
#AttributeMacros:
# - H5_ATTR_FORMAT
# - H5_ATTR_UNUSED
# - H5_ATTR_DEPRECATED_USED
# - H5_ATTR_NDEBUG_UNUSED
# - H5_ATTR_DEBUG_API_USED
# - H5_ATTR_PARALLEL_UNUSED
# - H5_ATTR_PARALLEL_USED
# - H5_ATTR_NORETURN
# - H5_ATTR_CONST
# - H5_ATTR_PURE
# - H5_ATTR_FALLTHROUGH
ForEachMacros: ['ALL_MEMBERS', 'UNIQUE_MEMBERS']
IncludeCategories:
- Regex: '^"(llvm|llvm-c|clang|clang-c)/'
Priority: 3
SortPriority: 0
- Regex: '^(<|"(gtest|gmock|isl|json)/)'
Priority: 4
SortPriority: 0
- Regex: '.*'
Priority: 0
SortPriority: 0
- Regex: '^H5*.*'
Priority: 1
SortPriority: 0
- Regex: 'private.*'
Priority: 2
SortPriority: 0
IncludeIsMainRegex: '(public)?$'
IndentCaseLabels: true
#llvm11: IndentCaseBlocks: false
IndentGotoLabels: false
#llvm11: IndentExternBlock: AfterExternBlock
#llvm11: InsertTrailingCommas: None
MacroBlockBegin: "^BEGIN_FUNC"
MacroBlockEnd: "^END_FUNC"
ObjCBlockIndentWidth: 4
#llvm11: ObjCBreakBeforeNestedBlockParam: true
ReflowComments: true
SortIncludes: false
StatementMacros:
- FUNC_ENTER_API
- FUNC_LEAVE_API
- FUNC_ENTER_NOAPI_NOINIT_NOERR
- FUNC_LEAVE_NOAPI
- H5_BEGIN_TAG
- HGOTO_DONE_TAG
- H5_END_TAG
- HSYS_DONE_ERROR
- HSYS_GOTO_ERROR
- HDONE_ERROR
- HERROR
- H5_LEAVE
- H5E_PRINTF
- H5E_THROW
- HGOTO_DONE
- HGOTO_ERROR
- HMPI_ERROR
- HMPI_DONE_ERROR
- HMPI_GOTO_ERROR
- H5_GCC_DIAG_OFF
- H5_GCC_DIAG_ON
- CATCH
#llvm10: TypenameMacros:
#llvm10: - STACK_OF
#llvm10: - LIST
#llvm11: WhitespaceSensitiveMacros:
#llvm11: - STRINGIZE
#llvm11: - PP_STRINGIZE
---
Language: Java
BreakAfterJavaFieldAnnotations: true
JavaImportGroups: ['java', 'hdf', 'hdf.hdf5lib', 'org']
...

24
.gitattributes vendored
View File

@@ -192,12 +192,12 @@ java/examples/testfiles/examples.intro.H5_CreateGroup.txt -text
java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt -text
java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt -text
java/examples/testfiles/examples.intro.H5_ReadWrite.txt -text
java/lib/ext/slf4j-nop-1.7.33.jar -text svneol=unset#application/zip
java/lib/ext/slf4j-simple-1.7.33.jar -text svneol=unset#application/zip
java/lib/ext/slf4j-nop-1.7.5.jar -text svneol=unset#application/zip
java/lib/ext/slf4j-simple-1.7.5.jar -text svneol=unset#application/zip
java/lib/hamcrest-core.jar -text svneol=unset#application/java-archive
java/lib/junit.jar -text svneol=unset#application/java-archive
java/lib/simplelogger.properties -text
java/lib/slf4j-api-1.7.33.jar -text svneol=unset#application/zip
java/lib/slf4j-api-1.7.5.jar -text svneol=unset#application/zip
java/src/CMakeLists.txt -text
java/src/Makefile.am -text
java/src/hdf/CMakeLists.txt -text
@@ -216,10 +216,10 @@ java/src/hdf/hdf5lib/callbacks/H5D_iterate_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5D_iterate_t.java -text
java/src/hdf/hdf5lib/callbacks/H5E_walk_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5E_walk_t.java -text
java/src/hdf/hdf5lib/callbacks/H5L_iterate_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5L_iterate_t.java -text
java/src/hdf/hdf5lib/callbacks/H5L_iterate_opdata_t.java -text
java/src/hdf/hdf5lib/callbacks/H5O_iterate_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5O_iterate_t.java -text
java/src/hdf/hdf5lib/callbacks/H5O_iterate_opdata_t.java -text
java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_t.java -text
java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_cb.java -text
@@ -267,8 +267,6 @@ java/src/hdf/hdf5lib/structs/H5G_info_t.java -text
java/src/hdf/hdf5lib/structs/H5L_info_t.java -text
java/src/hdf/hdf5lib/structs/H5O_hdr_info_t.java -text
java/src/hdf/hdf5lib/structs/H5O_info_t.java -text
java/src/hdf/hdf5lib/structs/H5O_native_info_t.java -text
java/src/hdf/hdf5lib/structs/H5O_token_t.java -text
java/src/hdf/hdf5lib/structs/H5_ih_info_t.java -text
java/src/hdf/overview.html -text
java/src/jni/CMakeLists.txt -text
@@ -323,7 +321,6 @@ java/test/TestH5Dparams.java -text
java/test/TestH5Dplist.java -text
java/test/TestH5E.java -text
java/test/TestH5Edefault.java -text
java/test/TestH5Eparams.java -text
java/test/TestH5Eregister.java -text
java/test/TestH5F.java -text
java/test/TestH5Fbasic.java -text
@@ -350,7 +347,6 @@ java/test/TestH5Sbasic.java -text
java/test/TestH5T.java -text
java/test/TestH5Tbasic.java -text
java/test/TestH5Tparams.java -text
java/test/TestUnit.java -text
java/test/TestH5Z.java -text
java/test/h5ex_g_iterate.orig -text svneol=unset#application/x-hdf
java/test/junit.sh.in -text
@@ -365,7 +361,6 @@ m4/ax_check_rqrd_class.m4 -text
m4/ax_java_check_class.m4 -text
m4/ax_java_options.m4 -text
m4/ax_jni_include_dir.m4 -text
m4/ax_prog_doxygen.m4 -text
m4/ax_prog_jar.m4 -text
m4/ax_prog_java.m4 -text
m4/ax_prog_java_cc.m4 -text
@@ -393,10 +388,10 @@ test/cork.c -text
test/corrupt_stab_msg.h5 -text
test/deflate.h5 -text
test/dynlib4.c -text
test/family_v16-000000.h5 -text
test/family_v16-000001.h5 -text
test/family_v16-000002.h5 -text
test/family_v16-000003.h5 -text
test/family_v16_00000.h5 -text
test/family_v16_00001.h5 -text
test/family_v16_00002.h5 -text
test/family_v16_00003.h5 -text
test/file_image_core_test.h5 -text
test/filespace_1_6.h5 -text
test/filespace_1_8.h5 -text
@@ -785,7 +780,6 @@ tools/testfiles/tints4dimsStride2.ddl -text
tools/testfiles/tintsattrs.h5 -text svneol=unset#application/x-hdf
tools/testfiles/tlarge_objname.h5 -text
tools/testfiles/tldouble.h5 -text
tools/testfiles/tldouble_scalar.h5 -text
tools/testfiles/tlonglinks.h5 -text
tools/testfiles/tloop.h5 -text
tools/testfiles/tloop2.h5 -text

11
.github/CODEOWNERS vendored
View File

@@ -1,11 +0,0 @@
# Lines starting with '#' are comments.
# Each line is a file pattern followed by one or more owners.
# These owners will be the default owners for everything in the repo.
* @lrknox @derobins @byrnHDF @fortnern @jhendersonHDF @qkoziol @vchoi-hdfgroup @bmribler @raylu-hdf
# Order is important. The last matching pattern has the most precedence.
# So if a pull request only touches javascript files, only these owners
# will be requested to review.
/fortran/ @brtnfld
/java/ @jhendersonHDF @byrnHDF @derobins

3
.github/FUNDING.yml vendored
View File

@@ -1,3 +0,0 @@
# These are supported funding model platforms
custom: "https://hdfgroup.org/about-us/donate-to-the-hdf-group/"

View File

@@ -1,25 +0,0 @@
---
name: Bug report
about: Report a problem with HDF5
title: "[BUG]"
labels: bug
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**Expected behavior**
A clear and concise description of what you expected to happen.
**Platform (please complete the following information)**
- HDF5 version (if building from a maintenance branch, please include the commit hash)
- OS and version
- Compiler and version
- Build system (e.g. CMake, Autotools) and version
- Any configure options you specified
- MPI library and version (parallel HDF5)
**Additional context**
Add any other context about the problem here.

View File

@@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an improvement to HDF5
title: "[Feature Request]"
labels: enhancement
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@@ -1,8 +0,0 @@
## Describe your changes
## Issue ticket number (GitHub or JIRA)
## Checklist before requesting a review
- [ ] My code conforms to the guidelines in CONTRIBUTING.md
- [ ] I made an entry in release_docs/RELEASE.txt (bug fixes, new features)
- [ ] I added a test (bug fixes, new features)

View File

@@ -1,18 +0,0 @@
name: clang-format Check
on:
pull_request:
jobs:
formatting-check:
name: Formatting Check
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, 'skip-ci')"
steps:
- uses: actions/checkout@v3
- name: Run clang-format style check for C and Java programs.
uses: DoozyX/clang-format-lint-action@v0.13
with:
source: '.'
extensions: 'c,h,cpp,hpp,java'
clangFormatVersion: 13
style: file
exclude: './config ./hl/src/H5LTanalyze.c ./hl/src/H5LTparse.c ./hl/src/H5LTparse.h ./src/H5Epubgen.h ./src/H5Einit.h ./src/H5Eterm.h ./src/H5Edefin.h ./src/H5version.h ./src/H5overflow.h'

View File

@@ -1,25 +0,0 @@
name: clang-format Check
on:
workflow_dispatch:
push:
jobs:
formatting-check:
name: Formatting Check
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, 'skip-ci')"
steps:
- uses: actions/checkout@v3
- name: Run clang-format style check for C and Java programs.
uses: DoozyX/clang-format-lint-action@v0.13
with:
source: '.'
extensions: 'c,h,cpp,hpp,java'
clangFormatVersion: 13
inplace: True
style: file
exclude: './config ./hl/src/H5LTanalyze.c ./hl/src/H5LTparse.c ./hl/src/H5LTparse.h ./src/H5Epubgen.h ./src/H5Einit.h ./src/H5Eterm.h ./src/H5Edefin.h ./src/H5version.h ./src/H5overflow.h'
- uses: EndBug/add-and-commit@v7
with:
author_name: github-actions
author_email: 41898282+github-actions[bot]@users.noreply.github.com
message: 'Committing clang-format changes'

View File

@@ -1,15 +0,0 @@
# GitHub Action to automate the identification of common misspellings in text files
# https://github.com/codespell-project/codespell
# https://github.com/codespell-project/actions-codespell
name: codespell
on: [push, pull_request]
jobs:
codespell:
name: Check for spelling errors
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: codespell-project/actions-codespell@master
with:
skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./tools/test/h5repack/testfiles/*.dat
ignore_words_list: isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,minnum,ake,gord,numer,ro,oce

View File

@@ -1,484 +0,0 @@
name: hdf5 dev CI
# Controls when the action will run. Triggers the workflow on push or pull request
on:
workflow_dispatch:
push:
pull_request:
branches: [ develop, hdf5_1_14, hdf5_1_12, hdf5_1_10, hdf5_1_8 ]
paths-ignore:
- '.github/CODEOWNERS'
- '.github/FUNDING.yml'
- 'doc/**'
- 'release_docs/**'
- 'ACKNOWLEDGEMENTS'
- 'COPYING**'
- '**.md'
# A workflow run is made up of one or more jobs that can run sequentially or
# in parallel. We just have one job, but the matrix items defined below will
# run in parallel.
jobs:
# A workflow that builds the library and runs all the tests
build_and_test:
strategy:
# The current matrix has three dimensions:
#
# * config name
# * thread-safety on/off
# * release vs. debug build
#
# Most configuration information is added via the 'include' mechanism,
# which will append the key-value pairs in the configuration where the
# names match.
matrix:
name:
- "Windows MSVC CMake"
- "Ubuntu gcc CMake"
- "Ubuntu gcc Autotools"
- "Ubuntu gcc Autotools parallel (build only)"
- "MacOS Clang CMake"
thread_safety:
- enabled: true
text: " TS"
- enabled: false
text: ""
build_mode:
- text: " REL"
cmake: "Release"
autotools: "production"
- text: " DBG"
cmake: "Debug"
autotools: "debug"
# This is where we list the bulk of the options for each configuration.
# The key-value pair values are usually appropriate for being CMake or
# Autotools configure values, so be aware of that.
include:
# Windows w/ MSVC + CMake
#
# No Fortran, parallel, or VFDs that rely on POSIX things
- name: "Windows MSVC CMake"
os: windows-2022
toolchain: ""
cpp: ON
fortran: OFF
java: ON
parallel: OFF
mirror_vfd: OFF
direct_vfd: OFF
generator: "-G \"Visual Studio 17 2022\" -A x64"
run_tests: true
# Linux (Ubuntu) w/ gcc + CMake
#
# We might think about adding Clang, but MacOS already tests that
# so it's not critical
- name: "Ubuntu gcc CMake"
os: ubuntu-latest
cpp: ON
fortran: ON
java: ON
parallel: OFF
mirror_vfd: ON
direct_vfd: ON
toolchain: "config/toolchain/GCC.cmake"
generator: "-G Ninja"
run_tests: true
# Linux (Ubuntu) w/ gcc + Autotools
#
# Keep this identical to the CMake configs. Note the difference in
# the values.
- name: "Ubuntu gcc Autotools"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: enable
default_api: v114
toolchain: ""
generator: "autogen"
flags: ""
run_tests: true
# Parallel Linux (Ubuntu) w/ gcc + Autotools
#
# The GitHub runners are inadequate for running parallel HDF5 tests,
# so we catch most issues in daily testing. What we have here is just
# a compile check to make sure nothing obvious is broken.
- name: "Ubuntu gcc Autotools parallel (build only)"
os: ubuntu-latest
cpp: disable
fortran: enable
java: disable
parallel: enable
mirror_vfd: disable
direct_vfd: disable
deprec_sym: enable
default_api: v114
toolchain: ""
generator: "autogen"
flags: "CC=mpicc"
run_tests: false
# MacOS w/ Clang + CMake
#
# We could also build with the Autotools via brew installing them,
# but that seems unnecessary
- name: "MacOS Clang CMake"
os: macos-11
cpp: ON
fortran: OFF
java: ON
parallel: OFF
mirror_vfd: ON
direct_vfd: OFF
toolchain: "config/toolchain/clang.cmake"
generator: "-G Ninja"
run_tests: true
#
# SPECIAL AUTOTOOLS BUILDS
#
# These do not run tests and are not built into the matrix and instead
# become NEW configs as their name would clobber one of the matrix
# names (so make sure the names are UNIQUE).
#
- name: "Ubuntu gcc Autotools v1.6 default API (build only)"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: enable
default_api: v16
toolchain: ""
generator: "autogen"
flags: ""
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools v1.8 default API (build only)"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: enable
default_api: v18
toolchain: ""
generator: "autogen"
flags: ""
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools v1.10 default API (build only)"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: enable
default_api: v110
toolchain: ""
generator: "autogen"
flags: ""
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools v1.12 default API (build only)"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: enable
default_api: v112
toolchain: ""
generator: "autogen"
flags: ""
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools v1.14 default API (build only)"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: enable
default_api: v114
toolchain: ""
generator: "autogen"
flags: ""
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools no deprecated symbols (build only)"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: disable
default_api: default
toolchain: ""
generator: "autogen"
flags: ""
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools -Werror (build only)"
os: ubuntu-latest
cpp: enable
fortran: disable
java: disable
parallel: disable
mirror_vfd: disable
direct_vfd: enable
deprec_sym: enable
default_api: v114
toolchain: ""
generator: "autogen"
flags: "CFLAGS=-Werror"
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools -Werror (build only)"
os: ubuntu-latest
cpp: enable
fortran: disable
java: disable
parallel: disable
mirror_vfd: disable
direct_vfd: enable
deprec_sym: enable
default_api: v114
toolchain: ""
generator: "autogen"
flags: "CFLAGS=-Werror"
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " REL"
cmake: "Release"
autotools: "production"
# Sets the job's name from the properties
name: "${{ matrix.name }}${{ matrix.build_mode.text }}${{ matrix.thread_safety.text }}"
# Don't run the action if the commit message says to skip CI
if: "!contains(github.event.head_commit.message, 'skip-ci')"
# The type of runner that the job will run on
runs-on: ${{ matrix.os }}
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
#
# SETUP
#
#Useful for debugging
- name: Dump matrix context
run: echo '${{ toJSON(matrix) }}'
- name: Install CMake Dependencies (Linux)
run: sudo apt-get install ninja-build
if: matrix.os == 'ubuntu-latest'
- name: Install Autotools Dependencies (Linux, serial)
run: |
sudo apt update
sudo apt install automake autoconf libtool libtool-bin
sudo apt install gcc-11 g++-11 gfortran-11
echo "CC=gcc-11" >> $GITHUB_ENV
echo "CXX=g++-11" >> $GITHUB_ENV
echo "FC=gfortran-11" >> $GITHUB_ENV
if: (matrix.generator == 'autogen') && (matrix.parallel != 'enable')
- name: Install Autotools Dependencies (Linux, parallel)
run: |
sudo apt update
sudo apt install automake autoconf libtool libtool-bin
sudo apt install openmpi-bin openmpi-common mpi-default-dev
echo "CC=mpicc" >> $GITHUB_ENV
echo "FC=mpif90" >> $GITHUB_ENV
if: (matrix.generator == 'autogen') && (matrix.parallel == 'enable')
- name: Install Dependencies (Windows)
run: choco install ninja
if: matrix.os == 'windows-latest'
- name: Install Dependencies (macOS)
run: brew install ninja
if: matrix.os == 'macos-11'
- name: Set environment for MSVC (Windows)
run: |
# Set these environment variables so CMake picks the correct compiler
echo "CXX=cl.exe" >> $GITHUB_ENV
echo "CC=cl.exe" >> $GITHUB_ENV
if: matrix.os == 'windows-latest'
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- name: Get Sources
uses: actions/checkout@v3
#
# AUTOTOOLS CONFIGURE
#
- name: Autotools Configure
run: |
sh ./autogen.sh
mkdir "${{ runner.workspace }}/build"
cd "${{ runner.workspace }}/build"
${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd
shell: bash
if: (matrix.generator == 'autogen') && (! matrix.thread_safe.enabled)
- name: Autotools Configure (Thread-Safe)
run: |
sh ./autogen.sh
mkdir "${{ runner.workspace }}/build"
cd "${{ runner.workspace }}/build"
${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.parallel }}-parallel --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd
shell: bash
if: (matrix.generator == 'autogen') && (matrix.thread_safe.enabled)
#
# CMAKE CONFIGURE
#
- name: CMake Configure
run: |
mkdir "${{ runner.workspace }}/build"
cd "${{ runner.workspace }}/build"
cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE
shell: bash
if: (matrix.generator != 'autogen') && (! matrix.thread_safe.enabled)
- name: CMake Configure (Thread-Safe)
run: |
mkdir "${{ runner.workspace }}/build"
cd "${{ runner.workspace }}/build"
cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_HL_LIB:BOOL=OFF -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE
shell: bash
if: (matrix.generator != 'autogen') && (matrix.thread_safe.enabled)
#
# BUILD
#
- name: Autotools Build
run: make
working-directory: ${{ runner.workspace }}/build
if: matrix.generator == 'autogen'
- name: CMake Build
run: cmake --build . --config ${{ matrix.build_mode.cmake }}
working-directory: ${{ runner.workspace }}/build
if: matrix.generator != 'autogen'
#
# RUN TESTS
#
- name: Autotools Run Tests
run: make check
working-directory: ${{ runner.workspace }}/build
if: (matrix.generator == 'autogen') && (matrix.run_tests)
- name: CMake Run Tests
run: ctest --build . -C ${{ matrix.build_mode.cmake }} -V
working-directory: ${{ runner.workspace }}/build
# Skip Debug MSVC while we investigate H5L Java test timeouts
if: (matrix.generator != 'autogen') && (matrix.run_tests) && ! ((matrix.name == 'Windows MSVC CMake') && (matrix.build_mode.cmake == 'Debug'))
#
# INSTALL (note that this runs even when we don't run the tests)
#
- name: Autotools Install
run: make install
working-directory: ${{ runner.workspace }}/build
if: (matrix.generator == 'autogen')
- name: Autotools Verify Install
run: make check-install
working-directory: ${{ runner.workspace }}/build
if: (matrix.generator == 'autogen')

View File

@@ -1,10 +1,11 @@
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.

View File

@@ -5,21 +5,17 @@
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
option (USE_LIBAEC "Use AEC library as SZip Filter" OFF)
option (USE_LIBAEC_STATIC "Use static AEC library " OFF)
include (ExternalProject)
include (FetchContent)
#option (HDF5_ALLOW_EXTERNAL_SUPPORT "Allow External Library Building (NO GIT TGZ)" "NO")
set (HDF5_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO GIT TGZ)")
set_property (CACHE HDF5_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ)
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
option (ZLIB_USE_EXTERNAL "Use External Library Building for HDF5_ZLIB" 1)
option (ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 1)
option (SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 1)
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT")
set (ZLIB_URL ${ZLIB_GIT_URL} CACHE STRING "Path to zlib git repository")
@@ -31,18 +27,7 @@ if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MAT
set (TGZPATH ${HDF5_SOURCE_DIR})
endif ()
set (ZLIB_URL ${TGZPATH}/${ZLIB_TGZ_NAME})
if (NOT EXISTS "${ZLIB_URL}")
set (HDF5_ENABLE_Z_LIB_SUPPORT OFF CACHE BOOL "" FORCE)
message (VERBOSE "Filter ZLIB file ${ZLIB_URL} not found")
endif ()
set (SZIP_URL ${TGZPATH}/${SZIP_TGZ_NAME})
if (USE_LIBAEC)
set (SZIP_URL ${TGZPATH}/${SZAEC_TGZ_NAME})
endif ()
if (NOT EXISTS "${SZIP_URL}")
set (HDF5_ENABLE_SZIP_SUPPORT OFF CACHE BOOL "" FORCE)
message (VERBOSE "Filter SZIP file ${SZIP_URL} not found")
endif ()
else ()
set (ZLIB_USE_EXTERNAL 0)
set (SZIP_USE_EXTERNAL 0)
@@ -60,7 +45,8 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT)
if (NOT ZLIB_FOUND)
find_package (ZLIB) # Legacy find
if (ZLIB_FOUND)
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_LIBRARIES})
set (LINK_LIBS ${LINK_LIBS} ${ZLIB_LIBRARIES})
set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${ZLIB_LIBRARIES})
endif ()
endif ()
endif ()
@@ -77,7 +63,7 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT)
set (H5_HAVE_FILTER_DEFLATE 1)
set (H5_HAVE_ZLIB_H 1)
set (H5_HAVE_LIBZ 1)
message (VERBOSE "Filter HDF5_ZLIB is built")
message (STATUS "Filter ZLIB is built")
else ()
message (FATAL_ERROR " ZLib is Required for ZLib support in HDF5")
endif ()
@@ -91,9 +77,12 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT)
if (H5_HAVE_FILTER_DEFLATE)
set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} DEFLATE")
endif ()
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_STATIC_LIBRARY})
if (BUILD_SHARED_LIBS)
set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${ZLIB_SHARED_LIBRARY})
endif ()
set (LINK_LIBS ${LINK_LIBS} ${ZLIB_STATIC_LIBRARY})
INCLUDE_DIRECTORIES (${ZLIB_INCLUDE_DIRS})
message (VERBOSE "Filter HDF5_ZLIB is ON")
message (STATUS "Filter ZLIB is ON")
endif ()
#-----------------------------------------------------------------------------
@@ -103,22 +92,12 @@ option (HDF5_ENABLE_SZIP_SUPPORT "Use SZip Filter" OFF)
if (HDF5_ENABLE_SZIP_SUPPORT)
option (HDF5_ENABLE_SZIP_ENCODING "Use SZip Encoding" OFF)
if (NOT SZIP_USE_EXTERNAL)
set(SZIP_FOUND FALSE)
if (USE_LIBAEC)
set(libaec_USE_STATIC_LIBS ${USE_LIBAEC_STATIC})
find_package (libaec 1.0.5 CONFIG)
if (SZIP_FOUND)
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES})
endif ()
endif ()
find_package (SZIP NAMES ${SZIP_PACKAGE_NAME}${HDF_PACKAGE_EXT} COMPONENTS static shared)
if (NOT SZIP_FOUND)
find_package (SZIP NAMES ${SZIP_PACKAGE_NAME}${HDF_PACKAGE_EXT} COMPONENTS static shared)
if (NOT SZIP_FOUND)
find_package (SZIP) # Legacy find
if (SZIP_FOUND)
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES})
endif ()
find_package (SZIP) # Legacy find
if (SZIP_FOUND)
set (LINK_LIBS ${LINK_LIBS} ${SZIP_LIBRARIES})
set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${SZIP_LIBRARIES})
endif ()
endif ()
endif ()
@@ -134,20 +113,17 @@ if (HDF5_ENABLE_SZIP_SUPPORT)
set (H5_HAVE_FILTER_SZIP 1)
set (H5_HAVE_SZLIB_H 1)
set (H5_HAVE_LIBSZ 1)
message (VERBOSE "Filter SZIP is built")
if (USE_LIBAEC)
message (VERBOSE "... with library AEC")
set (SZIP_PACKAGE_NAME ${LIBAEC_PACKAGE_NAME})
else ()
set (SZIP_PACKAGE_NAME ${SZIP_PACKAGE_NAME})
endif ()
message (STATUS "Filter SZIP is built")
else ()
message (FATAL_ERROR "SZIP is Required for SZIP support in HDF5")
endif ()
endif ()
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_STATIC_LIBRARY})
if (BUILD_SHARED_LIBS)
set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${SZIP_SHARED_LIBRARY})
endif ()
set (LINK_LIBS ${LINK_LIBS} ${SZIP_STATIC_LIBRARY})
INCLUDE_DIRECTORIES (${SZIP_INCLUDE_DIRS})
message (VERBOSE "Filter SZIP is ON")
message (STATUS "Filter SZIP is ON")
if (H5_HAVE_FILTER_SZIP)
set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} DECODE")
endif ()

View File

@@ -5,7 +5,7 @@
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
@@ -23,6 +23,16 @@ if (WIN32)
find_program (WIX_EXECUTABLE candle PATHS "${CPACK_WIX_ROOT}/bin")
endif ()
#-----------------------------------------------------------------------------
# Add file(s) to CMake Install
#-----------------------------------------------------------------------------
if (NOT HDF5_INSTALL_NO_DEVELOPMENT)
install (
FILES ${PROJECT_BINARY_DIR}/H5pubconf.h
DESTINATION ${HDF5_INSTALL_INCLUDE_DIR}
COMPONENT headers
)
endif ()
#-----------------------------------------------------------------------------
# Add Target(s) to CMake Install for import into other projects
@@ -33,7 +43,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED)
EXPORT ${HDF5_EXPORTED_TARGETS}
DESTINATION ${HDF5_INSTALL_CMAKE_DIR}
FILE ${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-targets.cmake
NAMESPACE ${HDF_PACKAGE_NAMESPACE}
NAMESPACE ${HDF5_PACKAGE}::
COMPONENT configinstall
)
endif ()
@@ -41,19 +51,21 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED)
#-----------------------------------------------------------------------------
# Export all exported targets to the build tree for use by parent project
#-----------------------------------------------------------------------------
export (
TARGETS ${HDF5_LIBRARIES_TO_EXPORT} ${HDF5_LIB_DEPENDENCIES} ${HDF5_UTILS_TO_EXPORT}
FILE ${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-targets.cmake
NAMESPACE ${HDF_PACKAGE_NAMESPACE}
)
if (NOT HDF5_EXTERNALLY_CONFIGURED)
export (
TARGETS ${HDF5_LIBRARIES_TO_EXPORT} ${HDF5_LIB_DEPENDENCIES} ${HDF5_UTILS_TO_EXPORT}
FILE ${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-targets.cmake
NAMESPACE ${HDF5_PACKAGE}::
)
endif ()
endif ()
#-----------------------------------------------------------------------------
# Set includes needed for build
#-----------------------------------------------------------------------------
set (HDF5_INCLUDES_BUILD_TIME
${HDF5_SRC_INCLUDE_DIRS} ${HDF5_CPP_SRC_DIR} ${HDF5_HL_SRC_DIR}
${HDF5_TOOLS_SRC_DIR} ${HDF5_SRC_BINARY_DIR}
${HDF5_SRC_DIR} ${HDF5_CPP_SRC_DIR} ${HDF5_HL_SRC_DIR}
${HDF5_TOOLS_SRC_DIR} ${HDF5_BINARY_DIR}
)
#-----------------------------------------------------------------------------
@@ -102,15 +114,10 @@ endif ()
# Configure the hdf5-config-version .cmake file for the install directory
#-----------------------------------------------------------------------------
if (NOT HDF5_EXTERNALLY_CONFIGURED)
write_basic_package_version_file (
"${HDF5_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-config-version.cmake"
VERSION ${HDF5_PACKAGE_VERSION}
COMPATIBILITY SameMinorVersion
configure_file (
${HDF_RESOURCES_DIR}/hdf5-config-version.cmake.in
${HDF5_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-config-version.cmake @ONLY
)
#configure_file (
# ${HDF_RESOURCES_DIR}/hdf5-config-version.cmake.in
# ${HDF5_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-config-version.cmake @ONLY
#)
install (
FILES ${HDF5_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-config-version.cmake
DESTINATION ${HDF5_INSTALL_CMAKE_DIR}
@@ -128,16 +135,34 @@ else ()
endif ()
configure_file (
${HDF_RESOURCES_DIR}/libhdf5.settings.cmake.in
${HDF5_SRC_BINARY_DIR}/libhdf5.settings ESCAPE_QUOTES @ONLY
${HDF5_BINARY_DIR}/libhdf5.settings @ONLY
)
install (
FILES ${HDF5_SRC_BINARY_DIR}/libhdf5.settings
FILES ${HDF5_BINARY_DIR}/libhdf5.settings
DESTINATION ${HDF5_INSTALL_LIB_DIR}
COMPONENT libraries
)
#-----------------------------------------------------------------------------
# Configure the HDF5_Examples.cmake file and the examples
# Create pkgconfig files
#-----------------------------------------------------------------------------
#foreach (libs ${LINK_LIBS})
# set (LIBS "${LIBS} -l${libs}")
#endforeach ()
#foreach (libs ${HDF5_LIBRARIES_TO_EXPORT})
# set (HDF5LIBS "${HDF5LIBS} -l${libs}")
#endforeach ()
#configure_file (
# ${HDF_RESOURCES_DIR}/libhdf5.pc.in
# ${HDF5_BINARY_DIR}/CMakeFiles/libhdf5.pc @ONLY
#)
#install (
# FILES ${HDF5_BINARY_DIR}/CMakeFiles/libhdf5.pc
# DESTINATION ${HDF5_INSTALL_LIB_DIR}/pkgconfig
#)
#-----------------------------------------------------------------------------
# Configure the HDF518_Examples.cmake file and the examples
#-----------------------------------------------------------------------------
option (HDF5_PACK_EXAMPLES "Package the HDF5 Library Examples Compressed File" OFF)
if (HDF5_PACK_EXAMPLES)
@@ -182,7 +207,7 @@ if (HDF5_PACK_EXAMPLES)
endif ()
#-----------------------------------------------------------------------------
# Configure the README.md file for the binary package
# Configure the README.txt file for the binary package
#-----------------------------------------------------------------------------
HDF_README_PROPERTIES(HDF5_BUILD_FORTRAN)
@@ -198,13 +223,15 @@ endif ()
#-----------------------------------------------------------------------------
if (NOT HDF5_EXTERNALLY_CONFIGURED)
install (
FILES ${HDF5_SOURCE_DIR}/COPYING
FILES
${HDF5_SOURCE_DIR}/COPYING
DESTINATION ${HDF5_INSTALL_DATA_DIR}
COMPONENT hdfdocuments
)
if (EXISTS "${HDF5_SOURCE_DIR}/release_docs" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/release_docs")
set (release_files
${HDF5_SOURCE_DIR}/release_docs/USING_HDF5_CMake.txt
${HDF5_SOURCE_DIR}/release_docs/COPYING
${HDF5_SOURCE_DIR}/release_docs/RELEASE.txt
)
if (WIN32)
@@ -216,7 +243,6 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED)
if (HDF5_PACK_INSTALL_DOCS)
set (release_files
${release_files}
${HDF5_SOURCE_DIR}/release_docs/INSTALL_Warnings.txt
${HDF5_SOURCE_DIR}/release_docs/INSTALL_CMake.txt
${HDF5_SOURCE_DIR}/release_docs/HISTORY-1_8.txt
${HDF5_SOURCE_DIR}/release_docs/INSTALL
@@ -242,12 +268,25 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED)
endif ()
install (
FILES ${release_files}
DESTINATION ${HDF5_INSTALL_DOC_DIR}
DESTINATION ${HDF5_INSTALL_DATA_DIR}
COMPONENT hdfdocuments
)
endif ()
endif ()
if (CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
if (CMAKE_HOST_UNIX)
set (CMAKE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}/HDF_Group/${HDF5_PACKAGE_NAME}/${HDF5_PACKAGE_VERSION}"
CACHE PATH "Install path prefix, prepended onto install directories." FORCE)
else ()
GetDefaultWindowsPrefixBase(CMAKE_GENERIC_PROGRAM_FILES)
set (CMAKE_INSTALL_PREFIX
"${CMAKE_GENERIC_PROGRAM_FILES}/HDF_Group/${HDF5_PACKAGE_NAME}/${HDF5_PACKAGE_VERSION}"
CACHE PATH "Install path prefix, prepended onto install directories." FORCE)
set (CMAKE_GENERIC_PROGRAM_FILES)
endif ()
endif ()
#-----------------------------------------------------------------------------
# Set the cpack variables
#-----------------------------------------------------------------------------
@@ -262,9 +301,9 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
set (CPACK_PACKAGE_VERSION_MAJOR "${HDF5_PACKAGE_VERSION_MAJOR}")
set (CPACK_PACKAGE_VERSION_MINOR "${HDF5_PACKAGE_VERSION_MINOR}")
set (CPACK_PACKAGE_VERSION_PATCH "")
set (CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/COPYING")
if (EXISTS "${HDF5_SOURCE_DIR}/release_docs")
set (CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/release_docs/RELEASE.txt")
set (CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/release_docs/COPYING")
set (CPACK_RESOURCE_FILE_README "${CMAKE_CURRENT_SOURCE_DIR}/release_docs/RELEASE.txt")
endif ()
set (CPACK_PACKAGE_RELOCATABLE TRUE)
@@ -273,7 +312,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
else ()
set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}/${CPACK_PACKAGE_NAME}/${CPACK_PACKAGE_VERSION}")
endif ()
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}/hdf.bmp")
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_EXT_DIR}/hdf.bmp")
set (CPACK_GENERATOR "TGZ")
if (WIN32)
@@ -296,10 +335,10 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
endif ()
# set the install/unistall icon used for the installer itself
# There is a bug in NSI that does not handle full unix paths properly.
set (CPACK_NSIS_MUI_ICON "${HDF_RESOURCES_DIR}\\\\hdf.ico")
set (CPACK_NSIS_MUI_UNIICON "${HDF_RESOURCES_DIR}\\\\hdf.ico")
set (CPACK_NSIS_MUI_ICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.ico")
set (CPACK_NSIS_MUI_UNIICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.ico")
# set the package header icon for MUI
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}\\\\hdf.bmp")
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.bmp")
set (CPACK_NSIS_DISPLAY_NAME "${CPACK_NSIS_PACKAGE_NAME}")
if (OVERRIDE_INSTALL_VERSION)
set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}\\\\${CPACK_PACKAGE_NAME}\\\\${OVERRIDE_INSTALL_VERSION}")
@@ -329,7 +368,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
set (CPACK_RESOURCE_FILE_LICENSE "${HDF5_BINARY_DIR}/COPYING.txt")
# .. variable:: CPACK_WIX_PRODUCT_ICON
# The Icon shown next to the program name in Add/Remove programs.
set(CPACK_WIX_PRODUCT_ICON "${HDF_RESOURCES_DIR}\\\\hdf.ico")
set(CPACK_WIX_PRODUCT_ICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.ico")
#
# .. variable:: CPACK_WIX_UI_BANNER
#
@@ -360,14 +399,14 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
list (APPEND CPACK_GENERATOR "DragNDrop")
set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON)
set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}")
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}/hdf.icns")
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_EXT_DIR}/hdf.icns")
option (HDF5_PACK_MACOSX_FRAMEWORK "Package the HDF5 Library in a Frameworks" OFF)
if (HDF5_PACK_MACOSX_FRAMEWORK AND HDF5_BUILD_FRAMEWORKS)
set (CPACK_BUNDLE_NAME "${HDF5_PACKAGE_STRING}")
set (CPACK_BUNDLE_LOCATION "/") # make sure CMAKE_INSTALL_PREFIX ends in /
set (CMAKE_INSTALL_PREFIX "/${CPACK_BUNDLE_NAME}.framework/Versions/${CPACK_PACKAGE_VERSION}/${CPACK_PACKAGE_NAME}/")
set (CPACK_BUNDLE_ICON "${HDF_RESOURCES_DIR}/hdf.icns")
set (CPACK_BUNDLE_ICON "${HDF_RESOURCES_EXT_DIR}/hdf.icns")
set (CPACK_BUNDLE_PLIST "${HDF5_BINARY_DIR}/CMakeFiles/Info.plist")
set (CPACK_SHORT_VERSION_STRING "${CPACK_PACKAGE_VERSION}")
#-----------------------------------------------------------------------------
@@ -382,7 +421,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
${HDF5_BINARY_DIR}/CMakeFiles/PkgInfo @ONLY
)
configure_file (
${HDF_RESOURCES_DIR}/version.plist.in
${HDF_RESOURCES_EXT_DIR}/version.plist.in
${HDF5_BINARY_DIR}/CMakeFiles/version.plist @ONLY
)
install (
@@ -395,19 +434,13 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}")
set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON)
list (APPEND CPACK_GENERATOR "DEB")
set (CPACK_DEBIAN_PACKAGE_SECTION "Libraries")
set (CPACK_DEBIAN_PACKAGE_MAINTAINER "${HDF5_PACKAGE_BUGREPORT}")
list (APPEND CPACK_GENERATOR "RPM")
# list (APPEND CPACK_GENERATOR "RPM")
set (CPACK_RPM_PACKAGE_RELEASE "1")
set (CPACK_RPM_PACKAGE_RELEASE_DIST ON)
set (CPACK_RPM_COMPONENT_INSTALL ON)
set (CPACK_RPM_PACKAGE_RELOCATABLE ON)
set (CPACK_RPM_FILE_NAME "RPM-DEFAULT")
set (CPACK_RPM_PACKAGE_NAME "${CPACK_PACKAGE_NAME}")
set (CPACK_RPM_PACKAGE_VERSION "${CPACK_PACKAGE_VERSION}")
set (CPACK_RPM_PACKAGE_VENDOR "${CPACK_PACKAGE_VENDOR}")
set (CPACK_RPM_PACKAGE_LICENSE "BSD-style")
set (CPACK_RPM_PACKAGE_GROUP "Development/Libraries")
set (CPACK_RPM_PACKAGE_URL "${HDF5_PACKAGE_URL}")
@@ -448,11 +481,11 @@ The HDF5 data model, file format, API, library, and tools are open and distribut
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
if (ZLIB_FOUND AND ZLIB_USE_EXTERNAL)
if (WIN32)
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;ALL;/")
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;ALL;/")
else ()
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;libraries;/")
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;headers;/")
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;configinstall;/")
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;libraries;/")
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;headers;/")
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;configinstall;/")
endif ()
endif ()
if (SZIP_FOUND AND SZIP_USE_EXTERNAL)
@@ -464,13 +497,6 @@ The HDF5 data model, file format, API, library, and tools are open and distribut
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${SZIP_INCLUDE_DIR_GEN};SZIP;configinstall;/")
endif ()
endif ()
if (PLUGIN_FOUND AND PLUGIN_USE_EXTERNAL)
if (WIN32)
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${PLUGIN_BINARY_DIR};PLUGIN;ALL;/")
else ()
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${PLUGIN_BINARY_DIR};PLUGIN;libraries;/")
endif ()
endif ()
endif ()
endif ()
@@ -553,13 +579,6 @@ The HDF5 data model, file format, API, library, and tools are open and distribut
)
endif ()
cpack_add_component (utilsapplications
DISPLAY_NAME "HDF5 Utility Applications"
DEPENDS libraries
GROUP Applications
INSTALL_TYPES Full Developer User
)
if (HDF5_BUILD_TOOLS)
cpack_add_component (toolsapplications
DISPLAY_NAME "HDF5 Tools Applications"

File diff suppressed because it is too large Load Diff

View File

@@ -1,56 +0,0 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
include (ExternalProject)
#option (HDF5_ALLOW_EXTERNAL_SUPPORT "Allow External Library Building (NO GIT TGZ)" "NO")
set (HDF5_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO GIT TGZ)")
set_property (CACHE HDF5_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ)
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
option (PLUGIN_USE_EXTERNAL "Use External Library Building for filter PLUGIN" 1)
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT")
set (PLUGIN_URL ${PLUGIN_GIT_URL} CACHE STRING "Path to PLUGIN git repository")
set (PLUGIN_BRANCH ${PLUGIN_GIT_BRANCH})
elseif (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
if (NOT TGZPATH)
set (TGZPATH ${HDF5_SOURCE_DIR})
endif ()
set (PLUGIN_URL ${TGZPATH}/${PLUGIN_TGZ_NAME})
if (NOT EXISTS "${PLUGIN_URL}")
set (HDF5_ENABLE_PLUGIN_SUPPORT OFF CACHE BOOL "" FORCE)
message (STATUS "Filter PLUGIN file ${PLUGIN_URL} not found")
endif ()
else ()
set (PLUGIN_USE_EXTERNAL 0)
endif ()
endif ()
#-----------------------------------------------------------------------------
# Option for PLUGIN support
#-----------------------------------------------------------------------------
option (HDF5_ENABLE_PLUGIN_SUPPORT "Enable PLUGIN Filters" OFF)
if (HDF5_ENABLE_PLUGIN_SUPPORT)
if (NOT PLUGIN_USE_EXTERNAL)
find_package (PLUGIN NAMES ${PLUGIN_PACKAGE_NAME}${HDF_PACKAGE_EXT})
if (NOT PLUGIN_FOUND)
find_package (PLUGIN) # Legacy find
endif ()
endif ()
if (NOT PLUGIN_FOUND)
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
EXTERNAL_PLUGIN_LIBRARY (${HDF5_ALLOW_EXTERNAL_SUPPORT})
message (STATUS "Filter PLUGIN is built")
else ()
message (FATAL_ERROR " PLUGIN is Required for PLUGIN support in HDF5")
endif ()
endif ()
message (STATUS "Filter PLUGIN is ON")
endif ()

View File

@@ -1,128 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
help@hdfgroup.org.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

View File

@@ -1,142 +0,0 @@
# How to contribute to HDF5
The HDF Group encourages community members to contribute to the HDF5 project. We accept and are very grateful for any contributions,
from minor typos and bug fixes to new features. The HDF Group is committed to work with the code contributors and make contribution
process enjoyable and straightforward.
This document describes guiding principles for the HDF5 code contributors and does not pretend to address any possible
contribution. If in doubt, please do not hesitate to ask us for guidance.
***Note that no contribution may be accepted unless the donor agrees with the HDF Group software license terms
found in the COPYING file in every branch's top source directory.***
> We will assume that you are familiar with `git` and `GitHub`. If not, you may go through the GitHub tutorial found at
[https://guides.github.com/activities/hello-world/](https://guides.github.com/activities/hello-world/). This tutorial should only take
around 10 minutes.
## Table of Contents
* [Workflow](#workflow)
* [Acceptance criteria for a pull request](#criteria)
* [Release Note](#releasenote)
* [Check List](#checklist)
# Workflow <A NAME="workflow"></A>
The process for contributing code to HDF5 is as follows:
* Open an issue on [HDF5 GitHub](https://github.com/HDFGroup/hdf5/issues).
> This step is ***required*** unless the change is minor (e.g., typo fix).
* Fork the [HDF5](https://github.com/HDFGroup/hdf5) repository.
* Make the desired changes to the HDF5 software.
* New features should always go to _develop_ branch first and later should be merged to the appropriate maintenance branches.
* Bug fixes should go to all appropriate branches (_develop_ and maintenance).
* Build and test your changes. Detailed instructions on building and testing HDF5 can be found in the `INSTALL*` files in the `release_docs` directory.
* Push your changes to GitHub.
* Issue a pull request and address any code formatting and testing issues reported.
Once a pull request is correctly formatted and passes **ALL** CI tests, it will be reviewed and evaluated by The HDF Group developers and HDF5
community members who can approve pull requests. The HDF Group developers will work with you to ensure that the pull request satisfies the acceptance
criteria described in the next section.
# Acceptance criteria for a pull request <A NAME="criteria"></A>
We appreciate every contribution we receive, but we may not accept them all. Those that we *do* satisfy the following criteria:
* **The pull request has a clear purpose** - What does the pull request address? How does it benefit the HDF5 community?
If the pull request does not have a clear purpose and benefits, it will not be accepted.
* **The pull request is documented** - The HDF5 developers must understand not only *what* a change is doing, but *how* it is doing it.
Documenting the code makes it easier for us to understand your patch and maintain the code in the future.
* **The pull request passes HDF5 regression testing** - Any issue fixed or functionality added should be accompanied by the corresponding
tests and pass HDF5 regression testing run by The HDF Group. We do not expect you to perform comprehensive testing across multiple platforms
before we accept the pull request. If the pull request does not pass regression testing after the merge, The HDF Group developers will work
with you on the fixes.
* **The pull request does not compromise the principles behind HDF5** - HDF5 has a 100% commitment to backward compatibility.
* Any file ever created with HDF5 must be readable by any future version of HDF5.
If your patch's purpose is to modify the HDF5 data model or file format,
**please** discuss this with us first. File format changes and features required by those changes can be introduced only in a new major release.
* HDF5 has a commitment to remaining *machine-independent*; data created on one platform/environment/architecture **must** remain readable by HDF5 on any other.
* For binary compatibility, no changes are allowed to public APIs and data structures in the maintenance releases; new APIs can be added.
* **New features are documented** - Any new features should have proper documentation; talk to us if you have any questions.
* **When to Write a Release Note** - Generally, a release note must be written for every change that is made to the code for which
users might see a change in the way the software works. In other words, if a user might see a difference in the way the software works,
a note should be written. By code we mean the text that will be compiled into one of the company's software products. The code includes
configuration changes and changes to tools users might work with to configure and build our software.
* Notes should be added for known problems. Known problems are issues that we know about and have not yet been able to fix.
* Any change made to address a user-reported problem should be described in a release note.
* A release note does not need to be written for changes to the code that users will not see. Here are some examples. If you add a
comment, you do not need to write a release note describing the comment you added. If you rewrite some code to make it read more
clearly and if there is no change in functionality or performance, then you do not need to write a release note. If you change the
process by which user software is made, you may not need to write a release note since the change was not made to the code.
* Users. We have different kinds of users. A release note may be written to be helpful to
application developers and not system administrators. Users who may find the RELEASE.txt file helpful include the following:
application developers, library developers, and system administrators.
# Release Note <A NAME="releasenote"></A>
* **Entry Syntax**
The release note entry syntax is shown below.
```
- Title/Problem
Problem/Solution
Signature
```
* **Entry Elements** - The elements of the entry - title, problem, solution, and signature - are described in more detail in the table
below. Descriptions of the problem and the solution should be clear without any ambiguities and should be short without losing clarity or specifics.
* **Title** - The title or tag should identify one or more categories that will help readers decide if the entry is something they need to study. Can be combined with the `Problem` element
* **Problem** - Describe the problem and how users might see the problem in a paragraph.
You might also consider the following as you describe the problem:
* Under what specific conditions does this issue arise?
* Under what specific conditions are we sure this issue will not arise?
* For a performance issue, instead of saying something is a performance issue, describe what the performance impact of issue is?
* **Solution** - Describe the solution in another paragraph.
You might also consider the following as you describe the solution:
* What was done to resolve the issue?
* What is the functional impact?
* Is there a workaround a way for users design their software so as not to encounter the issue? If so, what is the workaround?
* For a performance fix, how has the performance improved? Links to published documentation would be good.
* **Signature** - Each entry must be signed with the initials of the author, the date in YYYY/MM/DD format, and the JIRA ticket number. The
following is an example entry written by developer Xavier Zolo on April 16, 2014 about JIRA ticket HDFFV-5555: (XYZ - 2014/04/16, HDFFV-5555). The
signature is enclosed in parentheses. JIRA or Github numbers should not be used in the description of the problem or the solution. They are like
abbreviations that customers and external users will not be able to interpret.
# Checklist <A NAME="checklist"></A>
Please make sure that you check the items applicable to your pull request:
* Code
* [ ] Does the pull request have a corresponding GitHub issue and clear purpose?
* [ ] Does the pull request follow HDF5 best practices (naming conventions, code portability, code structure, etc.)? <<TODO: link to the document>>
* [ ] If changes were done to Autotools build, were they added to CMake and vice versa?
* [ ] Is the pull request applicable to any other branches? If yes, which ones? Please document it in the GitHub issue.
* [ ] Is the new code sufficiently documented for future maintenance?
* [ ] Does the new feature require a change to an existing API? See "API Compatibility Macros" document (https://portal.hdfgroup.org/display/HDF5/API+Compatibility+Macros)
* Documentation
* [ ] Was the change described in the release_docs/RELEASE.txt file?
* [ ] Was MANIFEST updated if new files had been added to the source?
* [ ] Was the new function documented in the corresponding public header file using [Doxygen](https://docs.hdfgroup.org/hdf5/develop/_r_m_t.html)?
* [ ] Was new functionality documented for the HDF5 community (the level of documentation depends on the feature; ask us what would be appropriate)
* Testing
* [ ] Does the pull request have tests?
* [ ] Does the pull request affect HDF5 library performance?
We want as many contributions as we can get, and we are here to help. Feel free to reach out to us if you have any questions
Thank you for your contribution!

79
COPYING
View File

@@ -3,10 +3,10 @@ HDF5 (Hierarchical Data Format 5) Software Library and Utilities
-----------------------------------------------------------------------------
HDF5 (Hierarchical Data Format 5) Software Library and Utilities
Copyright 2006 by The HDF Group.
Copyright (c) 2006, The HDF Group.
NCSA HDF5 (Hierarchical Data Format 5) Software Library and Utilities
Copyright 1998-2006 by The Board of Trustees of the University of Illinois.
Copyright (c) 1998-2006, The Board of Trustees of the University of Illinois.
All rights reserved.
@@ -21,27 +21,27 @@ provided that the following conditions are met:
this list of conditions, and the following disclaimer in the documentation
and/or materials provided with the distribution.
3. Neither the name of The HDF Group, the name of the University, nor the
3. In addition, redistributions of modified forms of the source or binary
code must carry prominent notices stating that the original code was
changed and the date of the change.
4. All publications or advertising materials mentioning features or use of
this software are asked, but not required, to acknowledge that it was
developed by The HDF Group and by the National Center for Supercomputing
Applications at the University of Illinois at Urbana-Champaign and
credit the contributors.
5. Neither the name of The HDF Group, the name of the University, nor the
name of any Contributor may be used to endorse or promote products derived
from this software without specific prior written permission from
The HDF Group, the University, or the Contributor, respectively.
DISCLAIMER:
THIS SOFTWARE IS PROVIDED BY THE HDF GROUP AND THE CONTRIBUTORS
"AS IS" WITH NO WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED. IN NO
EVENT SHALL THE HDF GROUP OR THE CONTRIBUTORS BE LIABLE FOR ANY DAMAGES
SUFFERED BY THE USERS ARISING OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You are under no obligation whatsoever to provide any bug fixes, patches, or
upgrades to the features, functionality or performance of the source code
("Enhancements") to anyone; however, if you choose to make your Enhancements
available either publicly, or directly to The HDF Group, without imposing a
separate written license agreement for such Enhancements, then you hereby
grant the following license: a non-exclusive, royalty-free perpetual license
to install, use, modify, prepare derivative works, incorporate into other
computer software, distribute, and sublicense such enhancements or derivative
works thereof, in binary and source code form.
"AS IS" WITH NO WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED. In no
event shall The HDF Group or the Contributors be liable for any damages
suffered by the users arising out of the use of this software, even if
advised of the possibility of such damage.
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@@ -55,9 +55,9 @@ http://support.hdfgroup.org/ftp/HDF5/releases/COPYING_LBNL_HDF5.
-----------------------------------------------------------------------------
Contributors: National Center for Supercomputing Applications (NCSA) at
the University of Illinois, Fortner Software, Unidata Program Center
(netCDF), The Independent JPEG Group (JPEG), Jean-loup Gailly and Mark Adler
(gzip), and Digital Equipment Corporation (DEC).
the University of Illinois, Fortner Software, Unidata Program Center (netCDF),
The Independent JPEG Group (JPEG), Jean-loup Gailly and Mark Adler (gzip),
and Digital Equipment Corporation (DEC).
-----------------------------------------------------------------------------
@@ -67,12 +67,6 @@ under Prime Contract No. DE-AC02-05CH11231.
-----------------------------------------------------------------------------
Portions of HDF5 were developed with support from Lawrence Livermore
National Laboratory and the United States Department of Energy under
Prime Contract No. DE-AC52-07NA27344.
-----------------------------------------------------------------------------
Portions of HDF5 were developed with support from the University of
California, Lawrence Livermore National Laboratory (UC LLNL).
The following statement applies to those portions of the product and must
@@ -86,21 +80,26 @@ and/or accompanying materials:
for the operation of UC LLNL.
DISCLAIMER:
THIS WORK WAS PREPARED AS AN ACCOUNT OF WORK SPONSORED BY AN AGENCY OF
THE UNITED STATES GOVERNMENT. NEITHER THE UNITED STATES GOVERNMENT NOR
THE UNIVERSITY OF CALIFORNIA NOR ANY OF THEIR EMPLOYEES, MAKES ANY
WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY OR RESPONSIBILITY
FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY INFORMATION,
APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS THAT ITS USE
WOULD NOT INFRINGE PRIVATELY- OWNED RIGHTS. REFERENCE HEREIN TO ANY
SPECIFIC COMMERCIAL PRODUCTS, PROCESS, OR SERVICE BY TRADE NAME,
TRADEMARK, MANUFACTURER, OR OTHERWISE, DOES NOT NECESSARILY CONSTITUTE
OR IMPLY ITS ENDORSEMENT, RECOMMENDATION, OR FAVORING BY THE UNITED
STATES GOVERNMENT OR THE UNIVERSITY OF CALIFORNIA. THE VIEWS AND
OPINIONS OF AUTHORS EXPRESSED HEREIN DO NOT NECESSARILY STATE OR REFLECT
THOSE OF THE UNITED STATES GOVERNMENT OR THE UNIVERSITY OF CALIFORNIA,
AND SHALL NOT BE USED FOR ADVERTISING OR PRODUCT ENDORSEMENT PURPOSES.
This work was prepared as an account of work sponsored by an agency of
the United States Government. Neither the United States Government nor
the University of California nor any of their employees, makes any
warranty, express or implied, or assumes any liability or responsibility
for the accuracy, completeness, or usefulness of any information,
apparatus, product, or process disclosed, or represents that its use
would not infringe privately- owned rights. Reference herein to any
specific commercial products, process, or service by trade name,
trademark, manufacturer, or otherwise, does not necessarily constitute
or imply its endorsement, recommendation, or favoring by the United
States Government or the University of California. The views and
opinions of authors expressed herein do not necessarily state or reflect
those of the United States Government or the University of California,
and shall not be used for advertising or product endorsement purposes.
-----------------------------------------------------------------------------
HDF5 is available with the SZIP compression library but SZIP is not part
of HDF5 and has separate copyright and license terms. See “Szip Compression
in HDF Products” (www.hdfgroup.org/doc_resource/SZIP/) for further details.
-----------------------------------------------------------------------------

View File

@@ -5,48 +5,39 @@
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
## This file should be placed in the root directory of your project.
## Then modify the CMakeLists.txt file in the root directory of your
## project to incorporate the testing dashboard.
## # The following are required to use Dart and the CDash dashboard
## # The following are required to uses Dart and the Cdash dashboard
## ENABLE_TESTING()
## INCLUDE(CTest)
set (CTEST_PROJECT_NAME "HDF5")
set (CTEST_NIGHTLY_START_TIME "18:00:00 CST")
set (CTEST_DROP_METHOD "https")
if (CTEST_DROP_SITE_INIT)
set (CTEST_DROP_SITE "${CTEST_DROP_SITE_INIT}")
set (CTEST_DROP_METHOD "http")
if (CDASH_LOCAL)
set (CTEST_DROP_SITE "cdash-internal.hdfgroup.org")
set (CTEST_DROP_LOCATION "/submit.php?project=HDF5Trunk")
else ()
if (CDASH_LOCAL)
set (CTEST_DROP_SITE "cdash-internal.hdfgroup.org")
else ()
set (CTEST_DROP_SITE "cdash.hdfgroup.org")
endif ()
endif ()
if (CTEST_DROP_LOCATION_INIT)
set (CTEST_DROP_LOCATION "${CTEST_DROP_LOCATION_INIT}")
else ()
if (CDASH_LOCAL)
set (CTEST_DROP_LOCATION "/submit.php?project=HDF5Trunk")
else ()
set (CTEST_DROP_LOCATION "/submit.php?project=HDF5")
endif ()
set (CTEST_DROP_SITE "cdash.hdfgroup.org")
set (CTEST_DROP_LOCATION "/submit.php?project=HDF5+Trunk")
endif ()
set (CTEST_DROP_SITE_CDASH TRUE)
set (UPDATE_TYPE git)
set (VALGRIND_COMMAND "/usr/bin/valgrind")
set (VALGRIND_COMMAND_OPTIONS "-v --tool=memcheck --leak-check=full --track-fds=yes --num-callers=50 --show-reachable=yes --track-origins=yes --malloc-fill=0xff --free-fill=0xfe")
set (CTEST_MEMORYCHECK_COMMAND "/usr/bin/valgrind")
set (CTEST_MEMORYCHECK_COMMAND_OPTIONS "-v --tool=memcheck --leak-check=full --track-fds=yes --num-callers=50 --show-reachable=yes --track-origins=yes --malloc-fill=0xff --free-fill=0xfe")
set (CTEST_TEST_TIMEOUT 1200 CACHE STRING
set (CTEST_TEST_TIMEOUT 600 CACHE STRING
"Maximum time allowed before CTest will kill the test.")
set (DART_TESTING_TIMEOUT 1200 CACHE STRING
set (DART_TESTING_TIMEOUT 600 CACHE STRING
"Maximum time allowed before CTest will kill the test." FORCE)
set (CTEST_SUBMIT_RETRY_DELAY 20 CACHE STRING
SET(CTEST_SUBMIT_RETRY_DELAY 20 CACHE STRING
"How long to wait between timed-out CTest submissions.")

3307
MANIFEST Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,18 +1,19 @@
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
## Makefile.am
## Run automake to generate a Makefile.in from this file.
##
#
#
# This makefile mostly just reinvokes make in the various subdirectories
# but does so in the correct order. You can alternatively invoke make from
# each subdirectory manually.
@@ -34,9 +35,6 @@
include $(top_srcdir)/config/commence.am
# include Doxygen rules (requires autoconf-archive >2016-03-20)
@DX_RULES@
# Define subdirectories to build.
## Automake understands that `make distclean' should recurse into
## conditional subdirectories even if `make all' does not.
@@ -47,10 +45,15 @@ include $(top_srcdir)/config/commence.am
# Since we're explicitly listing DIST_SUBDIRS, we also need to list
# directories that are only conditionally built (so that their Makefiles
# are cleaned as well).
# Note that `make clean' will not affect the examples or doc directories.
# Note that `make clean' will not affect the examples or doc directories.
# Conditionals. These conditionals are defined during configure
# Define each variable to empty if it is not used to placate pmake
if BUILD_PARALLEL_CONDITIONAL
TESTPARALLEL_DIR =testpar
else
TESTPARALLEL_DIR=
endif
if BUILD_CXX_CONDITIONAL
CXX_DIR =c++
else
@@ -71,25 +74,10 @@ if BUILD_HDF5_HL_CONDITIONAL
else
HDF5_HL_DIR=
endif
if BUILD_TESTS_CONDITIONAL
TESTSERIAL_DIR =test
else
TESTSERIAL_DIR=
endif
if BUILD_TESTS_PARALLEL_CONDITIONAL
TESTPARALLEL_DIR =testpar
else
TESTPARALLEL_DIR=
endif
if BUILD_TOOLS_CONDITIONAL
TOOLS_DIR =tools
else
TOOLS_DIR=
endif
SUBDIRS = src $(TESTSERIAL_DIR) $(TESTPARALLEL_DIR) bin $(TOOLS_DIR) utils . \
$(CXX_DIR) $(FORTRAN_DIR) $(JAVA_DIR) $(HDF5_HL_DIR)
DIST_SUBDIRS = src test testpar tools utils . c++ fortran hl examples java
SUBDIRS = src test $(TESTPARALLEL_DIR) tools . $(CXX_DIR) $(FORTRAN_DIR) \
$(JAVA_DIR) $(HDF5_HL_DIR)
DIST_SUBDIRS = src test testpar tools . c++ fortran hl examples java
# Some files generated during configure that should be cleaned
DISTCLEANFILES=config/stamp1 config/stamp2
@@ -135,7 +123,7 @@ mostlyclean-local:
# 'make install' will now install examples, the same as 'make install-all'.
# 'make-install-all' will be redundant but will still work.
install: install-recursive install-examples
uninstall: uninstall-recursive uninstall-examples
uninstall: uninstall-recursive uninstall-examples
# 'make install-all' also installs examples
install-all:
@@ -156,7 +144,7 @@ install-doc:
uninstall-doc:
@echo "docs no longer live in this tree. Use install-examples to install examples."
# `make check-install' or `make installcheck' checks that examples can
# `make check-install' or `make installcheck' checks that examples can
# be successfully built
installcheck-local:
if test -n "${DESTDIR}"; then \
@@ -189,25 +177,10 @@ check-all-install:
trace:
@(cd src && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1;
# doxygen support
if BUILD_DOXYGEN_CONDITIONAL
doxygen: doxygen-doc
endif
# Run tests with different Virtual File Drivers.
# Currently, only invoke check-vfd in the test directory.
check-vfd:
for d in src utils test; do \
if test $$d != .; then \
(cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
fi; \
done
# Run tests with different passthrough Virtual Object Layer Connectors.
# NOTE: Will only succeed with passthrough VOL connectors that use
# the native VOL connector as the terminal connector.
check-passthrough-vol:
for d in $(SUBDIRS); do \
for d in src test; do \
if test $$d != .; then \
(cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
fi; \

View File

@@ -1,12 +1,13 @@
# Top-level distributed Makefile -*- makefile -*-
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.

117
README.md
View File

@@ -1,117 +0,0 @@
HDF5 version 1.15.0 currently under development
![HDF5 Logo](doxygen/img/HDF5.png)
[![develop build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=develop&label=develop)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Adevelop)
[![1.14 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_14&label=1.14)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_14)
[![1.12 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_12&label=1.12)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_12)
[![1.10 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_10&label=1.10)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_10)
[![1.8 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_8&label=1.8)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_8)
[![BSD](https://img.shields.io/badge/License-BSD-blue.svg)](https://github.com/HDFGroup/hdf5/blob/develop/COPYING)
*Please refer to the release_docs/INSTALL file for installation instructions.*
This repository contains a high-performance library's source code and a file format
specification that implement the HDF5® data model. The model has been adopted across
many industries and this implementation has become a de facto data management standard
in science, engineering, and research communities worldwide.
The HDF Group is the developer, maintainer, and steward of HDF5 software. Find more
information about The HDF Group, the HDF5 Community, and other HDF5 software projects,
tools, and services at The HDF Group's website.
https://www.hdfgroup.org/
DOCUMENTATION
-------------
This release is fully functional for the API described in the documentation.
https://portal.hdfgroup.org/display/HDF5/The+HDF5+API
Full Documentation and Programming Resources for this release can be found at
https://portal.hdfgroup.org/display/HDF5
See the RELEASE.txt file in the release_docs/ directory for information specific
to the features and updates included in this release of the library.
Several more files are located within the release_docs/ directory with specific
details for several common platforms and configurations.
INSTALL - Start Here. General instructions for compiling and installing the library
INSTALL_CMAKE - instructions for building with CMake (Kitware.com)
INSTALL_parallel - instructions for building and configuring Parallel HDF5
INSTALL_Windows and INSTALL_Cygwin - MS Windows installations.
HELP AND SUPPORT
----------------
Information regarding Help Desk and Support services is available at
https://portal.hdfgroup.org/display/support/The+HDF+Help+Desk
FORUM and NEWS
--------------
The following public forums are provided for public announcements and discussions
of interest to the general HDF5 Community.
- Homepage of the Forum
https://forum.hdfgroup.org
- News and Announcement
https://forum.hdfgroup.org/c/news-and-announcements-from-the-hdf-group
- HDF5 and HDF4 Topics
https://forum.hdfgroup.org/c/hdf5
These forums are provided as an open and public service for searching and reading.
Posting requires completing a simple registration and allows one to join in the
conversation. Please read the following instructions pertaining to the Forum's
use and configuration
https://forum.hdfgroup.org/t/quickstart-guide-welcome-to-the-new-hdf-forum
RELEASE SCHEDULE
----------------
![HDF5 release schedule](doc/img/release-schedule.png)
HDF5 does not release on a regular schedule. Instead, releases are driven by
new features and bug fixes, though we try to have at least one release of each
maintenance branch per year. Future HDF5 releases indicated on this schedule
are tentative.
**NOTE**: HDF5 1.12 is being retired early due to its incomplete and incompatible VOL
layer.
| Release | New Features |
| ------- | ------------ |
| 1.8.23 | last HDF5 1.8 release |
| 1.10.10 | CVE fixes, performance improvements, H5Dchunk\_iter() |
| 1.12.3 | CVE fixes, performance improvements, H5Dchunk\_iter(), last HDF5 1.12 release |
| 1.14.1 | selection I/O with datatype conversion |
| 2.0.0 | TBD |
| TBD | VFD SWMR |
This list of feature release versions is also tentative, and the specific release
in which a feature is introduced may change.
SNAPSHOTS, PREVIOUS RELEASES AND SOURCE CODE
--------------------------------------------
Periodically development code snapshots are provided at the following URL:
https://gamma.hdfgroup.org/ftp/pub/outgoing/hdf5/snapshots/
Source packages for current and previous releases are located at:
https://portal.hdfgroup.org/display/support/Downloads
Development code is available at our Github location:
https://github.com/HDFGroup/hdf5.git

38
README.txt Normal file
View File

@@ -0,0 +1,38 @@
HDF5 version 1.11.0 currently under development
Please refer to the release_docs/INSTALL file for installation instructions.
------------------------------------------------------------------------------
This release is fully functional for the API described in the documentation.
See the RELEASE.txt file in the release_docs/ directory for information
specific to this release of the library. Several INSTALL* files can also be
found in the release_docs/ directory: INSTALL contains instructions for
compiling and installing the library; INSTALL_parallel contains instructions
for installing the parallel version of the library; similarly-named files
contain instructions for several environments on MS Windows systems.
Documentation for this release can be found at the following URL:
http://www.hdfgroup.org/HDF5/doc/.
The following mailing lists are currently set up for HDF5 Library users:
news - For announcements of HDF5 related developments,
not a discussion list.
hdf-forum - For general discussion of the HDF5 library with
other users.
To subscribe to a list, send mail to "<list>-subscribe@lists.hdfgroup.org".
where <list> is the name of the list. For example, send a request
to subscribe to the 'news' mail list to the following address:
news-subscribe@lists.hdfgroup.org
Messages sent to the list should be addressed to "<list>@lists.hdfgroup.org".
Periodic code snapshots are provided at the following URL:
ftp://ftp.hdfgroup.uiuc.edu/pub/outgoing/hdf5/snapshots
Please read the README.txt file in that directory before working with a
library snapshot.
The HDF5 website is located at http://hdfgroup.org/HDF5/
Bugs should be reported to help@hdfgroup.org.

View File

@@ -5,7 +5,7 @@
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#

View File

@@ -2,12 +2,13 @@ dnl -------------------------------------------------------------------------
dnl -------------------------------------------------------------------------
dnl
dnl Copyright by The HDF Group.
dnl Copyright by the Board of Trustees of the University of Illinois.
dnl All rights reserved.
dnl
dnl This file is part of HDF5. The full HDF5 copyright notice, including
dnl terms governing use, modification, and redistribution, is contained in
dnl the COPYING file, which can be found at the root of the source code
dnl dnl distribution tree, or in https://www.hdfgroup.org/licenses.
dnl dnl distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
dnl dnl If you do not have access to either file, you may request a copy from
dnl dnl help@hdfgroup.org.
dnl
@@ -20,9 +21,9 @@ dnl -------------------------------------------------------------------------
dnl _AC_SYS_LARGEFILE_MACRO_VALUE
dnl
dnl The following macro overrides the autoconf macro of the same name
dnl with this custom definition. This macro performs the same checks as
dnl with this custom definition. This macro performs the same checks as
dnl autoconf's native _AC_SYS_LARGEFILE_MACRO_VALUE, but will also set
dnl AM_CPPFLAGS with the appropriate -D defines so additional configure
dnl AM_CPPFLAGS with the appropriate -D defines so additional configure
dnl sizeof checks do not fail.
dnl
# _AC_SYS_LARGEFILE_MACRO_VALUE(C-MACRO, VALUE,

View File

@@ -1,18 +1,18 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# A script to reconfigure autotools for HDF5, and to recreate other
# generated files specific to HDF5.
# generated files specifc to HDF5.
#
# IMPORTANT OS X NOTE
#
@@ -50,7 +50,17 @@
# Note that aclocal will attempt to include libtool's share/aclocal
# directory.
#
# Aside from -h for help, this script takes one potential option:
# This script takes two potential options:
#
# -p
#
# When this is selected, the autotools versions are set to the paths
# and versions used by The HDF Group to produce the released versions
# of the library.
#
# NOTE: This is probably temporary. Once we update our dev machines
# to have recent versions of the autotools this option will probably
# be removed.
#
# -v
#
@@ -62,6 +72,9 @@ echo "* HDF5 autogen.sh script *"
echo "**************************"
echo
# Default is not production
production=false
# Default is not verbose output
verbose=false
@@ -73,6 +86,10 @@ while getopts "$optspec" optchar; do
echo
echo " -h Print this help message."
echo
echo " -p Used by THG to use hard-codes autotools"
echo " paths on THG machines. Not for non-HDF-Group"
echo " users!"
echo
echo " -v Show more verbose output."
echo
echo " NOTE: Each tool can be set via an environment variable."
@@ -80,6 +97,11 @@ while getopts "$optspec" optchar; do
echo
exit 0
;;
p)
echo "Setting THG production mode..."
echo
production=true
;;
v)
echo "Setting verbosity: high"
echo
@@ -95,52 +117,84 @@ while getopts "$optspec" optchar; do
esac
done
# If paths to autotools are not specified, use whatever the system
# has installed as the default. We use 'command -v <tool>' to
# show exactly what's being used (shellcheck complains that 'which'
# is non-standard and deprecated).
if test -z "${HDF5_AUTOCONF}"; then
HDF5_AUTOCONF="$(command -v autoconf)"
fi
if test -z "${HDF5_AUTOMAKE}"; then
HDF5_AUTOMAKE="$(command -v automake)"
fi
if test -z "${HDF5_AUTOHEADER}"; then
HDF5_AUTOHEADER="$(command -v autoheader)"
fi
if test -z "${HDF5_ACLOCAL}"; then
HDF5_ACLOCAL="$(command -v aclocal)"
fi
if test -z "${HDF5_LIBTOOL}"; then
case "$(uname)" in
Darwin*)
# libtool on OS-X is non-gnu
HDF5_LIBTOOL="$(command -v glibtool)"
;;
*)
HDF5_LIBTOOL="$(command -v libtool)"
;;
esac
fi
if test -z "${HDF5_M4}"; then
HDF5_M4="$(command -v m4)"
fi
if [ "$production" = true ] ; then
# Production mode
#
# Hard-code canonical HDF Group tool locations.
# If paths to tools are not specified, assume they are
# located in /usr/hdf/bin/AUTOTOOLS and set paths accordingly.
if test -z ${HDF5_AUTOCONF}; then
HDF5_AUTOCONF=/usr/hdf/bin/AUTOTOOLS/autoconf
fi
if test -z ${HDF5_AUTOMAKE}; then
HDF5_AUTOMAKE=/usr/hdf/bin/AUTOTOOLS/automake
fi
if test -z ${HDF5_AUTOHEADER}; then
HDF5_AUTOHEADER=/usr/hdf/bin/AUTOTOOLS/autoheader
fi
if test -z ${HDF5_ACLOCAL}; then
HDF5_ACLOCAL=/usr/hdf/bin/AUTOTOOLS/aclocal
fi
if test -z ${HDF5_LIBTOOL}; then
HDF5_LIBTOOL=/usr/hdf/bin/AUTOTOOLS/libtool
fi
if test -z ${HDF5_M4}; then
HDF5_M4=/usr/hdf/bin/AUTOTOOLS/m4
fi
else
# Not in production mode
#
# If paths to autotools are not specified, use whatever the system
# has installed as the default. We use 'which <tool>' to
# show exactly what's being used.
if test -z ${HDF5_AUTOCONF}; then
HDF5_AUTOCONF=$(which autoconf)
fi
if test -z ${HDF5_AUTOMAKE}; then
HDF5_AUTOMAKE=$(which automake)
fi
if test -z ${HDF5_AUTOHEADER}; then
HDF5_AUTOHEADER=$(which autoheader)
fi
if test -z ${HDF5_ACLOCAL}; then
HDF5_ACLOCAL=$(which aclocal)
fi
if test -z ${HDF5_LIBTOOL}; then
case "`uname`" in
Darwin*)
# libtool on OS-X is non-gnu
HDF5_LIBTOOL=$(which glibtool)
;;
*)
HDF5_LIBTOOL=$(which libtool)
;;
esac
fi
if test -z ${HDF5_M4}; then
HDF5_M4=$(which m4)
fi
fi # production
# Make sure that these versions of the autotools are in the path
AUTOCONF_DIR=$(dirname "${HDF5_AUTOCONF}")
LIBTOOL_DIR=$(dirname "${HDF5_LIBTOOL}")
M4_DIR=$(dirname "${HDF5_M4}")
AUTOCONF_DIR=`dirname ${HDF5_AUTOCONF}`
LIBTOOL_DIR=`dirname ${HDF5_LIBTOOL}`
M4_DIR=`dirname ${HDF5_M4}`
PATH=${AUTOCONF_DIR}:${LIBTOOL_DIR}:${M4_DIR}:$PATH
# Make libtoolize match the specified libtool
case "$(uname)" in
case "`uname`" in
Darwin*)
# On OS X, libtoolize could be named glibtoolize or
# libtoolize. Try the former first, then fall back
# to the latter if it's not found.
HDF5_LIBTOOLIZE="${LIBTOOL_DIR}/glibtoolize"
if [ ! -f "$HDF5_LIBTOOLIZE" ] ; then
if [ ! -f $HDF5_LIBTOOLIZE ] ; then
HDF5_LIBTOOLIZE="${LIBTOOL_DIR}/libtoolize"
fi
;;
@@ -195,7 +249,7 @@ echo
# LIBTOOLIZE
libtoolize_cmd="${HDF5_LIBTOOLIZE} --copy --force"
echo "${libtoolize_cmd}"
echo ${libtoolize_cmd}
if [ "$verbose" = true ] ; then
${HDF5_LIBTOOLIZE} --version
fi
@@ -210,7 +264,7 @@ if test -e "${LIBTOOL_DIR}/../share/aclocal" ; then
aclocal_include="-I ${LIBTOOL_DIR}/../share/aclocal"
fi
aclocal_cmd="${HDF5_ACLOCAL} --force -I m4 ${aclocal_include}"
echo "${aclocal_cmd}"
echo ${aclocal_cmd}
if [ "$verbose" = true ] ; then
${HDF5_ACLOCAL} --version
fi
@@ -219,7 +273,7 @@ echo
# AUTOHEADER
autoheader_cmd="${HDF5_AUTOHEADER} --force"
echo "${autoheader_cmd}"
echo ${autoheader_cmd}
if [ "$verbose" = true ] ; then
${HDF5_AUTOHEADER} --version
fi
@@ -228,7 +282,7 @@ echo
# AUTOMAKE
automake_cmd="${HDF5_AUTOMAKE} --copy --add-missing --force-missing"
echo "${automake_cmd}"
echo ${automake_cmd}
if [ "$verbose" = true ] ; then
${HDF5_AUTOMAKE} --version
fi
@@ -236,11 +290,8 @@ ${automake_cmd} || exit 1
echo
# AUTOCONF
# The "obsolete" warnings category flags our Java macros as obsolete.
# Since there is no clear way to upgrade them (Java support in the Autotools
# is not great) and they work well enough for now, we suppress those warnings.
autoconf_cmd="${HDF5_AUTOCONF} --force --warnings=no-obsolete"
echo "${autoconf_cmd}"
autoconf_cmd="${HDF5_AUTOCONF} --force"
echo ${autoconf_cmd}
if [ "$verbose" = true ] ; then
${HDF5_AUTOCONF} --version
fi

13
bin/COPYING Executable file
View File

@@ -0,0 +1,13 @@
Copyright by The HDF Group and
The Board of Trustees of the University of Illinois.
All rights reserved.
The files and subdirectories in this directory are part of HDF5.
The full HDF5 copyright notice, including terms governing use,
modification, and redistribution, is contained in the COPYING file
which can be found at the root of the source code distribution tree
or in https://support.hdfgroup.org/ftp/HDF5/releases. If you do
not have access to either file, you may request a copy from
help@hdfgroup.org.

View File

@@ -1,56 +0,0 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
## Makefile.am
## Run automake to generate a Makefile.in from this file.
#
# HDF5 Library Makefile(.in)
#
include $(top_srcdir)/config/commence.am
# Include src directory
AM_CPPFLAGS+=-I$(top_srcdir)/src -I$(top_srcdir)/tools/lib
# These are our main targets
bin_SCRIPTS=h5redeploy
# Tell automake to clean h5redeploy script
CLEANFILES=h5redeploy
# These were generated by configure. Remove them only when distclean.
DISTCLEANFILES=h5cc
# All programs rely on hdf5 library and h5tools library
LDADD=$(LIBH5TOOLS) $(LIBHDF5)
# How to build h5redeploy script
h5redeploy: h5redeploy.in
@cp $(srcdir)/$@.in $@
# h5cc needs custom install and uninstall rules, since it may be
# named h5pcc if hdf5 is being built in parallel mode.
if BUILD_PARALLEL_CONDITIONAL
H5CC_NAME=h5pcc
else
H5CC_NAME=h5cc
endif
$(DESTDIR)$(bindir):
echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \
$(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1;
install-exec-local: $(DESTDIR)$(bindir)
@$(INSTALL) h5cc $(DESTDIR)$(bindir)/$(H5CC_NAME)
uninstall-local:
@$(RM) $(DESTDIR)$(bindir)/$(H5CC_NAME)
include $(top_srcdir)/config/conclude.am

2
bin/README Normal file
View File

@@ -0,0 +1,2 @@
The daily tests run copies of some of the scripts in this directory from another repository, notably snapshot and runtest. The copies in this directory should work, but are not used in daily tests, though they should be tested occasionally.

View File

@@ -1,31 +0,0 @@
# Scripts in `bin` and their purpose
Programs run via `autogen.sh` (or the equivalent in CMake) are indicated.
|Program|Purpose|
|-------|-------|
|`buildhdf5`|Convenience script to build HDF5 using the Autotools|
|`checkapi`|Checks if public API calls are used in internal functions|
|`checkposix`|Checks if C/POSIX calls are prefixed with `HD`|
|`chkcopyright`|Checks if files have appropriate copyright statements|
|`cmakehdf5`|Convenience script to build HDF5 using CMake|
|`debug-ohdr`|Examines debug output from `H5O_open/close` to look for open objects|
|`format_source`|Runs `clang-format` over the source files, applying our rules|
|`genparser`|Creates the flex/bison-based parser files in the high-level library|
|`h5cc.in`|Input file from which h5cc is created|
|`h5redeploy.in`|Input file from which h5redeploy is created|
|`h5vers`|Updates the library version number|
|`make_err`|Generates the H5E header files (called in `autogen.sh`)|
|`make_vers`|Generates H5version.h (called in `autogen.sh`)|
|`make_overflow`|Generates H5overflow.h (called in `autogen.sh`)|
|`output_filter`|Used in the tools test code to strip extraneous output before we diff files|
|`restore.sh`|Removes files generated by `autogen.sh`|
|`runbkprog`|Used by CMake to run test programs in the background|
|`switch_maint_mode`|Switches maintainer mode on/off in `configure.ac`|
|`trace`|Adds `TRACE` macros to HDF5 C library source files (run by `autogen.sh`)|
|`warnhist`|Generates compiler warning statistics for gcc/clang when fed output of make|
## TODO
* chkcopyright is currently semi-broken as it doesn't handle the full variety of copyright headers we need. We're leaving it in place, though, in the hopes that someone will update it in the future.
* Extending warnhist to better understand the output of additional compilers/languages would be nice.

View File

@@ -1,23 +0,0 @@
#!/bin/bash -l
if [ $# -gt 0 ]; then
SUMMARY_FILE=$1
fi
ACCOUNT_ID=@ACCOUNT_ID@
echo "Run parallel test command. Test output will be in build/${SUMMARY_FILE}"
CTEST_CMD=`which ctest`
#SKIPTESTS <<KEYWORD:script inserts list of skips tests here -- don't remove>>
cd @HDF5_BINARY_DIR@
if [[ $SUMMARY_FILE == *"ctestS"* ]]; then
CMD="${CTEST_CMD} -S ctest_serial.cmake"
qsub -t 60 -n 1 -q debug-flat-quad -A ${ACCOUNT_ID} ${CMD} >& ${SUMMARY_FILE}
echo "Done running ctest serial command."
touch ctestS.done
else
CMD="${CTEST_CMD} -S ctest_parallel.cmake"
qsub -t 60 -n 1 -q debug-flat-quad -A ${ACCOUNT_ID} ${CMD} >& ${SUMMARY_FILE}
echo "Done running ctest parallel command."
touch ctestP.done
fi

View File

@@ -1,20 +0,0 @@
#!/bin/tcsh
### LSF syntax
#BSUB -nnodes 1 #number of nodes
#BSUB -W 30 #walltime in minutes
#BSUB -G guests #account
#BSUB -e ctestPerrors.txt #stderr
#BSUB -o ctestPoutput.txt #stdout
#BSUB -J hdf5_ctestP #job
##BSUB -q pbatch #queue to use
#BSUB -q pdebug
##date; hostname
##echo -n 'JobID is '; echo $LSB_JOBID
cd @HDF5_BINARY_DIR@
echo "Run parallel test command. Test output will be in build/ctestP.out"
ctest -S ctest_parallel.cmake >& ctestP.out
echo "Done running ctest parallel command."
touch ctestP.done

View File

@@ -1,15 +0,0 @@
#!/bin/bash
#SBATCH --nodes=1
#SBATCH -t 00:30:00
#SBATCH --mail-type=BEGIN,END,FAIL
##SBATCH --mail-user=<username>@sandia.gov
#SBATCH --export=ALL
#SBATCH --job-name=h5_ctestP
cd @HDF5_BINARY_DIR@
echo "Run parallel test command. Test output will be in build/ctestP.out"
ctest -S ctest_parallel.cmake >& ctestP.out
echo "Done running ctest parallel command."
touch ctestP.done

View File

@@ -1,17 +0,0 @@
#!/bin/tcsh
### LSF syntax
#BSUB -nnodes 1 #number of nodes
#BSUB -W 29 #walltime in minutes
#BSUB -G guests #account
#BSUB -e ctestSerrors.txt #stderr
#BSUB -o ctestSoutput.txt #stdout
#BSUB -J hdf5_ctestS #job
##BSUB -q pbatch #queue to use
#BSUB -q pdebug
cd @HDF5_BINARY_DIR@
echo "Run command. Test output will be in build/ctestS.out"
ctest -S ctest_serial.cmake >& ctestS.out
echo "Done running command."
touch ctestS.done

View File

@@ -1,15 +0,0 @@
#!/bin/bash
#SBATCH --nodes=1
#SBATCH -t 00:30:00
#SBATCH --mail-type=BEGIN,END,FAIL
##SBATCH --mail-user=<username>@sandia.gov
#SBATCH --export=ALL
#SBATCH --job-name=h5_ctestS
cd @HDF5_BINARY_DIR@
echo "Run command. Test output will be in build/ctestS.out"
ctest -S ctest_serial.cmake >& ctestS.out
echo "Done running command."
touch ctestS.done

View File

@@ -1,12 +0,0 @@
if(NOT "$ENV{CI_SITE_NAME}" STREQUAL "")
set(CTEST_SITE "$ENV{CI_SITE_NAME}")
endif()
if(NOT "$ENV{CI_BUILD_NAME}" STREQUAL "")
set(CTEST_BUILD_NAME "$ENV{CI_BUILD_NAME}")
endif()
ctest_start ("$ENV{CI_MODEL}" "@HDF5_SOURCE_DIR@" "@HDF5_BINARY_DIR@" APPEND)
ctest_test (BUILD "@HDF5_BINARY_DIR@" APPEND INCLUDE MPI_TEST_ RETURN_VALUE res)
if (${res} LESS 0 OR ${res} GREATER 0)
file (APPEND ${CTEST_SCRIPT_DIRECTORY}/FailedCTest.txt "Failed Tests: ${res}\n")
endif ()

View File

@@ -1,12 +0,0 @@
if(NOT "$ENV{CI_SITE_NAME}" STREQUAL "")
set(CTEST_SITE "$ENV{CI_SITE_NAME}")
endif()
if(NOT "$ENV{CI_BUILD_NAME}" STREQUAL "")
set(CTEST_BUILD_NAME "$ENV{CI_BUILD_NAME}")
endif()
ctest_start ("$ENV{CI_MODEL}" "@HDF5_SOURCE_DIR@" "@HDF5_BINARY_DIR@" APPEND)
ctest_test (BUILD "@HDF5_BINARY_DIR@" APPEND EXCLUDE MPI_TEST_ PARALLEL_LEVEL 32 RETURN_VALUE res)
if (${res} LESS 0 OR ${res} GREATER 0)
file (APPEND ${CTEST_SCRIPT_DIRECTORY}/FailedCTest.txt "Failed Tests: ${res}\n")
endif ()

View File

@@ -1,20 +0,0 @@
#!/bin/bash
#SBATCH -p knl -C quad
#SBATCH --nodes=1
#SBATCH -t 00:10:00
#SBATCH --mail-type=BEGIN,END,FAIL
#SBATCH --mail-user=<username>@sandia.gov
#SBATCH --export=ALL
#SBATCH --job-name=knl_h5detect
# Inputs: Build directory, output file name, executable file name (username/email if available).
PROGNAME=H5detect
OUTPUT=H5Tinit.c
CMD="@HDF5_BINARY_DIR@/bin/${PROGNAME} @HDF5_GENERATED_SOURCE_DIR@/${OUTPUT}"
echo "Run $CMD"
srun -n 1 $CMD
echo "Done running $CMD"

View File

@@ -1,16 +0,0 @@
#!/bin/bash
#SBATCH -p knl -C quad,cache
#SBATCH --nodes=1
#SBATCH -t 00:30:00
#SBATCH --mail-type=BEGIN,END,FAIL
##SBATCH --mail-user=<username>@sandia.gov
#SBATCH --export=ALL
#SBATCH --job-name=h5_ctestP
cd @HDF5_BINARY_DIR@
echo "Run parallel test command. Test output will be in build/ctestP.out"
ctest -S ctest_parallel.cmake >& ctestP.out
echo "Done running ctest parallel command."
touch ctestP.done

View File

@@ -1,16 +0,0 @@
#!/bin/bash
#SBATCH -p knl -C quad,cache
#SBATCH --nodes=1
#SBATCH -t 00:30:00
#SBATCH --mail-type=BEGIN,END,FAIL
##SBATCH --mail-user=<username>@sandia.gov
#SBATCH --export=ALL
#SBATCH --job-name=h5_ctestS
cd @HDF5_BINARY_DIR@
echo "Run command. Test output will be in build/ctestS.out"
ctest -S ctest_serial.cmake >& ctestS.out
echo "Done running command."
touch ctestS.done

View File

@@ -1,22 +0,0 @@
#!/bin/tcsh
### LSF syntax
#BSUB -n 6 #number of nodes
#BSUB -R "span[ptile=6]"
#BSUB -W 30 #walltime in minutes
#BSUB -G guests #account
#BSUB -e ctestPerrors.txt #stderr
#BSUB -o ctestPoutput.txt #stdout
#BSUB -J hdf5_ctestP #job
##BSUB -q pbatch #queue to use
#BSUB -q pdebug
##date; hostname
##echo -n 'JobID is '; echo $LSB_JOBID
cd @HDF5_BINARY_DIR@
echo "Run parallel test command. Test output will be in build/ctestP.out"
ctest -S ctest_parallel.cmake >& ctestP.out
echo "Done running ctest parallel command."
touch ctestP.done
~

View File

@@ -1,17 +0,0 @@
#!/bin/tcsh
### LSF syntax
#BSUB -n 1 #number of nodes
#BSUB -W 29 #walltime in minutes
#BSUB -G guests #account
#BSUB -e ctestSerrors.txt #stderr
#BSUB -o ctestSoutput.txt #stdout
#BSUB -J hdf5_ctestS #job
##BSUB -q pbatch #queue to use
#BSUB -q pdebug
cd @HDF5_BINARY_DIR@
echo "Run command. Test output will be in build/ctestS.out"
ctest -S ctest_serial.cmake >& ctestS.out
echo "Done running command."
touch ctestS.done

View File

@@ -1,7 +0,0 @@
#!/bin/tcsh
# ray.llnl.gov requires a '<' with bsub for submitting .lsf batch jobs.
# CMake is reluctant to pass the '<', so we put it in this script and use
# the script to submit the bsub command on ray.
bsub < $1

View File

@@ -1,25 +1,45 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Make a release of hdf5.
#
# NOTE:
# This script differs from bin/release in that this has an added
# --revision option to create private releases with the code revision
# hash in the version strings.
# Programmer: Robb Matzke
# Creation date: on or before 1998-01-29.
#
# This script can probably be merged into the original release script in
# the future.
# Modifications
# Robb Matzke, 1999-07-16
# The SunOS 5.6 sed *must* have slashes as delimiters. I changed things like
# `sed s+/CVS++' to `sed 's/\/CVS//'
#
# Albert Cheng, 1999-10-26
# Moved the MANIFEST checking to a separate command file so that
# it can be invoked individually.
#
# Albert Cheng, 2004-08-14
# Added the --private option.
#
# James Laird, 2005-09-07
# Added the md5 method.
#
# Larry Knox, 2016-08-30
# Added the --revision option to create private releases with the
# code revision hash in the version strings. Currently the version
# of this script with the --revision option is named bbrelease. It
# can probably be merged into the original release script in the
# future.
# Commands to get the revision hash have now been converted to git
# to match the source repository change.
# Function definitions
#
@@ -27,27 +47,27 @@
USAGE()
{
cat << EOF
Usage: $0 -d <dir> [-h] [--private] [--revision [--branch BRANCHNAME]] <methods> ...
-d DIR The name of the directory where the release(s) should be
Usage: $0 -d <dir> [--docver BRANCHNAME] [-h] [--nocheck] [--private] <methods> ...
-d DIR The name of the directory where the releas(es) should be
placed.
--branch BRANCHNAME This is to get the correct version of the branch name from the
--docver BRANCHNAME This is added for 1.8 and beyond to get the correct
version of documentation files from the hdf5docs
repository. BRANCHNAME for v1.8 should be hdf5_1_8.
-h print the help page.
--private Make a private release with today's date in version information.
--revision Make a private release with the code revision number in version information.
This allows --branch to be used for the file name.
--branch BRANCHNAME This is to get the correct version of the branch name from the
repository. BRANCHNAME for v1.8 should be hdf5_1_8.
--nocheck Ignore errors in MANIFEST file.
--private Make a private release with today's date in version information.
--revision Make a private release with the code revision number in version information.
This must be run at the top level of the source directory.
The other command-line options are the names of the programs to use
for compressing the resulting tar archive (if none are given then
"tar" is assumed):
tar -- use tar and don't do any compressing.
gzip -- use gzip with "-9" and append ".gz" to the output name.
tar -- use tar and don't do any compressing.
gzip -- use gzip with "-9" and append ".gz" to the output name.
bzip2 -- use bzip2 with "-9" and append ".bz2" to the output name.
zip -- convert all text files to DOS style and form a zip file for Windows use.
zip -- convert all text files to DOS style and form a zip file for Windows use.
doc -- produce the latest doc tree in addition to the archive.
An md5 checksum is produced for each archive created and stored in the md5 file.
@@ -77,10 +97,15 @@ EOF
# Function name: tar2zip
# Convert the release tarball to a Windows zipball.
#
# Programmer: Albert Cheng
# Creation date: 2014-04-23
#
# Modifications
#
# Steps:
# 1. untar the tarball in a temporary directory;
# 1. untar the tarball in a temporay directory;
# Note: do this in a temporary directory to avoid changing
# the original source directory which may be around.
# the original source directory which maybe around.
# 2. convert all its text files to DOS (LF-CR) style;
# 3. form a zip file which is usable by Windows users.
#
@@ -94,8 +119,8 @@ EOF
tar2zip()
{
if [ $# -ne 3 ]; then
echo "usage: tar2zip <tarfilename> <zipfilename>"
return 1
echo "usage: tar2zip <tarfilename> <zipfilename>"
return 1
fi
ztmpdir=/tmp/tmpdir$$
mkdir -p $ztmpdir
@@ -107,23 +132,23 @@ tar2zip()
(cd $ztmpdir; tar xf -) < $tarfile
# sanity check
if [ ! -d $ztmpdir/$version ]; then
echo "untar did not create $ztmpdir/$version source dir"
# cleanup
rm -rf $ztmpdir
return 1
echo "untar did not create $ztmpdir/$version source dir"
# cleanup
rm -rf $ztmpdir
return 1
fi
# step 2: convert text files
# There maybe a simpler way to do this.
# options used in unix2dos:
# -k Keep the date stamp
# -k Keep the date stamp
# -q quiet mode
# grep redirect output to /dev/null because -q or -s are not portable.
find $ztmpdir/$version | \
while read inf; do \
if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \
unix2dos -q -k $inf; \
fi\
done
while read inf; do \
if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \
unix2dos -q -k $inf; \
fi\
done
# step 3: make zipball
# -9 maximum compression
# -y Store symbolic links as such in the zip archive
@@ -138,6 +163,14 @@ tar2zip()
# This command must be run at the top level of the hdf5 source directory.
# Verify this requirement.
# Since we are running bbrelease to create an HDF5 source tarfile for buildbot
# testing with source that is not for release, there is not a file named
# "configure" but there will be one named "configure.ac". The "configure"
# file will be created when autogen.sh runs. There probably will always
# be a bin/release file, but just in case it is removed, we can check for
# this script, bbrelease, in the bin directory. The bin/release script should
# continue to check for "configure" because it should be present in release
# source.
if [ ! \( -f configure.ac -a -f bin/bbrelease \) ]; then
echo "$0 must be run at the top level of the hdf5 source directory"
exit 1
@@ -149,22 +182,24 @@ VERS=`perl bin/h5vers`
VERS_OLD=
test "$VERS" || exit 1
verbose=yes
check=yes
release_date=`date +%F`
today=`date +%Y%m%d`
pmode='no'
revmode='no'
tmpdir="../#release_tmp.$$" # tmp work directory
tmpdir="../#release_tmp.$$" # tmp work directory
DOC_URL=https://git@bitbucket.hdfgroup.org/scm/hdffv/hdf5doc.git
CPPLUS_RM_NAME=cpplus_RM
# Restore previous Version information
RESTORE_VERSION()
{
if [ X-${VERS_OLD} != X- ]; then
echo restoring version information back to $VERS_OLD
rm -f config/lt_vers.am
cp $tmpdir/lt_vers.am config/lt_vers.am
bin/h5vers -s $VERS_OLD
VERS_OLD=
echo restoring version information back to $VERS_OLD
rm -f config/lt_vers.am
cp $tmpdir/lt_vers.am config/lt_vers.am
bin/h5vers -s $VERS_OLD
VERS_OLD=
fi
}
@@ -174,32 +209,35 @@ while [ -n "$1" ]; do
arg=$1
shift
case "$arg" in
-d)
DEST=$1
shift
;;
-h)
USAGE
exit 0
;;
--private)
pmode=yes
;;
-d)
DEST=$1
shift
;;
--nocheck)
check=no
;;
-h)
USAGE
exit 0
;;
--private)
pmode=yes
;;
--revision)
revmode=yes
;;
--branch)
BRANCHNAME=$1
--docver)
DOCVERSION=$1
shift
;;
-*)
echo "Unknown switch: $arg" 1>&2
USAGE
exit 1
;;
*)
methods="$methods $arg"
;;
-*)
echo "Unknown switch: $arg" 1>&2
USAGE
exit 1
;;
*)
methods="$methods $arg"
;;
esac
done
@@ -208,7 +246,7 @@ if [ "X$methods" = "X" ]; then
methods="tar"
fi
# Create the temporary work directory.
# Create the temporay work directory.
if mkdir $tmpdir; then
echo "temporary work directory for release. "\
"Can be deleted after release completes." > $tmpdir/README
@@ -238,17 +276,14 @@ if [ X$revmode = Xyes ]; then
# Copy old version of config/lt_vers.am, since it's hard to
# "undo" changes to it.
cp config/lt_vers.am $tmpdir
if [ "${BRANCHNAME}" = "" ]; then
BRANCHNAME=`git symbolic-ref -q --short HEAD`
fi
branch=`git branch | grep '*' | awk '{print $NF}'`
revision=`git rev-parse --short HEAD`
# Set version information to m.n.r-r$revision.
# Set version information to m.n.r-r$revision.
# (h5vers does not correctly handle just m.n.r-$today.)
VERS=`echo $VERS | sed -e s/-.*//`-$revision
echo Private release of $VERS
HDF5_VERS=hdf5-$BRANCHNAME-$revision
echo file base of $HDF5_VERS
bin/h5vers -s $VERS
HDF5_VERS=hdf5-$branch-$revision
# use a generic directory name for revision releases
HDF5_IN_VERS=hdfsrc
else
@@ -264,17 +299,30 @@ if [ ! -d $DEST ]; then
exit 1
fi
# Create a symlink to the source so files in the tarball have the prefix
# we want (gnu's --transform isn't portable)
ln -s `pwd` $tmpdir/$HDF5_IN_VERS || exit 1
# Check the validity of the MANIFEST file.
bin/chkmanifest || fail=yes
if [ "X$fail" = "Xyes" ]; then
if [ $check = yes ]; then
exit 1
else
echo "Continuing anyway..."
fi
fi
# Create a manifest that contains only files for distribution.
MANIFEST=$tmpdir/H5_MANIFEST
grep '^\.' MANIFEST | grep -v _DO_NOT_DISTRIBUTE_ >$MANIFEST
# Prepare the source tree for a release.
#ln -s `pwd` $tmpdir/$HDF5_VERS || exit 1
ln -s `pwd` $tmpdir/$HDF5_IN_VERS || exit 1
# Save a backup copy of Makefile if exists.
test -f Makefile && mv Makefile $tmpdir/Makefile.x
cp -p Makefile.dist Makefile
# Update README.md and release_docs/RELEASE.txt with release information in
# Update README.txt and release_docs/RELEASE.txt with release information in
# line 1.
for f in README.md release_docs/RELEASE.txt; do
for f in README.txt release_docs/RELEASE.txt; do
echo "HDF5 version $VERS released on $release_date" >$f.x
sed -e 1d $f >>$f.x
mv $f.x $f
@@ -282,38 +330,64 @@ for f in README.md release_docs/RELEASE.txt; do
chmod 644 $f
done
# trunk is different than branches.
if [ "${DOCVERSION}" ]; then
DOC_URL=https://git@bitbucket.hdfgroup.org/scm/hdffv/hdf5doc.git -b ${DOCVERSION}
fi
# Create the tar file
test "$verbose" && echo " Running tar..." 1>&2
(cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_IN_VERS" || exit 1 )
( \
cd $tmpdir; \
tar cf $HDF5_VERS.tar $HDF5_IN_VERS/Makefile \
`sed 's/^\.\//'$HDF5_IN_VERS'\//' $MANIFEST` || exit 1 \
)
# Compress
MD5file=$HDF5_VERS.md5
cp /dev/null $DEST/$MD5file
for comp in $methods; do
case $comp in
tar)
cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar
(cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file)
;;
gzip)
test "$verbose" && echo " Running gzip..." 1>&2
gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz
(cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file)
;;
bzip2)
test "$verbose" && echo " Running bzip2..." 1>&2
bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2
(cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file)
;;
zip)
test "$verbose" && echo " Creating zip ball..." 1>&2
tar2zip $HDF5_IN_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2
(cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file)
;;
*)
echo "***Error*** Unknown method $comp"
exit 1
;;
tar)
cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar
(cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file)
;;
gzip)
test "$verbose" && echo " Running gzip..." 1>&2
gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz
(cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file)
;;
bzip2)
test "$verbose" && echo " Running bzip2..." 1>&2
bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2
(cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file)
;;
zip)
test "$verbose" && echo " Creating zip ball..." 1>&2
tar2zip $HDF5_IN_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2
(cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file)
;;
doc)
if [ "${DOCVERSION}" = "" ]; then
DOCVERSION=master
fi
test "$verbose" && echo " Creating docs..." 1>&2
# Check out docs from git repo
(cd $tmpdir; git clone $DOC_URL > /dev/null) || exit 1
# Create doxygen C++ RM
(cd c++/src && doxygen cpp_doc_config > /dev/null ) || exit 1
# Replace version of C++ RM with just-created version
rm -rf $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME
mv c++/src/$CPPLUS_RM_NAME $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME
# Compress the docs and move them to the release area
mv $tmpdir/$DOCVERSION $tmpdir/${HDF5_VERS}_docs
(cd $tmpdir && tar cf ${HDF5_VERS}_docs.tar ${HDF5_VERS}_docs)
mv $tmpdir/${HDF5_VERS}_docs.tar $DEST
;;
*)
echo "***Error*** Unknown method $comp"
exit 1
;;
esac
done
@@ -334,6 +408,4 @@ fi
# Remove temporary things
rm -rf $tmpdir
echo "DONE"
exit 0

View File

@@ -1,12 +1,13 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
@@ -177,7 +178,7 @@ LOCATE_SZLIB()
esac
;; # end of case ncsa
unknown)
# Unknown domain. Give a shot at the some standard places.
# Unknow domain. Give a shot at the some standard places.
szlibpaths="/usr/local"
;;
esac # end of case $mydomain

View File

@@ -1,19 +1,18 @@
#!/usr/bin/env perl
#!/usr/bin/perl -w
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
require 5.003;
use warnings;
# Purpose: insures that API functions aren't called internally.
# Usage: checkapi H5*.c
my $filename = "";

View File

@@ -1,263 +1,113 @@
#!/usr/bin/env perl
#!/usr/bin/perl -w
require 5.003;
use warnings;
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Dana Robinson
# Spring 2019
# (Original by Robb Matzke)
# Robb Matzke, matzke@llnl.gov
# 30 Aug 1997
#
# Purpose: Given the names of C source files this script will print the
# file name, line number, and function name of any function that
# doesn't begin with the letter 'h' or 'H' as stipulated by the
# HDF5 programming style guide.
# Purpose: Given the names of C source files this script will print the
# file name, line number, and function name of any function that
# doesn't begin with the letter `h' or `H' as stipulated by the
# HDF5 programming style guide.
#
# Emacs users can run this script as the compile command and
# use 'next-error' (usually bound to M-`) to find each name
# violation.
# Emacs users can run this script as the compile command and
# use `next-error' (usually bound to M-`) to find each name
# violation.
use File::Basename;
# Loop over all files passed to the function
foreach $arg (@ARGV) {
# Get the filename from the path
$filename = fileparse($arg);
# Skip files that don't include H5private.h
# H5system. has to be inspected by hand since it wraps POSIX files
#
# H5detect and H5make_libsettings are created before the library exists
# so calls that link to function replacements won't work. We'll ignore
# it here.
#
# If a user specifies one file, process it no matter what so people
# can inspect files we normally skip (like H5system.c).
$ignore = 0;
# Ignored files in src/
if($#ARGV gt 0 and $filename =~ /H5FDmulti|H5FDstdio|H5VLpassthru|H5system|H5detect|H5make_libsettings/) {
$ignore = 1;
}
# Ignored atomic test files in test/
if($#ARGV gt 0 and $filename =~ /atomic_reader|atomic_writer/) {
$ignore = 1;
}
# Ignored filter plugins in test/
if($#ARGV gt 0 and $filename =~ /^filter_plugin\d_/) {
$ignore = 1;
}
# Ignored generators in test/
if($#ARGV gt 0 and $filename =~ /^gen_/) {
$ignore = 1;
if(<>) {
if($ARGV =~ /\//) {
($filename) = ($ARGV =~ /^.*\/([A-Za-z0-9_]*)\.c$/);
} else {
($filename) = ($ARGV =~ /([A-Za-z0-9_]*)\.c$/);
}
if($ignore) {
print "$filename is exempt from using Standard library macro wrappers\n";
next;
}
if($filename =~ /H5FDmulti|H5FDstdio/) {
print "$ARGV is exempt from using Standard library macro wrappers\n";
} else {
while (<>) {
# Open the file
open(my $fh, "<", $arg) or do {
warn "NOTE: Unable to open $arg: !$\n";
next;
};
# Get rid of comments by removing the inside part.
s|/\*.*?\*/||g;
if ($in_comment) {
if (/\*\//) {
s|.*?\*/||;
$in_comment = 0;
} else {
$_="\n";
}
} elsif (m|/\*|) {
s|/\*.*||;
$in_comment = 1;
}
# Loop over all lines in the file to find undecorated functions
while (<$fh>) {
# Get rid of string constants if they begin and end on this line.
s/([\'\"])([^\1]|\\\1)*?\1/$1$1/g;
# Get rid of comments by removing the inside part.
s|/\*.*?\*/||g;
if ($in_comment) {
if (/\*\//) {
s|.*?\*/||;
$in_comment = 0;
} else {
$_="\n";
}
} elsif (m|/\*|) {
s|/\*.*||;
$in_comment = 1;
}
# Get rid of preprocessor directives
s/^\#.*//;
# Get rid of string constants if they begin and end on this line.
s/([\'\"])([^\1]|\\\1)*?\1/$1$1/g;
# Skip callbacks invoked as methods in a struct
next if $_ =~ /\b(\)?->|\.)\(?([a-z_A-Z]\w*)\s*\(/;
# Get rid of preprocessor directives
s/^\#.*//;
# Skip callbacks invoked as methods in a struct
next if $_ =~ /\b(\)?]?->|\.)\(?([a-z_A-Z]\w*)\s*\(/;
# Now find all function calls on this line which don't start with 'H'
while (($name)=/\b([a-z_A-GI-Z]\w*)\s*\(/) {
$_ = $';
# Now find all function calls on this line which don't start with 'H'
while (($name)=/\b([a-z_A-GI-Z]\w*)\s*\(/) {
$_ = $';
# Ignore C statements that look sort of like function
# calls.
next if $name =~ /^(if|for|offsetof|return|sizeof|switch|while|void)$/;
# Ignore C statements that look sort of like function
# calls.
next if $name =~ /^(if|for|offsetof|return|sizeof|switch|while|void)$/;
# Ignore things that get misdetected because of the simplified
# parsing that takes place here.
next if $name =~ /^(int|herr_t|_term_interface|_term_package)$/;
# Ignore things that get misdetected because of the simplified
# parsing that takes place here.
next if $name =~ /^(int|herr_t|_term_interface)$/;
# These are really HDF5 functions/macros even though they don't
# start with `h' or `H'.
next if $name =~ /^FUNC_(ENTER|LEAVE)(_(NO)?API|_PACKAGE|_STATIC)?(_NAMECHECK_ONLY|_NOFS|_NOCLEAR|_NOINIT|_NOPUSH)?(_NOFUNC|_TAG)?$/;
next if $name =~ /^(BEGIN|END)_FUNC$/;
next if $name =~ /^U?INT(8|16|32|64)(ENCODE|DECODE)(_VAR)?$/;
next if $name =~ /^CI_(PRINT_STATS|INC_SRC|INC_DST)$/;
next if $name =~ /^(ABS|ADDR_OVERFLOW|ALL_MEMBERS|BOUND|CONSTR|DETECT_[I|F|M]|DOWN)$/;
next if $name =~ /^(MIN3?|MAX3?|NELMTS|POWER_OF_TWO|REGION_OVERFLOW)$/;
next if $name =~ /^(SIZE_OVERFLOW|UNIQUE_MEMBERS|S_ISDIR)$/;
next if $name =~ /^addr_defined$/;
next if $name =~ /^TERMINATOR$/;
# These are really HDF5 functions/macros even though they don't
# start with `h' or `H'.
next if $name =~ /^FUNC_(ENTER|LEAVE)(_(NO)?API|_PACKAGE|_STATIC)?(_NOFS|_NOCLEAR|_NOINIT)?(_NOFUNC|_TAG)?$/;
next if $name =~ /^(BEGIN|END)_FUNC$/;
next if $name =~ /^U?INT(8|16|32|64)(ENCODE|DECODE)(_VAR)?$/;
next if $name =~ /^CI_(PRINT_STATS|INC_SRC|INC_DST)$/;
next if $name =~ /^(ABS|ADDR_OVERFLOW|ALL_MEMBERS|BOUND|CONSTR|DETECT_[I|F|M]|DOWN)$/;
next if $name =~ /^(MIN3?|MAX3?|NELMTS|POWER_OF_TWO|REGION_OVERFLOW)$/;
next if $name =~ /^(UNIQUE_MEMBERS)$/;
next if $name =~ /^addr_defined$/;
# Ignore callback invocation
next if $name =~ /^(op|cb|OP|iter_op|func)$/;
# These functions/macros are exempt.
next if $name =~ /^(main|[fs]?printf|va_(start|arg|end))$/;
# Ignore main
next if $name =~ /^main$/;
# These are Windows system calls. Ignore them.
next if $name =~ /^(_get_osfhandle|GetFileInformationByHandle|SetFilePointer|GetLastError|SetEndOfFile)$/;
next if $name =~ /^(FindNextFile|FindClose|_tzset|Wgettimeofday|GetSystemTimeAsFileTime|Wgetlogin|GetUserName)$/;
# This often appears in preprocessor lines that span multiple lines
next if $name =~ /^(defined)$/;
# These are MPI function calls. Ignore them.
next if $name =~ /^(MPI_|MPE_)/;
# These are Windows system calls. Ignore them.
next if $name =~ /^(_get_osfhandle|GetFileInformationByHandle|SetFilePointer|GetLastError|SetEndOfFile)$/;
next if $name =~ /^(FindNextFile|FindClose|_tzset|Wgettimeofday|GetSystemTimeAsFileTime|GetUserName)$/;
next if $name =~ /^(DeleteCriticalSection|TlsFree|TlsGetValue|CreateThread)$/;
next if $name =~ /^(ExpandEnvironmentStringsA|LockFileEx|UnlockFileEx)$/;
next if $name =~ /^(DllMain|LocalAlloc|LocalFree)$/;
next if $name =~ /^(FindFirstFileA|FindNextFileA)$/;
next if $name =~ /^(_beginthread|(Initialize|Enter|Leave)CriticalSection|TlsAlloc)$/;
# These are POSIX threads function calls. Ignore them.
next if $name =~ /^pthread_/;
# These are MPI function calls. Ignore them.
next if $name =~ /^(MPI_)/;
# These are Windows threads function calls. Ignore them.
next if $name =~ /^(_beginthread|(Initialize|Enter|Leave)CriticalSection|TlsAlloc)$/;
# These are POSIX threads function calls. Ignore them.
next if $name =~ /^pthread_/;
# These are zlib & szlib function calls. Ignore them.
next if $name =~ /^(inflate|SZ_)/;
next if $name =~ /^compress2$/;
# These are zlib & szlib function calls. Ignore them.
next if $name =~ /^(inflate|SZ_)/;
next if $name =~ /^compress2$/;
print "$ARGV:$.: $name\n";
}
# These is an H5Dfill function. Ignore it in this file.
if($filename =~ /H5Dfill/) {
next if $name =~ /^(alloc_func)$/;
}
# These are H5Zscaleoffset functions. Ignore them in this file.
if($filename =~ /H5Zscaleoffset/) {
next if $name =~ /^(pow_fun|round_fun|abs_fun|lround_fun|llround_fun)$/;
}
# This is a macro parameter in H5Rint.c. Ignore it in this file.
if($filename =~ /H5Rint/) {
next if $name =~ /^(func)$/;
}
# Internal calls in the HDFS VFD (H5FDhdfs.c). Ignore it in this file.
if($filename =~ /H5FDhdfs/) {
next if $name =~ /^(hdfs)/;
}
# Macros, etc. from the mirror VFD (H5FDmirror.c). Ignore in this file.
if($filename =~ /H5FDmirror/) {
next if $name =~ /^(LOG)/;
next if $name =~ /^(BSWAP_64|is_host_little_endian)$/;
}
# These are things in H5FDs3comms.c and H5FDros3.c. Ignore them in these files.
if($filename =~ /H5FDs3comms|H5FDros3/) {
next if $name =~ /^(curl_|curlwritecallback|gmnow)/;
next if $name =~ /^(ros3_|ROS3_|S3COMMS_)/;
next if $name =~ /^(EVP_sha256|SHA256|ISO8601NOW)$/;
}
# TESTING (not comprehensive - just noise reduction)
# Test macros and functions (testhdf5.h)
next if $name =~ /^(AddTest|TestErrPrintf|TestSummary|TestCleanup|TestShutdown)$/;
next if $name =~ /^(CHECK|CHECK_PTR|CHECK_PTR_NULL|CHECK_PTR_EQ|CHECK_I)$/;
next if $name =~ /^(VERIFY|VERIFY_STR|VERIFY_TYPE|MESSAGE|ERROR)$/;
# Test macros and functions (h5test.h)
next if $name =~ /^(TESTING|PASSED|SKIPPED|PUTS_ERROR|FAIL_PUTS_ERROR|FAIL_STACK_ERROR|TEST_ERROR|AT)$/;
next if $name =~ /^(GetTestExpress)$/;
# Ignore functions that start with test_ or check_
next if $name =~ /^test_/;
next if $name =~ /^check_/;
# Ignore functions that start with h5_
next if $name =~ /^h5_/;
# Ignore process completed status
next if $name =~ /(WIFEXITED|WEXITSTATUS|WIFSIGNALED|WTERMSIG|WCOREDUMP|WIFSTOPPED|WSTOPSIG)/;
# Ignore usage functions
next if $name =~ /^usage$/;
# Ignore callbacks
next if $name =~ /(_cb\d?)$/;
# Specific tests (not even remotely comprehensive)
# accum test code
if($filename =~ /accum/) {
next if $name =~ /^(accum_)/;
}
# cache test code
if($filename =~ /cache/) {
next if $name =~ /(_entry|_entries|_cache|_check|_dependency|_status|_op)$/;
next if $name =~ /^(verify_|smoke_check_|row_major_|col_major_)/;
next if $name =~ /^(resize_configs_are_equal|CACHE_ERROR)$/
}
# Splitter VFD test code. Ignore in vfd.c.
if($filename =~ /vfd/) {
next if $name =~ /^(SPLITTER_|splitter_)/;
next if $name =~ /(_splitter_)/;
next if $name =~ /^(file_exists)$/;
}
# S3 VFD test code. Ignore in ros3.c and s3comms.c.
# HDFS VFD test code. Ignore in hdfs.c.
if($filename =~ /ros3|s3comms|hdfs/) {
next if $name =~ /^(JSVERIFY|JSFAILED_|JSERR_|jserr_|FAIL_)/;
next if $name =~ /^(curl_)/;
next if $name =~ /^(S3COMMS_FORMAT_CREDENTIAL|ISO8601NOW|gmnow)$/;
}
# VDS test code. Ignore in vds.c.
if($filename =~ /vds/) {
next if $name =~ /^(vds_)/;
}
print "$filename:$.: $name\n";
} continue {
close ARGV if eof; # reset line number
}
}
# Close the file
close($fh);
}
if($#ARGV gt 0) {
print "\n";
print "NOTE:\n";
print "If any files were skipped due to being exempt, you can inspect them manually\n";
print "by using this script on them one at a time, which will always process the file.\n";
}

82
bin/chkconfigure Executable file
View File

@@ -0,0 +1,82 @@
#!/bin/sh
##
## Copyright by the Board of Trustees of the University of Illinois.
## All rights reserved.
##
## This file is part of HDF5. The full HDF5 copyright notice, including
## terms governing use, modification, and redistribution, is contained in
## the COPYING file, which can be found at the root of the source code
## distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
## If you do not have access to either file, you may request a copy from
## help@hdfgroup.org.
##
# Check that all the configure files are properly generated.
#
# Programmer: Albert Cheng
# Created Date: 2004/12/07
#
# Configure: should be generated by autoconf version 2.69.
# autoconf: should be of version 2.69.
# variable initialization
nerrors=0
AUTOCONFVERSION=2.69
AUTOCONFVERSIONLEAD='Generated by GNU Autoconf'
CONFIGUREFILES="configure"
# Function definitions
#
# PRINTMSG
# Print a one line message left justified in a field of 70 characters
# without newline. More output for this line later.
#
PRINTMSG() {
SPACES=" "
echo "$* $SPACES" | cut -c1-70 | tr -d '\012'
}
# print result passed.
PASSED() {
echo " PASSED"
}
# print result failed.
FAILED() {
echo "*FAILED*"
}
# Main body
# Check configure files
# The autoconf version should be among the first 5 lines.
echo "Check autoconf version. Should be version $AUTOCONFVERSION"
for xf in $CONFIGUREFILES; do
PRINTMSG $xf
if [ ! -f $xf ]; then
FAILED
echo File not found
nerrors=`expr $nerrors + 1`
continue
fi
autoconf_version=`head -5 $xf | grep "$AUTOCONFVERSIONLEAD"`
echo $autoconf_version | grep "$AUTOCONFVERSIONLEAD $AUTOCONFVERSION" > /dev/null 2>&1
if [ $? -eq 0 ]; then
PASSED
else
FAILED
echo "Expected: $AUTOCONFVERSIONLEAD $AUTOCONFVERSION"
echo "Got: $autoconf_version"
nerrors=`expr $nerrors + 1`
fi
done
# Summary
echo $0 found $nerrors errors
if [ $nerrors != 0 ]; then
exit 1
fi
exit 0

View File

@@ -1,14 +1,16 @@
#! /bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Check Copyright notice.
# Check that all the files have the proper copyright notice.
@@ -37,8 +39,8 @@ NFIXEDFILES=0 # Number of files fixed.
NFIXFAILEDFILES=0 # Number of files fix failed.
NUMBEGINLINES=60 # Copyright notice should be located within the
# this number of lines at the beginning of the file.
THGCOPYRIGHTSTR="Copyright by The HDF Group."
UICOPYRIGHTSTR="Copyright by the Board of Trustees of the University of Illinois"
THGCOPYRIGHTSTR="Copyright by The HDF Group."
PASSEDLOG=/tmp/h5chkright_passed.$$
SKIPPEDLOG=/tmp/h5chkright_skipped.$$
@@ -109,92 +111,113 @@ BUILDCOPYRIGHT()
# C and C++ source Copyright notice
cat > ${C_COPYRIGHT} << \EOF
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* the files COPYING and Copyright.html. COPYING can be found at the root *
* of the source code distribution tree; Copyright.html can be found at the *
* root level of an installed copy of the electronic HDF5 document set and *
* is linked from the top-level documents page. It can also be found at *
* http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
* access to either file, you may request a copy from help@hdfgroup.org. *
EOF
# Fortran9X source Copyright notice
cat > ${FTN_COPYRIGHT} << \EOF
! Copyright by The HDF Group. *
! Copyright by the Board of Trustees of the University of Illinois. *
! All rights reserved. *
! *
! This file is part of HDF5. The full HDF5 copyright notice, including *
! terms governing use, modification, and redistribution, is contained in *
! the COPYING file, which can be found at the root of the source code *
! distribution tree, or in https://www.hdfgroup.org/licenses. *
! If you do not have access to either file, you may request a copy from *
! help@hdfgroup.org. *
! the files COPYING and Copyright.html. COPYING can be found at the root *
! of the source code distribution tree; Copyright.html can be found at the *
! root level of an installed copy of the electronic HDF5 document set and *
! is linked from the top-level documents page. It can also be found at *
! http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
! access to either file, you may request a copy from help@hdfgroup.org. *
EOF
# HTML file Copyright notice
cat > ${HTM_COPYRIGHT} << \EOF
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* the files COPYING and Copyright.html. COPYING can be found at the root *
* of the source code distribution tree; Copyright.html can be found at the *
* root level of an installed copy of the electronic HDF5 document set and *
* is linked from the top-level documents page. It can also be found at *
* http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
* access to either file, you may request a copy from help@hdfgroup.org. *
EOF
# Shell style Copyright notice
cat > ${SH_COPYRIGHT} << \EOF
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
# the files COPYING and Copyright.html. COPYING can be found at the root
# of the source code distribution tree; Copyright.html can be found at the
# root level of an installed copy of the electronic HDF5 document set and
# is linked from the top-level documents page. It can also be found at
# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
# access to either file, you may request a copy from help@hdfgroup.org.
EOF
# Shell style Copyright notice (2nd type)
cat > ${SH_COPYRIGHT2} << \EOF
## Copyright by The HDF Group.
## Copyright by the Board of Trustees of the University of Illinois.
## All rights reserved.
##
## This file is part of HDF5. The full HDF5 copyright notice, including
## terms governing use, modification, and redistribution, is contained in
## the COPYING file, which can be found at the root of the source code
## distribution tree, or in https://www.hdfgroup.org/licenses.
## If you do not have access to either file, you may request a copy from
## help@hdfgroup.org.
## the files COPYING and Copyright.html. COPYING can be found at the root
## of the source code distribution tree; Copyright.html can be found at the
## root level of an installed copy of the electronic HDF5 document set and
## is linked from the top-level documents page. It can also be found at
## http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
## access to either file, you may request a copy from help@hdfgroup.org.
EOF
# Windows Batch file Copyright notice
cat > ${WINBAT_COPYRIGHT} << \EOF
@REM Copyright by The HDF Group.
@REM Copyright by the Board of Trustees of the University of Illinois.
@REM All rights reserved.
@REM
@REM This file is part of HDF5. The full HDF5 copyright notice, including
@REM terms governing use, modification, and redistribution, is contained in
@REM the COPYING file, which can be found at the root of the source code
@REM distribution tree, or in https://www.hdfgroup.org/licenses.
@REM If you do not have access to either file, you may request a copy from
@REM help@hdfgroup.org.
@REM the files COPYING and Copyright.html. COPYING can be found at the root
@REM of the source code distribution tree; Copyright.html can be found at the
@REM root level of an installed copy of the electronic HDF5 document set and
@REM is linked from the top-level documents page. It can also be found at
@REM http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
@REM access to either file, you may request a copy from help@hdfgroup.org.
EOF
# configure.ac file Copyright notice
cat > ${CONFIGURE_AC_COPYRIGHT} << \EOF
dnl Copyright by The HDF Group.
dnl Copyright by the Board of Trustees of the University of Illinois.
dnl All rights reserved.
dnl
dnl This file is part of HDF5. The full HDF5 copyright notice, including
dnl terms governing use, modification, and redistribution, is contained in
dnl the COPYING file, which can be found at the root of the source code
dnl distribution tree, or in https://www.hdfgroup.org/licenses.
dnl If you do not have access to either file, you may request a copy from
dnl help@hdfgroup.org.
dnl the files COPYING and Copyright.html. COPYING can be found at the root
dnl of the source code distribution tree; Copyright.html can be found at the
dnl root level of an installed copy of the electronic HDF5 document set and
dnl is linked from the top-level documents page. It can also be found at
dnl http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
dnl access to either file, you may request a copy from help@hdfgroup.org.
EOF
}
@@ -341,7 +364,7 @@ FindLineInFile()
# $1 file which contains the expected copyright notice.
# $2 file in which to look for the copyright notice.
# Copyright notice must be found within the beginning $NUMBEGINLINES of lines.
# Hunt for the particular string $THGCOPYRIGHTSTR which signifies the beginning
# Hunt for the particular string $UICOPYRIGHTSTR which signifies the beginning
# of the copyright notice.
#
MATCH_COPYRIGHT()
@@ -356,7 +379,7 @@ MATCH_COPYRIGHT()
nlines=`wc -l ${COPYRIGHTFILE} | cut -f1 -d' '`
# Find a line that contains the copyright string and its line number in
# the file.
begin=`FindLineInFile "${THGCOPYRIGHTSTR}" $f`
begin=`FindLineInFile "${UICOPYRIGHTSTR}" $f`
if [ "$begin" -le 0 ] ; then
# Not found, generate an empty dummy file
cp /dev/null ${EXTRACTEDFILE}
@@ -381,7 +404,7 @@ MATCH_COPYRIGHT()
# $1 file which contains the expected copyright notice.
# $2 file in which to look for the copyright notice.
# Copyright notice must be found within the beginning $NUMBEGINLINES of lines.
# Hunt for the particular string $THGCOPYRIGHTSTR which signifies the beginning
# Hunt for the particular string $UICOPYRIGHTSTR which signifies the beginning
# of the copyright notice.
#
FIX_COPYRIGHT()
@@ -404,12 +427,7 @@ FIX_COPYRIGHT()
# the file.
insertbegin=`FindLineInFile "${THGCOPYRIGHTSTR}" $f`
if [ $insertbegin -gt 0 ]; then
insertUIbegin=`FindLineInFile "${UICOPYRIGHTSTR}" $f`
if [ $insertUIbegin -gt 0 ]; then
insertend=`expr $insertbegin + $nlines + 1`
else
insertend=`expr $insertbegin + $nlines`
fi
insertend=`expr $insertbegin + $nlines` # no need to -1. See below.
else
insertbegin=`FindLineInFile "${UICOPYRIGHTSTR}" $f`
if [ $insertbegin -gt 0 ]; then

154
bin/chkmanifest Executable file
View File

@@ -0,0 +1,154 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Check that all the files in MANIFEST exist and (if this is a
# GIT checkout) that all the GIT-managed files appear in the
# MANIFEST.
#
verbose=yes
MANIFEST=/tmp/HD_MANIFEST.$$
AUTOGEN=./autogen.sh
AUTOGEN_LOG=/tmp/autogen.log.$$
# Main
test "$verbose" && echo " Checking MANIFEST..." 1>&2
# clean up $MANIFEST file when exits
trap "rm -f $MANIFEST" 0
# Only split lines on newline, not whitespace
set -f
IFS='
'
# First make sure i am in the directory in which there is an MANIFEST file
# and then do the checking from there. Will try the following,
# current directory, parent directory, the directory this command resides.
if [ -f MANIFEST ]; then
continue
elif [ -f ../MANIFEST ]; then
cd ..
else
commanddir=`dirname $0`
if [ -d "$commanddir" -a -f $commanddir/MANIFEST ]; then
cd $commanddir
continue
else
echo MANIFEST file not found. Abort.
exit 1
fi
fi
# Do an autogen if generated files (e.g., configure) is not present
if [ ! -f configure ]; then
echo " running $AUTOGEN"
$AUTOGEN > $AUTOGEN_LOG 2>&1
if [ $? -ne 0 ]; then
echo $AUTOGEN encountered error. Abort.
echo output from $AUTOGEN:
cat $AUTOGEN_LOG
exit 1
fi
rm $AUTOGEN_LOG
fi
# Check for duplicate entries. This can be done at any time, but it may as
# well be sooner so that if something else fails the presence of duplicates
# will already be known.
errcode=0
DUPLICATES=`perl -ne 's/#.*//; next if /^\s*$/; if ($uniq{$_}++) { print $_; }' MANIFEST`
if [ "$DUPLICATES" ]; then
cat 1>&2 <<EOF
These entries appear more than once in the MANIFEST:
$DUPLICATES
Please remove the duplicate lines and try again.
EOF
errcode=1
fi
# Copy the manifest file to get a list of file names.
grep '^\.' MANIFEST | expand | cut -f1 -d' ' >$MANIFEST
for file in `cat $MANIFEST`; do
if [ ! -f $file ]; then
echo "- $file"
fail=yes
fi
done
# Get the list of files under version control and check that they are
# present.
#
# First get a list of all the pending files with git status and
# check those.
git_stat=`git status -s`
for file in $git_stat; do
# Newly added files are not listed by git ls-files, which
# we check later.
# The line listing new files starts with 'A'.
letter=`echo $file | head -c 1`
if [ "$letter" = "A" ]; then
# Convert the git status columns to './' so it matches
# the manifest file name.
#
# There is a space between the status columns and file name, hence
# the '3'.
path=`echo $file | sed 's/^.\{3\}/\.\//g'`
# Ignore directories
if [ ! -d $path ]; then
if (grep ^$path$ $MANIFEST >/dev/null); then
:
else
echo "- $path"
fail=yes
fi
fi
fi
done
# Next check git ls-files, which gets a list of all files that are
# checked in.
git_ls=`git ls-files`
for file in $git_ls; do
path="./${file}"
# Ignore directories
if [ ! -d $path ]; then
if (grep ^$path$ $MANIFEST >/dev/null); then
:
else
echo "+ $path"
fail=yes
fi
fi
done
# Finish up
if [ "X$fail" = "Xyes" ]; then
cat 1>&2 <<EOF
The MANIFEST is out of date. Files marked with a minus sign (-) no
longer exist; files marked with a plus sign (+) are GIT-managed but do
not appear in the MANIFEST. Please remedy the situation and try again.
EOF
exit 1
fi
if [ $errcode -ne 0 ]; then
exit 1
fi
test "$verbose" && echo " The MANIFEST is up to date." 1>&2
exit 0

View File

@@ -46,9 +46,8 @@ cacheinit=$srcdir/config/cmake/cacheinit.cmake
build_cpp_lib=-DHDF5_BUILD_CPP_LIB:BOOL=OFF # C++ interface default off
build_fortran=-DHDF5_BUILD_FORTRAN:BOOL=OFF # Fortran interface default off
build_hl_lib=-DHDF5_BUILD_HL_LIB:BOOL=ON # High Level interface default on
build_threadsafe=-DHDF5_ENABLE_THREADSAFE:BOOL=OFF # Threadsafe feature default off
build_threadsafe=-DHDF5_ENABLE_THREADSAFE:BOOL=OFF # Threadsafe feature default off
build_testing=-DBUILD_TESTING:BOOL=ON # Build tests default on
build_test_shell=-DTEST_SHELL_SCRIPTS:BOOL=ON # Run shell script tests default on
build_tools=-DHDF5_BUILD_TOOLS:BOOL=ON # Build tools default on
with_zlib=-DHDF5_ENABLE_Z_LIB_SUPPORT=ON # enable zlib filter default on
with_szlib=-DHDF5_ENABLE_SZIP_SUPPORT=OFF # enables szip filter default off
@@ -199,7 +198,7 @@ DUMP_LOGFILE()
# Show a start time stamp
TIMESTAMP
# Initialize njobs if $MAKE is defined
# Initialize njobs if $AMKE is defined
if [ -n "$MAKE" ]; then
# assume all arguments are for --jobs
njobs=`echo $MAKE | cut -s -d' ' -f2-`
@@ -257,12 +256,6 @@ while [ $# -gt 0 ]; do
--disable-testing)
build_testing=-DBUILD_TESTING:BOOL=OFF
;;
--enable-shell-testing)
build_test_shell=-DTEST_SHELL_SCRIPTS:BOOL=ON
;;
--disable-shell-testing)
build_test_shell=-DTEST_SHELL_SCRIPTS:BOOL=OFF
;;
--with-zlib)
with_zlib=-DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=ON
;;
@@ -349,7 +342,6 @@ STEP "Configure..." \
$build_threadsafe \
$shared_lib \
$build_testing \
$build_test_shell \
$build_tools \
$with_zlib \
$with_szlib \
@@ -365,7 +357,7 @@ STEP "Test the library and tools..." "ctest . -C Release $njobs" $testlog
# 7. Create an install image with this command:
STEP "Create an install image..." "cpack -C Release CPackConfig.cmake" $packlog
# The implementation of installation is incomplete (only works for linux).
# The implementation of installation is imcomplete (only works for linux).
# Screen it out for now till it is completed.
if false; then
# 8. Install with this command:

View File

@@ -1,12 +1,13 @@
#!/usr/bin/env perl
#!/usr/bin/perl
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#

49
bin/dependencies Executable file
View File

@@ -0,0 +1,49 @@
#!/usr/bin/perl -w
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
my $depend_file;
my $new_depend_file;
my $srcdir;
my $top_srcdir;
my $top_builddir;
while ($_ = shift @ARGV) {
if (/^--top_srcdir=([^ \t\n]*)/) {
$top_srcdir = $1;
$top_srcdir =~ s/\+/\\\+/g;
$top_srcdir =~ s/\./\\\./g;
} elsif (/^--top_builddir=([^ \t\n]*)/) {
$top_builddir = $1;
$top_builddir =~ s/\+/\\\+/g;
$top_builddir =~ s/\./\\\./g;
} else {
$depend_file = $_;
$new_depend_file = "$_.new";
last;
}
}
open(DEPEND, "<$depend_file") || die "cannot open file $depend_file: $!\n";
open(NEW, ">$new_depend_file") || die "cannot open file $new_depend_file: $!\n";
while (<DEPEND>) {
s/\.o(\b)/\.lo$1/g;
s/ $top_srcdir/ \$\(top_srcdir\)/g;
s/ $top_builddir/ \$\(top_builddir\)/g;
print NEW $_;
}
close(DEPEND);
close(NEW);
`mv $new_depend_file $depend_file`;

58
bin/deploy Executable file
View File

@@ -0,0 +1,58 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Deploy the HDF5 binary.
#
# Programmer: Albert Cheng
# Created Date: 2004/12/15
#
# Modifications
# Function definitions
#
# Print Usage page
USAGE()
{
cat << EOF
Usage: $0 <dir>
Install the binary to directory <dir>
Examples:
$ bin/deploy /usr/local/hdf5
....
EOF
}
# Variables
if [ $# != 1 ]; then
USAGE
exit 1
fi
installdir=$1
# create installdir if it does not exist yet.
if [ -d $installdir ] || mkdir $installdir ; then
${MAKE:-gmake} install prefix=$installdir && \
( cd $installdir/bin; ./h5redeploy -force)
exit $?
else
echo $installdir is not a valid directory
USAGE
exit 1
fi

24
bin/distdep Executable file
View File

@@ -0,0 +1,24 @@
#!/usr/bin/perl -p
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Usage: $0 [<].depend
# Takes dependency info and generates on stdout dependencies suitable for
# distribution by removing all the system include files from the list and
# removing all but the base name of other include files (since the Makefiles
# contain the logic for searching).
($h,$_)=/\s*\\/?$h.$`:("",$h.$_);
s|( +/\S*)*( *)|$2?" \\\n ":""|eg;
#s|(([-\w\.]+)/)+([-\w\.]+)|\3|g;

138
bin/errors Executable file
View File

@@ -0,0 +1,138 @@
#!/usr/local/bin/perl -w
require 5.003;
use Text::Tabs;
# NOTE: THE FORMAT OF HRETURN_ERROR AND HGOTO_ERROR MACROS HAS
# CHANGED. THIS SCRIPT NO LONGER WORKS! --rpm
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Robb Matzke, matzke@llnl.gov
# 30 Aug 1997
#
# Purpose: This script will read standard input which should be a
# function prologue followed by a C function and will emit
# on standard output the same source code with the function
# prologue containing documentation for the various errors
# that occur in the function.
#
# Errors are raised by calling HGOTO_ERROR() or
# HRETURN_ERROR(). The reason for the error message is a
# comment which appears immediately after the error macro
# call and is contained entirely on one line:
#
# HRETURN_ERROR (...); /*entry not found*/
#
# If such a comment doesn't exist, then the previous comment
# is used, subject to the constraint that raising an error
# clears the previous comment.
#
# /* Entry not found */
# HGOTO_ERROR (...);
#
# Emacs users can use this script interactively with the
# c-mark-function and shell-command-on-region functions which
# are normally bound to M-C-h and M-|.
# Split STDIN into the prolog and the function body. Preserve leading
# white space.
$_ = join "", <STDIN>;
my ($head, $prolog, $body) = (/^(\s*)(\/\*(.*?)\*\/)?(.*)/s)[0,2,3];
$prolog = "" unless $prolog;
# Find each error and the comment that goes with it.
for ($_=$body,$comment=""; /\/\*|H(RETURN|GOTO)_ERROR/s;) {
$_ = $&.$';
if (/^H(RETURN|GOTO)_ERROR\s*\(\s*H5E_(\w+)\s*,\s*H5E_(\w+)\s*,/s) {
($major, $minor, $_) = ($2, $3, $');
$comment=$1 if /^.*?\)\s*;\s*\/\*\s*(.*?)\s*\*\//;
$comment =~ s/^\s*\*+\s*/ /mg; # leading asterisks.
$comment =~ s/^\s+//s; # leading white space.
$comment =~ s/\s+$//s; # trailing white space.
$comment =~ s/(\w)$/$1./s; # punctuation.
$comment ||= "***NO COMMENT***";
$errors{"$major\000$minor\000\u$comment"} = 1;
$comment = "";
} else {
($comment) = /^\/\*\s*(.*?)\s*\*\//s;
$_ = $';
}
}
# Format an error so it isn't too wide.
sub fmt_error ($) {
local ($_) = @_;
my ($prefix,$space,$err) = /^((.*?)([A-Z_0-9]+\s+[A-Z_0-9]+\s+))/;
$_ = $';
tr/\n / /s;
my $w = 70 - length expand $prefix;
s/(.{$w}\S+)\s+(\S)/$1."\n".$space.' 'x(length $err).$2/eg;
return $prefix . $_."\n";
}
# Sort the errors by major, then minor, then comment. Duplicate
# triplets have already been removed.
sub by_triplet {
my ($a_maj, $a_min, $a_com) = split /\000/, $a;
my ($b_maj, $b_min, $b_com) = split /\000/, $b;
$a_maj cmp $b_maj || $a_min cmp $b_min || $a_com cmp $b_com;
}
@errors = map {sprintf "%-9s %-13s %s\n", split /\000/}
sort by_triplet keys %errors;
# Add the list of errors to the prologue depending on the type of
# prolog.
if (($front, $back) = $prolog=~/^(.*?Errors:\s*?(?=\n)).*?\n\s*\*\s*\n(.*)/s) {
#| * Errors: |#
#| * __list_of_error_messages__ (zero or more lines) |#
#| * |#
print $head, "/*", $front, "\n";
map {print fmt_error " *\t\t".$_} @errors;
print " *\n", $back, "*/", $body;
} elsif (($front,$back) = $prolog =~
/(.*?\n\s*ERRORS:?\s*?(?=\n)).*?\n\s*\n(.*)/s) {
#| ERRORS |#
#| __list_of_error_messages__ (zero or more lines) |#
#| |#
print $head, "/*", $front, "\n";
map {print fmt_error " ".$_} @errors;
print "\n", $back, "*/", $body;
} elsif ($prolog eq "") {
# No prolog present.
print $head;
print " \n/*", "-"x73, "\n * Function:\t\n *\n * Purpose:\t\n *\n";
print " * Errors:\n";
map {print fmt_error " *\t\t".$_} @errors;
print " *\n * Return:\tSuccess:\t\n *\n *\t\tFailure:\t\n *\n";
print " * Programmer:\t\n *\n * Modifications:\n *\n *", '-'x73, "\n";
print " */\n", $body;
} else {
# Prolog format not recognized.
print $head, "/*", $prolog, "*/\n\n";
print "/*\n * Errors returned by this function...\n";
map {print fmt_error " *\t".$_} @errors;
print " */\n", $body;
}

View File

@@ -1,26 +0,0 @@
#!/bin/bash
#
# Recursively format all C & C++ sources and header files, except those in the
# 'config' directory and generated files, such as H5LTanalyze.c, etc.
#
# Note that any files or directories that are excluded here should also be
# added to the 'exclude' list in .github/workflows/clang-format-check.yml
#
# (Remember to update both bin/format_source and bin/format_source_patch)
find . \( -type d -path ./config -prune -and -not -path ./config \) \
-or \( \( \! \( \
-name H5LTanalyze.c \
-or -name H5LTparse.c \
-or -name H5LTparse.h \
-or -name H5Epubgen.h \
-or -name H5Einit.h \
-or -name H5Eterm.h \
-or -name H5Edefin.h \
-or -name H5version.h \
-or -name H5overflow.h \
\) \) \
-and \( -iname *.h -or -iname *.c -or -iname *.cpp -or -iname *.hpp -or -iname *.java \) \) \
| xargs clang-format -style=file -i -fallback-style=none
exit 0

51
bin/gcov_script Executable file
View File

@@ -0,0 +1,51 @@
#! /bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
# A script to generate coverage files for HDF5 using gcov.
# Configures, builds, and runs tests in-place; the output files will be placed
# in a directory called gcov_logs.
# Must be invoked from the root hdf5 directory.
# This script has been tested on kagiso.
CFLAGS="$CFLAGS -ftest-coverage -fprofile-arcs"
export CFLAGS
LDFLAGS="$LDFLAGS -lgcov"
export LDFLAGS
CC=gcc
export CC
./configure
make
make check
mkdir gcov_logs
cd src
for j in *.h *.c
do
ln -s ../$j .libs/$j
done
cd .libs
for j in *.gcda
do
gcov -b $j >> gcov.log 2>&1
done
for j in *.gcov
do
mv $j ../../gcov_logs
done
mv gcov.log ../../gcov_logs
for j in *.c *.h
do
rm $j
done
cd ../..

View File

@@ -1,12 +1,12 @@
#! /bin/bash
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
@@ -21,13 +21,13 @@
#
# There is NO dependency in either the autotools or CMake to regenerate
# the parser code. If you modify H5LT analyze.l or H5LTparse.y, you
# will need to run this script manually on a system with a suitable
# will need to run this scrpit manually on a system with a suitable
# lexer and parser generator.
#
# IMPORTANT OS X NOTE
#
# If you are using OS X, you will probably not have flex or bison
# installed. In addition, even if you do have bison installed, the bison
# installed. In addtion, even if you do have bison installed, the bison
# version you have installed may also have a bug that makes it unable to
# process our input files.
#
@@ -200,34 +200,28 @@ if [ "$verbose" = true ] ; then
fi
${HDF5_FLEX} --nounistd -PH5LTyy -o ${path_to_hl_src}/H5LTanalyze.c ${path_to_hl_src}/H5LTanalyze.l
# fix H5LTparse.c and H5LTlparse.h to declare H5LTyyparse return type as an
# hid_t instead of int. Currently the generated function H5LTyyparse is
# fix H5LTparse.c to declare H5LTyyparse return type as an hid_t
# instead of int. Currently the generated function H5LTyyparse is
# generated with a return value of type int, which is a mapping to the
# flex yyparse function. The return value in the HL library should be
# an hid_t.
# I propose to not use flex to generate this function, but for now I am
# an hid_t.
# I propose to not use flex to generate this function, but for now I am
# adding a perl command to find and replace this function declaration in
# H5LTparse.c.
perl -0777 -pi -e 's/int yyparse/hid_t yyparse/igs' ${path_to_hl_src}/H5LTparse.c
perl -0777 -pi -e 's/int\nyyparse/hid_t\nyyparse/igs' ${path_to_hl_src}/H5LTparse.c
perl -0777 -pi -e 's/int H5LTyyparse/hid_t H5LTyyparse/igs' ${path_to_hl_src}/H5LTparse.c
perl -0777 -pi -e 's/int yyparse/hid_t yyparse/igs' ${path_to_hl_src}/H5LTparse.h
perl -0777 -pi -e 's/int\nyyparse/hid_t\nyyparse/igs' ${path_to_hl_src}/H5LTparse.h
perl -0777 -pi -e 's/int H5LTyyparse/hid_t H5LTyyparse/igs' ${path_to_hl_src}/H5LTparse.h
# Add code that disables warnings in the flex/bison-generated code.
#
# Note that the GCC pragmas did not exist until gcc 4.2. Earlier versions
# will simply ignore them, but we want to avoid those warnings.
#
# Note also that although clang defines __GNUC__, it doesn't support every
# warning that GCC does.
for f in ${path_to_hl_src}/H5LTparse.c ${path_to_hl_src}/H5LTanalyze.c
do
echo '#if defined (__GNUC__) ' >> tmp.out
echo '#if ((__GNUC__ * 100) + __GNUC_MINOR__) >= 402 ' >> tmp.out
echo '#if __GNUC__ >= 4 && __GNUC_MINOR__ >=2 ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wconversion" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wimplicit-function-declaration" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wlarger-than=" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wmissing-prototypes" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wnested-externs" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wold-style-definition" ' >> tmp.out
@@ -236,20 +230,11 @@ do
echo '#pragma GCC diagnostic ignored "-Wsign-conversion" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wstrict-overflow" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wstrict-prototypes" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wimplicit-fallthrough" ' >> tmp.out
echo '#if !defined (__clang__) ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wlarger-than=" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wsuggest-attribute=const" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wsuggest-attribute=pure" ' >> tmp.out
echo '#endif ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wswitch-default" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wunused-function" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wunused-macros" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wunused-parameter" ' >> tmp.out
echo '#endif ' >> tmp.out
echo '#if ((__GNUC__ * 100) + __GNUC_MINOR__) >= 600 ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wnull-dereference" ' >> tmp.out
echo '#endif ' >> tmp.out
echo '#elif defined __SUNPRO_CC ' >> tmp.out
echo '#pragma disable_warn ' >> tmp.out
echo '#elif defined _MSC_VER ' >> tmp.out

View File

@@ -7,16 +7,17 @@ require 5.003;
use strict;
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Robb Matzke
# Robb Matzke <matzke@llnl.gov>
# 17 July 1998
### Purpose
@@ -65,7 +66,7 @@ use strict;
# ./H5public.h or ./src/H5public.h.
#
# If the version number is changed (either `-s' or `-i' was used on
# the command line) then the version line of the README.md and RELEASE.txt files
# the command line) then the first line of the README.txt and RELEASE.txt files
# one directory above the H5public.h file is also modified so it looks
# something like: This is hdf5-1.2.3-pre1 currently under development.
# The AC_INIT macro in configure.ac will also change in this case to be
@@ -155,10 +156,10 @@ while ($_ = shift) {
}
die "mutually exclusive options given\n" if $set && $inc;
# Determine file to use as H5public.h, README.md,
# Determine file to use as H5public.h, README.txt,
# release_docs/RELEASE.txt, configure.ac, windows/src/H5pubconf.h
# config/lt_vers.am and config/cmake/scripts/HDF5config.cmake.
# The README.md, release_docs/RELEASE.txt, configure.ac,
# The README.txt, release_docs/RELEASE.txt, configure.ac,
# windows/src/H5pubconf.h, config/lt_vers.am and
# config/cmake/scripts/HDF5config.cmake
# files are always in the directory above H5public.h
@@ -177,9 +178,9 @@ die "unable to read file: $LT_VERS\n" unless -r $file;
my $HDF5CONFIGCMAKE = $file;
$HDF5CONFIGCMAKE =~ s/[^\/]*$/..\/config\/cmake\/scripts\/HDF5config.cmake/;
die "unable to read file: $HDF5CONFIGCMAKE\n" unless -r $file;
# README.md
# README.txt
my $README = $file;
$README =~ s/[^\/]*$/..\/README.md/;
$README =~ s/[^\/]*$/..\/README.txt/;
die "unable to read file: $README\n" unless -r $file;
# release_docs/RELEASE.txt
my $RELEASE = $file;
@@ -212,7 +213,7 @@ my (@curver) = getvers $contents;
# Determine the new version number.
my @newver; #new version
if ($set) {
if ($set =~ /(\d+)\.(\d+)\.(\d+)(-([\da-zA-Z]\w*))?/) {
if ($set =~ /(\d+)\.(\d+)\.(\d+)(-([a-zA-Z]\w*))?/) {
@newver = ($1, $2, $3, $5);
} elsif ($set =~ /(\d+)\D+(\d+)\D+(\d+)(\s*\(([a-zA-Z]\w*)\))?\D*$/) {
@newver = ($1, $2, $3, $5);
@@ -302,7 +303,7 @@ if ($LT_VERS && $version_increased) {
# close FILE;
}
# Update the README.md file
# Update the README.txt file
if ($README) {
open FILE, $README or die "$README: $!\n";
my @contents = <FILE>;
@@ -376,7 +377,7 @@ if ($H5_JAVA) {
my $version_string2 = sprintf("%d, %d, %d", @newver[0,1,2]);
$data =~ s/\@version HDF5 .* <BR>/\@version HDF5 $version_string1 <BR>/;
$data =~ s/ public final static int LIB_VERSION\[\] = \{\d*,.\d*,.\d*\};/ public final static int LIB_VERSION[] = \{$version_string2\};/;
$data =~ s/ public final static int LIB_VERSION\[\] = { \d*, \d*, \d* };/ public final static int LIB_VERSION[] = { $version_string2 };/;
write_file($H5_JAVA, $data);
}
@@ -393,7 +394,7 @@ if ($TESTH5_JAVA) {
my $version_string1 = sprintf("%d, %d, %d", @newver[0,1,2]);
my $version_string2 = sprintf("int majnum = %d, minnum = %d, relnum = %d", @newver[0,1,2]);
$data =~ s/ int libversion\[\] = \{.*\};/ int libversion\[\] = \{$version_string1\};/;
$data =~ s/ int libversion\[\] = { .* };/ int libversion\[\] = { $version_string1 };/;
$data =~ s/ int majnum = \d*, minnum = \d*, relnum = \d*;/ $version_string2;/;
write_file($TESTH5_JAVA, $data);
@@ -404,7 +405,7 @@ if ($REPACK_LAYOUT_PLUGIN_VERSION) {
my $data = read_file($REPACK_LAYOUT_PLUGIN_VERSION);
my $version_string = sprintf("%d %d %d", @newver[0,1,2]);
$data =~ s/ PARAMS \{ 9 \d* \d* \d* \}/ PARAMS \{ 9 $version_string \}/g;
$data =~ s/ PARAMS { 9 \d* \d* \d* }/ PARAMS { 9 $version_string }/g;
write_file($REPACK_LAYOUT_PLUGIN_VERSION, $data);
}

View File

@@ -1,12 +1,13 @@
#!/usr/bin/env perl
#!/usr/bin/perl
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#

238
bin/locate_sw Executable file
View File

@@ -0,0 +1,238 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Try to locate the software as named in argument.
# This is a sequential search of all possible locations of the software.
# Usage: locate_sw <SW-Name>
# It prints a string showing the paths leading to the include, lib and bin
# directory of the software, separated by colons. E.g., if the software is
# located in /usr/sdt/*, it prints
# /usr/sdt/include:/usr/sdt/lib:/usr/sdt/bin
# Any component that is not found will be returned as an empty string. E.g.,
# if somehow the header files of the software are not found, it prints
# :/usr/sdt/lib;/usr/sdt/bin
# Function definitions
USAGE()
{
echo "Usage: locate_sw <SW-Name>"
echo " where <SW-Name> can be hdf4, hdf5, zlib"
echo " It prints the paths leading the header files (include),"
echo " library (lib), and tools (bin). E.g.,"
echo " /usr/sdt/include:/usr/sdt/lib:/usr/sdt/bin"
echo " Any component that is not found will be returned as an empty string. E.g.,"
echo " if somehow the header files of the software are not found, it prints"
echo " :/usr/sdt/lib;/usr/sdt/bin"
echo "Exit code: 0 if software located; otherwise non-zero"
}
# locate hdf4 software
locate_hdf4()
{
# this default is the best guess of locating hdf4 software
swpaths_defaults="/usr/ncsa /usr/sdt /usr/local"
swpaths=
case "$OSname" in
SunOS)
case "$OSrelease" in
5.7)
swpaths="/afs/ncsa/packages/hdf/SunOS_5.7"
;;
*)
# use default
;;
esac
;;
HP-UX)
case "$OSrelease" in
B.11.00)
swpaths="/afs/ncsa/packages/hdf/HPUX_11.00"
;;
*)
# use default
;;
esac
;;
Linux)
swpaths="/afs/ncsa/packages/hdf/Linux"
;;
OSF1)
swpaths="/afs/ncsa/packages/hdf/OSF1_V4.0"
;;
*)
# just use the defaults
;;
esac
# Check if the hdf4 software is actually available.
# Accept the directory only if needed .h, .a and tools are found
# in the same place. That way, they are more likely to be of the
# same version.
#
swpaths="$swpaths $swpaths_defaults"
for sw in $swpaths; do
if [ -r $sw/include/hdf.h -a -r $sw/lib/libdf.a -a -r $sw/bin/hdp ]; then
SW_inc=$sw/include
SW_lib=$sw/lib
SW_bin=$sw/bin
SW_Location=$sw
break
fi
done
}
# locate hdf5 software
locate_hdf5()
{
# this default is the best guess of locating hdf5 software
swpaths_defaults="/usr/ncsa /usr/sdt /usr/local"
swpaths=
case "$OSname" in
SunOS)
case "$OSrelease" in
5.7)
swpaths="/afs/ncsa/packages/hdf5/SunOS_5.7"
;;
*)
# use default
;;
esac
;;
HP-UX)
case "$OSrelease" in
B.11.00)
swpaths="/afs/ncsa/packages/hdf5/HPUX_11.00"
;;
*)
# use default
;;
esac
;;
Linux)
swpaths="/afs/ncsa/packages/hdf5/Linux"
;;
FreeBSD)
swpaths="/afs/ncsa/packages/hdf5/FreeBSD"
;;
OSF1)
swpaths="/afs/ncsa/packages/hdf5/OSF1_V4.0"
;;
*)
# just use the defaults
;;
esac
# Check if the hdf5 software is actually available.
# Accept the directory only if needed .h, .a and tools are found
# in the same place. That way, they are more likely to be of the
# same version.
#
swpaths="$swpaths $swpaths_defaults"
for sw in $swpaths; do
if [ -r $sw/include/hdf5.h -a -r $sw/lib/libhdf5.a -a -r $sw/bin/h5dump ]; then
SW_inc=$sw/include
SW_lib=$sw/lib
SW_bin=$sw/bin
SW_Location=$sw
break
fi
done
}
# locate zlib software
locate_zlib()
{
# this default is the best guess of locating zlib software
swpaths_defaults="/usr /usr/local /usr/ncsa /usr/sdt"
swpaths=
# Check if the zlib software is actually available.
# Accept the directory only if needed .h, .a and tools are found
# in the same place. That way, they are more likely to be of the
# same version.
# Don't know something specific to check the bin directory. Maybe gzip?
# Just make sure it exists.
#
swpaths="$swpaths $swpaths_defaults"
for sw in $swpaths; do
if [ -r $sw/include/zlib.h -a \
\( -r $sw/lib/libz.a -o -r $sw/lib/libz.so \) -a -d $cw/bin ]; then
SW_inc=$sw/include
SW_lib=$sw/lib
SW_bin=$sw/bin
SW_Location=$sw
break
fi
done
# if none found, try HDF4 software which contains a version of zlib.
if [ x-$SW_Location = x- ]; then
locate_hdf4
fi
}
# Main
#
# Options
#
if [ $# -lt 1 ]; then
USAGE
exit 1
fi
if [ "$1" = -h ]; then
USAGE
exit 0
fi
SW=$1
shift
# locations of the software seeked.
SW_inc= # include place
SW_lib= # library place
SW_bin= # binary place
SW_Location= # parent directory of all the above
OSname=`uname -s`
OSrelease=`uname -r`
case $SW in
hdf4|hdf)
locate_hdf4
;;
hdf5)
locate_hdf5
;;
zlib)
locate_zlib
;;
*)
echo "unknown software ($SW)"
USAGE
exit 1
;;
esac
# show the results located, separated by commas.
if [ -n "${SW_inc}" -a -n "${SW_lib}" -a -n "${SW_bin}" ]; then
echo ${SW_inc},${SW_lib},${SW_bin}
exit 0
else
exit 1
fi

View File

@@ -1,16 +1,16 @@
#!/usr/bin/env perl
#!/usr/bin/perl -w
require 5.003;
$indent=4;
use warnings;
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
@@ -31,12 +31,13 @@ sub print_copyright ($) {
print $fh "/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n";
print $fh " * Copyright by The HDF Group. *\n";
print $fh " * Copyright by the Board of Trustees of the University of Illinois. *\n";
print $fh " * All rights reserved. *\n";
print $fh " * *\n";
print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n";
print $fh " * terms governing use, modification, and redistribution, is contained in *\n";
print $fh " * the COPYING file, which can be found at the root of the source code *\n";
print $fh " * distribution tree, or in https://www.hdfgroup.org/licenses. *\n";
print $fh " * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *\n";
print $fh " * If you do not have access to either file, you may request a copy from *\n";
print $fh " * help\@hdfgroup.org. *\n";
print $fh " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */\n";
@@ -62,8 +63,8 @@ sub print_startprotect ($$) {
$file =~ s/(\w*)\.h/$1/;
# Print the ifdef info
print $fh "\n#ifndef ${file}_H\n";
print $fh "#define ${file}_H\n";
print $fh "\n#ifndef _${file}_H\n";
print $fh "#define _${file}_H\n";
}
##############################################################################
@@ -243,8 +244,8 @@ sub create_init ($) {
print HEADER "/* Major error codes */\n";
print HEADER "/*********************/\n\n";
foreach $name (keys %major) {
print HEADER " "x(0*$indent),"HDassert(${name}_g==(-1));\n";
print HEADER " "x(0*$indent),"if((msg = H5E__create_msg(cls, H5E_MAJOR, \"${major{$name}}\"))==NULL)\n";
print HEADER " "x(0*$indent),"assert(${name}_g==(-1));\n";
print HEADER " "x(0*$indent),"if((msg = H5E_create_msg(cls, H5E_MAJOR, \"${major{$name}}\"))==NULL)\n";
print HEADER " "x(1*$indent),"HGOTO_ERROR(H5E_ERROR, H5E_CANTINIT, FAIL, \"error message initialization failed\")\n";
print HEADER " "x(0*$indent),"if((${name}_g = H5I_register(H5I_ERROR_MSG, msg, FALSE))<0)\n";
print HEADER " "x(1*$indent),"HGOTO_ERROR(H5E_ERROR, H5E_CANTREGISTER, FAIL, \"can't register error message\")\n";
@@ -259,8 +260,8 @@ sub create_init ($) {
# Iterate over all the minor errors in each section
for $name ( @{$section_list{$sect_name}}) {
print HEADER " "x(0*$indent),"HDassert(${name}_g==(-1));\n";
print HEADER " "x(0*$indent),"if((msg = H5E__create_msg(cls, H5E_MINOR, \"${minor{$name}}\"))==NULL)\n";
print HEADER " "x(0*$indent),"assert(${name}_g==(-1));\n";
print HEADER " "x(0*$indent),"if((msg = H5E_create_msg(cls, H5E_MINOR, \"${minor{$name}}\"))==NULL)\n";
print HEADER " "x(1*$indent),"HGOTO_ERROR(H5E_ERROR, H5E_CANTINIT, FAIL, \"error message initialization failed\")\n";
print HEADER " "x(0*$indent),"if((${name}_g = H5I_register(H5I_ERROR_MSG, msg, FALSE))<0)\n";
print HEADER " "x(1*$indent),"HGOTO_ERROR(H5E_ERROR, H5E_CANTREGISTER, FAIL, \"can't register error message\")\n";

View File

@@ -1,7 +1,6 @@
#!/usr/bin/env perl
#!/usr/bin/perl -w
require 5.003;
use strict;
use warnings;
# Global settings
@@ -10,12 +9,13 @@ my @ctypes = ( () );
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
@@ -60,12 +60,13 @@ sub print_copyright ($) {
print $fh "/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n";
print $fh " * Copyright by The HDF Group. *\n";
print $fh " * Copyright by the Board of Trustees of the University of Illinois. *\n";
print $fh " * All rights reserved. *\n";
print $fh " * *\n";
print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n";
print $fh " * terms governing use, modification, and redistribution, is contained in *\n";
print $fh " * the COPYING file, which can be found at the root of the source code *\n";
print $fh " * distribution tree, or in https://www.hdfgroup.org/licenses. *\n";
print $fh " * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *\n";
print $fh " * If you do not have access to either file, you may request a copy from *\n";
print $fh " * help\@hdfgroup.org. *\n";
print $fh " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */\n";
@@ -91,8 +92,8 @@ sub print_startprotect ($$) {
$file =~ s/(\w*)\.h/$1/;
# Print the ifdef info
print $fh "\n#ifndef ${file}_H\n";
print $fh "#define ${file}_H\n";
print $fh "\n#ifndef _${file}_H\n";
print $fh "#define _${file}_H\n";
}
##############################################################################

View File

@@ -1,15 +1,14 @@
#!/usr/bin/env perl
#!/usr/bin/perl -w
require 5.003;
use warnings;
# Global settings
# (The max_idx parameter is the only thing that needs to be changed when adding
# support for a new major release. If support for a prior major release
# is added (like support for 1.4, etc), the min_sup_idx parameter will
# need to be decremented.)
# need to be decremented. - QAK)
# Max. library "index" (0 = v1.0, 1 = 1.2, 2 = 1.4, 3 = 1.6, 4 = 1.8, 5 = 1.10, 6 = 1.12, 7 = 1.14, 8 = 1.16, etc)
$max_idx = 8;
# Max. library "index" (0 = v1.0, 1 = 1.2, etc)
$max_idx = 5;
# Min. supported previous library version "index" (0 = v1.0, 1 = 1.2, etc)
$min_sup_idx = 3;
@@ -19,12 +18,13 @@ $indent = 2;
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
@@ -45,12 +45,13 @@ sub print_copyright ($) {
print $fh "/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n";
print $fh " * Copyright by The HDF Group. *\n";
print $fh " * Copyright by the Board of Trustees of the University of Illinois. *\n";
print $fh " * All rights reserved. *\n";
print $fh " * *\n";
print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n";
print $fh " * terms governing use, modification, and redistribution, is contained in *\n";
print $fh " * the COPYING file, which can be found at the root of the source code *\n";
print $fh " * distribution tree, or in https://www.hdfgroup.org/licenses. *\n";
print $fh " * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *\n";
print $fh " * If you do not have access to either file, you may request a copy from *\n";
print $fh " * help\@hdfgroup.org. *\n";
print $fh " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */\n";
@@ -76,8 +77,8 @@ sub print_startprotect ($$) {
$file =~ s/(\w*)\.h/$1/;
# Print the ifdef info
print $fh "\n#ifndef ${file}_H\n";
print $fh "#define ${file}_H\n";
print $fh "\n#ifndef _${file}_H\n";
print $fh "#define _${file}_H\n";
}
##############################################################################
@@ -88,8 +89,7 @@ sub print_checkoptions ($) {
my $curr_idx; # Current API version index
# Print the option checking
print $fh "\n\n/* Issue error if contradicting macros have been defined. */\n";
print $fh "/* (Can't use an older (deprecated) API version if deprecated symbols have been disabled) */\n";
print $fh "\n/* Issue error if contradicting macros have been defined. */\n";
# Print the #ifdef
print $fh "#if (";
@@ -118,30 +118,7 @@ sub print_checkoptions ($) {
##############################################################################
# Print "global" API version macro settings
#
sub print_globalapidefvers ($) {
my $fh = shift; # File handle for output file
my $curr_idx; # Current API version index
# Print the descriptive comment
print $fh "\n\n/* If a particular default \"global\" version of the library's interfaces is\n";
print $fh " * chosen, set the corresponding version macro for API symbols.\n";
print $fh " *\n";
print $fh " */\n";
for $curr_idx ($min_sup_idx .. ($max_idx - 1)) {
# Print API version ifdef
print $fh "\n#if defined(H5_USE_1", ($curr_idx * 2), "_API_DEFAULT) && !defined(H5_USE_1", ($curr_idx * 2), "_API)\n";
# Print API version definition
print $fh " " x $indent, "#define H5_USE_1", ($curr_idx * 2), "_API 1\n";
# Print API version endif
print $fh "#endif /* H5_USE_1", ($curr_idx * 2), "_API_DEFAULT && !H5_USE_1", ($curr_idx * 2), "_API */\n";
}
}
##############################################################################
# Print "global" API symbol version macro settings
#
sub print_globalapisymbolvers ($) {
sub print_globalapivers ($) {
my $fh = shift; # File handle for output file
my $curr_idx; # Current API version index
@@ -153,6 +130,15 @@ sub print_globalapisymbolvers ($) {
print $fh " * API symbol, the individual API version macro takes priority.\n";
print $fh " */\n";
for $curr_idx ($min_sup_idx .. ($max_idx - 1)) {
# Print API version ifdef
print $fh "#if defined(H5_USE_1", ($curr_idx * 2), "_API_DEFAULT) && !defined(H5_USE_1", ($curr_idx * 2), "_API)\n";
# Print API version definition
print $fh " " x $indent, "#define H5_USE_1", ($curr_idx * 2), "_API 1\n";
# Print API version endif
print $fh "#endif /* H5_USE_1", ($curr_idx * 2), "_API_DEFAULT && !H5_USE_1", ($curr_idx * 2), "_API */\n\n";
}
# Loop over supported older library APIs and define the appropriate macros
for $curr_idx ($min_sup_idx .. ($max_idx - 1)) {
# Print API version ifdef
@@ -352,18 +338,7 @@ sub parse_line ($) {
my $vers_idx; # Index of version in array
# Do some validation on the input
# Note: v111 is allowed because H5O functions were prematurely versioned
# in HDF5 1.10. Because users were affected by this, the versioning
# was rescinded but the H5O version 2 functions were kept to be
# called directly. Now that the version macros are added in 1.12,
# along with a 3rd version of the H5O functions, the H5O function
# version for default api=v110 should be version 1 to work correctly
# with 1.10 applications that were using unversioned H5O functions,
# and the H5O function version should be version 3 for default api=v112
# (the default api version for 1.12). Allowing a v111 entry and
# incrementing its index 13 lines below allows a version 2 that is
# never accessed via the H5O function macros.
if(!( $_ =~ /v1[02468]/ || $_ =~ /v11[02468]/ || $_ =~ /v111/ )) {
if(!( $_ =~ /v1[02468]/ || $_ =~ /v11[02468]/ )) {
die "bad version information: $name";
}
if(exists($sym_versions{$_})) {
@@ -376,9 +351,6 @@ sub parse_line ($) {
#print "parse_line: _=$_\n";
# Get the index of the version
($vers_idx) = ($_ =~ /v1(\d+)/);
if($vers_idx == 11) {
$vers_idx++;
}
$vers_idx /= 2;
#print "parse_line: vers_idx='$vers_idx'\n";
push(@vers_nums, $vers_idx);
@@ -471,9 +443,8 @@ sub create_public ($) {
print_copyright(*HEADER);
print_warning(*HEADER);
print_startprotect(*HEADER, $file);
print_globalapidefvers(*HEADER);
print_checkoptions(*HEADER);
print_globalapisymbolvers(*HEADER);
print_globalapivers(*HEADER);
print_defaultapivers(*HEADER);
print_endprotect(*HEADER, $file);
@@ -507,7 +478,7 @@ for $file (@ARGV) {
}
}
close SOURCE;
# Create header files
print "Generating '", $prefix, "H5version.h'\n";
create_public($prefix);

View File

@@ -40,7 +40,6 @@ make distclean: remove all files generated by make, make check, or
make check-p: Only run parallel tests
make check-s: Only run serial tests
make check-vfd: Run tests with each virtual file driver
make check-vol: Run tests with each virtual object layer connector
HDF5 uses Automake, so any standard Automake targets not listed here
should also work.

33
bin/mkdirs Executable file
View File

@@ -0,0 +1,33 @@
#! /bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# This is a small program which will create directories n-levels deep.
# You just call it with something like:
#
# mkdirs /tmp/foo/bar/baz
#
# and it will create all the directories from /tmp down to baz which
# don't exist.
#
chmodprog="${CHMODPROG-chmod}"
mkdirprog="${MKDIRPROG-mkdir}"
make_dir () {
if test ! -d $1; then
make_dir `echo $1 | sed -e 's#/[^/]*$##'`
$mkdirprog $1
$chmodprog 755 $1
fi
}
make_dir `echo $1 | sed -e 's#/$##'`

43
bin/newer Executable file
View File

@@ -0,0 +1,43 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Compare the modification time of file argument 1 against other file arguments.
# Return true (0) if argument 1 is newer than all others, otherwise return
# false (1). If any of the argument is not a file, return false (1).
#
# Programmer: Albert Cheng
# Created Date: 2005/07/06
# Modification:
# Albert Cheng 2005/8/30
# Changed from two arguments to mulitple arguments.
if test $# -lt 2; then
exit 1
fi
if test ! -f $1; then
exit 1
fi
f1=$1
shift
for f in $*; do
if test ! -f $f; then
exit 1
fi
if test X = X`find $f1 -newer $f -print`; then
exit 1
fi
done
# passed all tests. Must be a file newer than all others.
exit 0

View File

@@ -4,7 +4,7 @@
## This file is part of HDF5. The full HDF5 copyright notice, including
## terms governing use, modification, and redistribution, is contained in
## the COPYING file, which can be found at the root of the source code
## distribution tree, or in https://www.hdfgroup.org/licenses.
## distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
## If you do not have access to either file, you may request a copy from
## help@hdfgroup.org.
@@ -61,21 +61,26 @@ STDOUT_FILTER() {
# Remove them from the stderr result file.
# $1 is the file name of the file to be filtered.
# Cases of filter needed:
# * LANL MPI:
# 1. MPE:
# In parallel mode and if MPE library is used, it prints the following
# two message lines whether the MPE tracing is used or not.
# Writing logfile.
# Finished writing logfile.
# 2. LANL MPI:
# The LANL MPI will print some messages like the following,
# LA-MPI: *** mpirun (1.5.10)
# LA-MPI: *** 3 process(es) on 2 host(s): 2*fln21 1*fln22
# LA-MPI: *** libmpi (1.5.10)
# LA-MPI: *** Copyright 2001-2004, ACL, Los Alamos National Laboratory
# * h5diff debug output:
# 3. h5diff debug output:
# Debug output all have prefix "h5diff debug: ".
# * AIX system prints messages like these when it is aborting:
# 4. AIX system prints messages like these when it is aborting:
# ERROR: 0031-300 Forcing all remote tasks to exit due to exit code 1 in task 0
# ERROR: 0031-250 task 4: Terminated
# ERROR: 0031-250 task 3: Terminated
# ERROR: 0031-250 task 2: Terminated
# ERROR: 0031-250 task 1: Terminated
# * LLNL Blue-Gene mpirun prints messages like there when it exit non-zero:
# 5. LLNL Blue-Gene mpirun prints messages like there when it exit non-zero:
# <Apr 12 15:01:49.075658> BE_MPI (ERROR): The error message in the job record is as follows:
# <Apr 12 15:01:49.075736> BE_MPI (ERROR): "killed by exit(1) on node 0"
STDERR_FILTER() {
@@ -86,6 +91,12 @@ STDERR_FILTER() {
cp $result_file $tmp_file
sed -e '/ BE_MPI (ERROR): /d' \
< $tmp_file > $result_file
# Filter MPE messages
if test -n "$pmode"; then
cp $result_file $tmp_file
sed -e '/^Writing logfile./d' -e '/^Finished writing logfile./d' \
< $tmp_file > $result_file
fi
# Filter LANL MPI messages
# and LLNL srun messages
# and AIX error messages

View File

@@ -1,12 +1,13 @@
#! /bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#

View File

@@ -2,12 +2,13 @@
# makeTarFiles.pl
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#

View File

@@ -0,0 +1,215 @@
#!/usr/bin/perl
# makeInternalREADME.pl
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF4. The full HDF4 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the files COPYING and Copyright.html. COPYING can be found at the root
# of the source code distribution tree; Copyright.html can be found at the
# root level of an installed copy of the electronic HDF4 document set and
# is linked from the top-level documents page. It can also be found at
# http://hdfgroup.org/HDF4/doc/Copyright.html. If you do not have
# access to either file, you may request a copy from help@hdfgroup.org.
#
use warnings;
use strict;
my $section2="For information on compilers and settings used to build these HDF5
libraries, please refer to:
./lib/libhdf5.settings
The contents of this directory are:
COPYING - Copyright notice
README - This file
RELEASE.txt - Detailed information regarding this release
bin/ - Directory containing HDF5 pre-compiled utilities
include/ - Directory containing HDF5 include files
lib/ - Directory containing HDF5 libraries and settings
share/ - Directory containing example code in C, C++, and
Fortran using HDF5 and HDF5 HL library APIs. The
shell scripts provided with these examples will
compile and run them, and will also test the
h5cc, h5c++, and h5fc compile scripts found
in the installed bin directory.
These binaries were built with the ZLIB and SZIP (version 2.1, Encoder
ENABLED) external libraries which are included in the lib directory for
convenience.
We also provide the ZLIB and SZIP source code on our ftp server at:
ftp://ftp.hdfgroup.org/lib-external/
The official ZLIB and SZIP pages are at:
ZLIB: http://www.zlib.net/
SZIP: http://hdfgroup.org/doc_resource/SZIP/
";
my $section3 = "If using the shared libraries, you must add the HDF5 library path
to the LD_LIBRARY_PATH variable.
";
my $section4 = "We provide scripts for compiling applications with the HDF5 libraries:
bin/h5cc - for C
bin/h5fc - for F90 (if Fortran 90 library is included with the binaries)
bin/h5c++ - for C++ (if C++ library is included with the binaries)
After you have installed the binaries to their final destination, you can use
these scripts (h5cc, h5fc, h5c++) to compile. However, you must first run
./h5redeploy in the bin directory to change site specific paths in the scripts.
You may also need to change other variables in the scripts, depending
on how things are set up on your system. Here are some of the variables
to check:
prefix - Path to the HDF5 top level installation directory
CCBASE - Name of the C compiler
CLINKERBASE - Name of the linker
LIBS - Libraries your application will link with
For further details refer to the INSTALL files in
ftp://ftp.hdfgroup.org/HDF5/current/src/unpacked/release_docs/
or in the ./release_docs/ directory of the HDF5 source code, which can be found
on the HDF Group ftp server at ftp://ftp.hdfgroup.org/HDF5/current/src/.
Please send questions, comments, and suggestions to the appropriate
contact address from http://www.hdfgroup.org/about/contact.html
";
my $indirectory = ".";
$indirectory = shift;
my $linktype = "shared";
if ($indirectory =~ /static/) {
$linktype = "static";
}
my $modestring="";
if ($indirectory =~ /32/) {
$modestring = "in 32 bit mode ";
}
my $version;
my $outfile = "$indirectory/README";
open OUTFILE, ">$outfile" or die "$!Couldn't open $outfile - check permissions for $indirectory\n";
my $hostname;
my $cmd = "grep \"HDF5 Version\" $indirectory/lib/libhdf5.settings";
$_ = `$cmd`;
#print OUTFILE $_, "\n";
s/HDF5 Version://;
s/^\s+//;
chomp;
$version = $_;
#print OUTFILE $_, "\n";
my $versionstring= "This directory contains the $linktype binary distribution of HDF5-".$version;
$cmd = "grep \"Uname information:\" $indirectory/lib/libhdf5.settings";
$_ = `$cmd`;
s/Uname information://;
s/^\s+//;
#print OUTFILE $_;
chomp;
#s/(^\w+)(\s)(\S+)/$1/;
#s/(^.*)(-)(.*)(200[7-8])(.*)(\s)(\S+)/$1 $5/;
#my $platformstring = "\nthat was compiled on:" . $_ . " ";
my $platformstring = "";
my $hostnamestring = $_;
my @hostnamestring = split / /, $hostnamestring;
#print OUTFILE "Size of hostnamestring is ", scalar @hostnamestring, "\n";
#print OUTFILE $hostnamestring[0] . "\t" . $hostnamestring[2]."\t".$hostnamestring[19]."\n";
$hostname = $hostnamestring[1];
#my $size = scalar @hostnamestring;
if ($hostname =~ /loyalty/) {
$platformstring = "\nthat was compiled " . $modestring . "on: " . $hostnamestring[0]." " . $hostnamestring[2]." " . $hostnamestring[-1] . " ";
}
elsif ($hostname =~ /freedom/) {
$platformstring = "\nthat was compiled " . $modestring . "on: " . $hostnamestring[0]." " . $hostnamestring[2]." " . $hostnamestring[-1] . " ";
} elsif ($hostname =~ /emu/) {
$platformstring = "\nthat was compiled " . $modestring . "on: " . $hostnamestring[0]." " . $hostnamestring[2] . " " . $hostnamestring[-2] . " ";
} elsif ($hostname =~ /fred/) {
$platformstring = "\nthat was compiled " . $modestring . "on: " . $hostnamestring[0]." " . $hostnamestring[2] . " " . $hostnamestring[-1] . " ";
} else {
$_ = $hostnamestring[2];
my $pos = index $_, '-';
my $os = substr $_, 0, $pos;
$platformstring = "\nthat was compiled " . $modestring . "on: " . $hostnamestring[0] . " " . $os . " " . $hostnamestring[-2] . " ";
}
my $mappingstring = "";
#no mappingstring for 1.6. Put it back for 1.8.
#$cmd = "grep \"Default API Mapping:\" $indirectory/lib/libhdf5.settings";
#$_ = `$cmd`;
#s/Default API Mapping://;
#s/^\s+//;
#chomp;
#if (/v\d+/) {
# s/v//;
# s/(\d)(\d)/$1\.$2/g;
# $mappingstring = "using the default\nAPI mapping for VERSION ".$_.".";
#
#}
print OUTFILE $versionstring;
print OUTFILE $platformstring."\n\n";
#print OUTFILE $mappingstring;
#if ($hostname eq "loyalty.hdfgroup.uiuc.edu" || $hostname eq "freedom.hdfgroup.uiuc.edu") {
# print OUTFILE " It includes the C APIs,\nbuilt using the following ";
# print OUTFILE "compilers:\n\n";
#}
#else {
if ($linktype eq "shared" && !($hostname =~ /32/)) {
print OUTFILE "\n\nIt includes the C, C++, F90 and Java APIs, built using the following\n";
} else {
print OUTFILE "\n\nIt includes the C, C++, and F90 APIs, built using the following\n";
}
print OUTFILE "compilers:\n\n";
#}
# Only the gcc compiler version is in libhdf5.settings, so for now I looked
# up the versions and hardcoded them here. We will put them in libhdf5.settings
# for the next release.
if ($indirectory =~ /gnu484/) {
print OUTFILE "\tgcc, g++, and gfortran 4.8.4\n\n";
} elsif ($hostname =~ /jam/ || $hostname =~ /koala/) {
print OUTFILE "\tgcc, g++, and gfortran 4.1.2\n\n";
} elsif ($hostname =~ /platypus/) {
print OUTFILE "\tgcc, g++, and gfortran 4.4.7\n\n";
if ($linktype eq "shared" && !($hostname =~ /32/)) {
print OUTFILE "\tjava 1.8.0_51\n\n";
}
} elsif ($hostname =~ /moohan/) {
print OUTFILE "\tgcc, g++, and gfortran 4.8.5\n\n";
if ($linktype eq "shared" && !($hostname =~ /32/)) {
print OUTFILE "\tjava 1.8.0_51\n\n";
}
} elsif ($hostname =~ /emu/) {
print OUTFILE "\tSun C and C++ 5.12, Sun Fortran 95 8.6\n\n";
} elsif ($hostname =~ /loyalty/ || $hostname =~ /freedom/) {
print OUTFILE "\tgcc, g++, and gfortran 4.6.1\n\n";
} elsif ($hostname =~ /duck/) {
print OUTFILE "\tApple clang/clang++ 3.0 from Xcode 4.6.1 and gfortran 4.8.2\n\n";
} elsif ($hostname =~ /kite/) {
print OUTFILE "\tApple clang/clang++ 5.1 from Xcode 5.0.2 and gfortran 4.8.2\n\n";
} elsif ($hostname =~ /quail/) {
print OUTFILE "\tgcc, g++ 5.1 from Xcode 5.1 and gfortran 4.8.2\n\n";
} elsif ($hostname =~ /osx1010test/) {
print OUTFILE "\tgcc, g++ 5.1 from Xcode 5.1 and gfortran 4.8.2\n\n";
}
print OUTFILE $section2;
print OUTFILE $section3;
print OUTFILE $section4;

182
bin/pkgscrpts/makeOuterREADME.pl Executable file
View File

@@ -0,0 +1,182 @@
#!/usr/bin/perl
# makeOuterREADME.pl
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF4. The full HDF4 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the files COPYING and Copyright.html. COPYING can be found at the root
# of the source code distribution tree; Copyright.html can be found at the
# root level of an installed copy of the electronic HDF4 document set and
# is linked from the top-level documents page. It can also be found at
# http://hdfgroup.org/HDF4/doc/Copyright.html. If you do not have
# access to either file, you may request a copy from help@hdfgroup.org.
#
use warnings;
use strict;
my $indirectory = ".";
$indirectory = shift;
my $outdirectory = ".";
$outdirectory = shift;
my $outsubdir = shift;
my $version;
my $outfile = "$outdirectory/$outsubdir/README";
open OUTFILE, ">$outfile";
my $hostname="";
my $cmd = "grep \"HDF5 Version\" $indirectory-static/lib/libhdf5.settings";
$_ = `$cmd`;
print OUTFILE $_, "\n";
s/HDF5 Version://;
s/^\s+//;
chomp;
$version = $_;
#print OUTFILE $_, "\n";
my $versionstring= "This directory contains the precompiled HDF5 $version binary distribution\n(include files, libraries, utilities) for";
$cmd = "grep \"Uname information:\" $indirectory-static/lib/libhdf5.settings";
$_ = `$cmd`;
s/Uname information://;
s/^\s+//;
chomp;
print "String to work with is $_\n";
my $platformstring = "";
my $hostnamestring = $_;
my @hostnamestring = split / /, $hostnamestring;
$platformstring = "$hostnamestring[0] ";
if ($indirectory =~ /jam/ || $indirectory =~ /koala/) {
$hostnamestring = $hostnamestring[2];
my $pos = index $hostnamestring, "-";
if ($pos > 0) {
$platformstring .= substr $hostnamestring, 0, $pos;
} else {
$platformstring .= $hostnamestring[2];
}
$platformstring .= " ".$hostnamestring[-3];
} elsif ($indirectory =~ /linew/) {
$platformstring .= "$hostnamestring[2] $hostnamestring[-2]";
} else {
$platformstring .= "$hostnamestring[2] $hostnamestring[-1]";
}
print OUTFILE $versionstring." ".$platformstring.":\n\n";
my $line1;
my $line3;
my $line5;
my $compilerstring="";
my $compilerstring1="";
my $compilerstring2="";
print $indirectory, "\n";
if ($indirectory =~ /ostrich/) {
# $line1 = " hdf5-$version-$outsubdir-16API.tar.gz - Includes C, C++, F90 APIs (using\n";
$line3 = " hdf5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 APIs (using";
$compilerstring = " gcc, g++, and gfortran 4.4.7)";
}
elsif ($indirectory =~ /platypus/) {
# $line1 = " hdf5-$version-$outsubdir-16API.tar.gz - Includes C, C++, F90 APIs (using\n";
$line3 = " hdf5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
$compilerstring1 = " gcc, g++, and gfortran 4.4.7)\n";
if ($indirectory =~ /32/) {
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 and Java APIs (using";
$compilerstring2 = " gcc, g++, and gfortran 4.4.7)\n";
} else {
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 and Java APIs (using";
$compilerstring2 = " gcc, g++, and gfortran 4.4.7 and java 1.8.0_51)\n";
}
}
elsif ($indirectory =~ /moohan/) {
# $line1 = " hdf5-$version-$outsubdir-16API.tar.gz - Includes C, C++, F90 APIs (using\n";
$line3 = " hdf5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
$compilerstring1 = " gcc, g++, and gfortran 4.8.5)\n";
if ($indirectory =~ /32/) {
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 and Java APIs (using";
$compilerstring2 = " gcc, g++, and gfortran 4.4.7)\n";
} else {
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 and Java APIs (using";
$compilerstring2 = " gcc, g++, and gfortran 4.8.5 and java 1.8.0_51)\n";
}
}
elsif ($indirectory =~ /emu/) {
# $line1 = " hdf5-$version-$outsubdir-16API.tar.gz - Includes C, C++, F90 APIs (using\n";
$line3 = " hdf5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 APIs (using";
$compilerstring = " Sun C and C++ 5.12, Sun Fortran 95 8.6)\n";
}
elsif ($indirectory =~ /kite/) {
$line3 = " hfd5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
$line5 = " hfd5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 APIs (using";
$compilerstring = " Apple clang/clang++ 5.1 from Xcode 5.0.2,
gfortran 4.8.2)\n";
}
elsif ($indirectory =~ /quail/) {
$line3 = " hfd5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
$line5 = " hfd5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 APIs (using";
$compilerstring = " Apple clang/clang++ 6.0 from Xcode 5.1,
gfortran 4.9.2)\n";
}
elsif ($indirectory =~ /osx1010test/) {
$line3 = " hdf5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 APIs (using";
$compilerstring = " Apple clang/clang++ 6.1 from Xcode 6.1,
gfortran 4.9.2)\n";
}
elsif ($indirectory =~ /osx1011test/) {
$line3 = " hdf5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 APIs (using";
$compilerstring = " Apple clang/clang++ 7.0.2 from Xcode 7.0,
gfortran 5.2.0)\n";
}
print OUTFILE $line3;
print OUTFILE $compilerstring1."\n";
if ($line5 ne "") {
print OUTFILE $line5;
print OUTFILE $compilerstring2."\n";
}
#elsif ($indirectory =~ /-16API/) {
# print OUTFILE $line1;
# print OUTFILE $compilerstring;
# print OUTFILE " USES THE VERSION 1.6 DEFAULT API\n";
# print OUTFILE " MAPPING.\n\n";
#}
print OUTFILE " utilities/ - Directory containing the compiled HDF5 utilities.\n";
print OUTFILE " These utilities are STATICALLY linked and will run as is.\n\n";
#print OUTFILE " The tools can be downloaded separately, or ALL in one\n";
#print OUTFILE " compressed file (5-$version-$outsubdir-static-util.tar.gz).\n\n";
print OUTFILE "\n";
print OUTFILE "To obtain the HDF5 distribution, 'unzip' and 'untar' the distribution\n";
print OUTFILE "file:\n\n";
print OUTFILE " gzip -cd <gz file from above> | tar xvf -\n";
#$cmd = "grep \"Configured by:\" $indirectory/$key-static/lib/libhdf5.settings";
#$_ = `$cmd`;
#s/Configured by://;
#s/^\s+//;
#print OUTFILE $_;
#chomp;
#my $hostnamestring = $_;
#s/(^\w+)(\s)(\S+)/$1/;
#s/(^.*)(-)(.*)(200[7-8])(.*)(\s)(\S+)/$1 $5/;
#my $platformstring = $_ . ":\n\n";
#my @hostnamestring = split /@/, $hostnamestring;
#print "Size of hostnamestring is ", scalar @hostnamestring, "\n";
#print $hostnamestring[0] . "\t" . $hostnamestring[2]."\t".$hostnamestring[19]."\n";
#my $platformstring = $hostnamestring[1].":\n\n";
#$hostnamestring = $hostnamestring[1];
#my $pos = index $hostnamestring, ".";
#if ($pos > 0) {
# @hostnamestring = split /\./, $hostnamestring;
# $platformstring = $hostnamestring[0].":\n\n";
#}

View File

@@ -1,17 +1,36 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Make a release of hdf5.
#
# Programmer: Robb Matzke
# Creation date: on or before 1998-01-29.
#
# Modifications
# Robb Matzke, 1999-07-16
# The SunOS 5.6 sed *must* have slashes as delimiters. I changed things like
# `sed s+/CVS++' to `sed 's/\/CVS//'
#
# Albert Cheng, 1999-10-26
# Moved the MANIFEST checking to a separate command file so that
# it can be invoked individually.
#
# Albert Cheng, 2004-08-14
# Added the --private option.
#
# James Laird, 2005-09-07
# Added the md5 method.
# Function definitions
#
@@ -19,56 +38,49 @@
USAGE()
{
cat << EOF
Usage: $0 -d <dir> [--docver BRANCHNAME] [-h] [--private] <methods> ...
-d DIR The name of the directory where the release(s) should be
Usage: $0 -d <dir> [--docver BRANCHNAME] [-h] [--nocheck] [--private] <methods> ...
-d DIR The name of the directory where the releas(es) should be
placed.
--docver BRANCHNAME This is added for 1.8 and beyond to get the correct
version of documentation files from the hdf5docs
repository. BRANCHNAME for v1.8 should be hdf5_1_8.
-h print the help page.
--private Make a private release with today's date in version information.
--nocheck Ignore errors in MANIFEST file.
--private Make a private release with today's date in version information.
This must be run at the top level of the source directory.
The other command-line options are the names of the programs to use
for compressing the resulting tar archive (if none are given then
"tar" is assumed):
tar -- use tar and don't do any compressing.
gzip -- use gzip with "-9" and append ".gz" to the output name.
bzip2 -- use bzip2 with "-9" and append ".bz2" to the output name.
zip -- convert all text files to DOS style and form a zip file for Windows use.
cmake-tgz -- create a tar file using the gzip default level with a build-unix.sh
tar -- use tar and don't do any compressing.
gzip -- use gzip with "-9" and append ".gz" to the output name.
cmake-tgz -- create a tar file using the gzip default level with a build-unix.sh
command file and all other CMake files needed to build HDF5 source
using CMake on unix machines.
cmake-zip -- convert all text files to DOS style and create a zip file including cmake
scripts and .bat files to build HDF5 source using CMake on Windows.
hpc-cmake-tgz
-- create a tar file using the gzip default level with a build-unix.sh
command file and all other CMake files needed to build HDF5 source
using CMake on unix machines, with HDF5options.cmake files for serial
and parallel builds on machines requiring batch jobs to run tests.
The default is for parallel build, with serial only build by changing
the HDF5options.cmake symlink to ser-HDF5options.cmake. More
information is available in the README_HPC file.
bzip2 -- use bzip2 with "-9" and append ".bz2" to the output name.
zip -- convert all text files to DOS style and form a zip file for Windows use.
cmake-zip -- convert all text files to DOS style and create a zip file inluding cmake
scripts and .bat files to build HDF5 source using CMake on Windows.
doc -- produce the latest doc tree in addition to the archive.
A sha256 checksum is produced for each archive created and stored in the sha256 file.
An md5 checksum is produced for each archive created and stored in the md5 file.
Examples:
$ bin/release -d /tmp
/tmp/hdf5-1.8.13-RELEASE.txt
/tmp/hdf5-1.8.13.sha256
/tmp/hdf5-1.8.13.md5
/tmp/hdf5-1.8.13.tar
$ bin/release -d /tmp gzip
/tmp/hdf5-1.8.13-RELEASE.txt
/tmp/hdf5-1.8.13.sha256
/tmp/hdf5-1.8.13.md5
/tmp/hdf5-1.8.13.tar.gz
$ bin/release -d /tmp tar gzip zip
/tmp/hdf5-1.8.13-RELEASE.txt
/tmp/hdf5-1.8.13.sha256
/tmp/hdf5-1.8.13.md5
/tmp/hdf5-1.8.13.tar
/tmp/hdf5-1.8.13.tar.gz
/tmp/hdf5-1.8.13.tar.zip
@@ -80,6 +92,11 @@ EOF
# Function name: tar2zip
# Convert the release tarball to a Windows zipball.
#
# Programmer: Albert Cheng
# Creation date: 2014-04-23
#
# Modifications
#
# Steps:
# 1. untar the tarball in a temporary directory;
# Note: do this in a temporary directory to avoid changing
@@ -97,8 +114,8 @@ EOF
tar2zip()
{
if [ $# -ne 3 ]; then
echo "usage: tar2zip <tarfilename> <zipfilename>"
return 1
echo "usage: tar2zip <tarfilename> <zipfilename>"
return 1
fi
ztmpdir=/tmp/ztmpdir$$
mkdir -p $ztmpdir
@@ -110,23 +127,23 @@ tar2zip()
(cd $ztmpdir; tar xf -) < $tarfile
# sanity check
if [ ! -d $ztmpdir/$version ]; then
echo "untar did not create $ztmpdir/$version source dir"
# cleanup
rm -rf $ztmpdir
return 1
echo "untar did not create $ztmpdir/$version source dir"
# cleanup
rm -rf $ztmpdir
return 1
fi
# step 2: convert text files
# There maybe a simpler way to do this.
# options used in unix2dos:
# -k Keep the date stamp
# -k Keep the date stamp
# -q quiet mode
# grep redirect output to /dev/null because -q or -s are not portable.
find $ztmpdir/$version | \
while read inf; do \
if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \
unix2dos -q -k $inf; \
fi\
done
while read inf; do \
if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \
unix2dos -q -k $inf; \
fi\
done
# step 3: make zipball
# -9 maximum compression
# -y Store symbolic links as such in the zip archive
@@ -142,19 +159,24 @@ tar2zip()
# Function name: tar2cmakezip
# Convert the release tarball to a Windows zipball with files to run CMake build.
#
# Programmer: Larry Knox
# Creation date: 2017-02-20
#
# Modifications
#
# Steps:
# 1. untar the tarball in a temporary directory;
# Note: do this in a temporary directory to avoid changing
# the original source directory which may be around.
# 2. add build-unix.sh script.
# 3. add LIBAEC.tar.gz, ZLib.tar.gz and cmake files to top level directory.
# 3. add SZIP.tar.gz, ZLib.tar.gz and cmake files to top level directory.
# 4. create gzipped tar file with these contents:
# build-unix.sh script
# hdf5-<version> source code directory extracted from tar file
# CTestScript.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# HDF5config.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# HDF5options.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# LIBAEC.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
# SZip.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
# ZLib.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
@@ -166,64 +188,58 @@ tar2zip()
# Returns 0 if successful; 1 otherwise
#
# need function to create another temporary directory, extract the
# $tmpdir/$HDF5_VERS.tar into it, create build-VS*.bat files,
# add CTestScript.cmake, HDF5config.cmake, LIBAEC.tar.gz
# ZLib.tar.gz, HDF5 examples, and then zip it.
# $tmpdir/$HDF5_VERS.tar into it, add (create) build-unix.sh,
# CTestScript.cmake, HDF5config.cmake, SZIP.tar.gz and ZLib.tar.gz,
# and then tar.gz it.
tar2cmakezip()
{
if [ $# -ne 3 ]; then
echo "usage: tar2cmakezip <tarfilename> <zipfilename>"
return 1
echo "usage: tar2cmakezip <tarfilename> <tgzfilename>"
return 1
fi
cmziptmpdir=/tmp/cmziptmpdir$$
cmziptmpsubdir=$cmziptmpdir/CMake-$HDF5_VERS
mkdir -p $cmziptmpsubdir
mkdir -p $cmziptmpdir
version=$1
tarfile=$2
zipfile=$3
# step 1: untar tarball in cmgztmpdir
(cd $cmziptmpsubdir; tar xf -) < $tarfile
(cd $cmziptmpdir; tar xf -) < $tarfile
# sanity check
if [ ! -d $cmziptmpsubdir/$version ]; then
echo "untar did not create $cmziptmpsubdir/$version source dir"
# cleanup
rm -rf $cmziptmpdir
return 1
if [ ! -d $cmziptmpdir/$version ]; then
echo "untar did not create $cmziptmpdir/$version source dir"
# cleanup
rm -rf $cmziptmpdir
return 1
fi
# step 2: add batch file for building CMake on window
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2013 -C Release -V -O hdf5.log" > build-VS2013-32.bat; chmod 755 build-VS2013-32.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201364 -C Release -V -O hdf5.log" > build-VS2013-64.bat; chmod 755 build-VS2013-64.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2015 -C Release -V -O hdf5.log" > build-VS2015-32.bat; chmod 755 build-VS2015-32.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201564 -C Release -V -O hdf5.log" > build-VS2015-64.bat; chmod 755 build-VS2015-64.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2017 -C Release -V -O hdf5.log" > build-VS2017-32.bat; chmod 755 build-VS2017-32.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201764 -C Release -V -O hdf5.log" > build-VS2017-64.bat; chmod 755 build-VS2017-64.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2019 -C Release -V -O hdf5.log" > build-VS2019-32.bat; chmod 755 build-VS2019-32.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201964 -C Release -V -O hdf5.log" > build-VS2019-64.bat; chmod 755 build-VS2019-64.bat)
cp /mnt/scr1/pre-release/hdf5/CMake/build-VS2013-32.bat $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/build-VS2013-64.bat $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/build-VS2015-32.bat $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/build-VS2015-64.bat $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/build-VS2017-32.bat $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/build-VS2017-64.bat $cmziptmpdir
# step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmziptmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmziptmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.2-Source.zip $cmziptmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.zip $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmziptmpsubdir
# step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmziptmpdir
cp $cmziptmpdir/$version/config/cmake/scripts/CTestScript.cmake $cmziptmpdir
cp $cmziptmpdir/$version/config/cmake/scripts/HDF5config.cmake $cmziptmpdir
cp $cmziptmpdir/$version/config/cmake/scripts/HDF5options.cmake $cmziptmpdir
# step 4: convert text files
# There maybe a simpler way to do this.
# options used in unix2dos:
# -k Keep the date stamp
# -k Keep the date stamp
# -q quiet mode
# grep redirect output to /dev/null because -q or -s are not portable.
find $cmziptmpsubdir/$version | \
while read inf; do \
if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \
unix2dos -q -k $inf; \
fi\
done
find $cmziptmpdir/$version | \
while read inf; do \
if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \
unix2dos -q -k $inf; \
fi\
done
# step 3: make zipball
# -9 maximum compression
# -y Store symbolic links as such in the zip archive
@@ -237,22 +253,26 @@ tar2cmakezip()
}
# Function name: tar2cmaketgz
# Convert the release tarball to a gzipped tar file with files to run CMake build.
# Convert the release tarball to a Windows zipball with files to run CMake build.
#
# Programmer: Larry Knox
# Creation date: 2017-02-20
#
# Modifications
#
# Steps:
# 1. untar the tarball in a temporary directory;
# Note: do this in a temporary directory to avoid changing
# the original source directory which may be around.
# 2. add build-unix.sh script.
# 3. add LIBAEC.tar.gz, ZLib.tar.gz and cmake files to top level directory.
# 3. add SZIP.tar.gz, ZLib.tar.gz and cmake files to top level directory.
# 4. create gzipped tar file with these contents:
# build-unix.sh script
# hdf5-<version> source code directory extracted from tar file
# CTestScript.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# HDF5config.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# HDF5options.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# LIBAEC.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
# SZip.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
# ZLib.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
@@ -264,129 +284,43 @@ tar2cmakezip()
# Returns 0 if successful; 1 otherwise
#
# need function to create another temporary directory, extract the
# $tmpdir/$HDF5_VERS.tar into it, create build-unix.sh,
# add CTestScript.cmake, HDF5config.cmake, LIBAEC.tar.gz
# ZLib.tar.gz, HDF5 examples, and then tar.gz it.
# $tmpdir/$HDF5_VERS.tar into it, add (create) build-unix.sh,
# CTestScript.cmake, HDF5config.cmake, SZIP.tar.gz and ZLib.tar.gz,
# and then tar.gz it.
tar2cmaketgz()
{
if [ $# -ne 3 ]; then
echo "usage: tar2cmaketgz <tarfilename> <tgzfilename>"
return 1
echo "usage: tar2cmaketgz <tarfilename> <tgzfilename>"
return 1
fi
cmgztmpdir=/tmp/cmgztmpdir$$
cmgztmpsubdir=$cmgztmpdir/CMake-$HDF5_VERS
mkdir -p $cmgztmpsubdir
mkdir -p $cmgztmpdir
version=$1
tarfile=$2
tgzfile=$3
# step 1: untar tarball in cmgztmpdir
(cd $cmgztmpsubdir; tar xf -) < $tarfile
(cd $cmgztmpdir; tar xf -) < $tarfile
# sanity check
if [ ! -d $cmgztmpsubdir/$version ]; then
echo "untar did not create $cmgztmpsubdir/$version source dir"
# cleanup
rm -rf $cmgztmpdir
return 1
if [ ! -d $cmgztmpdir/$version ]; then
echo "untar did not create $cmgztmpdir/$version source dir"
# cleanup
rm -rf $cmgztmpdir
return 1
fi
# step 2: add build-unix.sh script
(cd $cmgztmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=Unix -C Release -V -O hdf5.log" > build-unix.sh; chmod 755 build-unix.sh)
# step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.2-Source.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.tar.gz $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmgztmpsubdir
tar czf $DEST/CMake-$HDF5_VERS.tar.gz -C $cmgztmpdir . || exit 1
# cleanup
rm -rf $cmgztmpdir
}
# Function name: tar2hpccmaketgz
# Convert the release tarball to a gzipped tarfile with files to run CMake build
# and HDF5options.cmake files for parallel or serial only builds where build
# tests are run on compute nodes using batch scripts.
#
# Steps:
# 1. untar the tarball in a temporary directory;
# Note: do this in a temporary directory to avoid changing
# the original source directory which may be around.
# 2. add build-unix.sh script.
# 3. add LIBAEC.tar.gz, ZLib.tar.gz and cmake files to top level directory.
# 4. create gzipped tar file with these contents:
# build-unix.sh script
# hdf5-<version> source code directory extracted from tar file
# CTestScript.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# HDF5config.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# HDF5options.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# LIBAEC.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
# ZLib.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
#
# 5. For HPC-CMake tgz file the following are also needed in the top-level directory:
# README_HPC copied from release_docs
# ser-HDF5options.cmake copied from <hdf5 source code>/config/cmake/scripts/HPC
# par-HDF5options.cmake copied from <hdf5 source code>/config/cmake/scripts/HPC
# HDF5options.cmake symlink to par-HDF5options.cmake
#
# Parameters:
# $1 version
# $2 release tarball
# $3 output zipball file name
#
# Returns 0 if successful; 1 otherwise
#
# need function to create another temporary directory, extract the
# $tmpdir/$HDF5_VERS.tar into it, create build-unix.sh,
# add CTestScript.cmake, HDF5config.cmake, LIBAEC.tar.gz
# ZLib.tar.gz, HDF5 examples, and then tar.gz it.
tar2hpccmaketgz()
{
if [ $# -ne 3 ]; then
echo "usage: tar2hpccmaketgz <tarfilename> <tgzfilename>"
return 1
fi
cmgztmpdir=/tmp/cmgztmpdir$$
cmgztmpsubdir=$cmgztmpdir/HPC-CMake-$HDF5_VERS
mkdir -p $cmgztmpsubdir
version=$1
tarfile=$2
tgzfile=$3
# step 1: untar tarball in cmgztmpdir
(cd $cmgztmpsubdir; tar xf -) < $tarfile
# sanity check
if [ ! -d $cmgztmpsubdir/$version ]; then
echo "untar did not create $cmgztmpsubdir/$version source dir"
# cleanup
rm -rf $cmgztmpdir
return 1
fi
# step 2: add build-unix.sh script
(cd $cmgztmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=Unix -C Release -V -O hdf5.log" > build-unix.sh; chmod 755 build-unix.sh)
# step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.2-Source.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.tar.gz $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/release_docs/README_HPC $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HPC/ser-HDF5options.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HPC/par-HDF5options.cmake $cmgztmpsubdir
(cd $cmgztmpsubdir; ln -s par-HDF5options.cmake HDF5options.cmake)
tar czf $DEST/HPC-CMake-$HDF5_VERS.tar.gz -C $cmgztmpdir . || exit 1
(cd $cmgztmpdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=Unix -C Release -V -O hdf5.log" > build-unix.sh; chmod 755 build-unix.sh)
# step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmgztmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpdir
cp $cmgztmpdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpdir
cp $cmgztmpdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpdir
cp $cmgztmpdir/$version/config/cmake/scripts/HDF5options.cmake $cmgztmpdir
tar czf $DEST/CMake-$HDF5_VERS.tar.gz -C $cmgztmpdir . || exit 1
# cleanup
rm -rf $cmgztmpdir
}
@@ -404,10 +338,11 @@ VERS=`perl bin/h5vers`
VERS_OLD=
test "$VERS" || exit 1
verbose=yes
check=yes
release_date=`date +%F`
today=`date +%Y%m%d`
pmode='no'
tmpdir="../#release_tmp.$$" # tmp work directory
tmpdir="../#release_tmp.$$" # tmp work directory
DOC_URL=https://git@bitbucket.hdfgroup.org/scm/hdffv/hdf5doc.git
CPPLUS_RM_NAME=cpplus_RM
MAINT_MODE_ENABLED=""
@@ -424,11 +359,11 @@ fi
RESTORE_VERSION()
{
if [ X-${VERS_OLD} != X- ]; then
echo restoring version information back to $VERS_OLD
rm -f config/lt_vers.am
cp $tmpdir/lt_vers.am config/lt_vers.am
bin/h5vers -s $VERS_OLD
VERS_OLD=
echo restoring version information back to $VERS_OLD
rm -f config/lt_vers.am
cp $tmpdir/lt_vers.am config/lt_vers.am
bin/h5vers -s $VERS_OLD
VERS_OLD=
fi
}
@@ -438,29 +373,32 @@ while [ -n "$1" ]; do
arg=$1
shift
case "$arg" in
-d)
DEST=$1
shift
;;
-h)
USAGE
exit 0
;;
--private)
pmode=yes
;;
-d)
DEST=$1
shift
;;
--nocheck)
check=no
;;
-h)
USAGE
exit 0
;;
--private)
pmode=yes
;;
--docver)
DOCVERSION=$1
shift
;;
-*)
echo "Unknown switch: $arg" 1>&2
USAGE
exit 1
;;
*)
methods="$methods $arg"
;;
-*)
echo "Unknown switch: $arg" 1>&2
USAGE
exit 1
;;
*)
methods="$methods $arg"
;;
esac
done
@@ -469,7 +407,7 @@ if [ "X$methods" = "X" ]; then
methods="tar"
fi
# Create the temporary work directory.
# Create the temporay work directory.
if mkdir $tmpdir; then
echo "temporary work directory for release. "\
"Can be deleted after release completes." > $tmpdir/README
@@ -503,17 +441,35 @@ if [ ! -d $DEST ]; then
exit 1
fi
# Create a symlink to the source so files in the tarball have the prefix
# we want (gnu's --transform isn't portable)
ln -s `pwd` $tmpdir/$HDF5_VERS || exit 1
# Check the validity of the MANIFEST file.
bin/chkmanifest || fail=yes
if [ "X$fail" = "Xyes" ]; then
if [ $check = yes ]; then
echo ""
echo "Note! If you are running bin/release in a development branch"
echo "later than v 1.8 the MANIFEST check is expected to fail when"
echo "autogen.sh has not been run successfully. Either run autogen.sh "
echo "with /usr/hdf/bin/AUTOTOOLS at the beginning of PATH or add the"
echo "--nocheck argument to the bin/release command."
exit 1
else
echo "Continuing anyway..."
fi
fi
# Create a manifest that contains only files for distribution.
MANIFEST=$tmpdir/H5_MANIFEST
grep '^\.' MANIFEST | grep -v _DO_NOT_DISTRIBUTE_ >$MANIFEST
# Prepare the source tree for a release.
ln -s `pwd` $tmpdir/$HDF5_VERS || exit 1
# Save a backup copy of Makefile if exists.
test -f Makefile && mv Makefile $tmpdir/Makefile.x
cp -p Makefile.dist Makefile
# Update README.md and release_docs/RELEASE.txt with release information in
# Update README.txt and release_docs/RELEASE.txt with release information in
# line 1.
for f in README.md release_docs/RELEASE.txt; do
for f in README.txt release_docs/RELEASE.txt; do
echo "HDF5 version $VERS released on $release_date" >$f.x
sed -e 1d $f >>$f.x
mv $f.x $f
@@ -521,75 +477,74 @@ for f in README.md release_docs/RELEASE.txt; do
chmod 644 $f
done
# develop is different than branches.
# trunk is different than branches.
if [ "${DOCVERSION}" ]; then
DOC_URL="$DOC_URL -b ${DOCVERSION}"
fi
# Create the tar file
test "$verbose" && echo " Running tar..." 1>&2
(cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_VERS" || exit 1 )
( \
cd $tmpdir; \
tar cf $HDF5_VERS.tar $HDF5_VERS/Makefile \
`sed 's/^\.\//hdf5-'$VERS'\//' $MANIFEST` || exit 1 \
)
# Compress
SHA256=$HDF5_VERS.sha256
cp /dev/null $DEST/$SHA256
MD5file=$HDF5_VERS.md5
cp /dev/null $DEST/$MD5file
for comp in $methods; do
case $comp in
tar)
cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar
(cd $DEST; sha256sum $HDF5_VERS.tar >> $SHA256)
;;
gzip)
test "$verbose" && echo " Running gzip..." 1>&2
gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz
(cd $DEST; sha256sum $HDF5_VERS.tar.gz >> $SHA256)
;;
tar)
cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar
(cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file)
;;
gzip)
test "$verbose" && echo " Running gzip..." 1>&2
gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz
(cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file)
;;
cmake-tgz)
test "$verbose" && echo " Creating CMake tar.gz file..." 1>&2
tar2cmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.tar.gz 1>&2
(cd $DEST; sha256sum CMake-$HDF5_VERS.tar.gz >> $SHA256)
;;
hpc-cmake-tgz)
test "$verbose" && echo " Creating HPC-CMake tar.gz file..." 1>&2
tar2hpccmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/HPC-CMake-$HDF5_VERS.tar.gz 1>&2
(cd $DEST; sha256sum HPC-CMake-$HDF5_VERS.tar.gz >> $SHA256)
;;
bzip2)
test "$verbose" && echo " Running bzip2..." 1>&2
bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2
(cd $DEST; sha256sum $HDF5_VERS.tar.bz2 >> $SHA256)
;;
zip)
test "$verbose" && echo " Creating zip ball..." 1>&2
tar2zip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2
(cd $DEST; sha256sum $HDF5_VERS.zip >> $SHA256)
test "$verbose" && echo " Creating CMake tar.gz file..." 1>&2
tar2cmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.tar.gz 1>&2
(cd $DEST; md5sum CMake-$HDF5_VERS.tar.gz >> $MD5file)
;;
bzip2)
test "$verbose" && echo " Running bzip2..." 1>&2
bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2
(cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file)
;;
zip)
test "$verbose" && echo " Creating zip ball..." 1>&2
tar2zip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2
(cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file)
;;
cmake-zip)
test "$verbose" && echo " Creating CMake-zip ball..." 1>&2
tar2cmakezip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.zip 1>&2
(cd $DEST; sha256sum CMake-$HDF5_VERS.zip >> $SHA256)
(cd $DEST; md5sum CMake-$HDF5_VERS.zip >> $MD5file)
;;
doc)
doc)
if [ "${DOCVERSION}" = "" ]; then
DOCVERSION=master
fi
test "$verbose" && echo " Creating docs..." 1>&2
# Check out docs from git repo
(cd $tmpdir; git clone -q $DOC_URL ${DOCVERSION} > /dev/null) || exit 1
test "$verbose" && echo " Creating docs..." 1>&2
# Check out docs from git repo
(cd $tmpdir; git clone -q $DOC_URL ${DOCVERSION} > /dev/null) || exit 1
# Create doxygen C++ RM
(cd c++/src && doxygen cpp_doc_config > /dev/null ) || exit 1
# Replace version of C++ RM with just-created version
rm -rf $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME || exit 1
mv c++/src/$CPPLUS_RM_NAME $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME || exit 1
(cd c++/src && doxygen cpp_doc_config > /dev/null ) || exit 1
# Replace version of C++ RM with just-created version
rm -rf $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME || exit 1
mv c++/src/$CPPLUS_RM_NAME $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME || exit 1
# Compress the docs and move them to the release area
mv $tmpdir/${DOCVERSION} $tmpdir/${HDF5_VERS}_docs || exit 1
(cd $tmpdir && tar cf ${HDF5_VERS}_docs.tar ${HDF5_VERS}_docs) || exit 1
mv $tmpdir/${HDF5_VERS}_docs.tar $DEST || exit 1
;;
*)
echo "***Error*** Unknown method $comp"
exit 1
;;
mv $tmpdir/${DOCVERSION} $tmpdir/${HDF5_VERS}_docs || exit 1
(cd $tmpdir && tar cf ${HDF5_VERS}_docs.tar ${HDF5_VERS}_docs) || exit 1
mv $tmpdir/${HDF5_VERS}_docs.tar $DEST || exit 1
;;
*)
echo "***Error*** Unknown method $comp"
exit 1
;;
esac
done
@@ -615,6 +570,4 @@ fi
# Remove temporary things
rm -rf $tmpdir
echo "DONE"
exit 0

View File

@@ -1,12 +1,12 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
@@ -48,9 +48,6 @@ rm -f bin/missing
rm -f bin/test-driver
rm -f bin/depcomp
echo "Remove files generated by autoheader"
rm -f src/H5config.h.in
echo "Remove files generated by bin/make_err"
rm -f src/H5Epubgen.h
rm -f src/H5Einit.h

View File

@@ -1,87 +0,0 @@
#!/usr/bin/env perl
require 5.003;
use warnings;
$indent=4;
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Run program in background
#
use warnings;
use strict;
use Carp;
use Time::HiRes;
use POSIX 'setsid';
my $child_pid;
my $child_proc;
my $cmd = $ARGV[0];
my $debug = 1;
print "\nStart child process\n";
start_child();
print "\nStarted child process\n";
sub start_child {
die "cannot execute cmd: $cmd" unless -x $cmd;
if ($^O eq 'MSWin32') { # Windows
require Win32::Process;
Win32::Process::Create($child_proc, $cmd, $cmd, 0, 0, ".") || confess "Could not spawn child: $!";
$child_pid = $child_proc->GetProcessID();
}
else { # Unix
$SIG{CHLD} = 'IGNORE';
$child_pid = fork();
unless (defined $child_pid) {
confess "Could not spawn child (Unix): $!";
}
if ($child_pid == 0) { # child
unless ($debug) {
open STDIN, "<", "/dev/null" or die "Can't read /dev/null: $!";
open STDOUT, ">", "/dev/null" or die "Can't write /dev/null: $!";
}
setsid or warn "setsid cannot start a new session: $!";
unless ($debug) {
open STDERR, '>&STDOUT' or die "Can't dup stdout: $!";
}
local $| = 1;
unless (exec($cmd)) {
confess "Could not start child: $cmd: $!";
CORE::exit(0);
}
}
# parent
$SIG{CHLD} = 'DEFAULT';
}
# catch early child exit, e.g. if program path is incorrect
sleep(1.0);
POSIX::waitpid(-1, POSIX::WNOHANG()); # clean up any defunct child process
if (kill(0,$child_pid)) {
print "\nStarted child process id $child_pid\n";
}
else {
warn "Child process exited quickly: $cmd: process $child_pid";
}
}
sub stop_child
{
if ($^O eq 'MSWin32') { # Windows
Win32::Process::KillProcess($child_pid,0);
}
else { # Unix
kill 9, $child_pid || warn "could not kill process $child_pid: $!";
}
print "Stopped child process id $child_pid\n";
}

966
bin/runtest Executable file
View File

@@ -0,0 +1,966 @@
#! /bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# run the hdf5/bin/snapshot
# Usage:
# runtest run the test for the local host
# runtest <hostname> run the test for <hostname>
# runtest -all run the test for all predefined hosts
#
# Assumptions in knowing where to find the right scripts to execute.
# 1. assume we are at the top level of the hdf5 source. So, bin/* are
# where the script files are.
# 2. after the cvs update is completed, we can go to the snapshot area
# hdf5 source tree and use the bin/* there.
# 3. Cannot use the snapshot area scripts from the beginning because
# for one, the current directory is renamed as previous right after
# a snapshot release; and for another, some scripts may be changed
# by the cvs update while it is being used.
# local setup
DEBUGMODE=""
test -n "$DEBUGMODE" && echo "******** DEBUGMODE is $DEBUGMODE ************"
WHEREAMI='pwd'
CMD=
# the name of this program
PROGNAME="bin/runtest $DEBUGMODE"
# Setup
HOSTNAME=`hostname | cut -f1 -d.` # no domain part
TODAY=`date +%m%d%a`
WEEKDAY=`date +%a`
H5VER= # default to current CVS version
H5VERSION= # default to current CVS version
n_test=0 # Number of tests ran
n_pass=0 # Number of tests passed
n_fail=0 # Number of tests failed
n_skip=0 # Number of tests skipped
# Default to do checkout (only once) and test, no release.
# Will run test only if there is significant differences from previous version.
# If srcdir is not used, don't launched multiple tests
SNAPSHOT="${DEBUGMODE:+echo }bin/snapshot"
SRCDIR="srcdir"
# Default standard Snaptest commands
SNAPCMD="$SRCDIR test clean"
# Default Standard snaptest command options
STANDARD_OPT=""
ENABLE_PARALLEL="--enable-parallel"
CONFIGNAME=$HOSTNAME # Name used in the SNAPTESTCFG file
# test host default as local host.
TESTHOST=""
#################################
# Function definitions
#################################
# Print messages to stdout
# Use this to show output heading to stdout
PRINT()
{
echo "$*"
}
# Show seconds since midnight.
# This is used to calculate seconds elapsed
SecOfDay()
{
set `date '+%H %M %S'`
t_sec=`expr $1 \* 3600 + $2 \* 60 + $3`
echo $t_sec
}
# Calculated the elapsed time (in seconds) between the first
# and second time. If second time is smaller than the first,
# we assume the clock has passed midnight and calculate appropriately.
ElapsedTime()
{
if [ $2 -lt $1 ]; then
t_sec=`expr 3600 \* 24 - $1 + $2`
else
t_sec=`expr $2 - $1`
fi
echo `expr $t_sec / 60`m `expr $t_sec % 60`s
}
# Report errors
# $1--an error message to be printed
REPORT_ERR()
{
ERRMSG=$1
# print it with a banner shifted right a bit
PRINT " *************************************"
PRINT " `date`"
PRINT " $ERRMSG"
PRINT " *************************************"
# report it in the FAILED-LOG file too
PRINT "$ERRMSG" >> $FAILEDLOG
}
#
# Report results of the last test done
REPORT_RESULT()
{
if [ $retcode -eq 0 ]; then
if [ $skiptest = yes ]; then
n_skip=`expr $n_skip + 1`
PRINT "SKIPPED ${HOSTNAME}: $TEST_TYPE" | tee -a $SKIPPEDLOG
else
n_pass=`expr $n_pass + 1`
PRINT "PASSED ${HOSTNAME}: $TEST_TYPE" | tee -a $PASSEDLOG
fi
else
# test failed.
n_fail=`expr $n_fail + 1`
REPORT_ERR "****FAILED ${HOSTNAME}: $TEST_TYPE****"
fi
}
# Print a blank line
PRINT_BLANK()
{
PRINT
}
# Print test trailer
PRINT_TEST_TRAILER()
{
PRINT "*** finished $TEST_TYPE tests for $HOSTNAME ***"
date; EndTime=`SecOfDay`
PRINT Total time = `ElapsedTime $StartTime $EndTime`
PRINT_BLANK
}
# Print trailer summary
PRINT_TRAILER()
{
PRINT "*** finished tests in $HOSTNAME ***"
date; TotalEndTime=`SecOfDay`
PRINT "${HOSTNAME}: Ran $n_test($n_pass/$n_fail/$n_skip) $runtest_type, Grand total test time = " \
"`ElapsedTime $TotalStartTime $TotalEndTime`" | tee -a $TIMELOG
PRINT_BLANK
}
# Figure out which remote command to use to reach a host.
# Try ssh first, then rsh since fewer machines support rsh exec.
# $1--hostname to reach.
CHECK_RSH()
{
# Figure out how to use ping command in this host.
# Some hosts use "ping host count", some use "ping -c count host".
# Test "ping -c 3 -w 5" since it has timeout feature.
# Test "ping -c ..." style before "ping host 3" because some machines
# that recognize -c treat 'ping localhost 3' as to ping host '3'.
if [ -z "$PING" ]; then
if ping -c 3 -w 5 localhost >/dev/null 2>&1; then
PING='ping -c 3 -w 5'
PINGCOUNT=
elif ping -c 3 localhost >/dev/null 2>&1; then
PING='ping -c 3'
PINGCOUNT=
elif ping localhost 3 >/dev/null 2>&1; then
PING=ping
PINGCOUNT=3
else # don't know how to use ping.
PING=no_ping
PINGCOUNT=
fi
fi
#
host=$1
# Try remote command with host if it responds to ping.
# Still try it if we don't know how to do ping.
if [ no_ping = "$PING" ] || $PING $host $PINGCOUNT >/dev/null 2>&1; then
if ssh $host -n hostname >/dev/null 2>&1; then
RSH=ssh
elif rsh $host -n hostname >/dev/null 2>&1; then
RSH=rsh
else
PRINT cannot remote command with $host
RSH="NoRemoteCommand"
fi
else
RSH="NotReachable"
fi
}
# Wait for a file for at most number of minutes
# $1--the file
# $2--number of minutes
# WAIT_STATUS set to:
# -1 if errors encountered
# 0 if file found within time limit
# 1 if file not found within time limit
WAITFOR()
{
wait_file=$1
nminutes=$2
if [ -z "$wait_file" -o ! "$nminutes" -ge 0 ]
then
PRINT "errors in argument of WAITFOR(): wait_file($1) or nminutes($2)"
WAIT_STATUS=-1
return
fi
while [ ! -f $wait_file ]; do
if [ $nminutes -gt 0 ]; then
PRINT "Wait For $wait_file to appear"
sleep 60 #sleep 1 minute
else
WAIT_STATUS=1
return
fi
nminutes=`expr $nminutes - 1`
done
WAIT_STATUS=0
return
}
# Wait till a file disappears for at most number of minutes.
# Useful to wait till a lock is removed by another process.
# $1--the file
# $2--number of minutes
# WAIT_STATUS set to:
# -1 if errors encountered
# 0 if file disappears within time limit
# 1 if file has not disappeared within time limit
WAITTILL()
{
wait_file=$1
nminutes=$2
if [ -z "$wait_file" -o ! "$nminutes" -ge 0 ]
then
PRINT "errors in argument of WAITTILL(): wait_file($1) or nminutes($2)"
WAIT_STATUS=-1
return
fi
while [ -f $wait_file ]; do
if [ $nminutes -gt 0 ]; then
PRINT "Wait till $wait_file has disappeared"
sleep 60 #sleep 1 minute
else
WAIT_STATUS=1
return
fi
nminutes=`expr $nminutes - 1`
done
WAIT_STATUS=0
return
}
# Run one snapshot test
# $*--Types of test being run
RUNSNAPTEST()
{
SNAPCMD_OPT="$STANDARD_OPT" # snapshot test option
SRCDIRNAME=${HOSTNAME}
# restore CC, PATH in case they were changed in the last test.
CC="$CC_SAVED"
PATH=$PATH_SAVED
export PATH # DEC OSF1 needs to export PATH explicitly
TEST_TYPE=$*
retcode=0
skiptest=no
date
PRINT "*** starting $TEST_TYPE tests in $HOSTNAME ***"
PRINT "Uname -a: `uname -a`"
# Parse the test type and set options accordingly.
# See comments of SNAPTEST_CONFIG_PARSE().
while [ $# -gt 0 ]; do
case $1 in
-n32) # want -n32 option
SRCDIRNAME=${SRCDIRNAME}-n32
CC="cc -n32"
export CC
;;
-64) # want -64 option
SRCDIRNAME=${SRCDIRNAME}-64
CC="cc -64"
export CC
;;
parallel) # want parallel test
SNAPCMD_OPT="$SNAPCMD_OPT $ENABLE_PARALLEL"
SRCDIRNAME=${SRCDIRNAME}-pp
;;
standard) # standard test
;;
--*)
# option for configure
SNAPCMD_OPT="$SNAPCMD_OPT $1"
;;
op-configure)
# option for configure
SNAPCMD_OPT="$SNAPCMD_OPT $1 $2"
shift
;;
op-snapshot)
# option for snapshot
shift
SNAPCMD_OPT="$SNAPCMD_OPT $1"
;;
setenv)
# pass them along to snapshot set environment variable
shift
SNAPCMD_OPT="$SNAPCMD_OPT setenv $1 $2"
shift
;;
setenvN)
# set environment variable with $1 values
# e.g., setenvN 3 x a b c is same as setenv x="a b c".
# pass them along to snapshot set environment variable
shift
envN=$1
shift
envname=$1
SNAPCMD_OPT="$SNAPCMD_OPT setenvN $envN $envname"
envalue=
while test $envN -gt 0; do
shift
envalue="$envalue $1"
envN=`expr $envN - 1`
done
SNAPCMD_OPT="$SNAPCMD_OPT $envalue"
;;
skip)
# skip this test
skiptest=yes
;;
srcdirname)
# Use this before using parallel and -n32 since this overrides
# the others.
shift
SRCDIRNAME=$1
;;
deploy)
# deploy the built binary.
shift
SNAPCMD_OPT="$SNAPCMD_OPT deploy $1"
;;
deploydir)
# default directory for deployment.
shift
SNAPCMD_OPT="$SNAPCMD_OPT deploydir $1"
;;
*) # unknown test
PRINT "$0: unknown type of test ($1)"
retcode=1
;;
esac
shift
done
if [ $retcode -ne 0 -o $skiptest = yes ]; then
errcode=$retcode
return $retcode
fi
# Track down the zlib software
ans=`$SNAPYARD/current/bin/locate_sw zlib`
if [ $? = 0 ]; then
Z_INC=`echo $ans | cut -f1 -d,`
Z_LIB=`echo $ans | cut -f2 -d,`
SNAPCMD_OPT="$SNAPCMD_OPT zlib $Z_INC,$Z_LIB"
else
# cannot locate zlib software.
# continue the test, maybe configure can find it.
:
fi
if [ -n "${SRCDIRNAME}" ]; then
SNAPCMD_OPT="$SNAPCMD_OPT srcdirname ${SRCDIRNAME}"
fi
# Setup log file name to save test output
THIS_MINUTE=`date +%H%M`
LOGFILE=${LOGBASENAME}/${SRCDIRNAME}_${TODAY}_${THIS_MINUTE}
PRINT "Running snapshot with output saved in"
PRINT " $LOGFILE"
(date; PRINT Hostname=$HOSTNAME) >> $LOGFILE
(
cd $SNAPYARD/current
$SNAPSHOT $SNAPCMD $SNAPCMD_OPT
) >> $LOGFILE 2>&1
retcode=$?
[ $retcode -ne 0 ] && errcode=$retcode
date >> $LOGFILE
if [ $retcode -ne 0 ]; then
# Dump the first 10 lines and the last 30 lines of the LOGFILE.
( ntail=30
echo =========================
echo "Dumping logfile of ${HOSTNAME}: $TEST_TYPE"
echo "Last $ntail lines of $LOGFILE"
echo =========================
tail -$ntail $LOGFILE
echo =========================
echo Dumping done
echo =========================
echo ""
) >> $FAILEDDETAIL
fi
}
TIMELIMIT_PARSE()
{
# Function returns timeparam for timekeeper via standard out -
# any debug statements should be 'echo "Debug string" >&2' or timekeeper
# will declare timeparam to be non-numeric and ignore it.
while read x y ; do
# Scan for entry for this weekday.
xd=`echo $x | cut -f1 -d/`
if [ "$xd" = ${WEEKDAY} ]; then
# strip away the weekday/ part.
timeparam=`echo $x | cut -f2 -d/`
break
fi
case "$x" in
'' | '#'*)
# blank or comment lines. Continue.
;;
???/*)
# Ignore any entry not of this weekday.
;;
*)
timeparam="$x"
;;
esac
done
echo $timeparam
return
}
# configuration parsing.
# Taking configuration from input.
# This should be invoke with configure file as stdin.
# Syntax of the configure file:
# All lines started with the # are comment lines and are ignored.
# Blank lines are ignored too.
# Each config line starts with a "Scope" followed by test types.
#
# Scope can be:
# standard ... # what the standard test types are.
# <host>: <test> Do <test> for <host>
# all: <test> Do <test> for all hosts.
# <weekday>/... Use this scope if the <weekday> matches.
# <weekday> can be {Mon,Tue,Wed,Thu,Fri,Sat,Sun}
# If no <host>: input for a <host>, the standard test is used.
#
# Test types:
# standard tests defined in standard scope.
# -n32 -n32 mode. Apply to 64/32 bit OS such as IRIX64.
# parallel parallel mode.
# op-configure <option> configure option
# op-snapshot <option> snapshot option
# --* configure option
# setenv <name> <value> set environment variable <name> to <value>
# Pass along to snapshot
# setenvN <N> <name> <value> ...
# set environment variable with <N> values
# e.g., setenvN 3 x a b c is same as setenv x="a b c".
# Pass along to snapshot.
# skip skip this test
# srcdirname <name> use <name> as the build-directory.
# deploy <name> deploy the built binary at directory <name>.
# deploydir <name> use <name> as the default directory for deployment.
SNAPTEST_CONFIG_PARSE()
{
while read x y ; do
# Scan for entry for this weekday.
xd=`echo $x | cut -f1 -d/`
if [ "$xd" = ${WEEKDAY} ]; then
# strip away the weekday/ part.
x=`echo $x | cut -f2 -d/`
fi
case "$x" in
'' | '#'*)
# blank or comment lines. Continue.
;;
???/*)
# Ignore any entry not of this weekday.
;;
standard)
#standard configuration
STANDARD_OPT="$y"
;;
all: | ${CONFIGNAME}:)
# types of test for all hosts or this host
if [ -n "$TEST_TYPES" ]; then
TEST_TYPES="$TEST_TYPES ; $y"
else
TEST_TYPES="$y"
fi
;;
*:) # ignore types of test for other hosts
;;
*) # unknown configuration option
PRINT $x $y
PRINT "***Unknown configuration option. Ignored.***"
;;
esac
done
}
# Snap Test configuration parsing.
# If TEST_TYPES is not set, set it to do the "standard" test.
SNAPTEST_CONFIG()
{
TEST_TYPES=
STANDARD_OPT=
if [ -f $SNAPTESTCFG ]; then
SNAPTEST_CONFIG_PARSE < $SNAPTESTCFG
fi
TEST_TYPES=${TEST_TYPES:-'standard'}
}
# Show usage page
USAGE()
{
cat <<EOF
Usage: runtest [-h] [-debug] [-r<version>] [-all] [-nocvs] [-nodiff] [<host> ...]
-h
print this help page
-debug
turn on debug mode
-r<version>
do runtest for <version>
-all
launch tests for all pre-defined testing hosts
-nocvs
do not do cvs commands
-nodiff
do not do diff commands
-setup
setup the directory structure for snapshot test
-configname <name>
use <name> as hostname in the parsing of the snaptest configure file
<host>
launch tests for <host>
-all and <host> are contradictory and whichever is specified last, is
the one to take effect. If neither are given, do the test for the
local host.
EOF
}
# Verify if directory ($1) exists. If not, create it.
CHECK_DIR()
{
dir=$1
if test ! -e $1; then
echo mkdir $1
mkdir $1
errcode=$?
elif test ! -d $1; then
echo $1 is not a directory
errcode=1
fi
}
#################################
# Main
#################################
#################################
# Set up global variables
#################################
retcode=0 # error code of individula task
errcode=0 # error code of the whole test
skiptest=no # if test is skipped
CC_SAVED="$CC" # CC & PATH maybe changed within a test.
PATH_SAVED=$PATH # These save the original values.
timelimit=300 # default time limit (minutes) for the timekeeper
#################################
# Parse options
#################################
while [ $# -gt 0 ]; do
case "$1" in
-h) # help--show usage
USAGE
exit 0
;;
-debug*)
# set debug mode
DEBUGMODE="$1"
SNAPSHOT="echo bin/snapshot"
PROGNAME="$PROGNAME $DEBUGMODE"
PRINT "******** DEBUGMODE is $DEBUGMODE ************"
;;
-r*)
# version string
H5VER="$1"
;;
-all)
# Test all hosts.
TESTHOST=-all
;;
-nocvs)
# do not do cvs commands.
NOCVS=nocvs
;;
-nodiff)
# do not do diff commands.
NODIFF=nodiff
;;
-configname)
# use <name> as hostname in the parsing of the snaptest configure file.
shift
CONFIGNAME=$1
;;
-setup)
# setup the directory structure for snapshot test.
CMD=setup
;;
-*) # Unknow option
PRINT "Unknown option ($1)"
USAGE
exit 1
;;
*)
TESTHOST=$*
break
;;
esac
shift
done
# setup H5VER if not set yet
if [ -z "$H5VER" -a -f bin/snapshot_version ]
then
. bin/snapshot_version
fi
if [ -n "$H5VER" ]
then
H5VERSION=hdf5_`echo $H5VER | sed -e s/-r// -e s/\\\./_/g`
PROGNAME="$PROGNAME $H5VER"
else
H5VERSION=hdf5
fi
#################################
# Setup snapshot test directories
#################################
BASEDIR=${HOME}/snapshots-${H5VERSION}
# initial processing of setup option if requested
if test x-$CMD = x-setup; then
CHECK_DIR $BASEDIR
test $errcode -ne 0 && exit 1
elif [ ! -d ${BASEDIR} ]; then
echo "BASEDIR ($BASEDIR) does not exist"
exit 1
fi
# Show the real physical path rather than the symbolic path
SNAPYARD=`cd $BASEDIR && /bin/pwd`
# Log file basename
LOGDIR=${SNAPYARD}/log
LOGBASENAME=${LOGDIR}
PASSEDLOG=${LOGDIR}/PASSED_LOG_${TODAY}
FAILEDLOG=${LOGDIR}/FAILED_LOG_${TODAY}
FAILEDDETAIL=${LOGDIR}/FAILED_DETAIL_${TODAY}
SKIPPEDLOG=${LOGDIR}/SKIPPED_LOG_${TODAY}
TIMELOG=${LOGDIR}/TIME_LOG_${TODAY}
TIMEKEEPERLOG=${LOGDIR}/TIMEKEEPER_LOG_${TODAY}
CVSLOG=${LOGDIR}/CVS_LOG_${TODAY}
CVSLOG_LOCK=${LOGDIR}/CVS_LOG_LOCK_${TODAY}
DIFFLOG=${LOGDIR}/DIFF_LOG_${TODAY}
COPYRIGHT_ERR=${LOGDIR}/COPYRIGHT_ERR_${TODAY}
# Snap Test hosts and Configuration files
ALLHOSTSFILE=${SNAPYARD}/allhostfile
SNAPTESTCFG=${SNAPYARD}/snaptest.cfg
TIMELIMIT=${SNAPYARD}/timelimit
TMPFILE="${LOGDIR}/#runtest.${TODAY}.$$"
# more processing of setup option if requested
if test x-$CMD = x-setup; then
CHECK_DIR $LOGDIR
test $errcode -ne 0 && exit 1
CHECK_DIR $LOGDIR/OLD
test $errcode -ne 0 && exit 1
CHECK_DIR $SNAPYARD/TestDir
test $errcode -ne 0 && exit 1
# create empty test hosts or configure files if non-existing
for f in $ALLHOSTSFILE $SNAPTESTCFG; do
if test ! -f $f; then
echo Creating $f
touch $f
fi
done
# create or update the current source.
echo update current source
$SNAPSHOT checkout
# setup completed. Exit.
exit 0
fi
#################################
# Show some host status numbers
#################################
# df sometimes hangs due to file system problems. Invoke it as background
# process and give it 10 seconds to finish. If it hangs, just continue.
uptime
df &
sleep 10
#################################
# Setup test host(s)
#################################
if [ "$TESTHOST" = -all ]; then
if [ -f $ALLHOSTSFILE ]; then
TESTHOST=`sed -e '/^#/d;/^ *$/d' $ALLHOSTSFILE`
else
PRINT "could not access the all-hosts-file ($ALLHOSTSFILE)"
USAGE
exit 1
fi
fi
#################################
# Setup to print a trailer summary when exiting not via
# the normal end of the script.
#################################
trap PRINT_TRAILER 0
#
TotalStartTime=`SecOfDay`
# Process the configuration
SNAPTEST_CONFIG
PRINT STANDARD_OPT=$STANDARD_OPT
PRINT TEST_TYPES=$TEST_TYPES
PRINT_BLANK
# Do a checkout if one has not been done today.
# Then check MANIFEST file and copyrights noitces.
if [ -z "$NOCVS" ]; then
PRINT "Running CVS checkout with output saved in"
PRINT " $CVSLOG"
# Set CVS lock first
touch $CVSLOG_LOCK
($SNAPSHOT checkout ) >> $CVSLOG 2>&1
# Save error code and remove the lock
errcode=$?
rm -f $CVSLOG_LOCK
if [ $errcode -ne 0 ]; then
# test failed.
REPORT_ERR "****FAILED ${HOSTNAME}: CVS checkout****"
exit $errcode
fi
# ===================
# Check MANIFEST file
# ===================
PRINT Checking MAINFEST file ...
(cd $SNAPYARD/current; bin/chkmanifest) > $TMPFILE 2>&1
errcode=$?
if [ $errcode -eq 0 ]; then
# test passed.
cat $TMPFILE
else
# test failed.
REPORT_ERR "****FAILED ${HOSTNAME}: MANIFEST check****"
( echo =========================
echo "MANIFEST checking failed output"
echo =========================
cat $TMPFILE
echo =========================
echo "MANIFEST checking failed output done"
echo =========================
echo ""
) >> $FAILEDDETAIL
fi
rm $TMPFILE
PRINT_BLANK
# No copyright checking until what need copyright is decided. 2006/4/7.
if false; then
# ======================
# Check Copyright notice
# ======================
PRINT Checking Copyrights notices ...
if (cd $SNAPYARD/current; bin/chkcopyright) > $TMPFILE 2>&1 ; then
echo Passed.
else
# Save the output and report some of it.
# Do not report it as failed for runtest yet.
# Send a separate report mail via hardcoding.
# Need fixes/cleanup later.
echo "Failed. See detail in another report mail"
cp $TMPFILE $COPYRIGHT_ERR
nheadlines=300
ntaillines=5 # Number of lines in report summary.
(
echo =========================
echo "Copyright checking failed. Showing first $nheadlines lines of output."
echo "Complete output is in file $COPYRIGHT_ERR"
echo =========================
nreportlines=`wc -l < $COPYRIGHT_ERR`
if [ $nreportlines -le `expr $nheadlines + $ntaillines` ]; then
# Just print the whole file.
cat $COPYRIGHT_ERR
else
# Show the first $nheadlines plus report summary
head -$nheadlines $COPYRIGHT_ERR
echo ...
tail -$ntaillines $COPYRIGHT_ERR
fi
) | Mail -s "${H5VERSION} Copyrights check Failed" hdf5lib
fi
rm $TMPFILE
PRINT_BLANK
fi
else
# make sure the cvs update, if done by another host, has completed.
# First wait for the presence of $CVSLOG which signals some host
# has started the cvs update. Then wait for the absense of $CVSLOG_LOCK
# which signals the host has completed the cvs update.
WAITFOR $CVSLOG 90
if [ $WAIT_STATUS -ne 0 ]; then
errcode=$WAIT_STATUS
REPORT_ERR "****FAILED ${HOSTNAME}: Time expired waiting CVS update to start****"
exit $errcode
fi
WAITTILL $CVSLOG_LOCK 10
if [ $WAIT_STATUS -ne 0 ]; then
errcode=$WAIT_STATUS
REPORT_ERR "****FAILED ${HOSTNAME}: Time expired waiting CVS update to finish****"
exit $errcode
fi
fi
# run a snapshot diff to see if any significant differences between
# the current and previous versions
if [ -z "$NODIFF" ]; then
$SNAPSHOT diff >> $DIFFLOG 2>&1
errcode=$?
# check the errcode only if NOT in DEBUG MODE
if [ -z "$DEBUGMODE" -a $errcode -eq 0 ]; then
# no need to run test
PRINT "NO TEST: no significant differences between current and previous versions" |
tee -a $PASSEDLOG
exit 0
fi
fi
# we can use the version of script in SNAPYARD/current now.
# Don't do the diff or cvs update any more.
PROGNAME="$SNAPYARD/current/$PROGNAME -nodiff -nocvs"
# Decide to do test for the local host or for remote hosts
if [ -n "$TESTHOST" -a $HOSTNAME != "$TESTHOST" ]; then
date
PRINT "*** launching tests from $HOSTNAME ***"
PRINT_BLANK
TEST_TYPE="launching"
cd ${SNAPYARD}/log
# Fork off timekeeper if concurrent tests will be used.
if [ -n "$SRCDIR" ]; then
timelimit=`TIMELIMIT_PARSE < $TIMELIMIT`
($SNAPYARD/current/bin/timekeeper $timelimit > $TIMEKEEPERLOG 2>&1 &)
PRINT " Fork off timekeeper $timelimit"
fi
runtest_type="hosts"
for h in $TESTHOST; do
# Must do CONFIGNAME before $h got changed by the second cut.
# cut returns the whole string if there is no / in the string
# at all. But that works okay for the CONFIGNAME too.
CONFIGNAME=`echo $h | cut -f2 -d/`
h=`echo $h | cut -f1 -d/`
n_test=`expr $n_test + 1`
TMP_OUTPUT="#${h}_${CONFIGNAME}.out"
(PRINT "=============="
PRINT "Testing $h"
PRINT "==============") > $TMP_OUTPUT
CHECK_RSH $h
# run the remote shell command with output to $TMP_OUTPUT
case "$RSH" in
rsh|ssh)
CMD="$RSH $h -n $PROGNAME -configname $CONFIGNAME"
PRINT $CMD
# launch concurrent tests only if srcdir is used
if [ -n "$SRCDIR" ]; then
$CMD || REPORT_ERR "****FAILED ${h}: Abnormal exit from runtest****" && PRINT_BLANK &
echo $! > PID.${h}_${CONFIGNAME}
else
$CMD || REPORT_ERR "****FAILED ${h}: Abnormal exit from runtest****" && PRINT_BLANK
fi
;;
NoRemoteCommand)
PRINT $h does not accept Remote Command "(`date`)"
;;
NotReachable)
PRINT $h is not reachable "(`date`)"
;;
*)
PRINT "CHECK_RSH for $h returned unknow result ($RSH)"
;;
esac >> $TMP_OUTPUT 2>&1
done
# wait for all launched tests to finish, then cat them back out.
wait
# Pause a moment in case the timekeeper is terminating processes.
wait 30
for h in $TESTHOST; do
CONFIGNAME=`echo $h | cut -f2 -d/`
h=`echo $h | cut -f1 -d/`
TMP_OUTPUT="#${h}_${CONFIGNAME}.out"
cat $TMP_OUTPUT
# Verify test script did complete by checking the last lines
(tail -5 $TMP_OUTPUT | grep -s 'Grand total' > /dev/null 2>&1) ||
(REPORT_ERR "****FAILED ${h}: snaptest did not complete****" &&
PRINT_BLANK)
rm -f $TMP_OUTPUT PID.${h}_${CONFIGNAME}
done
exit 0
fi
# run the test(s)
# Note that first field is cut without -s but all subsequent cut
# must use -s. If -s is not used at all, a $TEST_TYPES that has
# no ';' (only 1 test), will pass through intact in all cut. That
# results in infinite looping.
# If -s is used with the first field, it will suppress completely
# a $TYPE_TYPES that has no ';' (only 1 tst ). That results in no
# test at all.
# Note that n_test must start as 1.
#
n_test=1
runtest_type="tests"
TEST="`echo $TEST_TYPES | cut -f$n_test -d';'`"
while [ -n "$TEST" ]; do
StartTime=`SecOfDay`
RUNSNAPTEST $TEST
REPORT_RESULT
PRINT_TEST_TRAILER
n_test=`expr $n_test + 1`
TEST="`echo $TEST_TYPES | cut -f$n_test -s -d';'`"
done
# dec n_test to show the actual number of tests ran.
n_test=`expr $n_test - 1`
PRINT_TRAILER
# disable trailer summary printing since all trailers have been
# printed and we are exiting normally.
trap 0
exit $errcode

828
bin/snapshot Executable file
View File

@@ -0,0 +1,828 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
# This script should be run nightly from cron. It checks out the source
# from the source repository and compares it against the previous
# snapshot. If anything significant changed then a new snapshot is
# created, the minor version number is incremented, and the change is
# checked back into the source repository.
#
# function definitions
TIMESTAMP()
{
echo "=====" "$1": "`date`" "====="
}
EXIT_BANNER()
{
TIMESTAMP "Exit $PROGNAME with status=$?"
}
# Show current total disk usage.
DISKUSAGE()
{
du -ks | \
( read x y; echo "Disk Usage=$x KB" )
}
# function provided for testing software downloaded as tar files. A version of
# this function that properly extracts the downloaded files can be provided in
# the snapshots-${sw}-overrides file.
EXTRACT()
{
echo "Error: ${SWVERSION} is in source repository - does not need extraction."
}
# Standard procedure for checking out or updating source code from an hdfgroup
# git repository. Override the function for other repositories or procedures.
SOURCE_CHECKOUT()
{
if test -n $GIT_URL; then
if [ -n "$AUTOGEN" ]; then
echo "Creating fresh clone of $GIT_URL in $BASEDIR/current_src"
# Check out the current version from source repository.
(cd $BASEDIR; rm -rf current_src
if test -z $GIT_BRANCH; then
echo "Testing empty branch $GIT_BRANCH."
git clone $GIT_URL current_src
else
echo "Testing branch $GIT_BRANCH."
git clone $GIT_URL -b $GIT_BRANCH current_src
fi
) || exit 1
else
echo "Creating fresh clone of $GIT_URL in $BASEDIR/current"
# Check out the current version from source repository.
(cd $BASEDIR; rm -rf current
if test -n $GIT_BRANCH; then
git clone $GIT_URL -b $GIT_BRANCH current
else
git clone $GIT_URL current
fi ) || exit 1
fi
else
echo "Warning! Source directory ("current") is not checked out from git."
fi
}
# Standard procedure for running the configure command in a build (test)
# directory
RUNCONFIGURE()
{
if [ "${CURRENT}" != "${TESTDIR}" -a "$CPSRC" = "yes" ]; then
echo "Copying source files to ${TESTDIR}."
cp -pr ${CURRENT}/* ${TESTDIR}
cd ${TESTDIR}
./${CONFIGURE}
elif [ -n "${AUTOGEN}" ]; then
${CURRENTSRC}/${CONFIGURE}
else
${CURRENT}/${CONFIGURE}
fi
}
# Sometimes "make distclean" doesn't adequately remove files from the previous
# build. If a build (test) directory was used, its contents can be entirely
# deleted to provide a clean start. If the test is building in the source
# directory, the contents can't be deleted, so run "make distclean".
DISTCLEAN()
{
if [ "${srcdir}" = "yes" -a -n "${SRCDIRNAME}" -a -d ${BASEDIR}/TestDir/${SRCDIRNAME} ]; then
echo "Remove contents of $SRCDIRNAME.\n"
rm -rf ${BASEDIR}/TestDir/${SRCDIRNAME}/*
else
echo "$MAKE distclean"
(cd ${TESTDIR} && ${MAKE} distclean)
fi
}
# Several of the software packages tested do not support make check-install.
# Those that support it should have a version of this function in their
# override with the following lines:
# TIMESTAMP "check-install $1"
# ${MAKE} check-install $1
CHECKINSTALL()
{
echo "check-install is not supported for ${SWVERSION}"
}
# Function for hdf4 and hdf5 to override to check in changes after snapshot.
# Safety measure to avoid unintended checkins to other repositories.
COMMITSNAPSHOT()
{
echo "original hdf5 script committed code changes back into git."
}
DISPLAYUSAGE()
{
set -
cat <<EOF
Usage: $PROGNAME [all] [checkout] [ftp <URL> [diff] [test] [srcdir] [release] [help]
[clean] [distclean] [echo] [deploy <dir>] [deploydir <dir>]
[zlib <zlib_path>] [releasedir <dir>] [srcdirname <dir>] [check-vfd]
[exec <command>] [module-load <module-list>] [op-configure <option>]
[--<option>]
all: Run all commands (checkout, test & release)
[Default is all]
checkout: Run source checkout
diff: Run diff on current and previous versions. Exit 0 if
no significant differences are found. Otherwise, non-zero.
deploy: deploy binary to directory <dir>
deploydir: use <dir> as the default directory for deployment
test: Run test
release: Run release
clean: Run make clean
distclean:Run make distclean
echo: Turn on echo mode (set -x)
setenv <name> <value>:
Set environment variable <name> to <value>.
setenvN <N> <name> <value> ...:
Set environment variable with <N> values.
E.g., setenvN 3 x a b c is same as setenv x="a b c".
srcdir: Use srcdir option (does not imply other commands)
"snapshot srcdir" is equivalent to "snapshot srcdir all"
"snapshot srcdir checkout" is equivalent to "snapshot checkout"
srcdirname <dir>:
Use <dir> as the srcdir testing directory if srcdir is choosen.
If <dir> starts with '-', it is append to the default name
E.g., "snapshot srcdir srcdirname -xx" uses hostname-xx
[Default is hostname]
help: Print this message
echo: Turn on shell echo
zlib <zlib_path>:
Use <zlib_path> as the ZLIB locations
[Default is $ZLIB_default]
releasedir <dir>:
Use <dir> as the release directory
[Default is $ReleaseDir_default]
check-vfd:
Run make check-vfd instead of just make check.
exttest <testscript>;
Run testscript;
exec <command>:
Run <command>;
module-load <module-list>:
Load modules in comma-separated <module-list>;
op-configure <option>:
Pass <option> to the configure command
E.g., "snapshot op-configure --enable-parallel"
configures for parallel mode
--<option>:
Pass --<option> to the configure command
E.g., "snapshot --enable-parallel"
configures for parallel mode
EOF
exit $errcode
}
# MAIN
# SGI /bin/sh replaces $0 as function name if used in a function.
# Set the name here to avoid that ambiguity and better style too.
PROGNAME=$0
SNAPSHOTNAME=
HDFREPOS=
DOCVERSION=""
MODULELIST=""
if [ -f bin/snapshot_params ]; then
. bin/snapshot_params
echo "Added snapshot_params."
fi
if [ -z "$SWVER" -a -f bin/snapshot_version ]
then
. bin/snapshot_version
echo "Added snapshot_version."
fi
if [ -n ${HDFREPOS} -a -f bin/snapshot-${HDFREPOS}-overrides ]; then
. bin/snapshot-${HDFREPOS}-overrides
echo "Added snapshot-${HDFREPOS}-overrides."
fi
echo "====================================="
echo "$PROGNAME $*"
echo "====================================="
TIMESTAMP MAIN
uname -a
# setup exit banner message
trap EXIT_BANNER 0 1 2 9 15
# Dump environment variables before option parsing
echo ===Dumping environment variables before option parsing ===
printenv | sort
echo ===Done Dumping environment variables before option parsing ===
# snapshots release directory. Default relative to $BASEDIR.
ReleaseDir_default=release_dir
# Where is the zlib library?
# At NCSA, half of the machines have it in /usr/lib, the other half at
# /usr/ncsa/lib. Leave it unset.
ZLIB_default=
ZLIB=$ZLIB_default
# What compression methods to use? (md5 does checksum). Doc was apparently
# added as a compression method to create a separate tarfile containing the
# documentation files for v 1.8 and above.
if [ "${SWVERSION}" = "hdf5_1_6" ]; then
METHODS="gzip bzip2 md5"
else
METHODS="gzip bzip2 doc"
fi
# Use User's MAKE if set. Else use generic make.
MAKE=${MAKE:-make}
# Default check action.
CHECKVAL=check
#
# Command options
cmd="all"
test_opt=""
errcode=0
AUTOGEN=""
EXTTEST=""
EXEC_CMD_ARG=""
while [ $# -gt 0 ] ; do
case "$1" in
all)
cmd="all"
;;
checkout-autogen)
cmdcheckout="checkout"
AUTOGEN="autogen"
cmd=""
;;
checkout)
cmdcheckout="checkout"
cmd=""
;;
ftp)
echo "Setting ftp flags in snapshot script"
cmdcheckout="checkout"
cmdftp="ftp"
cmd=""
shift
if [ $# -lt 1 ]; then
echo "URL missing"
errcode=1
cmd="help"
break
fi
ftp_url="$1"
echo "ftp_url is $ftp_url"
;;
diff)
cmddiff="diff"
cmd=""
;;
deploy)
# deploy the built binary.
shift
if [ $# -lt 1 ]; then
echo "deploy <dir> missing"
errcode=1
cmd="help"
break
fi
cmddeploy="deploy"
DEPLOYDIRNAME="$1"
;;
deploydir)
# default directory for deployment.
shift
if [ $# -lt 1 ]; then
echo "deploydir <dir> missing"
errcode=1
cmd="help"
break
fi
deploydir="$1"
;;
test)
cmdtest="test"
cmd=""
;;
setenv)
# set environment variable
shift
eval $1="$2"
export $1
shift
;;
setenvN)
# set environment variable with $1 values
# e.g., setenvN 3 x a b c is same as setenv x="a b c".
# a kludge now--the extra single quotes are needed
# else eval complains.
shift
envN=$1
shift
envname=$1
envalue=
while test $envN -gt 0; do
shift
envalue="$envalue $1"
envN=`expr $envN - 1`
done
eval $envname="'$envalue'"
export $envname
;;
srcdir)
#use srcdir option for test
srcdir="yes"
;;
srcdirname)
shift
if [ $# -lt 1 ]; then
echo "srcdirname <dir> missing"
errcode=1
cmd="help"
break
fi
SRCDIRNAME="$1"
;;
release)
cmdrel="release"
cmd=""
;;
autogen-release)
cmdrel="autogen-release"
cmd=""
;;
clean | distclean)
cmdclean="$1"
cmd=""
;;
help)
cmd="help"
break
;;
echo)
set -x
break
;;
zlib)
shift
if [ $# -lt 1 ]; then
echo "ZLIB information missing"
errcode=1
cmd="help"
break
fi
ZLIB="$1"
;;
releasedir)
shift
if [ $# -lt 1 ]; then
echo "Release directory name missing"
errcode=1
cmd="help"
break
fi
ReleaseDir="$1"
;;
exttest)
shift
if [ $# -lt 1 ]; then
echo "exttest script name missing"
errcode=1
cmd="help"
break
fi
cmd=""
EXTTEST="$1"
;;
exec)
shift
if [ $# -lt 1 ]; then
echo "exec command name missing"
errcode=1
cmd="help"
break
fi
cmd=""
EXEC_CMD_ARG="$@"
# exit the parsing while loop since all arguments have been consummed.
break
;;
check-vfd)
CHECKVAL=check-vfd
;;
module-load)
shift
if [ $# -lt 1 ]; then
echo "missing module list to load"
errcode=1
cmd="help"
break
fi
MODULELIST="$1"
;;
--*)
OP_CONFIGURE="$OP_CONFIGURE $1"
;;
op-configure)
shift
if [ $# -lt 1 ]; then
echo "op-configure option missing"
errcode=1
cmd="help"
break
fi
OP_CONFIGURE="$OP_CONFIGURE $1"
;;
*)
echo "Unkown option $1"
errcode=1
cmd="help"
break
;;
esac
shift
done
if [ -n "$MODULELIST" ]; then
. ~/.bashrc
module use /opt/pkgs/modules/all
# load module command will take a space separated list of modules.
# If we have a comma separated list, convert ',' to ' '.
MODULELIST="$( echo -e "$MODULELIST" | tr ',' ' ' )"
module load $MODULELIST
fi
# Dump environment variables after option parsing
echo ===Dumping environment variables after option parsing ===
printenv | sort
echo ===Done Dumping environment variables after option parsing ===
if [ "$cmd" = help ]; then
DISPLAYUSAGE
fi
# Setup the proper configure option (--with-zlib) to use zlib library
# provide ZLIB is non-empty.
ZLIB=${ZLIB:+"--with-zlib="$ZLIB}
# Adding --prefix as a configure option will put the path to the deploy
# directory in the initial libhdf5*.la files
if [ -n "$DEPLOYDIRNAME" ]; then
OP_CONFIGURE="$OP_CONFIGURE --prefix=${deploydir}/${DEPLOYDIRNAME}"
fi
CONFIGURE="configure $OP_CONFIGURE"
# echo "Configure command is $CONFIGURE"
# Execute the requests
snapshot=yes
BASEDIR=${HOME}/snapshots-${SNAPSHOTNAME}
if [ ! -d ${BASEDIR} ]; then
echo "BASEDIR ($BASEDIR) does not exist"
exit 1
fi
CURRENT=${BASEDIR}/current
PREVIOUS=${BASEDIR}/previous
ReleaseDir=${ReleaseDir:=${BASEDIR}/${ReleaseDir_default}}
HOSTNAME=`hostname | cut -f1 -d.` # no domain part
# Try finding a version of diff that supports the -I option too.
DIFF=diff
for d in `echo $PATH | sed -e 's/:/ /g'` ; do
test -x $d/diff && $d/diff -I XYZ /dev/null /dev/null > /dev/null 2>&1 &&
DIFF=$d/diff && break
done
#=============================
# Run source checkout
#=============================
if [ "$cmd" = "all" -o -n "$cmdcheckout" ]; then
TIMESTAMP "checkout"
# ${BASEDIR}/bin is now updated from git by EveningMaint or DailyMaint
# to avoid updating the scripts in ${BASEDIR}/bin while they are running.
if [ -z "$AUTOGEN" ]; then
# If there is a Makefile in ${CURRENT}, the last test done in it
# has not been distclean'ed. They would interfere with other
# --srcdir build since make considers the files in ${CURRENT}
# take precedence over files in its own build-directory. Run
# a "make distclean" to clean them all out. This is not really
# part of the "checkout" functions but this is the most convenient
# spot to do the distclean. We will also continue the checkout process
# regardless of the return code of distclean.
( cd ${CURRENT}; test -f Makefile && ${MAKE} distclean)
fi
# echo "cmdftp is $cmdftp; ftp_url is $ftp_url"
if [ -n "$cmdftp" ]; then
echo "Get the NetCDF4 source from their ftp server."
echo "Command executed is: 2>&1 wget -N $ftp_url"
cd ${BASEDIR};
WGET_OUTPUT="`2>&1 wget -N $ftp_url`"
errcode=$?
if [[ $errcode -ne 0 ]]; then
exit $errcode
fi
if [ $? -ne 0 ];then
echo $0: "$WGET_OUTPUT" Exiting.
exit 1
fi
# echo "Wget output was $WGET_OUTPUT"
if echo "$WGET_OUTPUT" | fgrep 'not retrieving' &> /dev/null
then
echo "Snapshot unchanged"
else
echo "New snapshot downloaded"
EXTRACT
fi
else
SOURCE_CHECKOUT
fi
fi # Do source checkout
#=============================
# Run Test the HDF5 library
#=============================
if [ "$cmd" = "all" -o -n "$cmdtest" -o -n "$cmddiff" ]; then
TIMESTAMP "Run Tests"
# setup if srcdir is used.
if [ -z "$srcdir" ]; then
TESTDIR=${CURRENT}
else
#create TESTDIR if not exist yet
case "$SRCDIRNAME" in
"")
SRCDIRNAME=$HOSTNAME
;;
-*)
SRCDIRNAME="$HOSTNAME$SRCDIRNAME"
;;
esac
TESTDIR=${BASEDIR}/TestDir/${SRCDIRNAME}
test -d ${TESTDIR} || mkdir ${TESTDIR}
# set TESTDIR to use the direct path to the local test directory
# rather than the path through ${BASEDIR}.
cd ${TESTDIR}
TESTDIR=`pwd -P`
cd ${CURRENT}
fi
# Make sure current version exists and is clean
if [ -d ${TESTDIR} ]; then
DISTCLEAN
else
errcode=$?
snapshot=no
exit $errcode
fi
# Compare it with the previous version. Compare only files listed in
# the MANIFEST plus the MANIFEST itself.
if [ -d ${PREVIOUS} ]; then
if [ -z "${AUTOGEN}" ]; then
CURRENTSRC=${CURRENT}
else
CURRENTSRC=${BASEDIR}/current_src
fi
if (${DIFF} -c ${PREVIOUS}/MANIFEST ${CURRENTSRC}/MANIFEST); then
snapshot=no
for src in `grep '^\.' ${CURRENTSRC}/MANIFEST|expand|cut -f1 -d' '`; do
if ${DIFF} -I H5_VERS_RELEASE -I " released on " \
-I " currently under development" \
${PREVIOUS}/$src ${CURRENTSRC}/$src
then
: #continue
else
snapshot=yes
break
fi
done
fi
fi
# if diff is choosen, exit 0 if no significant differences are found.
# otherwise, exit 1. This includes cases of other failures.
if [ -n "$cmddiff" ]; then
if [ $snapshot = no ]; then
exit 0
else
exit 1
fi
fi
#=============================
# Execute command if defined
#=============================
#echo BEFORE EXEC command
#echo EXEC_CMD_ARG=${EXEC_CMD_ARG}
if [ -n "$EXEC_CMD_ARG" ]; then
TIMESTAMP ${EXEC_CMD_ARG}
TESTDIR=${BASEDIR}/TestDir/${SRCDIRNAME}
test -d ${TESTDIR} || mkdir ${TESTDIR}
if cd ${TESTDIR}; then
# clean up the directory before executing the command
# Do we need to clean first?
# rm -rf *
#
# If EXEC_CMD_ARG starts with a '/', it has an absolute path, else it is
# relative to the BASEDIR.
case "$EXEC_CMD_ARG" in
/*)
${EXEC_CMD_ARG}
;;
*)
${BASEDIR}/${EXEC_CMD_ARG}
;;
esac
errcode=$?
else
echo "${TESTDIR} not accessible"
errcode=1
fi
# exit snapshot since nothing else to do, for now.
exit $errcode
fi
# Build, run tests and install procedures
if [ "$snapshot" = "yes" ] && [ "$NOMAKE" != "yes" ]; then
FAIL_SECTION=""
if [ -f ${TESTDIR}/failsection ]; then
rm ${TESTDIR}/failsection
fi
if (cd ${TESTDIR} && \
TIMESTAMP "configure" && echo "configure" > ${TESTDIR}/failsection && \
RUNCONFIGURE && \
sleep 2 && \
TIMESTAMP "make" && echo "make" > ${TESTDIR}/failsection && \
${MAKE} && DISKUSAGE \
TIMESTAMP ${CHECKVAL} && echo "make check" > ${TESTDIR}/failsection && \
${MAKE} ${CHECKVAL} && DISKUSAGE \
TIMESTAMP "install" && echo "make install" > ${TESTDIR}/failsection && \
${MAKE} install && DISKUSAGE \
TIMESTAMP "check-install" && echo "make check-install" > ${TESTDIR}/failsection && \
CHECKINSTALL && DISKUSAGE \
TIMESTAMP "uninstall" && echo "make uninstall" > ${TESTDIR}/failsection && \
${MAKE} uninstall && DISKUSAGE); then
:
else
errcode=$?
FAIL_SECTION=`cat ${TESTDIR}/failsection`
echo "Failed running ${FAIL_SECTION}"
snapshot=no
exit $errcode
fi
elif [ $CPSRC ]; then
cp -pr ${CURRENT}/* ${TESTDIR}
else
cmdclean=""
fi
fi # Test the HDF5 library
# Run external test if configured
#=============================
#=============================
#if [ -d "$CURRENT" ]; then
if [ "$EXTTEST" != "" ]; then
TIMESTAMP ${EXTTEST}
TESTDIR=${BASEDIR}/TestDir/${SRCDIRNAME}
test -d ${TESTDIR} || mkdir ${TESTDIR}
cd ${TESTDIR}
sleep 1
TIMESTAMP $pwd
ls
${BASEDIR}/${EXTTEST}
errcode=$?
exit $errcode
fi
#=============================
# Run deployment if requested.
#=============================
if [ -n "$DEPLOYDIRNAME" ]; then
# The daily tests deploy to .../hdf5/... or .../hdf4/... except on cobalt where the
# deploy directory is in .../HDF5/... lc will take care of this. If hdf4 or hdf5
# either upper or lower case isn't in the path, RELEASE.txt won't be found unless
# it is in $CURRENT.
POS4=`perl -e "print index(lc(\"${deploydir}/${DEPLOYDIRNAME}\"), 'hdf4')"`
POS5=`perl -e "print index(lc(\"${deploydir}/${DEPLOYDIRNAME}\"), 'hdf5')"`
if [ "${POS4}" -ge "0" ]; then
RELEASE_TXT_LOC="release_notes"
elif [ "${POS5}" -ge "0" ]; then
RELEASE_TXT_LOC="release_docs"
else
RELEASE_TXT_LOC=""
fi
if [ "$snapshot" = "yes" ]; then
TIMESTAMP "deploy"
if (cd ${TESTDIR} &&
${BASEDIR}/bin/deploy ${deploydir}/${DEPLOYDIRNAME} && \
TIMESTAMP "clean" && \
${MAKE} clean && \
TIMESTAMP "check-install prefix=${deploydir}/${DEPLOYDIRNAME}" && \
CHECKINSTALL prefix=${deploydir}/${DEPLOYDIRNAME}); then
cd ${CURRENT}
cp ${RELEASE_TXT_LOC}/RELEASE.txt ${deploydir}/${DEPLOYDIRNAME}
cp COPYING ${deploydir}/${DEPLOYDIRNAME}
#: #continue
else
errcode=$?
exit $errcode
fi
fi
fi # Deploy
#=============================
# Run Release snapshot, update version, and commit to source repository
#=============================
if [ "$cmd" = "all" -o -n "$cmdrel" ]; then
if [ "$snapshot" = "yes" ]; then
TIMESTAMP "release"
DISTCLEAN
(
# Turn on exit on error in the sub-shell so that it does not
# commit source if errors encounter here.
set -e
if [ "$cmdrel" = "autogen-release" ]; then
cd ${BASEDIR}/current_src
else
cd ${CURRENT}
fi
if [ "$HDFREPOS" = "hdf4" ]; then
RELEASE_VERSION="`perl bin/h4vers -v`"
echo "Making snapshot release ($RELEASE_VERSION) to ${ReleaseDir}..."
bin/release -d $ReleaseDir $METHODS
perl bin/h4vers -i
elif [ "$HDFREPOS" = "hdf5" ]; then
RELEASE_VERSION="`perl bin/h5vers -v`"
echo "Making snapshot release ($RELEASE_VERSION) to ${ReleaseDir}..."
if [ "${DOCVERSION}" ]; then
bin/release -d $ReleaseDir --docver ${DOCVERSION} $METHODS
else
bin/release -d $ReleaseDir $METHODS
fi
perl bin/h5vers -i
else
echo "need real release steps. For now, only move current version to previous"
fi
COMMITSNAPSHOT
)
errcode=$?
fi
# Replace the previous version with the current version.
# Should check if the errcode of the release process but there
# are other failures after release was done (e.g. h5vers or git failures)
# that should allow the replacement to occure.
rm -rf ${PREVIOUS}
mv ${CURRENT} ${PREVIOUS}
fi #Release snapshot
#=============================
# Clean the test area. Default is no clean.
#=============================
if [ -n "$cmdclean" ] && [ "$NOMAKE" != "yes" ]; then
TIMESTAMP "clean"
# setup if srcdir is used.
if [ -z "$srcdir" ]; then
TESTDIR=${CURRENT}
else
case "$SRCDIRNAME" in
"")
SRCDIRNAME=$HOSTNAME
;;
-*)
SRCDIRNAME="$HOSTNAME$SRCDIRNAME"
;;
esac
TESTDIR=${BASEDIR}/TestDir/${SRCDIRNAME}
fi
# clean it
if (cd ${TESTDIR} && ${MAKE} $cmdclean ) then
:
else
errcode=$?
snapshot=no
exit $errcode
fi
fi # Clean the Test directory
exit $errcode

19
bin/snapshot_version Normal file
View File

@@ -0,0 +1,19 @@
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# default version for snapshot test
# H5VERSION matches with a source version symbolic name. Will test use the
# latest revision of that branch. If set to "hdf5", it uses the main
# version.
# H5VER tells runtest which version to run
H5VERSION=hdf5

View File

@@ -1,12 +1,13 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#

129
bin/timekeeper Executable file
View File

@@ -0,0 +1,129 @@
#!/bin/sh
##
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
# As a time keeper of the remote daily test process launched by runtest.
# It sleeps for a certain time and then wakes up to hangup those processes
# that are still around, assuming they have run too long.
#
# Programmer: Albert Cheng
# Created Date: 2004/12/23
# variable initialization
waitminutes=300 # default to 5 hours == 300 minutes
debugtimelimit=
debugflag= # no debug
# Function definitions
#
# PRINTMSG
# Print a one line message left justified in a field of 70 characters
# without newline. More output for this line later.
#
PRINTMSG() {
SPACES=" "
echo "$* $SPACES" | cut -c1-70 | tr -d '\012'
}
USAGE()
{
echo "Usage: %0 [-h] [-debug] [<time-limit>]"
echo " Run timekeeper with <time-limit> minutes, default is $waitminutes."
echo " If <time-limit> is in the form of HH:MM, it means wait till then."
echo " -h print this help page"
echo " -debug run debug mode"
}
ParseOption()
{
if [ $# -gt 0 -a "$1" = -h ]; then
shift
USAGE
exit 0
fi
if [ $# -gt 0 -a "$1" = -debug ]; then
shift
debugflag=yes
waitminutes=1 # use shorter time for debug
fi
if [ $# -gt 0 ]; then
targettime=$1
shift
# find out it is minutes to wait or HH:MM to wake up
case $targettime in
*:*) # HH:MM
currenttime=`date +%H:%M`
currenthour=`echo $currenttime | cut -f1 -d:`
currentminute=`echo $currenttime | cut -f2 -d:`
targethour=`echo $targettime | cut -f1 -d:`
targetminute=`echo $targettime | cut -f2 -d:`
waitminutes=`expr \( $targethour - $currenthour \) \* 60 + $targetminute - $currentminute`
if test $waitminutes -le 0; then
# target time is in tomorrow, add 1 day of minutes
waitminutes=`expr 24 \* 60 + $waitminutes`
fi
;;
*)
waitminutes=$targettime
;;
esac
fi
}
# Main body
echo "Timekeeper started at `date`"
ParseOption $*
waitperiod=`expr $waitminutes \* 60` # convert to seconds
if [ -z "$debugflag" ]; then
# normal time keeping mode
# sleep first
echo Timekeeper sleeping for $waitperiod seconds
sleep $waitperiod
# Look for any processes still around
echo "Timekeeper woke up at `date`, looking for processes to terminate..."
for x in PID.* ; do
if [ -f $x ]; then
pid=`cat $x`
# check if process is still around
if test X$pid \!= X && ps -p $pid > /dev/null; then
echo "terminating process $x ($pid)"
kill -HUP $pid
echo "Remote shell command ended. But some processes might still be"
echo "running in the remote machine. Login there to verify."
fi
fi
done
else
# Debug mode. Launch two rsh process, one ends before, the other after
# waitperiod. Must launch timekeeper from a subshell, else the debug
# will wait for it too.
myhostname=`hostname`
( $0 $waitminutes &)
debugtimelimit=`expr $waitperiod - 10`
echo rsh $myhostname sleep $debugtimelimit
rsh $myhostname sleep $debugtimelimit &
echo $! > PID.before
debugtimelimit=`expr $waitperiod + 10`
echo rsh $myhostname sleep $debugtimelimit
rsh $myhostname sleep $debugtimelimit &
echo $! > PID.after
wait
rm PID.before PID.after
fi
echo "Timekeeper ended at `date`"

422
bin/trace
View File

@@ -1,17 +1,17 @@
#!/usr/bin/env perl
#!/usr/bin/perl -w
##
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
require 5.003;
use warnings;
$Source = "";
##############################################################################
@@ -27,106 +27,56 @@ $Source = "";
# usually the same as the package name.
#
%TypeString = ("haddr_t" => "a",
"H5A_info_t" => "Ai",
"H5A_operator1_t" => "Ao",
"H5A_operator2_t" => "AO",
"hbool_t" => "b",
"H5AC_cache_config_t" => "Cc",
"H5AC_cache_image_config_t" => "CC",
"double" => "d",
"H5D_alloc_time_t" => "Da",
"H5D_append_cb_t" => "DA",
"H5FD_mpio_collective_opt_t" => "Dc",
"H5D_fill_time_t" => "Df",
"H5D_fill_value_t" => "DF",
"H5D_gather_func_t" => "Dg",
"H5FD_mpio_chunk_opt_t" => "Dh",
"H5D_mpio_actual_io_mode_t" => "Di",
"H5FD_file_image_callbacks_t" => "DI",
"H5D_chunk_index_t" => "Dk",
"H5D_layout_t" => "Dl",
"H5D_mpio_no_collective_cause_t" => "Dn",
"H5D_mpio_actual_chunk_opt_mode_t" => "Do",
"H5D_operator_t" => "DO",
"H5D_space_status_t" => "Ds",
"H5D_scatter_func_t" => "DS",
"H5FD_mpio_xfer_t" => "Dt",
"H5D_vds_view_t" => "Dv",
"H5FD_class_value_t" => "DV",
"H5D_chunk_iter_op_t" => "x",
"H5FD_mpio_xfer_t" => "Dt",
"herr_t" => "e",
"H5E_auto1_t" => "Ea",
"H5E_auto2_t" => "EA",
"H5ES_event_complete_func_t" => "EC",
"H5E_direction_t" => "Ed",
"H5E_error_t" => "Ee",
"H5ES_event_insert_func_t" => "EI",
"H5ES_status_t" => "Es",
"H5E_type_t" => "Et",
"H5FD_class_t" => "FC",
"H5F_close_degree_t" => "Fd",
"H5F_fspace_strategy_t" => "Ff",
"H5F_flush_cb_t" => "FF",
"H5F_info2_t" => "FI",
"H5F_file_space_type_t" => "Ff",
"H5F_mem_t" => "Fm",
"H5F_scope_t" => "Fs",
"H5F_file_space_type_t" => "Ft",
"H5F_fspace_type_t" => "Ft",
"H5F_libver_t" => "Fv",
"H5G_iterate_t" => "Gi",
"H5G_obj_t" => "Go",
"H5G_stat_t" => "Gs",
"hsize_t" => "h",
"H5_atclose_func_t" => "Hc",
"hssize_t" => "Hs",
"H5E_major_t" => "i", # H5E_major_t is typedef'd to hid_t
"H5E_minor_t" => "i", # H5E_minor_t is typedef'd to hid_t
"hid_t" => "i",
"H5I_future_discard_func_t" => "ID",
"H5I_free_t" => "If",
"H5_index_t" => "Ii",
"H5I_iterate_func_t" => "II",
"H5E_major_t" => "i",
"H5E_minor_t" => "i",
"H5_iter_order_t" => "Io",
"H5FD_subfiling_ioc_select_t" => "IO",
"H5I_future_realize_func_t" => "IR",
"H5_index_t" => "Ii",
"hid_t" => "i",
"int" => "Is",
"int32_t" => "Is",
"H5I_search_func_t" => "IS",
"H5I_type_t" => "It",
"unsigned" => "Iu",
"unsigned int" => "Iu",
"uint32_t" => "Iu",
"H5O_token_t" => "k",
"H5L_iterate1_t" => "Li",
"H5L_iterate2_t" => "LI",
"H5I_type_t" => "It",
"H5G_link_t" => "Ll", #Same as H5L_type_t now
"H5L_type_t" => "Ll",
"H5L_elink_traverse_t" => "Lt",
"H5MM_allocate_t" => "Ma",
"MPI_Comm" => "Mc",
"H5MM_free_t" => "Mf",
"MPI_Info" => "Mi",
"H5M_iterate_t" => 'MI',
"H5FD_mem_t" => "Mt",
"off_t" => "o",
"H5O_iterate1_t" => "Oi",
"H5O_iterate2_t" => "OI",
"H5O_mcdt_search_cb_t" => "Os",
"H5O_type_t" => "Ot",
"H5P_class_t" => "p",
"H5P_cls_create_func_t" => "Pc",
"H5P_prp_create_func_t" => "PC",
"H5P_prp_delete_func_t" => "PD",
"H5P_prp_get_func_t" => "PG",
"H5P_iterate_t" => "Pi",
"H5P_cls_close_func_t" => "Pl",
"H5P_prp_close_func_t" => "PL",
"H5P_prp_compare_func_t" => "PM",
"H5P_cls_copy_func_t" => "Po",
"H5P_prp_copy_func_t" => "PO",
"H5P_prp_set_func_t" => "PS",
"hdset_reg_ref_t" => "Rd",
"hobj_ref_t" => "Ro",
"H5R_ref_t" => "Rr",
"hobj_ref_t" => "r",
"H5R_type_t" => "Rt",
"char" => "s",
"unsigned char" => "s",
@@ -135,86 +85,79 @@ $Source = "";
"H5S_sel_type" => "St",
"htri_t" => "t",
"H5T_cset_t", => "Tc",
"H5T_conv_t" => "TC",
"H5T_direction_t", => "Td",
"H5T_pers_t" => "Te",
"H5T_conv_except_func_t" => "TE",
"H5T_norm_t" => "Tn",
"H5T_order_t" => "To",
"H5T_pad_t" => "Tp",
"H5T_pers_t" => "Te",
"H5T_sign_t" => "Ts",
"H5T_class_t" => "Tt",
"H5T_str_t" => "Tz",
"unsigned long" => "Ul",
"unsigned long long" => "UL",
"uint64_t" => "UL",
"H5VL_attr_get_t" => "Va",
"H5VL_blob_optional_t" => "VA",
"H5VL_attr_specific_t" => "Vb",
"H5VL_blob_specific_t" => "VB",
"H5VL_dataset_get_t" => "Vc",
"H5VL_class_value_t" => "VC",
"H5VL_dataset_specific_t" => "Vd",
"H5VL_datatype_get_t" => "Ve",
"H5VL_datatype_specific_t" => "Vf",
"H5VL_file_get_t" => "Vg",
"H5VL_file_specific_t" => "Vh",
"H5VL_group_get_t" => "Vi",
"H5VL_group_specific_t" => "Vj",
"H5VL_link_create_t" => "Vk",
"H5VL_link_get_t" => "Vl",
"H5VL_get_conn_lvl_t" => "VL",
"H5VL_link_specific_t" => "Vm",
"H5VL_object_get_t" => "Vn",
"H5VL_request_notify_t" => "VN",
"H5VL_object_specific_t" => "Vo",
"H5VL_request_specific_t" => "Vr",
"H5VL_attr_optional_t" => "Vs",
"H5VL_subclass_t" => "VS",
"H5VL_dataset_optional_t" => "Vt",
"H5VL_datatype_optional_t" => "Vu",
"H5VL_file_optional_t" => "Vv",
"H5VL_group_optional_t" => "Vw",
"H5VL_link_optional_t" => "Vx",
"H5VL_object_optional_t" => "Vy",
"H5VL_request_optional_t" => "Vz",
"va_list" => "x",
"void" => "x",
"FILE" => "x",
"H5A_operator_t" => "x",
"H5A_operator1_t" => "x",
"H5A_operator2_t" => "x",
"H5A_info_t" => "x",
"H5AC_cache_config_t" => "x",
"H5AC_cache_image_config_t" => "x",
"H5D_append_cb_t" => "x",
"H5D_gather_func_t" => "x",
"H5D_operator_t" => "x",
"H5D_scatter_func_t" => "x",
"H5E_auto_t" => "x",
"H5E_auto1_t" => "x",
"H5E_auto2_t" => "x",
"H5E_walk_t" => "x",
"H5E_walk1_t" => "x",
"H5E_walk2_t" => "x",
"H5F_flush_cb_t" => "x",
"H5F_info1_t" => "x",
"H5F_info2_t" => "x",
"H5F_retry_info_t" => "x",
"H5FD_t" => "x",
"H5FD_class_t" => "x",
"H5FD_stream_fapl_t" => "x",
"H5FD_file_image_callbacks_t" => "x",
"H5G_iterate_t" => "x",
"H5G_info_t" => "x",
"H5I_free_t" => "x",
"H5I_search_func_t" => "x",
"H5L_class_t" => "x",
"H5L_elink_traverse_t" => "x",
"H5L_iterate_t" => "x",
"H5MM_allocate_t" => "x",
"H5MM_free_t" => "x",
"H5O_info_t" => "x",
"H5O_iterate_t" => "x",
"H5O_mcdt_search_cb_t" => "x",
"H5P_cls_create_func_t" => "x",
"H5P_cls_copy_func_t" => "x",
"H5P_cls_close_func_t" => "x",
"H5P_iterate_t" => "x",
"H5P_prp_create_func_t" => "x",
"H5P_prp_copy_func_t" => "x",
"H5P_prp_close_func_t" => "x",
"H5P_prp_delete_func_t" => "x",
"H5P_prp_get_func_t" => "x",
"H5P_prp_set_func_t" => "x",
"H5P_prp_compare_func_t" => "x",
"H5T_cdata_t" => "x",
"H5T_conv_t" => "x",
"H5T_conv_except_func_t" => "x",
"H5Z_func_t" => "x",
"H5Z_filter_func_t" => "x",
"va_list" => "x",
"size_t" => "z",
"H5Z_SO_scale_type_t" => "Za",
"H5Z_class_t" => "Zc",
"H5Z_EDC_t" => "Ze",
"H5Z_filter_t" => "Zf",
"H5Z_filter_func_t" => "ZF",
"ssize_t" => "Zs",
# Types below must be defined here, as they appear in function arguments,
# but they are not yet supported in the H5_trace_args() routine yet. If
# they are used as an actual parameter type (and not just as a pointer to
# to the type), they must have a "real" abbreviation added (like the ones
# above), moved to the section of entries above, and support for displaying
# the type must be added to H5_trace_args().
"H5ES_err_info_t" => "#",
"H5FD_t" => "#",
"H5FD_hdfs_fapl_t" => "#",
"H5FD_mirror_fapl_t" => "#",
"H5FD_onion_fapl_t" => "#",
"H5FD_ros3_fapl_t" => "#",
"H5FD_splitter_vfd_config_t" => "#",
"H5L_class_t" => "#",
"H5VL_class_t" => "#",
"H5VL_loc_params_t" => "#",
"H5VL_request_status_t" => "#",
);
##############################################################################
# Maximum length of H5TRACE macro line
# If the ColumnLimit in .clang-format is changed, this value will need to be updated
#
my $max_trace_macro_line_len = 110;
##############################################################################
# Print an error message.
#
@@ -245,15 +188,8 @@ sub argstring ($$$) {
# Normalize the data type by removing redundant white space,
# certain type qualifiers, and indirection.
$atype =~ s/^\bconst\b//; # Leading const
$atype =~ s/\s*const\s*//; # const after type, possibly in the middle of '*'s
$atype =~ s/^\bstatic\b//;
$atype =~ s/^\bconst\b//;
$atype =~ s/\bH5_ATTR_UNUSED\b//g;
$atype =~ s/\bH5_ATTR_DEPRECATED_USED\b//g;
$atype =~ s/\bH5_ATTR_NDEBUG_UNUSED\b//g;
$atype =~ s/\bH5_ATTR_DEBUG_API_USED\b//g;
$atype =~ s/\bH5_ATTR_PARALLEL_UNUSED\b//g;
$atype =~ s/\bH5_ATTR_PARALLEL_USED\b//g;
$atype =~ s/\s+/ /g;
$ptr = length $1 if $atype =~ s/(\*+)//;
$atype =~ s/^\s+//;
@@ -271,65 +207,53 @@ sub argstring ($$$) {
--$ptr;
$tstr = $TypeString{"$atype*"};
} elsif (!exists $TypeString{$atype}) {
# Defer throwing error until type is actually used
# errmesg $file, $func, "untraceable type \`$atype", '*'x$ptr, "\'";
errmesg $file, $func, "untraceable type \`$atype", '*'x$ptr, "\'";
} else {
$tstr = $TypeString{$atype};
}
return ("*" x $ptr) . ($array ? "[$array]" : "") . $tstr;
return ("*" x $ptr) . ($array?"[$array]":"") . $tstr;
}
##############################################################################
# Given information about an API function, rewrite that function with
# updated tracing information.
#
my $file_api = 0;
my $file_args = 0;
my $total_api = 0;
my $total_args = 0;
sub rewrite_func ($$$$$) {
my ($file, $type, $name, $args, $body) = @_;
my ($arg, $trace, $argtrace);
my (@arg_name, @arg_str, @arg_type);
my ($arg,$trace);
my (@arg_name, @arg_str);
local $_;
# Keep copy of original arguments
my $orig_args = $args;
# Parse return value
my $rettype = argstring $file, $name, $type;
goto error if $rettype =~ /!/;
# Parse arguments
if ($args eq "void") {
$trace = "H5TRACE0(\"$rettype\", \"\");\n";
$argtrace = "H5ARG_TRACE0(\"\")";
$trace = "H5TRACE0(\"$rettype\",\"\");\n";
} else {
# Split arguments. First convert `/*in,out*/' to get rid of the
# comma and remove lines beginning with a '#', then split the arguments
# on commas.
$args =~ s/(\/\*\s*in),\s*(out\s*\*\/)/$1_$2/g; # Get rid of comma in 'in,out'
$args =~ s/H5FL_TRACK_PARAMS//g; # Remove free list macro
$args =~ s/\n#.*?\n/\n/g; # Remove lines beginning with '#'
# comma, then split the arguments on commas.
$args =~ s/(\/\*\s*in),\s*(out\s*\*\/)/$1_$2/g;
my @args = split /,[\s\n]*/, $args;
my $argno = 0;
my %names;
for $arg (@args) {
if($arg=~/\w*\.{3}\w*/){ # Skip "..." for varargs parameter
if($arg=~/\w*\.{3}\w*/){
next;
}
unless ($arg=~/^((\s*[a-z_A-Z](\w|\*)*\s+)+(\s*\*\s*|\s*const\s*|\s*volatile\s*)*)
unless ($arg=~/^(([a-z_A-Z]\w*\s+)+\**)
([a-z_A-Z]\w*)(\[.*?\])?
(\s*\/\*\s*(in|out|in_out)\s*\*\/)?\s*$/x) {
errmesg $file, $name, "unable to parse \`$arg\'";
goto error;
} else {
my ($atype, $aname, $array, $adir) = ($1, $5, $6, $8);
my ($atype, $aname, $array, $adir) = ($1, $3, $4, $6);
$names{$aname} = $argno++;
$adir ||= "in";
$atype =~ s/\s+$//;
push @arg_name, $aname;
push @arg_type, $atype;
if ($adir eq "out") {
push @arg_str, "x";
@@ -350,184 +274,71 @@ sub rewrite_func ($$$$$) {
}
}
}
# Compose the trace macro
$trace = "H5TRACE" . scalar(@arg_str) . "(\"$rettype\", \"";
$argtrace = "H5ARG_TRACE" . scalar(@arg_str) . "(__func__, \"";
$trace .= join("", @arg_str) . "\"";
$argtrace .= join("", @arg_str) . "\"";
# Add 4 for indenting the line
my $len = 4 + length($trace);
for my $i (0 .. $#arg_name) {
# Handle wrapping
# Be VERY careful here! clang-format and this script MUST agree
# on which lines get wrapped or there will be churn as each tries
# to undo the other's output.
#
# TWO cases must be handled:
# 1) The argument is that last one and ');' will be appended
# 2) The argument is NOT the last one and ',' will be appended
#
# NB: clang-format does NOT consider terminal newlines when
# counting columns for the ColumnLimit
#
# The extra '2' added after $len includes the ', ' that would be
# added BEFORE the argument.
#
my $adjust = ($i + 1 == scalar(@arg_str)) ? 2 : 1;
my $len_if_added = $len + 2 + length($arg_name[$i]) + $adjust;
# Wrap lines that will be longer than the limit
if ($len_if_added > $max_trace_macro_line_len) {
# Wrap line, with indentation
$trace .= ",\n ";
$len = 13; # Set to 13, for indentation
# Indent an extra space to account for extra digit in 'H5TRACE' macro
if (scalar(@arg_str) >= 10) {
$trace .= " ";
$len++;
}
my $len = 4 + length $trace;
for (@arg_name) {
if ($len + length >= 77) {
$trace .= ",\n $_";
$len = 13 + length;
} else {
$trace .= ", ";
$len += 2; # Add 2, for ', '
$trace .= ", $_";
$len += 1 + length;
}
# Append argument
$trace .= "$arg_name[$i]";
$argtrace .= ", $arg_name[$i]";
# Add length of appended argument name
$len += length($arg_name[$i]);
}
# Append final ');' for macro
$trace .= ");\n";
$argtrace .= ")";
}
# Check for API / non-API routine name
if( $name =~ /H5[A-Z]{0,2}[a-z].*/) {
# The H5TRACE() statement, for API routines
if ($body =~ /\/\*[ \t]*NO[ \t]*TRACE[ \t]*\*\//) {
# Ignored due to NO TRACE comment.
} else {
# Check for known, but unsupported type
if ( $trace =~ /(^#)|([^*]#)/ ) {
# Check for unsupported return type
if ( $type =~ /(^#)|([^*]#)/ ) {
errmesg $file, $name, "unsupported type in return type\nAdd to TypeString hash in trace script and update H5_trace_args()";
print "type = '$type'\n";
}
# Check for unsupported argument type
$index = 0;
for (@arg_str) {
if ( $_ =~ /(^#)|([^*]#)/ ) {
errmesg $file, $name, "unsupported type in args\nAdd to TypeString hash in trace script and update H5_trace_args()";
print "type = $arg_type[$index]\n";
}
$index++;
}
goto error;
}
# Check for unknown (and therefore unsupported) type
if ( $trace =~ /(^!)|([^*]!)/ ) {
# Check for unsupported return type
if ( $type =~ /(^!)|([^*]!)/ ) {
errmesg $file, $name, "unknown type in return type\nAdd to TypeString hash in trace script and also update H5_trace_args() if used by value";
print "type = '$type'\n";
}
# Check for unsupported argument type
$index = 0;
for (@arg_str) {
if ( $_ =~ /(^!)|([^*]!)/ ) {
errmesg $file, $name, "unknown type in args\nAdd to TypeString hash in trace script and also update H5_trace_args() if used by value";
print "type = $arg_type[$index]\n";
}
$index++;
}
goto error;
}
if ($body =~ s/((\n[ \t]*)H5TRACE\d+\s*\(.*?\);)\n/"$2$trace"/es) {
# Replaced an H5TRACE macro.
} elsif ($body=~s/((\n[ \t]*)FUNC_ENTER\w*[ \t]*(\(.*?\))?;??)\n/"$1$2$trace"/es) {
# Added an H5TRACE macro after a FUNC_ENTER macro.
} else {
errmesg $file, $name, "unable to insert tracing information";
print "body = ", $body, "\n";
goto error;
}
}
#Increment # of API routines modified
$file_api++;
}
# Check for H5ARG_TRACE macros in non-API routines
if ( $body =~ /H5ARG_TRACE/ ) {
# Check for untraceable type (deferred until $argtrace used)
if ( $argtrace =~ /(^!)|([^*]!)/ ) {
errmesg $file, $name, "untraceable type in args";
print "args = '$orig_args'\n";
goto error;
}
# Replace / update H5ARG_TRACE macro.
$body =~ s/(H5ARG_TRACE(\d+\s*\(.*?\))?)/"$argtrace"/esg;
#Increment # of non-API routines modified
$file_args++;
}
goto error if grep {/!/} @arg_str;
# The H5TRACE() statement
if ($body =~ /\/\*[ \t]*NO[ \t]*TRACE[ \t]*\*\//) {
# Ignored due to NO TRACE comment.
} elsif ($body =~ s/((\n[ \t]*)H5TRACE\d+\s*\(.*?\);)\n/"$2$trace"/es) {
# Replaced an H5TRACE macro.
} elsif ($body=~s/((\n[ \t]*)FUNC_ENTER\w*\s*(\(.*?\))?;??)\n/"$1$2$trace"/es) {
# Added an H5TRACE macro after a FUNC_ENTER macro.
} else {
errmesg $file, $name, "unable to insert tracing information";
print "body = ", $body, "\n";
goto error;
}
error:
return "\n$type\n$name($orig_args)\n$body";
return "\n$type\n$name($args)\n$body";
}
##############################################################################
# Process each source file, rewriting API functions with updated
# tracing information.
#
my $total_api = 0;
for $file (@ARGV) {
$file_api = 0;
$file_args = 0;
# Ignore some files that do not need tracing macros
unless ($file eq "H5FDmulti.c" or $file eq "src/H5FDmulti.c" or $file eq "H5FDstdio.c" or $file eq "src/H5FDstdio.c" or $file eq "src/H5TS.c" or $file eq "src/H5FDperform.c") {
unless ($file eq "H5FDmulti.c" or $file eq "src/H5FDmulti.c" or $file eq "H5FDstdio.c" or $file eq "src/H5FDstdio.c") {
# Snarf up the entire file
open SOURCE, $file or die "$file: $!\n";
$Source = join "", <SOURCE>;
close SOURCE;
# Make a copy of the original data
my $original = $Source;
# Make modifications
$Source =~ s/\n([A-Za-z]\w*(\s+[A-Za-z]\w*)*\s*\**)\n #type
(H5[A-Z]{0,2}_?[a-zA-Z0-9_]\w*) #name
\s*\((.*?)\)\s* #args
(\{.*?\n\}[^\n]*) #body
/rewrite_func($file,$1,$3,$4,$5)/segx;
my $original = $Source;
my $napi = $Source =~ s/\n([A-Za-z]\w*(\s+[A-Za-z]\w*)*\s*\**)\n #type
(H5[A-Z]{0,2}[^_A-Z0-9]\w*) #name
\s*\((.*?)\)\s* #args
(\{.*?\n\}[^\n]*) #body
/rewrite_func($file,$1,$3,$4,$5)/segx;
$total_api += $napi;
# If the source changed then print out the new version
if ($original ne $Source) {
printf "%s: instrumented %d API function%s and %d argument list%s\n",
$file, $file_api, (1 == $file_api ? "" : "s"),
$file_args, (1 == $file_args ? "" : "s");
printf "%s: instrumented %d API function%s\n",
$file, $napi, 1==$napi?"":"s";
rename $file, "$file~" or die "unable to make backup";
open SOURCE, ">$file" or die "unable to modify source";
print SOURCE $Source;
close SOURCE;
$total_api += $file_api;
$total_args += $file_args;
}
}
}
@@ -539,9 +350,6 @@ if ($found_errors eq 1) {
printf "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n";
exit 1;
} else {
printf "Finished processing HDF5 API calls:\n";
printf "\tinstrumented %d API function%s and %d argument list%s\n",
$total_api, (1 == $total_api ? "" : "s"),
$total_args, (1 == $total_args ? "" : "s");
printf "Finished processing HDF5 API calls\n";
}

View File

@@ -1,549 +0,0 @@
#!/usr/bin/env perl
require 5.003;
use warnings;
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Quincey Koziol
# 9 Aug 2013
#
# Purpose: Given an input file containing the output from a build of the
# library, gather the file names and line numbers, alias
# identical types of warnings together into a single bin and
# report the number of warnings for each type of warning, each file
# and the total count of warnings
# Perl modules/settings
use strict;
use Getopt::Std;
# Global variables, for accumulating information
my $totalcount = 0;
my $notecount = 0;
my $dupcount = 0;
my %warn_count = ();
my $warncount;
my %warn_file = ();
my %warn_file_line = ();
my %warn_file_line_offset = ();
my %file_count = ();
my $filecount;
my $ignorecount = 0;
my @ignorenames;
my %ignored_files = ();
my %warn_file_indices = ();
my %file_warn_indices = ();
my @warn_match_strings;
my @file_match_strings;
my %file_warn = ();
my %file_warn_line = ();
my $current_warning = 0;
my $current_file = 0;
my $warn_index;
my $genericize = 1;
# Info about last name / line / offset for file
my $last_c_name;
my $last_fort_name;
my $last_fort_line;
my $last_fort_offset;
# Display usage
sub do_help {
print "Usage: 'warnhist [-h, --help] [-t <prefix>] [-w <n>] [-W] [-f <n>] [-F] [-s <warning string list>] [-S <file string list] [-l] [-u] [-i <name list>] [file]'\n";
print "\t-h, --help\tDisplay this usage\n";
print "\t-t <prefix>\tTrim pathname prefix from filenames, <prefix>\n";
print "\t-w <n>\tDisplay files for a given warning index list, <n>\n";
print "\t\t<n> can be a single value, a range, or a comma separated list\n";
print "\t\tFor example: '0' or '0,4' or '8-10' or '0,2-4,8-10,13'\n";
print "\t-W\tDisplay files for all warnings\n";
print "\t-f <n>\tDisplay warnings for a given file index list, <n>\n";
print "\t\t<n> can be a single value, a range, or a comma separated list\n";
print "\t\tFor example: '0' or '0,4' or '8-10' or '0,2-4,8-10,13'\n";
print "\t-F\tDisplay warnings for all files\n";
print "\t-s <warning string list>\tDisplay files for warnings which contain a string, <warning string list>\n";
print "\t\t<warning string list> is a comma separated list, with no spaces\n";
print "\t\tFor example: 'Wunused-dummy-argument' or 'Wunused-dummy-argument,Wunused-variable'\n";
print "\t-S <file string list>\tDisplay warnings for files which contain a string, <file string list>\n";
print "\t\t<file string list> is a comma separated list, with no spaces\n";
print "\t\tFor example: 'H5Fint' or 'H5Fint,H5Gnode'\n";
print "\t-l\tDisplay line numbers for file/warning\n";
print "\t-u\tLeave 'unique' types in warnings, instead of genericizing them\n";
print "\t-i <name list>\tIgnore named files, <name list>\n";
print "\t\t<name list> is a comma separated list, with no spaces\n";
print "\t\tFor example: 'H5LTparse' or 'H5LTparse,H5LTanalyze'\n";
print "\tfile\tFilename containing build output\n";
print "\t\tIf no file is given, standard input is used.\n";
exit;
}
sub main::HELP_MESSAGE {
do_help();
}
# declare the Perl command line flags/options we want to allow
my %options=();
getopts("FWhut:w:f:s:S:i:l", \%options);
# Display usage, if requested
if($options{h}) {
do_help();
}
# Parse list of file names to ignore
if(exists $options{i}) {
@ignorenames = split /,/, $options{i};
#print STDERR @ignorenames;
}
# Parse list of warning indices to expand file names
if(exists $options{w}) {
my @tmp_indices;
@tmp_indices = split /,/, $options{w};
#print STDERR @tmp_indices;
for my $x (@tmp_indices) {
#print STDERR "x = '$x'\n";
if($x =~ /\-/) {
my $start_index;
my $end_index;
#print STDERR "matched = '$x'\n";
($start_index, $end_index) = split /\-/, $x;
#print STDERR "start_index = '$start_index', end_index = '$end_index'\n";
for my $y ($start_index..$end_index) {
#print STDERR "y = '$y'\n";
if(!exists $warn_file_indices{$y}) {
$warn_file_indices{$y} = $y;
}
}
}
else {
if(!exists $warn_file_indices{$x}) {
$warn_file_indices{$x} = $x;
}
}
}
#foreach (sort keys %warn_file_indices) {
# print STDERR "$_ : $warn_file_indices{$_}\n";
#}
}
# Parse list of warning strings to expand file names
if(exists $options{s}) {
@warn_match_strings = split /,/, $options{s};
# print STDERR @warn_match_strings;
}
# Parse list of file indices to expand warnings
if(exists $options{f}) {
my @tmp_indices;
@tmp_indices = split /,/, $options{f};
#print STDERR @tmp_indices;
for my $x (@tmp_indices) {
#print STDERR "x = '$x'\n";
if($x =~ /\-/) {
my $start_index;
my $end_index;
#print STDERR "matched = '$x'\n";
($start_index, $end_index) = split /\-/, $x;
#print STDERR "start_index = '$start_index', end_index = '$end_index'\n";
for my $y ($start_index..$end_index) {
#print STDERR "y = '$y'\n";
if(!exists $file_warn_indices{$y}) {
$file_warn_indices{$y} = $y;
}
}
}
else {
if(!exists $file_warn_indices{$x}) {
$file_warn_indices{$x} = $x;
}
}
}
#foreach (sort keys %warn_file_indices) {
# print STDERR "$_ : $warn_file_indices{$_}\n";
#}
}
# Parse list of warning strings for files to expand warnings
if(exists $options{S}) {
@file_match_strings = split /,/, $options{S};
# print STDERR @file_match_strings;
}
# Check if warnings should stay unique and not be "genericized"
if($options{u}) {
$genericize = 0;
}
PARSE_LINES:
while (<>) {
my $name;
my $line;
my $prev_line;
my $toss;
my $offset;
my $warning;
my $extra;
my $extra2;
# Retain last FORTRAN compile line, which comes a few lines before warning
if($_ =~ /.*\.[fF]90:.*/) {
($last_fort_name, $last_fort_line, $last_fort_offset) = split /\:/, $_;
($last_fort_line, $toss) = split /\./, $last_fort_line;
}
# Retain last C/C++ compile line, which possibly comes a few lines before warning
if($_ =~ /.*[A-Za-z0-9_]\.[cC]:.*/) {
($last_c_name, $toss) = split /\:/, $_;
}
# Retain C/C++ compile line, which comes with the line of warning
if($_ =~ /.*[A-Za-z0-9_]\.[chC]\(.*[0-9]\):.*#.*/) {
$last_c_name = $_;
}
# Skip lines that don't have the word "warning"
next if $_ !~ /[Ww]arning/;
# Skip warnings from linker
next if $_ =~ /ld: warning:/;
# Skip warnings from build_py and install_lib
next if $_ =~ /warning: (build_py|install_lib)/;
# Skip variables with the word 'warning' in them
next if $_ =~ /_warning_/;
# Skip AMD Optimizing Compiler (aocc) lines "<#> warning(s) generated."
next if $_ =~ / warnings? generated\./;
# "Hide" the C++ '::' symbol until we've parsed out the parts of the line
while($_ =~ /\:\:/) {
$_ =~ s/\:\:/@@@@/g;
}
# Check for weird formatting of warning message
$line = "??";
$offset = "??";
if($_ =~ /^cc1: warning:.*/) {
$name = $last_c_name;
($toss, $toss, $warning, $extra, $extra2) = split /\:/, $_;
# Check for CMAKE build with warning on first line and no filename
} elsif($_ =~ /^\s*[Ww]arning:.*/) {
$name = $last_c_name;
($toss, $warning, $extra, $extra2) = split /\:/, $_;
# Check for file-scope gcc Fortran warning output
} elsif($_ =~ /f\d\d\d: Warning:/) {
# These are interspersed with the "compiling a file" output
# when compiling with `make -j` and thus difficult to tie to
# any particular file. They are due to things like inappropriate
# build options and don't have a line number.
#
# They start with f, as in f951
$name = "(generic)";
$line = int(rand(1000000)); # Hack to avoid counting as duplictates
($warning) = $_ =~ /\[(.*)\]/x;
# Check for FORTRAN warning output
} elsif($_ =~ /^Warning:.*/) {
$name = $last_fort_name;
$line = $last_fort_line;
$offset = $last_fort_offset;
($toss, $warning, $extra, $extra2) = split /\:/, $_;
# Check for improperly parsed filename or line
if($name =~ /^$/) {
print "Filename is a null string! Input line #$. is: '$_'";
next
}
if($line =~ /^$/) {
print "Line is a null string! Input line #$. is: '$_'";
next
}
# Check for non-GCC warning (Solaris/Oracle?)
} elsif($_ =~ /^\".*, line [0-9]+: *[Ww]arning:.*/) {
($name, $toss, $warning, $extra, $extra2) = split /\:/, $_;
($name, $line) = split /\,/, $name;
$name =~ s/^\"//g;
$name =~ s/\"$//g;
$line =~ s/^\s*line\s*//g;
# Check for Intel icc warning
} elsif($_ =~ /.*[A-Za-z0-9_]\.[chC]\(.*[0-9]\):.*#.*/) {
($last_c_name, $toss, $warning) = split /\:/, $last_c_name;
($name, $line) = split /\(/, $last_c_name;
$line =~ s/\)//g;
} else {
# Check for 'character offset' field appended to file & line #
# (This is probably specific to GCC)
if($_ =~ /^.*[0-9]+\:[0-9]+\:/) {
($name, $line, $offset, $toss, $warning, $extra, $extra2) = split /\:/, $_;
} else {
($name, $line, $toss, $warning, $extra, $extra2) = split /\:/, $_;
}
}
# Check for extra ':' followed by more text in original warning string,
# and append the ':' and text back onto the parsed warning
# (Use 'length $extra' idiom to avoid warning when $extra is undefined)
if(length $extra ) {
$warning = join ':', $warning, $extra;
}
if(length $extra2 ) {
$warning = join ':', $warning, $extra2;
}
# Restore the C++ '::' symbol now that we've parsed out the parts of the line
while($warning =~ /@@@@/) {
$warning =~ s/@@@@/\:\:/g;
}
# Trim leading '..' paths from filename
while($name =~ /^\.\.\//) {
$name =~ s/^\.\.\///g;
}
# Check for trimming prefix
if((exists $options{t}) && ($name =~ /$options{t}/)) {
$name =~ s/^$options{t}\///g;
}
# Check for ignored file
if(exists $options{i}) {
for my $x (@ignorenames) {
if($name =~ /$x/) {
$ignorecount++;
if(!(exists $ignored_files{$name})) {
$ignored_files{$name} = $name;
}
next PARSE_LINES;
}
}
}
# Check for improperly parsed warning (usually an undefined warning string)
if(!defined $warning) {
print "Warning Undefined! Input line is: '$_'";
next
}
# Get rid of leading & trailing whitespace
$warning =~ s/^\s//g;
$warning =~ s/\s$//g;
# Check for improperly parsed warning
if($warning =~ /^$/) {
print "Warning is a null string! Input line is: '$_'";
next
}
# Convert all quotes to '
$warning =~ s//'/g;
$warning =~ s//'/g;
$warning =~ s/"/'/g;
#
# These skipped messages & "genericizations" may be specific to GCC
# Skip supplemental warning message
if($warning =~ /near initialization for/) {
$notecount++;
next
}
# Skip C++ supplemental warning message
if($warning =~ /in call to/) {
$notecount++;
next
}
# Skip GCC warning that should be a note
if($_ =~ /\(this will be reported only once per input file\)/) {
$notecount++;
next
}
if($genericize) {
# Eliminate C/C++ "{aka <some type>}" and "{aka '<some type>'}" info
if($warning =~ /\s(\{|\()aka '?[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#]+[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#\ ]*'?(\}|\))/) {
$warning =~ s/\s(\{|\()aka '?[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#]+[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#\ ]*'?(\}|\))//g;
}
# Genericize C/C++ '<some type>', printf format '%<some format>', and
# "unknown warning group" into '-'
if($warning =~ /'[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#\-\=]+[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#\-\=\ ]*'/) {
$warning =~ s/'[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#\-\=]+[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#\-\=\ ]*'/'-'/g;
}
if($warning =~ /'%[\#0\-\ \+]*[,;\:_]?[0-9\*]*\.?[0-9\*]*[hjltzL]*[aAcdeEfFgGinosuxX]'/) {
$warning =~ s/'%[\#0\-\ \+]*[,;\:_]?[0-9\*]*\.?[0-9\*]*[hjltzL]*[aAcdeEfFgGinosuxX]'/'-'/g;
}
# Genericize C/C++ "<macro>" warnings into "-"
if($warning =~ /"[A-Za-z_0-9]*"/) {
$warning =~ s/"[A-Za-z_0-9]*"/"-"/g;
}
# Genericize [GCC?] C/C++ warning text about suggested attribute
if($warning =~ /attribute=[A-Za-z_0-9]*\]/) {
$warning =~ s/=[A-Za-z_0-9]*\]/=-\]/g;
}
# Genericize FORTRAN "at (<n>)" into "at (-)", "REAL(<n>)" into "REAL(-)",
# and "INTEGER(<n>)" into "INTEGER(-)"
if($warning =~ /.*at\s\([0-9]+\).*/) {
$warning =~ s/at\s\([0-9]+\)/at \(-\)/g;
}
if($warning =~ /.*REAL\([0-9]+\).*/) {
$warning =~ s/REAL\([0-9]+\)/REAL\(-\)/g;
}
if($warning =~ /.*INTEGER\([0-9]+\).*/) {
$warning =~ s/INTEGER\([0-9]+\)/INTEGER\(-\)/g;
}
# Genericize standalone numbers in warnings
if($warning =~ /(\s|')-?[0-9]+(\s|')/) {
$warning =~ s/-?[0-9]+/-/g;
}
# Genericize unusual GCC/G++/GFORTRAN warnings that aren't handled above
if($warning =~ /\[deprecation\] [A-Za-z_0-9]*\([A-Za-z_,0-9]*\) in [A-Za-z_0-9]* has been deprecated.*/) {
$warning =~ s/[A-Za-z_0-9]*\([A-Za-z_,0-9]*\) in [A-Za-z_0-9]*/-\(-\) in -/g;
}
}
# <end possible GCC-specific code>
# Check if we've already seen this warning on this line in this file
# (Can happen for warnings from inside header files)
if( !exists $warn_file_line_offset{$warning}{$name}{$line}{$offset} ) {
# Increment count for [generic] warning
$warn_count{$warning}++;
$warn_file{$warning}{$name}++;
$warn_file_line{$warning}{$name}{$line}++;
$warn_file_line_offset{$warning}{$name}{$line}{$offset}++;
# Increment count for filename
$file_count{$name}++;
$file_warn{$name}{$warning}++;
$file_warn_line{$name}{$warning}{$line}++;
# Increment total count of warnings
$totalcount++;
}
else {
# Increment count of duplicate warnings
$dupcount++;
}
# print STDERR "name = $name\n";
# print STDERR "line = $line\n";
# print STDERR "offset = $offset\n";
# print STDERR "warning = \"$warning\"\n";
}
print "Total unique [non-ignored] warnings: $totalcount\n";
print "Ignored notes / supplemental warning lines [not counted in unique warnings]: $notecount\n";
print "Duplicated warning lines [not counted in unique warnings]: $dupcount\n";
print "Total ignored warnings: $ignorecount\n";
$warncount = keys %warn_count;
print "Total unique kinds of warnings: $warncount\n";
$filecount = keys %file_count;
print "Total files with warnings: $filecount\n\n";
# Print warnings in decreasing frequency
print "# of Warnings by frequency (file count)\n";
print "=======================================\n";
for my $x (sort {$warn_count{$b} <=> $warn_count{$a}} keys(%warn_count)) {
printf ("[%2d] %4d (%2d) - %s\n", $current_warning++, $warn_count{$x}, scalar(keys %{$warn_file{$x}}), $x);
if((exists $options{W}) || (exists $options{w}) || (exists $options{s})) {
my $curr_index = $current_warning - 1;
my $match = 0;
# Check for string from list in current warning
if(exists $options{s}) {
for my $y (@warn_match_strings) {
# print STDERR "y = '$y'\n";
if($x =~ /$y/) {
# print STDERR "matched warning = '$x'\n";
$match = 1;
last;
}
}
}
# Check if current warning index matches
if((exists $warn_file_indices{$curr_index}) && $curr_index == $warn_file_indices{$curr_index}) {
$match = 1;
}
if($match) {
for my $y (sort {$warn_file{$x}{$b} <=> $warn_file{$x}{$a}} keys(%{$warn_file{$x}})) {
printf ("\t%4d - %s\n", $warn_file{$x}{$y}, $y);
if(exists $options{l}) {
my $lines = join ", ", sort {$a <=> $b} keys %{$warn_file_line{$x}{$y}};
printf("\t\tLines: $lines \n");
}
}
}
}
}
# Print warnings in decreasing frequency, by filename
print "\n# of Warnings by filename (warning type)\n";
print "========================================\n";
for my $x (sort {$file_count{$b} <=> $file_count{$a}} keys(%file_count)) {
printf ("[%3d] %4d (%2d) - %s\n", $current_file++, $file_count{$x}, scalar(keys %{$file_warn{$x}}), $x);
if((exists $options{F}) || (exists $options{f}) || (exists $options{S})) {
my $curr_index = $current_file - 1;
my $match = 0;
# Check for string from list in current file
if(exists $options{S}) {
for my $y (@file_match_strings) {
# print STDERR "y = '$y'\n";
if($x =~ /$y/) {
# print STDERR "matched warning = '$x'\n";
$match = 1;
last;
}
}
}
# Check if current file index matches
if((exists $file_warn_indices{$curr_index}) && $curr_index == $file_warn_indices{$curr_index}) {
$match = 1;
}
if($match) {
for my $y (sort {$file_warn{$x}{$b} <=> $file_warn{$x}{$a}} keys(%{$file_warn{$x}})) {
printf ("\t%4d - %s\n", $file_warn{$x}{$y}, $y);
if(exists $options{l}) {
my $lines = join ", ", sort {$a <=> $b} keys %{$file_warn_line{$x}{$y}};
printf("\t\tLines: $lines \n");
}
}
}
}
}
# Print names of files that were ignored
# Check for ignored file
if(exists $options{i}) {
print "\nIgnored filenames\n";
print "=================\n";
for my $x (sort keys(%ignored_files)) {
print "$x\n";
}
}

76
bin/yodconfigure Executable file
View File

@@ -0,0 +1,76 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Fix configure file so that it can launch configure testing executable
# via the proper launching command, e.g., yod. (Thus the name yodconfigure
# is used.)
#
# Author: Albert Cheng
if [ "$1" = -f ]; then
FORCEMODE=yes
echo turn FORCEMODE to $FORCEMODE
shift
fi
if [ $# -ne 1 ]; then
echo "Usage: $0 [-f] <configure file>"
echo " -f apply the change even if it has been applied already."
exit 1
fi
confile=$1
if [ ! -w $confile ]; then
echo "$0: $confile is not writable"
exit 1
fi
ACTRY_NAME="ACTRY()"
if grep ^"$ACTRY_NAME"$ $confile >/dev/null 2>&1 && [ "$FORCEMODE" != yes ]; then
echo "$0: $confile is already yodconfigure ready. Use -f to force yodconfigure again."
exit 1
fi
# Insert the ACTRY function after the 1st line which is the #!/bin/sh.
# Change all "eval $ac_try" commands to call ACTRY.
# auto-configure have changed the ac_try syntax from 'eval $ac_try' to
# 'eval "$ac_try"'. Thus requiring two very similar global-substitute.
# The single quotes around EOF tell shell NOT to expand or do substitution in
# the body of ed input.
#
ed - $confile <<'EOF'
1a
# ===inserted by yodconfigure ====
# ACTRY will figure out when it is approprirate to run the command by the
# $RUNSERIAL launcher (e.g., yod -sz 1) and when to just run it as is.
# So far, ./a.out and ./conftest are names of real executable that should
# be run by $RUNSERIAL.
#
# (uncomment the echo line if you want to see what is going on.)
ACTRY()
{
#echo ACTRY: args are: $* > /dev/tty
if [ "$1" = ./a.out -o "$1" = ./conftest ]; then
# echo $RUNSERIAL $* > /dev/tty
$RUNSERIAL $*
else
$*
fi
}
# === end of ACTRY inserted by yodconfigure ====
.
g/eval $ac_try/s/eval/eval ACTRY/
g/eval "$ac_try"/s/eval/eval ACTRY/
w
q
EOF

View File

@@ -1,18 +1,44 @@
cmake_minimum_required (VERSION 3.18)
project (HDF5_CPP CXX)
cmake_minimum_required (VERSION 3.2.2)
PROJECT (HDF5_CPP)
add_subdirectory (src)
#-----------------------------------------------------------------------------
# Apply Definitions to compiler in this directory and below
#-----------------------------------------------------------------------------
add_definitions (${HDF_EXTRA_C_FLAGS})
#-----------------------------------------------------------------------------
# Generate configure file
#-----------------------------------------------------------------------------
configure_file (${HDF_RESOURCES_DIR}/H5cxx_config.h.in
${HDF5_BINARY_DIR}/H5cxx_pubconf.h
)
#-----------------------------------------------------------------------------
# Setup Include directories
#-----------------------------------------------------------------------------
INCLUDE_DIRECTORIES (${HDF5_CPP_SOURCE_DIR}/src)
INCLUDE_DIRECTORIES (${HDF5_BINARY_DIR})
#-----------------------------------------------------------------------------
# Parallel/MPI, prevent spurious cpp/cxx warnings
#-----------------------------------------------------------------------------
if (H5_HAVE_PARALLEL)
add_definitions ("-DMPICH_SKIP_MPICXX")
add_definitions ("-DMPICH_IGNORE_CXX_SEEK")
endif ()
add_subdirectory (${HDF5_CPP_SOURCE_DIR}/src ${HDF5_CPP_BINARY_DIR}/src)
#-----------------------------------------------------------------------------
# Build the CPP Examples
#-----------------------------------------------------------------------------
if (HDF5_BUILD_EXAMPLES)
add_subdirectory (examples)
add_subdirectory (${HDF5_CPP_SOURCE_DIR}/examples ${HDF5_CPP_BINARY_DIR}/examples)
endif ()
#-----------------------------------------------------------------------------
# Build the CPP unit tests
#-----------------------------------------------------------------------------
if (BUILD_TESTING)
add_subdirectory (test)
add_subdirectory (${HDF5_CPP_SOURCE_DIR}/test ${HDF5_CPP_BINARY_DIR}/test)
endif ()

13
c++/COPYING Normal file
View File

@@ -0,0 +1,13 @@
Copyright by The HDF Group and
The Board of Trustees of the University of Illinois.
All rights reserved.
The files and subdirectories in this directory are part of HDF5.
The full HDF5 copyright notice, including terms governing use,
modification, and redistribution, is contained in the COPYING file
which can be found at the root of the source code distribution tree
or in https://support.hdfgroup.org/ftp/HDF5/releases. If you do
not have access to either file, you may request a copy from
help@hdfgroup.org.

View File

@@ -1,11 +1,12 @@
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
@@ -13,20 +14,13 @@
## Run automake to generate a Makefile.in from this file.
#
# Top-level HDF5-C++ Makefile(.in)
#
#
include $(top_srcdir)/config/commence.am
if BUILD_TESTS_CONDITIONAL
TEST_DIR = test
else
TEST_DIR=
endif
## Only recurse into subdirectories if C++ interface is enabled.
if BUILD_CXX_CONDITIONAL
SUBDIRS=src $(TEST_DIR)
SUBDIRS=src test
endif
DIST_SUBDIRS = src test examples

View File

@@ -1,5 +1,5 @@
cmake_minimum_required (VERSION 3.18)
project (HDF5_CPP_EXAMPLES CXX)
cmake_minimum_required (VERSION 3.2.2)
PROJECT (HDF5_CPP_EXAMPLES)
# --------------------------------------------------------------------
# Notes: When creating examples they should be prefixed
@@ -34,54 +34,20 @@ set (tutr_examples
foreach (example ${examples})
add_executable (cpp_ex_${example} ${HDF5_CPP_EXAMPLES_SOURCE_DIR}/${example}.cpp)
target_include_directories (cpp_ex_${example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (cpp_ex_${example} STATIC)
target_link_libraries (cpp_ex_${example} PRIVATE ${HDF5_CPP_LIB_TARGET} ${HDF5_LIB_TARGET})
else ()
TARGET_C_PROPERTIES (cpp_ex_${example} SHARED)
target_link_libraries (cpp_ex_${example} PRIVATE ${HDF5_CPP_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
if (MINGW AND HDF5_MINGW_STATIC_GCC_LIBS)
target_link_options (${HDF5_CPP_LIBSH_TARGET}
PRIVATE -static-libgcc -static-libstdc++
)
endif ()
endif ()
TARGET_NAMING (cpp_ex_${example} STATIC)
TARGET_C_PROPERTIES (cpp_ex_${example} STATIC " " " ")
target_link_libraries (cpp_ex_${example} ${HDF5_CPP_LIB_TARGET} ${HDF5_LIB_TARGET})
set_target_properties (cpp_ex_${example} PROPERTIES FOLDER examples/cpp)
#-----------------------------------------------------------------------------
# Add Target to clang-format
#-----------------------------------------------------------------------------
if (HDF5_ENABLE_FORMATTERS)
clang_format (HDF5_CPP_EXAMPLES_${example}_FORMAT cpp_ex_${example})
endif ()
endforeach ()
foreach (example ${tutr_examples})
add_executable (cpp_ex_${example} ${HDF5_CPP_EXAMPLES_SOURCE_DIR}/${example}.cpp)
target_include_directories (cpp_ex_${example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (cpp_ex_${example} STATIC)
target_link_libraries (cpp_ex_${example} PRIVATE ${HDF5_CPP_LIB_TARGET} ${HDF5_LIB_TARGET})
else ()
TARGET_C_PROPERTIES (cpp_ex_${example} SHARED)
target_link_libraries (cpp_ex_${example} PRIVATE ${HDF5_CPP_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
if (MINGW AND HDF5_MINGW_STATIC_GCC_LIBS)
target_link_options (${HDF5_CPP_LIBSH_TARGET}
PRIVATE -static-libgcc -static-libstdc++
)
endif ()
endif ()
TARGET_NAMING (cpp_ex_${example} STATIC)
TARGET_C_PROPERTIES (cpp_ex_${example} STATIC " " " ")
target_link_libraries (cpp_ex_${example} ${HDF5_CPP_LIB_TARGET} ${HDF5_LIB_TARGET})
set_target_properties (cpp_ex_${example} PROPERTIES FOLDER examples/cpp)
#-----------------------------------------------------------------------------
# Add Target to clang-format
#-----------------------------------------------------------------------------
if (HDF5_ENABLE_FORMATTERS)
clang_format (HDF5_CPP_EXAMPLES_${example}_FORMAT cpp_ex_${example})
endif ()
endforeach ()
if (BUILD_TESTING AND HDF5_TEST_CPP AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL)
if (BUILD_TESTING)
include (CMakeTests.cmake)
endif ()

View File

@@ -5,7 +5,7 @@
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
@@ -16,107 +16,82 @@
##############################################################################
##############################################################################
# Remove any output file left over from previous test run
set (CPP_EX_CLEANFILES
Group.h5
SDS.h5
SDScompound.h5
SDSextendible.h5
Select.h5
)
add_test (
NAME CPP_ex-clear-objects
COMMAND ${CMAKE_COMMAND}
-E remove ${CPP_EX_CLEANFILES}
)
set_tests_properties (CPP_ex-clear-objects PROPERTIES
FIXTURES_SETUP clear_cppex
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
)
add_test (
NAME CPP_ex-clean-objects
COMMAND ${CMAKE_COMMAND}
-E remove ${CPP_EX_CLEANFILES}
)
set_tests_properties (CPP_ex-clean-objects PROPERTIES
FIXTURES_CLEANUP clear_cppex
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
)
add_test (
NAME CPP_ex-clear-objects
COMMAND ${CMAKE_COMMAND}
-E remove
Group.h5
SDS.h5
SDScompound.h5
SDSextendible.h5
Select.h5
)
if (NOT "${last_test}" STREQUAL "")
set_tests_properties (CPP_ex-clear-objects PROPERTIES DEPENDS ${last_test})
endif ()
set (last_test "CPP_ex-clear-objects")
foreach (example ${examples})
if (HDF5_ENABLE_USING_MEMCHECKER)
add_test (NAME CPP_ex_${example} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:cpp_ex_${example}>)
else ()
add_test (NAME CPP_ex_${example} COMMAND "${CMAKE_COMMAND}"
-D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}"
-D "TEST_PROGRAM=$<TARGET_FILE:cpp_ex_${example}>"
-D "TEST_ARGS:STRING="
-D "TEST_EXPECT=0"
-D "TEST_SKIP_COMPARE=TRUE"
-D "TEST_OUTPUT=cpp_ex_${example}.txt"
#-D "TEST_REFERENCE=cpp_ex_${example}.out"
-D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
-P "${HDF_RESOURCES_DIR}/runTest.cmake"
)
endif ()
set_tests_properties (CPP_ex_${example} PROPERTIES FIXTURES_REQUIRED clear_cppex)
if (last_test)
set_tests_properties (CPP_ex_${example} PROPERTIES DEPENDS ${last_test})
endif ()
set (last_test "CPP_ex_${example}")
endforeach ()
#the following dependencies are handled by the order of the files
foreach (example ${examples})
if (HDF5_ENABLE_USING_MEMCHECKER)
add_test (NAME CPP_ex_${example} COMMAND $<TARGET_FILE:cpp_ex_${example}>)
else ()
add_test (NAME CPP_ex_${example} COMMAND "${CMAKE_COMMAND}"
-D "TEST_PROGRAM=$<TARGET_FILE:cpp_ex_${example}>"
-D "TEST_ARGS:STRING="
-D "TEST_EXPECT=0"
-D "TEST_SKIP_COMPARE=TRUE"
-D "TEST_OUTPUT=cpp_ex_${example}.txt"
#-D "TEST_REFERENCE=cpp_ex_${example}.out"
-D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
-P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake"
)
endif ()
if (NOT "${last_test}" STREQUAL "")
set_tests_properties (CPP_ex_${example} PROPERTIES DEPENDS ${last_test})
endif ()
set (last_test "CPP_ex_${example}")
endforeach ()
#the following dependicies are handled by the order of the files
# SET_TESTS_PROPERTIES(CPP_ex_readdata PROPERTIES DEPENDS CPP_ex_create)
# SET_TESTS_PROPERTIES(CPP_ex_chunks PROPERTIES DEPENDS CPP_ex_extend_ds)
set (CPP_EX_TUTR_CLEANFILES
h5tutr_cmprss.h5
h5tutr_dset.h5
h5tutr_extend.h5
h5tutr_group.h5
h5tutr_groups.h5
h5tutr_subset.h5
)
add_test (
NAME CPP_ex_tutr-clear-objects
COMMAND ${CMAKE_COMMAND}
-E remove ${CPP_EX_TUTR_CLEANFILES}
)
set_tests_properties (CPP_ex_tutr-clear-objects PROPERTIES
FIXTURES_SETUP clear_cppex_tutr
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
)
add_test (
NAME CPP_ex_tutr-clean-objects
COMMAND ${CMAKE_COMMAND}
-E remove ${CPP_EX_TUTR_CLEANFILES}
)
set_tests_properties (CPP_ex_tutr-clean-objects PROPERTIES
FIXTURES_CLEANUP clear_cppex_tutr
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
)
foreach (example ${tutr_examples})
if (HDF5_ENABLE_USING_MEMCHECKER)
add_test (NAME CPP_ex_${example} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:cpp_ex_${example}>)
else ()
add_test (NAME CPP_ex_${example} COMMAND "${CMAKE_COMMAND}"
-D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}"
-D "TEST_PROGRAM=$<TARGET_FILE:cpp_ex_${example}>"
-D "TEST_ARGS:STRING="
-D "TEST_EXPECT=0"
-D "TEST_SKIP_COMPARE=TRUE"
-D "TEST_OUTPUT=tutr_cpp_ex_${example}.txt"
#-D "TEST_REFERENCE=cpp_ex_tutr_${example}.out"
-D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
-P "${HDF_RESOURCES_DIR}/runTest.cmake"
)
add_test (
NAME CPP_ex_tutr-clear-objects
COMMAND ${CMAKE_COMMAND}
-E remove
h5tutr_cmprss.h5
h5tutr_dset.h5
h5tutr_extend.h5
h5tutr_group.h5
h5tutr_groups.h5
h5tutr_subset.h5
)
if (NOT "${last_test}" STREQUAL "")
set_tests_properties (CPP_ex_tutr-clear-objects PROPERTIES DEPENDS ${last_test})
endif ()
set_tests_properties (CPP_ex_${example} PROPERTIES FIXTURES_REQUIRED clear_cppex_tutr)
if (last_test)
set_tests_properties (CPP_ex_${example} PROPERTIES DEPENDS ${last_test})
endif ()
set (last_test "CPP_ex_${example}")
endforeach ()
#the following dependencies are handled by the order of the files
set (last_test "CPP_ex_tutr-clear-objects")
foreach (example ${tutr_examples})
if (HDF5_ENABLE_USING_MEMCHECKER)
add_test (NAME CPP_ex_${example} COMMAND $<TARGET_FILE:cpp_ex_${example}>)
else ()
add_test (NAME CPP_ex_${example} COMMAND "${CMAKE_COMMAND}"
-D "TEST_PROGRAM=$<TARGET_FILE:cpp_ex_${example}>"
-D "TEST_ARGS:STRING="
-D "TEST_EXPECT=0"
-D "TEST_SKIP_COMPARE=TRUE"
-D "TEST_OUTPUT=cpp_ex_${example}.txt"
#-D "TEST_REFERENCE=cpp_ex_${example}.out"
-D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
-P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake"
)
endif ()
if (NOT "${last_test}" STREQUAL "")
set_tests_properties (CPP_ex_${example} PROPERTIES DEPENDS ${last_test})
endif ()
set (last_test "CPP_ex_${example}")
endforeach ()
#the following dependicies are handled by the order of the files
# SET_TESTS_PROPERTIES(CPP_ex_h5tutr_crtatt PROPERTIES DEPENDS CPP_ex_h5tutr_crtdat)
# SET_TESTS_PROPERTIES(CPP_ex_h5tutr_rdwt PROPERTIES DEPENDS CPP_ex_h5tutr_crtdat)
# SET_TESTS_PROPERTIES(CPP_ex_h5tutr_crtgrpd PROPERTIES DEPENDS CPP_ex_h5tutr_crtgrpar)

View File

@@ -1,11 +1,12 @@
#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
@@ -48,8 +49,8 @@ CXX_API=yes
# Where to install examples
# Note: no '/' after DESTDIR. Explanation in commence.am
EXAMPLEDIR=${DESTDIR}$(examplesdir)/c++
EXAMPLETOPDIR=${DESTDIR}$(examplesdir)
EXAMPLEDIR=${DESTDIR}$(exec_prefix)/share/hdf5_examples/c++
EXAMPLETOPDIR=${DESTDIR}$(exec_prefix)/share/hdf5_examples
# How to build programs using h5c++
$(EXTRA_PROG): $(H5CPP)

View File

@@ -1,11 +1,12 @@
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -15,7 +16,11 @@
* We will read from the file created by extend.cpp
*/
#ifdef OLD_HEADER_FILENAME
#include <iostream.h>
#else
#include <iostream>
#endif
using std::cout;
using std::endl;
@@ -23,195 +28,203 @@ using std::endl;
#include "H5Cpp.h"
using namespace H5;
const H5std_string FILE_NAME("SDSextendible.h5");
const H5std_string DATASET_NAME("ExtendibleArray");
const int NX = 10;
const int NY = 5;
const int RANK = 2;
const int RANKC = 1;
const H5std_string FILE_NAME( "SDSextendible.h5" );
const H5std_string DATASET_NAME( "ExtendibleArray" );
const int NX = 10;
const int NY = 5;
const int RANK = 2;
const int RANKC = 1;
int
main(void)
int main (void)
{
hsize_t i, j;
hsize_t i, j;
// Try block to detect exceptions raised by any of the calls inside it
try {
/*
* Turn off the auto-printing when failure occurs so that we can
* handle the errors appropriately
*/
Exception::dontPrint();
try
{
/*
* Turn off the auto-printing when failure occurs so that we can
* handle the errors appropriately
*/
Exception::dontPrint();
/*
* Open the file and the dataset.
*/
H5File file(FILE_NAME, H5F_ACC_RDONLY);
DataSet dataset = file.openDataSet(DATASET_NAME);
/*
* Open the file and the dataset.
*/
H5File file( FILE_NAME, H5F_ACC_RDONLY );
DataSet dataset = file.openDataSet( DATASET_NAME );
/*
* Get filespace for rank and dimension
*/
DataSpace filespace = dataset.getSpace();
/*
* Get filespace for rank and dimension
*/
DataSpace filespace = dataset.getSpace();
/*
* Get number of dimensions in the file dataspace
*/
int rank = filespace.getSimpleExtentNdims();
/*
* Get number of dimensions in the file dataspace
*/
int rank = filespace.getSimpleExtentNdims();
/*
* Get and print the dimension sizes of the file dataspace
*/
hsize_t dims[2]; // dataset dimensions
rank = filespace.getSimpleExtentDims(dims);
cout << "dataset rank = " << rank << ", dimensions " << (unsigned long)(dims[0]) << " x "
<< (unsigned long)(dims[1]) << endl;
/*
* Get and print the dimension sizes of the file dataspace
*/
hsize_t dims[2]; // dataset dimensions
rank = filespace.getSimpleExtentDims( dims );
cout << "dataset rank = " << rank << ", dimensions "
<< (unsigned long)(dims[0]) << " x "
<< (unsigned long)(dims[1]) << endl;
/*
* Define the memory space to read dataset.
*/
DataSpace mspace1(RANK, dims);
/*
* Define the memory space to read dataset.
*/
DataSpace mspace1(RANK, dims);
/*
* Read dataset back and display.
*/
int data_out[NX][NY]; // buffer for dataset to be read
dataset.read(data_out, PredType::NATIVE_INT, mspace1, filespace);
/*
* Read dataset back and display.
*/
int data_out[NX][NY]; // buffer for dataset to be read
dataset.read( data_out, PredType::NATIVE_INT, mspace1, filespace );
cout << "\n";
cout << "Dataset: \n";
for (j = 0; j < dims[0]; j++) {
for (i = 0; i < dims[1]; i++)
cout << data_out[j][i] << " ";
cout << endl;
}
cout << "\n";
cout << "Dataset: \n";
for (j = 0; j < dims[0]; j++)
{
for (i = 0; i < dims[1]; i++)
cout << data_out[j][i] << " ";
cout << endl;
}
/*
* dataset rank 2, dimensions 10 x 5
* chunk rank 2, dimensions 2 x 5
/*
* dataset rank 2, dimensions 10 x 5
* chunk rank 2, dimensions 2 x 5
* Dataset:
* 1 1 1 3 3
* 1 1 1 3 3
* 1 1 1 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
*/
* Dataset:
* 1 1 1 3 3
* 1 1 1 3 3
* 1 1 1 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
*/
/*
* Read the third column from the dataset.
* First define memory dataspace, then define hyperslab
* and read it into column array.
*/
hsize_t col_dims[1];
col_dims[0] = 10;
DataSpace mspace2(RANKC, col_dims);
/*
* Read the third column from the dataset.
* First define memory dataspace, then define hyperslab
* and read it into column array.
*/
hsize_t col_dims[1];
col_dims[0] = 10;
DataSpace mspace2( RANKC, col_dims );
/*
* Define the column (hyperslab) to read.
*/
hsize_t offset[2] = {0, 2};
hsize_t count[2] = {10, 1};
int column[10]; // buffer for column to be read
/*
* Define the column (hyperslab) to read.
*/
hsize_t offset[2] = { 0, 2 };
hsize_t count[2] = { 10, 1 };
int column[10]; // buffer for column to be read
/*
* Define hyperslab and read.
*/
filespace.selectHyperslab(H5S_SELECT_SET, count, offset);
dataset.read(column, PredType::NATIVE_INT, mspace2, filespace);
/*
* Define hyperslab and read.
*/
filespace.selectHyperslab( H5S_SELECT_SET, count, offset );
dataset.read( column, PredType::NATIVE_INT, mspace2, filespace );
cout << endl;
cout << "Third column: " << endl;
for (i = 0; i < 10; i++)
cout << column[i] << endl;
cout << endl;
cout << "Third column: " << endl;
for (i = 0; i < 10; i++)
cout << column[i] << endl;
/*
* Third column:
* 1
* 1
* 1
* 0
* 0
* 0
* 0
* 0
* 0
* 0
*/
/*
* Third column:
* 1
* 1
* 1
* 0
* 0
* 0
* 0
* 0
* 0
* 0
*/
/*
* Get creation properties list.
*/
DSetCreatPropList cparms = dataset.getCreatePlist();
/*
* Get creation properties list.
*/
DSetCreatPropList cparms = dataset.getCreatePlist();
/*
* Check if dataset is chunked.
*/
hsize_t chunk_dims[2];
int rank_chunk;
if (H5D_CHUNKED == cparms.getLayout()) {
/*
* Get chunking information: rank and dimensions
*/
rank_chunk = cparms.getChunk(2, chunk_dims);
cout << "chunk rank " << rank_chunk << "dimensions " << (unsigned long)(chunk_dims[0]) << " x "
<< (unsigned long)(chunk_dims[1]) << endl;
/*
* Check if dataset is chunked.
*/
hsize_t chunk_dims[2];
int rank_chunk;
if( H5D_CHUNKED == cparms.getLayout() )
{
/*
* Get chunking information: rank and dimensions
*/
rank_chunk = cparms.getChunk( 2, chunk_dims);
cout << "chunk rank " << rank_chunk << "dimensions "
<< (unsigned long)(chunk_dims[0]) << " x "
<< (unsigned long)(chunk_dims[1]) << endl;
/*
* Define the memory space to read a chunk.
*/
DataSpace mspace3(rank_chunk, chunk_dims);
/*
* Define the memory space to read a chunk.
*/
DataSpace mspace3( rank_chunk, chunk_dims );
/*
* Define chunk in the file (hyperslab) to read.
*/
offset[0] = 2;
offset[1] = 0;
count[0] = chunk_dims[0];
count[1] = chunk_dims[1];
filespace.selectHyperslab(H5S_SELECT_SET, count, offset);
/*
* Define chunk in the file (hyperslab) to read.
*/
offset[0] = 2;
offset[1] = 0;
count[0] = chunk_dims[0];
count[1] = chunk_dims[1];
filespace.selectHyperslab( H5S_SELECT_SET, count, offset );
/*
* Read chunk back and display.
*/
int chunk_out[2][5]; // buffer for chunk to be read
dataset.read(chunk_out, PredType::NATIVE_INT, mspace3, filespace);
cout << endl;
cout << "Chunk:" << endl;
for (j = 0; j < chunk_dims[0]; j++) {
for (i = 0; i < chunk_dims[1]; i++)
cout << chunk_out[j][i] << " ";
cout << endl;
}
/*
* Chunk:
* 1 1 1 0 0
* 2 0 0 0 0
*/
}
} // end of try block
/*
* Read chunk back and display.
*/
int chunk_out[2][5]; // buffer for chunk to be read
dataset.read( chunk_out, PredType::NATIVE_INT, mspace3, filespace );
cout << endl;
cout << "Chunk:" << endl;
for (j = 0; j < chunk_dims[0]; j++)
{
for (i = 0; i < chunk_dims[1]; i++)
cout << chunk_out[j][i] << " ";
cout << endl;
}
/*
* Chunk:
* 1 1 1 0 0
* 2 0 0 0 0
*/
}
} // end of try block
// catch failure caused by the H5File operations
catch (FileIException error) {
error.printErrorStack();
return -1;
catch( FileIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSet operations
catch (DataSetIException error) {
error.printErrorStack();
return -1;
catch( DataSetIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch (DataSpaceIException error) {
error.printErrorStack();
return -1;
catch( DataSpaceIException error )
{
error.printErrorStack();
return -1;
}
return 0;
}

View File

@@ -1,11 +1,12 @@
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -16,7 +17,11 @@
* and read back fields' subsets.
*/
#ifdef OLD_HEADER_FILENAME
#include <iostream.h>
#else
#include <iostream>
#endif
using std::cout;
using std::endl;
@@ -24,170 +29,175 @@ using std::endl;
#include "H5Cpp.h"
using namespace H5;
const H5std_string FILE_NAME("SDScompound.h5");
const H5std_string DATASET_NAME("ArrayOfStructures");
const H5std_string MEMBER1("a_name");
const H5std_string MEMBER2("b_name");
const H5std_string MEMBER3("c_name");
const int LENGTH = 10;
const int RANK = 1;
const H5std_string FILE_NAME( "SDScompound.h5" );
const H5std_string DATASET_NAME( "ArrayOfStructures" );
const H5std_string MEMBER1( "a_name" );
const H5std_string MEMBER2( "b_name" );
const H5std_string MEMBER3( "c_name" );
const int LENGTH = 10;
const int RANK = 1;
int
main(void)
int main(void)
{
/* First structure and dataset*/
typedef struct s1_t {
int a;
float b;
double c;
} s1_t;
/* First structure and dataset*/
typedef struct s1_t {
int a;
float b;
double c;
} s1_t;
/* Second structure (subset of s1_t) and dataset*/
typedef struct s2_t {
double c;
int a;
} s2_t;
/* Second structure (subset of s1_t) and dataset*/
typedef struct s2_t {
double c;
int a;
} s2_t;
// Try block to detect exceptions raised by any of the calls inside it
try {
/*
* Initialize the data
*/
int i;
s1_t s1[LENGTH];
for (i = 0; i < LENGTH; i++) {
s1[i].a = i;
s1[i].b = i * i;
s1[i].c = 1. / (i + 1);
}
// Try block to detect exceptions raised by any of the calls inside it
try
{
/*
* Initialize the data
*/
int i;
s1_t s1[LENGTH];
for (i = 0; i< LENGTH; i++)
{
s1[i].a = i;
s1[i].b = i*i;
s1[i].c = 1./(i+1);
}
/*
* Turn off the auto-printing when failure occurs so that we can
* handle the errors appropriately
*/
Exception::dontPrint();
/*
* Turn off the auto-printing when failure occurs so that we can
* handle the errors appropriately
*/
Exception::dontPrint();
/*
* Create the data space.
*/
hsize_t dim[] = {LENGTH}; /* Dataspace dimensions */
DataSpace space(RANK, dim);
/*
* Create the data space.
*/
hsize_t dim[] = {LENGTH}; /* Dataspace dimensions */
DataSpace space( RANK, dim );
/*
* Create the file.
*/
H5File *file = new H5File(FILE_NAME, H5F_ACC_TRUNC);
/*
* Create the file.
*/
H5File* file = new H5File( FILE_NAME, H5F_ACC_TRUNC );
/*
* Create the memory datatype.
*/
CompType mtype1(sizeof(s1_t));
mtype1.insertMember(MEMBER1, HOFFSET(s1_t, a), PredType::NATIVE_INT);
mtype1.insertMember(MEMBER3, HOFFSET(s1_t, c), PredType::NATIVE_DOUBLE);
mtype1.insertMember(MEMBER2, HOFFSET(s1_t, b), PredType::NATIVE_FLOAT);
/*
* Create the memory datatype.
*/
CompType mtype1( sizeof(s1_t) );
mtype1.insertMember( MEMBER1, HOFFSET(s1_t, a), PredType::NATIVE_INT);
mtype1.insertMember( MEMBER3, HOFFSET(s1_t, c), PredType::NATIVE_DOUBLE);
mtype1.insertMember( MEMBER2, HOFFSET(s1_t, b), PredType::NATIVE_FLOAT);
/*
* Create the dataset.
*/
DataSet *dataset;
dataset = new DataSet(file->createDataSet(DATASET_NAME, mtype1, space));
/*
* Create the dataset.
*/
DataSet* dataset;
dataset = new DataSet(file->createDataSet(DATASET_NAME, mtype1, space));
/*
* Write data to the dataset;
*/
dataset->write(s1, mtype1);
/*
* Write data to the dataset;
*/
dataset->write( s1, mtype1 );
/*
* Release resources
*/
delete dataset;
delete file;
/*
* Release resources
*/
delete dataset;
delete file;
/*
* Open the file and the dataset.
*/
file = new H5File(FILE_NAME, H5F_ACC_RDONLY);
dataset = new DataSet(file->openDataSet(DATASET_NAME));
/*
* Open the file and the dataset.
*/
file = new H5File( FILE_NAME, H5F_ACC_RDONLY );
dataset = new DataSet (file->openDataSet( DATASET_NAME ));
/*
* Create a datatype for s2
*/
CompType mtype2(sizeof(s2_t));
/*
* Create a datatype for s2
*/
CompType mtype2( sizeof(s2_t) );
mtype2.insertMember(MEMBER3, HOFFSET(s2_t, c), PredType::NATIVE_DOUBLE);
mtype2.insertMember(MEMBER1, HOFFSET(s2_t, a), PredType::NATIVE_INT);
mtype2.insertMember( MEMBER3, HOFFSET(s2_t, c), PredType::NATIVE_DOUBLE);
mtype2.insertMember( MEMBER1, HOFFSET(s2_t, a), PredType::NATIVE_INT);
/*
* Read two fields c and a from s1 dataset. Fields in the file
* are found by their names "c_name" and "a_name".
*/
s2_t s2[LENGTH];
dataset->read(s2, mtype2);
/*
* Read two fields c and a from s1 dataset. Fields in the file
* are found by their names "c_name" and "a_name".
*/
s2_t s2[LENGTH];
dataset->read( s2, mtype2 );
/*
* Display the fields
*/
cout << endl << "Field c : " << endl;
for (i = 0; i < LENGTH; i++)
cout << s2[i].c << " ";
cout << endl;
/*
* Display the fields
*/
cout << endl << "Field c : " << endl;
for( i = 0; i < LENGTH; i++)
cout << s2[i].c << " ";
cout << endl;
cout << endl << "Field a : " << endl;
for (i = 0; i < LENGTH; i++)
cout << s2[i].a << " ";
cout << endl;
cout << endl << "Field a : " << endl;
for( i = 0; i < LENGTH; i++)
cout << s2[i].a << " ";
cout << endl;
/*
* Create a datatype for s3.
*/
CompType mtype3(sizeof(float));
/*
* Create a datatype for s3.
*/
CompType mtype3( sizeof(float) );
mtype3.insertMember(MEMBER2, 0, PredType::NATIVE_FLOAT);
mtype3.insertMember( MEMBER2, 0, PredType::NATIVE_FLOAT);
/*
* Read field b from s1 dataset. Field in the file is found by its name.
*/
float s3[LENGTH]; // Third "structure" - used to read float field of s1
dataset->read(s3, mtype3);
/*
* Read field b from s1 dataset. Field in the file is found by its name.
*/
float s3[LENGTH]; // Third "structure" - used to read float field of s1
dataset->read( s3, mtype3 );
/*
* Display the field
*/
cout << endl << "Field b : " << endl;
for (i = 0; i < LENGTH; i++)
cout << s3[i] << " ";
cout << endl;
/*
* Display the field
*/
cout << endl << "Field b : " << endl;
for( i = 0; i < LENGTH; i++)
cout << s3[i] << " ";
cout << endl;
/*
* Release resources
*/
delete dataset;
delete file;
} // end of try block
/*
* Release resources
*/
delete dataset;
delete file;
} // end of try block
// catch failure caused by the H5File operations
catch (FileIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the H5File operations
catch( FileIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSet operations
catch (DataSetIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSet operations
catch( DataSetIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch (DataSpaceIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch( DataSpaceIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch (DataTypeIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch( DataTypeIException error )
{
error.printErrorStack();
return -1;
}
return 0;
return 0;
}

View File

@@ -1,11 +1,12 @@
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -14,105 +15,115 @@
* This example writes a dataset to a new HDF5 file.
*/
#ifdef OLD_HEADER_FILENAME
#include <iostream.h>
#else
#include <iostream>
#endif
#include <string>
#include "H5Cpp.h"
using namespace H5;
const H5std_string FILE_NAME("SDS.h5");
const H5std_string DATASET_NAME("IntArray");
const int NX = 5; // dataset dimensions
const int NY = 6;
const int RANK = 2;
const H5std_string FILE_NAME( "SDS.h5" );
const H5std_string DATASET_NAME( "IntArray" );
const int NX = 5; // dataset dimensions
const int NY = 6;
const int RANK = 2;
int
main(void)
int main (void)
{
/*
* Data initialization.
*/
int i, j;
int data[NX][NY]; // buffer for data to write
for (j = 0; j < NX; j++) {
for (i = 0; i < NY; i++)
data[j][i] = i + j;
}
/*
* 0 1 2 3 4 5
* 1 2 3 4 5 6
* 2 3 4 5 6 7
* 3 4 5 6 7 8
* 4 5 6 7 8 9
*/
/*
* Data initialization.
*/
int i, j;
int data[NX][NY]; // buffer for data to write
for (j = 0; j < NX; j++)
{
for (i = 0; i < NY; i++)
data[j][i] = i + j;
}
/*
* 0 1 2 3 4 5
* 1 2 3 4 5 6
* 2 3 4 5 6 7
* 3 4 5 6 7 8
* 4 5 6 7 8 9
*/
// Try block to detect exceptions raised by any of the calls inside it
try {
/*
* Turn off the auto-printing when failure occurs so that we can
* handle the errors appropriately
*/
Exception::dontPrint();
// Try block to detect exceptions raised by any of the calls inside it
try
{
/*
* Turn off the auto-printing when failure occurs so that we can
* handle the errors appropriately
*/
Exception::dontPrint();
/*
* Create a new file using H5F_ACC_TRUNC access,
* default file creation properties, and default file
* access properties.
*/
H5File file(FILE_NAME, H5F_ACC_TRUNC);
/*
* Create a new file using H5F_ACC_TRUNC access,
* default file creation properties, and default file
* access properties.
*/
H5File file( FILE_NAME, H5F_ACC_TRUNC );
/*
* Define the size of the array and create the data space for fixed
* size dataset.
*/
hsize_t dimsf[2]; // dataset dimensions
dimsf[0] = NX;
dimsf[1] = NY;
DataSpace dataspace(RANK, dimsf);
/*
* Define the size of the array and create the data space for fixed
* size dataset.
*/
hsize_t dimsf[2]; // dataset dimensions
dimsf[0] = NX;
dimsf[1] = NY;
DataSpace dataspace( RANK, dimsf );
/*
* Define datatype for the data in the file.
* We will store little endian INT numbers.
*/
IntType datatype(PredType::NATIVE_INT);
datatype.setOrder(H5T_ORDER_LE);
/*
* Define datatype for the data in the file.
* We will store little endian INT numbers.
*/
IntType datatype( PredType::NATIVE_INT );
datatype.setOrder( H5T_ORDER_LE );
/*
* Create a new dataset within the file using defined dataspace and
* datatype and default dataset creation properties.
*/
DataSet dataset = file.createDataSet(DATASET_NAME, datatype, dataspace);
/*
* Create a new dataset within the file using defined dataspace and
* datatype and default dataset creation properties.
*/
DataSet dataset = file.createDataSet( DATASET_NAME, datatype, dataspace );
/*
* Write the data to the dataset using default memory space, file
* space, and transfer properties.
*/
dataset.write(data, PredType::NATIVE_INT);
} // end of try block
/*
* Write the data to the dataset using default memory space, file
* space, and transfer properties.
*/
dataset.write( data, PredType::NATIVE_INT );
} // end of try block
// catch failure caused by the H5File operations
catch (FileIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the H5File operations
catch( FileIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSet operations
catch (DataSetIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSet operations
catch( DataSetIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch (DataSpaceIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch( DataSpaceIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch (DataTypeIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch( DataTypeIException error )
{
error.printErrorStack();
return -1;
}
return 0; // successfully terminated
return 0; // successfully terminated
}

View File

@@ -1,11 +1,12 @@
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -17,7 +18,11 @@
*
*/
#ifdef OLD_HEADER_FILENAME
#include <iostream.h>
#else
#include <iostream>
#endif
#include <string>
using std::cout;
@@ -27,195 +32,201 @@ using std::endl;
#include "H5Cpp.h"
using namespace H5;
const H5std_string FILE_NAME("SDSextendible.h5");
const H5std_string DATASET_NAME("ExtendibleArray");
const int NX = 10;
const int NY = 5;
const int RANK = 2;
const H5std_string FILE_NAME( "SDSextendible.h5" );
const H5std_string DATASET_NAME( "ExtendibleArray" );
const int NX = 10;
const int NY = 5;
const int RANK = 2;
int
main(void)
int main (void)
{
/*
* Try block to detect exceptions raised by any of the calls inside it
*/
try {
/*
* Turn off the auto-printing when failure occurs so that we can
* handle the errors appropriately
*/
Exception::dontPrint();
/*
* Try block to detect exceptions raised by any of the calls inside it
*/
try
{
/*
* Turn off the auto-printing when failure occurs so that we can
* handle the errors appropriately
*/
Exception::dontPrint();
/*
* Create the data space with unlimited dimensions.
*/
hsize_t dims[2] = {3, 3}; // dataset dimensions at creation
hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
DataSpace mspace1(RANK, dims, maxdims);
/*
* Create the data space with unlimited dimensions.
*/
hsize_t dims[2] = { 3, 3}; // dataset dimensions at creation
hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
DataSpace mspace1( RANK, dims, maxdims);
/*
* Create a new file. If file exists its contents will be overwritten.
*/
H5File file(FILE_NAME, H5F_ACC_TRUNC);
/*
* Create a new file. If file exists its contents will be overwritten.
*/
H5File file( FILE_NAME, H5F_ACC_TRUNC );
/*
* Modify dataset creation properties, i.e. enable chunking.
*/
DSetCreatPropList cparms;
/*
* Modify dataset creation properties, i.e. enable chunking.
*/
DSetCreatPropList cparms;
hsize_t chunk_dims[2] = {2, 5};
cparms.setChunk(RANK, chunk_dims);
hsize_t chunk_dims[2] ={2, 5};
cparms.setChunk( RANK, chunk_dims );
/*
* Set fill value for the dataset
*/
int fill_val = 0;
cparms.setFillValue(PredType::NATIVE_INT, &fill_val);
/*
* Set fill value for the dataset
*/
int fill_val = 0;
cparms.setFillValue( PredType::NATIVE_INT, &fill_val);
/*
* Create a new dataset within the file using cparms
* creation properties.
*/
DataSet dataset = file.createDataSet(DATASET_NAME, PredType::NATIVE_INT, mspace1, cparms);
/*
* Create a new dataset within the file using cparms
* creation properties.
*/
DataSet dataset = file.createDataSet( DATASET_NAME, PredType::NATIVE_INT, mspace1, cparms);
/*
* Extend the dataset. This call assures that dataset is at least 3 x 3.
*/
hsize_t size[2];
size[0] = 3;
size[1] = 3;
dataset.extend(size);
/*
* Extend the dataset. This call assures that dataset is at least 3 x 3.
*/
hsize_t size[2];
size[0] = 3;
size[1] = 3;
dataset.extend( size );
/*
* Select a hyperslab.
*/
DataSpace fspace1 = dataset.getSpace();
hsize_t offset[2];
offset[0] = 0;
offset[1] = 0;
hsize_t dims1[2] = {3, 3}; /* data1 dimensions */
fspace1.selectHyperslab(H5S_SELECT_SET, dims1, offset);
/*
* Select a hyperslab.
*/
DataSpace fspace1 = dataset.getSpace ();
hsize_t offset[2];
offset[0] = 0;
offset[1] = 0;
hsize_t dims1[2] = { 3, 3}; /* data1 dimensions */
fspace1.selectHyperslab( H5S_SELECT_SET, dims1, offset );
/*
* Write the data to the hyperslab.
*/
int data1[3][3] = {{1, 1, 1}, /* data to write */
{1, 1, 1},
{1, 1, 1}};
dataset.write(data1, PredType::NATIVE_INT, mspace1, fspace1);
/*
* Write the data to the hyperslab.
*/
int data1[3][3] = { {1, 1, 1}, /* data to write */
{1, 1, 1},
{1, 1, 1} };
dataset.write( data1, PredType::NATIVE_INT, mspace1, fspace1 );
/*
* Extend the dataset. Dataset becomes 10 x 3.
*/
hsize_t dims2[2] = {7, 1}; /* data2 dimensions */
dims[0] = dims1[0] + dims2[0];
size[0] = dims[0];
size[1] = dims[1];
dataset.extend(size);
/*
* Extend the dataset. Dataset becomes 10 x 3.
*/
hsize_t dims2[2] = { 7, 1}; /* data2 dimensions */
dims[0] = dims1[0] + dims2[0];
size[0] = dims[0];
size[1] = dims[1];
dataset.extend( size );
/*
* Select a hyperslab.
*/
DataSpace fspace2 = dataset.getSpace();
offset[0] = 3;
offset[1] = 0;
fspace2.selectHyperslab(H5S_SELECT_SET, dims2, offset);
/*
* Select a hyperslab.
*/
DataSpace fspace2 = dataset.getSpace ();
offset[0] = 3;
offset[1] = 0;
fspace2.selectHyperslab( H5S_SELECT_SET, dims2, offset );
/*
* Define memory space
*/
DataSpace mspace2(RANK, dims2);
/*
* Define memory space
*/
DataSpace mspace2( RANK, dims2 );
/*
* Write the data to the hyperslab.
*/
int data2[7] = {2, 2, 2, 2, 2, 2, 2};
dataset.write(data2, PredType::NATIVE_INT, mspace2, fspace2);
/*
* Write the data to the hyperslab.
*/
int data2[7] = { 2, 2, 2, 2, 2, 2, 2};
dataset.write( data2, PredType::NATIVE_INT, mspace2, fspace2 );
/*
* Extend the dataset. Dataset becomes 10 x 5.
*/
hsize_t dims3[2] = {2, 2}; /* data3 dimensions */
dims[1] = dims1[1] + dims3[1];
size[0] = dims[0];
size[1] = dims[1];
dataset.extend(size);
/*
* Extend the dataset. Dataset becomes 10 x 5.
*/
hsize_t dims3[2] = { 2, 2}; /* data3 dimensions */
dims[1] = dims1[1] + dims3[1];
size[0] = dims[0];
size[1] = dims[1];
dataset.extend( size );
/*
* Select a hyperslab
*/
DataSpace fspace3 = dataset.getSpace();
offset[0] = 0;
offset[1] = 3;
fspace3.selectHyperslab(H5S_SELECT_SET, dims3, offset);
/*
* Select a hyperslab
*/
DataSpace fspace3 = dataset.getSpace ();
offset[0] = 0;
offset[1] = 3;
fspace3.selectHyperslab( H5S_SELECT_SET, dims3, offset );
/*
* Define memory space.
*/
DataSpace mspace3(RANK, dims3);
/*
* Define memory space.
*/
DataSpace mspace3( RANK, dims3 );
/*
* Write the data to the hyperslab.
*/
int data3[2][2] = {{3, 3}, {3, 3}};
dataset.write(data3, PredType::NATIVE_INT, mspace3, fspace3);
/*
* Write the data to the hyperslab.
*/
int data3[2][2] = { {3, 3}, {3, 3} };
dataset.write( data3, PredType::NATIVE_INT, mspace3, fspace3 );
/*
* Read the data from this dataset and display it.
*/
int i, j;
int data_out[NX][NY];
for (i = 0; i < NX; i++) {
for (j = 0; j < NY; j++)
data_out[i][j] = 0;
}
dataset.read(data_out, PredType::NATIVE_INT);
/*
* Resulting dataset
*
* 1 1 1 3 3
* 1 1 1 3 3
* 1 1 1 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
*/
/*
* Display the result.
*/
for (i = 0; i < NX; i++) {
for (j = 0; j < NY; j++)
cout << data_out[i][j] << " ";
cout << endl;
}
} // end of try block
/*
* Read the data from this dataset and display it.
*/
int i, j;
int data_out[NX][NY];
for (i = 0; i < NX; i++)
{
for (j = 0; j < NY; j++)
data_out[i][j] = 0;
}
dataset.read( data_out, PredType::NATIVE_INT );
/*
* Resulting dataset
*
* 1 1 1 3 3
* 1 1 1 3 3
* 1 1 1 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
* 2 0 0 0 0
*/
/*
* Display the result.
*/
for (i=0; i < NX; i++)
{
for(j=0; j < NY; j++)
cout << data_out[i][j] << " ";
cout << endl;
}
} // end of try block
// catch failure caused by the H5File operations
catch (FileIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the H5File operations
catch( FileIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSet operations
catch (DataSetIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSet operations
catch( DataSetIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch (DataSpaceIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch( DataSpaceIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch (DataTypeIException error) {
error.printErrorStack();
return -1;
}
return 0;
// catch failure caused by the DataSpace operations
catch( DataTypeIException error )
{
error.printErrorStack();
return -1;
}
return 0;
}

View File

@@ -1,11 +1,12 @@
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -19,7 +20,11 @@
* the C version is used in this example.
*/
#ifdef OLD_HEADER_FILENAME
#include <iostream.h>
#else
#include <iostream>
#endif
using std::cout;
using std::endl;
@@ -27,176 +32,185 @@ using std::endl;
#include "H5Cpp.h"
using namespace H5;
const H5std_string FILE_NAME("Group.h5");
const int RANK = 2;
const H5std_string FILE_NAME( "Group.h5" );
const int RANK = 2;
// Operator function
extern "C" herr_t file_info(hid_t loc_id, const char *name, const H5L_info2_t *linfo, void *opdata);
extern "C" herr_t file_info(hid_t loc_id, const char *name, const H5L_info_t *linfo,
void *opdata);
int
main(void)
int main(void)
{
hsize_t dims[2];
hsize_t cdims[2];
hsize_t dims[2];
hsize_t cdims[2];
// Try block to detect exceptions raised by any of the calls inside it
try {
/*
* Turn off the auto-printing when failure occurs so that we can
* handle the errors appropriately
*/
Exception::dontPrint();
try
{
/*
* Turn off the auto-printing when failure occurs so that we can
* handle the errors appropriately
*/
Exception::dontPrint();
/*
* Create the named file, truncating the existing one if any,
* using default create and access property lists.
*/
H5File *file = new H5File(FILE_NAME, H5F_ACC_TRUNC);
/*
* Create the named file, truncating the existing one if any,
* using default create and access property lists.
*/
H5File *file = new H5File( FILE_NAME, H5F_ACC_TRUNC );
/*
* Create a group in the file
*/
Group *group = new Group(file->createGroup("/Data"));
/*
* Create a group in the file
*/
Group* group = new Group( file->createGroup( "/Data" ));
/*
* Create dataset "Compressed Data" in the group using absolute
* name. Dataset creation property list is modified to use
* GZIP compression with the compression effort set to 6.
* Note that compression can be used only when dataset is chunked.
*/
dims[0] = 1000;
dims[1] = 20;
cdims[0] = 20;
cdims[1] = 20;
DataSpace *dataspace = new DataSpace(RANK, dims); // create new dspace
DSetCreatPropList ds_creatplist; // create dataset creation prop list
ds_creatplist.setChunk(2, cdims); // then modify it for compression
ds_creatplist.setDeflate(6);
/*
* Create dataset "Compressed Data" in the group using absolute
* name. Dataset creation property list is modified to use
* GZIP compression with the compression effort set to 6.
* Note that compression can be used only when dataset is chunked.
*/
dims[0] = 1000;
dims[1] = 20;
cdims[0] = 20;
cdims[1] = 20;
DataSpace *dataspace = new DataSpace(RANK, dims); // create new dspace
DSetCreatPropList ds_creatplist; // create dataset creation prop list
ds_creatplist.setChunk( 2, cdims ); // then modify it for compression
ds_creatplist.setDeflate( 6 );
/*
* Create the first dataset.
*/
DataSet *dataset = new DataSet(
file->createDataSet("/Data/Compressed_Data", PredType::NATIVE_INT, *dataspace, ds_creatplist));
/*
* Create the first dataset.
*/
DataSet* dataset = new DataSet(file->createDataSet(
"/Data/Compressed_Data", PredType::NATIVE_INT,
*dataspace, ds_creatplist ));
/*
* Close the first dataset.
*/
delete dataset;
delete dataspace;
/*
* Close the first dataset.
*/
delete dataset;
delete dataspace;
/*
* Create the second dataset.
*/
dims[0] = 500;
dims[1] = 20;
dataspace = new DataSpace(RANK, dims); // create second dspace
dataset = new DataSet(file->createDataSet("/Data/Float_Data", PredType::NATIVE_FLOAT, *dataspace));
/*
* Create the second dataset.
*/
dims[0] = 500;
dims[1] = 20;
dataspace = new DataSpace(RANK, dims); // create second dspace
dataset = new DataSet(file->createDataSet("/Data/Float_Data",
PredType::NATIVE_FLOAT, *dataspace));
delete dataset;
delete dataspace;
delete group;
delete file;
delete dataset;
delete dataspace;
delete group;
delete file;
/*
* Now reopen the file and group in the file.
*/
file = new H5File(FILE_NAME, H5F_ACC_RDWR);
group = new Group(file->openGroup("Data"));
/*
* Now reopen the file and group in the file.
*/
file = new H5File(FILE_NAME, H5F_ACC_RDWR);
group = new Group(file->openGroup("Data"));
/*
* Access "Compressed_Data" dataset in the group.
*/
try { // to determine if the dataset exists in the group
dataset = new DataSet(group->openDataSet("Compressed_Data"));
}
catch (GroupIException not_found_error) {
cout << " Dataset is not found." << endl;
}
cout << "dataset \"/Data/Compressed_Data\" is open" << endl;
/*
* Access "Compressed_Data" dataset in the group.
*/
try { // to determine if the dataset exists in the group
dataset = new DataSet( group->openDataSet( "Compressed_Data" ));
}
catch( GroupIException not_found_error ) {
cout << " Dataset is not found." << endl;
}
cout << "dataset \"/Data/Compressed_Data\" is open" << endl;
/*
* Close the dataset.
*/
delete dataset;
/*
* Close the dataset.
*/
delete dataset;
/*
* Create hard link to the Data group.
*/
file->link(H5L_TYPE_HARD, "Data", "Data_new");
/*
* Create hard link to the Data group.
*/
file->link( H5L_TYPE_HARD, "Data", "Data_new" );
/*
* We can access "Compressed_Data" dataset using created
* hard link "Data_new".
*/
try { // to determine if the dataset exists in the file
dataset = new DataSet(file->openDataSet("/Data_new/Compressed_Data"));
}
catch (FileIException not_found_error) {
cout << " Dataset is not found." << endl;
}
cout << "dataset \"/Data_new/Compressed_Data\" is open" << endl;
/*
* We can access "Compressed_Data" dataset using created
* hard link "Data_new".
*/
try { // to determine if the dataset exists in the file
dataset = new DataSet(file->openDataSet( "/Data_new/Compressed_Data" ));
}
catch( FileIException not_found_error )
{
cout << " Dataset is not found." << endl;
}
cout << "dataset \"/Data_new/Compressed_Data\" is open" << endl;
/*
* Close the dataset.
*/
delete dataset;
/*
* Close the dataset.
*/
delete dataset;
/*
* Use iterator to see the names of the objects in the file
* root directory.
*/
cout << endl << "Iterating over elements in the file" << endl;
herr_t idx = H5Literate2(file->getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL);
cout << endl;
/*
* Use iterator to see the names of the objects in the file
* root directory.
*/
cout << endl << "Iterating over elements in the file" << endl;
herr_t idx = H5Literate(file->getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL);
cout << endl;
/*
* Unlink name "Data" and use iterator to see the names
* of the objects in the file root direvtory.
*/
cout << "Unlinking..." << endl;
try { // attempt to unlink the dataset
file->unlink("Data");
}
catch (FileIException unlink_error) {
cout << " unlink failed." << endl;
}
cout << "\"Data\" is unlinked" << endl;
/*
* Unlink name "Data" and use iterator to see the names
* of the objects in the file root direvtory.
*/
cout << "Unlinking..." << endl;
try { // attempt to unlink the dataset
file->unlink( "Data" );
}
catch( FileIException unlink_error )
{
cout << " unlink failed." << endl;
}
cout << "\"Data\" is unlinked" << endl;
cout << endl << "Iterating over elements in the file again" << endl;
idx = H5Literate2(file->getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL);
cout << endl;
cout << endl << "Iterating over elements in the file again" << endl;
idx = H5Literate(file->getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, file_info, NULL);
cout << endl;
/*
* Close the group and file.
*/
delete group;
delete file;
} // end of try block
/*
* Close the group and file.
*/
delete group;
delete file;
} // end of try block
// catch failure caused by the H5File operations
catch (FileIException error) {
error.printErrorStack();
return -1;
catch( FileIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSet operations
catch (DataSetIException error) {
error.printErrorStack();
return -1;
catch( DataSetIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch (DataSpaceIException error) {
error.printErrorStack();
return -1;
catch( DataSpaceIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the Attribute operations
catch (AttributeIException error) {
error.printErrorStack();
return -1;
catch( AttributeIException error )
{
error.printErrorStack();
return -1;
}
return 0;
}
@@ -205,7 +219,7 @@ main(void)
* Operator function.
*/
herr_t
file_info(hid_t loc_id, const char *name, const H5L_info2_t *linfo, void *opdata)
file_info(hid_t loc_id, const char *name, const H5L_info_t *linfo, void *opdata)
{
hid_t group;
@@ -222,3 +236,4 @@ file_info(hid_t loc_id, const char *name, const H5L_info2_t *linfo, void *opdata
H5Gclose(group);
return 0;
}

View File

@@ -1,11 +1,12 @@
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright by The HDF Group. *
* All rights reserved. *
* *
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -23,128 +24,132 @@ using std::endl;
#include "H5Cpp.h"
using namespace H5;
const H5std_string FILE_NAME("h5tutr_cmprss.h5");
const H5std_string DATASET_NAME("Compressed_Data");
const int DIM0 = 100;
const int DIM1 = 20;
const H5std_string FILE_NAME("h5tutr_cmprss.h5");
const H5std_string DATASET_NAME("Compressed_Data");
const int DIM0 = 100;
const int DIM1 = 20;
int
main(void)
int main (void)
{
hsize_t dims[2] = {DIM0, DIM1}; // dataset dimensions
hsize_t chunk_dims[2] = {20, 20}; // chunk dimensions
int i, j, buf[DIM0][DIM1];
hsize_t dims[2] = { DIM0, DIM1 }; // dataset dimensions
hsize_t chunk_dims[2] = { 20, 20 }; // chunk dimensions
int i,j, buf[DIM0][DIM1];
// Try block to detect exceptions raised by any of the calls inside it
try {
// Turn off the auto-printing when failure occurs so that we can
// handle the errors appropriately
Exception::dontPrint();
try
{
// Turn off the auto-printing when failure occurs so that we can
// handle the errors appropriately
Exception::dontPrint();
// Create a new file using the default property lists.
H5File file(FILE_NAME, H5F_ACC_TRUNC);
// Create a new file using the default property lists.
H5File file(FILE_NAME, H5F_ACC_TRUNC);
// Create the data space for the dataset.
DataSpace *dataspace = new DataSpace(2, dims);
// Create the data space for the dataset.
DataSpace *dataspace = new DataSpace(2, dims);
// Modify dataset creation property to enable chunking
DSetCreatPropList *plist = new DSetCreatPropList;
plist->setChunk(2, chunk_dims);
// Modify dataset creation property to enable chunking
DSetCreatPropList *plist = new DSetCreatPropList;
plist->setChunk(2, chunk_dims);
// Set ZLIB (DEFLATE) Compression using level 6.
// To use SZIP compression comment out this line.
plist->setDeflate(6);
// Set ZLIB (DEFLATE) Compression using level 6.
// To use SZIP compression comment out this line.
plist->setDeflate(6);
// Uncomment these lines to set SZIP Compression
// unsigned szip_options_mask = H5_SZIP_NN_OPTION_MASK;
// unsigned szip_pixels_per_block = 16;
// plist->setSzip(szip_options_mask, szip_pixels_per_block);
// Uncomment these lines to set SZIP Compression
// unsigned szip_options_mask = H5_SZIP_NN_OPTION_MASK;
// unsigned szip_pixels_per_block = 16;
// plist->setSzip(szip_options_mask, szip_pixels_per_block);
// Create the dataset.
DataSet *dataset = new DataSet(file.createDataSet( DATASET_NAME,
PredType::STD_I32BE, *dataspace, *plist) );
// Create the dataset.
DataSet *dataset =
new DataSet(file.createDataSet(DATASET_NAME, PredType::STD_I32BE, *dataspace, *plist));
for (i = 0; i< DIM0; i++)
for (j=0; j<DIM1; j++)
buf[i][j] = i+j;
for (i = 0; i < DIM0; i++)
for (j = 0; j < DIM1; j++)
buf[i][j] = i + j;
// Write data to dataset.
dataset->write(buf, PredType::NATIVE_INT);
// Write data to dataset.
dataset->write(buf, PredType::NATIVE_INT);
// Close objects and file. Either approach will close the HDF5 item.
delete dataspace;
delete dataset;
delete plist;
file.close();
// Close objects and file. Either approach will close the HDF5 item.
delete dataspace;
delete dataset;
delete plist;
file.close();
// -----------------------------------------------
// Re-open the file and dataset, retrieve filter
// information for dataset and read the data back.
// -----------------------------------------------
int rbuf[DIM0][DIM1];
int numfilt;
size_t nelmts={1}, namelen={1};
unsigned flags, filter_info, cd_values[1], idx;
char name[1];
H5Z_filter_t filter_type;
// -----------------------------------------------
// Re-open the file and dataset, retrieve filter
// information for dataset and read the data back.
// -----------------------------------------------
// Open the file and the dataset in the file.
file.openFile(FILE_NAME, H5F_ACC_RDONLY);
dataset = new DataSet(file.openDataSet( DATASET_NAME));
int rbuf[DIM0][DIM1];
int numfilt;
size_t nelmts = {1}, namelen = {1};
unsigned flags, filter_info, cd_values[1], idx;
char name[1];
H5Z_filter_t filter_type;
// Get the create property list of the dataset.
plist = new DSetCreatPropList(dataset->getCreatePlist ());
// Open the file and the dataset in the file.
file.openFile(FILE_NAME, H5F_ACC_RDONLY);
dataset = new DataSet(file.openDataSet(DATASET_NAME));
// Get the number of filters associated with the dataset.
numfilt = plist->getNfilters();
cout << "Number of filters associated with dataset: " << numfilt << endl;
// Get the create property list of the dataset.
plist = new DSetCreatPropList(dataset->getCreatePlist());
for (idx=0; idx < numfilt; idx++) {
nelmts = 0;
// Get the number of filters associated with the dataset.
numfilt = plist->getNfilters();
cout << "Number of filters associated with dataset: " << numfilt << endl;
filter_type = plist->getFilter(idx, flags, nelmts, cd_values, namelen, name , filter_info);
for (idx = 0; idx < numfilt; idx++) {
nelmts = 0;
cout << "Filter Type: ";
filter_type = plist->getFilter(idx, flags, nelmts, cd_values, namelen, name, filter_info);
switch (filter_type) {
case H5Z_FILTER_DEFLATE:
cout << "H5Z_FILTER_DEFLATE" << endl;
break;
case H5Z_FILTER_SZIP:
cout << "H5Z_FILTER_SZIP" << endl;
break;
default:
cout << "Other filter type included." << endl;
}
}
cout << "Filter Type: ";
// Read data.
dataset->read(rbuf, PredType::NATIVE_INT);
switch (filter_type) {
case H5Z_FILTER_DEFLATE:
cout << "H5Z_FILTER_DEFLATE" << endl;
break;
case H5Z_FILTER_SZIP:
cout << "H5Z_FILTER_SZIP" << endl;
break;
default:
cout << "Other filter type included." << endl;
}
}
delete plist;
delete dataset;
file.close(); // can be skipped
// Read data.
dataset->read(rbuf, PredType::NATIVE_INT);
delete plist;
delete dataset;
file.close(); // can be skipped
} // end of try block
} // end of try block
// catch failure caused by the H5File operations
catch (FileIException error) {
error.printErrorStack();
return -1;
catch(FileIException error)
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSet operations
catch (DataSetIException error) {
error.printErrorStack();
return -1;
catch(DataSetIException error)
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSpace operations
catch (DataSpaceIException error) {
error.printErrorStack();
return -1;
catch(DataSpaceIException error)
{
error.printErrorStack();
return -1;
}
return 0; // successfully terminated
return 0; // successfully terminated
}

View File

@@ -1,11 +1,12 @@
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Copyright by The HDF Group. *
* All rights reserved. *
* *
* Copyright by The HDF Group. *
* Copyright by the Board of Trustees of the University of Illinois. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
@@ -20,62 +21,69 @@
#include "H5Cpp.h"
using namespace H5;
const H5std_string FILE_NAME("h5tutr_dset.h5");
const H5std_string DATASET_NAME("dset");
const H5std_string ATTR_NAME("Units");
const H5std_string FILE_NAME( "h5tutr_dset.h5" );
const H5std_string DATASET_NAME( "dset" );
const H5std_string ATTR_NAME( "Units" );
const int DIM1 = 2;
const int DIM1 = 2;
int
main(void)
int main (void)
{
int attr_data[2] = {100, 200};
hsize_t dims[1] = {DIM1};
int attr_data[2] = { 100, 200};
hsize_t dims[1] = { DIM1 };
// Try block to detect exceptions raised by any of the calls inside it
try {
// Turn off the auto-printing when failure occurs so that we can
// handle the errors appropriately
Exception::dontPrint();
// Try block to detect exceptions raised by any of the calls inside it
try
{
// Turn off the auto-printing when failure occurs so that we can
// handle the errors appropriately
Exception::dontPrint();
// Open an existing file and dataset.
H5File file(FILE_NAME, H5F_ACC_RDWR);
DataSet dataset = file.openDataSet(DATASET_NAME);
// Open an existing file and dataset.
H5File file( FILE_NAME, H5F_ACC_RDWR );
DataSet dataset = file.openDataSet( DATASET_NAME );
// Create the data space for the attribute.
DataSpace attr_dataspace = DataSpace(1, dims);
// Create the data space for the attribute.
DataSpace attr_dataspace = DataSpace (1, dims );
// Create a dataset attribute.
Attribute attribute = dataset.createAttribute(ATTR_NAME, PredType::STD_I32BE, attr_dataspace);
// Create a dataset attribute.
Attribute attribute = dataset.createAttribute( ATTR_NAME, PredType::STD_I32BE,
attr_dataspace);
// Write the attribute data.
attribute.write( PredType::NATIVE_INT, attr_data);
// Write the attribute data.
attribute.write(PredType::NATIVE_INT, attr_data);
} // end of try block
} // end of try block
// catch failure caused by the H5File operations
catch( DataSpaceIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the H5File operations
catch (DataSpaceIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the H5File operations
catch( AttributeIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the H5File operations
catch (AttributeIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the H5File operations
catch( FileIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the H5File operations
catch (FileIException error) {
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSet operations
catch( DataSetIException error )
{
error.printErrorStack();
return -1;
}
// catch failure caused by the DataSet operations
catch (DataSetIException error) {
error.printErrorStack();
return -1;
}
return 0; // successfully terminated
return 0; // successfully terminated
}

Some files were not shown because too many files have changed in this diff Show More