Compare commits
471 Commits
feature/pa
...
develop
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9d596b4a59 | ||
|
|
2e8ea68d97 | ||
|
|
48a3d92d04 | ||
|
|
d61fd4aba7 | ||
|
|
509fe962d4 | ||
|
|
2c9de7a9ae | ||
|
|
720e04e196 | ||
|
|
5543d6eb36 | ||
|
|
06c12f97cd | ||
|
|
90d20764f4 | ||
|
|
54590f3ebd | ||
|
|
337a882af8 | ||
|
|
423ee69c1e | ||
|
|
c2e3c732f0 | ||
|
|
83f81a6422 | ||
|
|
d326f6fae8 | ||
|
|
c0b3646193 | ||
|
|
3da2551adb | ||
|
|
d261272ffa | ||
|
|
23ccd7ee3b | ||
|
|
298c7efe76 | ||
|
|
1cf9ecccf5 | ||
|
|
cfd5059e77 | ||
|
|
9a694db450 | ||
|
|
ab0a778593 | ||
|
|
57af2ff798 | ||
|
|
9f5cf0a457 | ||
|
|
976e8c9750 | ||
|
|
c1e44d32e6 | ||
|
|
b9244a85d9 | ||
|
|
70cf2c390b | ||
|
|
149b8e9769 | ||
|
|
8102fa8c97 | ||
|
|
784061b15e | ||
|
|
6b6bcdead6 | ||
|
|
0e76abaf64 | ||
|
|
4bbda3038a | ||
|
|
138daa8846 | ||
|
|
10c693a04f | ||
|
|
0cb5808087 | ||
|
|
1b06e868fd | ||
|
|
93938b3a8d | ||
|
|
a1c81eda20 | ||
|
|
b7511c19d0 | ||
|
|
502b32b0f2 | ||
|
|
a9036005c3 | ||
|
|
b9f4c001ad | ||
|
|
4a8be219e0 | ||
|
|
ef4f1f2bd4 | ||
|
|
0b4e9cf976 | ||
|
|
dcccc35526 | ||
|
|
64514392fb | ||
|
|
0a2006d9c6 | ||
|
|
2376723d4a | ||
|
|
281984b3e3 | ||
|
|
c1c131260a | ||
|
|
24700e8f06 | ||
|
|
88b24c258b | ||
|
|
4e0277c35a | ||
|
|
96a4e10102 | ||
|
|
cf232a757f | ||
|
|
158dff6b50 | ||
|
|
eac2cd54e2 | ||
|
|
3e762b07aa | ||
|
|
ee4d92e01b | ||
|
|
9dd36f016a | ||
|
|
169896aedb | ||
|
|
719d800499 | ||
|
|
03cc221016 | ||
|
|
0f94940f1a | ||
|
|
34ec3bb7bc | ||
|
|
1750b4b0af | ||
|
|
659bc99fd1 | ||
|
|
99487d9e45 | ||
|
|
5985d0e0b1 | ||
|
|
a8942c7413 | ||
|
|
d93c6fae43 | ||
|
|
18c438bdf0 | ||
|
|
adc467992e | ||
|
|
0f30852242 | ||
|
|
2025d047e2 | ||
|
|
81a6f6874f | ||
|
|
018f093c09 | ||
|
|
40a0e3c7b5 | ||
|
|
55e3f667d4 | ||
|
|
6924d590cc | ||
|
|
e4fb9aa96e | ||
|
|
94119211a7 | ||
|
|
11dfa25910 | ||
|
|
71595e9ea3 | ||
|
|
2b8139f34c | ||
|
|
9acd203041 | ||
|
|
4d0501f48c | ||
|
|
a6c1dff09a | ||
|
|
0173706826 | ||
|
|
9c61f7b1e1 | ||
|
|
ad2d77397a | ||
|
|
ba1fe3d1df | ||
|
|
7d1997ff7b | ||
|
|
23e237e728 | ||
|
|
ea902f9ace | ||
|
|
f98c93ab19 | ||
|
|
4e4c7cf66d | ||
|
|
7997b53589 | ||
|
|
93754cae33 | ||
|
|
a898cef6c2 | ||
|
|
889dc1e9a7 | ||
|
|
0fd867390a | ||
|
|
afb92a3e22 | ||
|
|
5021fd9d4d | ||
|
|
80f80d61db | ||
|
|
edc3790f5c | ||
|
|
cffaf47c15 | ||
|
|
64e69d9291 | ||
|
|
306db409d4 | ||
|
|
e80079fd21 | ||
|
|
a322eb6147 | ||
|
|
c114bd64b0 | ||
|
|
1fb83da215 | ||
|
|
ce67e4eb05 | ||
|
|
79bdc6183e | ||
|
|
1bec0ce2d7 | ||
|
|
4184bac2ec | ||
|
|
1c26386c38 | ||
|
|
dcd332d231 | ||
|
|
853a592d2d | ||
|
|
100b22e6c2 | ||
|
|
d491c33a72 | ||
|
|
6d89634445 | ||
|
|
a9d8b469f4 | ||
|
|
16aa2dbaa0 | ||
|
|
45178c87a3 | ||
|
|
dcf3b54b6e | ||
|
|
b5598575bb | ||
|
|
fe9c07fd90 | ||
|
|
05a0411140 | ||
|
|
2d4a96b32c | ||
|
|
ecc687c4ff | ||
|
|
99f17c7271 | ||
|
|
1804e0e0c4 | ||
|
|
c8fdd92cd4 | ||
|
|
6e8aaab1b4 | ||
|
|
ccdcec273a | ||
|
|
5de9d63ff4 | ||
|
|
5719187a34 | ||
|
|
5097ff016f | ||
|
|
08aebb6d1a | ||
|
|
e1d6157c5d | ||
|
|
ee55dd777b | ||
|
|
88ed5b69d4 | ||
|
|
f020d129b2 | ||
|
|
21ec337855 | ||
|
|
801edb4f52 | ||
|
|
ec3737ab69 | ||
|
|
f24dff5157 | ||
|
|
3015e3d7ab | ||
|
|
10eb43d0c2 | ||
|
|
3e38a001a1 | ||
|
|
2b786ffe5a | ||
|
|
281db5876e | ||
|
|
7f6261a677 | ||
|
|
de46019a86 | ||
|
|
8cd97c4793 | ||
|
|
9dfcca0bbb | ||
|
|
a3fd350483 | ||
|
|
e6f16c0ec2 | ||
|
|
9304f277e8 | ||
|
|
cf482563cc | ||
|
|
d201907a4b | ||
|
|
e4de06057a | ||
|
|
07efecd9f4 | ||
|
|
ecc9bbe4b6 | ||
|
|
b54d01fcb5 | ||
|
|
1483eeef34 | ||
|
|
dc9562ee77 | ||
|
|
afc54d75a1 | ||
|
|
2bba853375 | ||
|
|
d6927e49c0 | ||
|
|
191c27c0a9 | ||
|
|
951566a4ad | ||
|
|
75f4d9b042 | ||
|
|
10e4dd4ef1 | ||
|
|
25ef534039 | ||
|
|
029cde6111 | ||
|
|
c58558e872 | ||
|
|
8d5fcbcdce | ||
|
|
b745b54a09 | ||
|
|
ec5100bf26 | ||
|
|
29c8937f60 | ||
|
|
a3e37c587b | ||
|
|
174505ca3c | ||
|
|
e794dc5ec4 | ||
|
|
f04312a792 | ||
|
|
4c73c284e0 | ||
|
|
9777d05848 | ||
|
|
36ae75fb93 | ||
|
|
6c5b5e0759 | ||
|
|
95ceec786a | ||
|
|
ef33ac8bac | ||
|
|
b84241e57a | ||
|
|
60ba2356de | ||
|
|
5d63aca604 | ||
|
|
57926d549e | ||
|
|
956282fa6f | ||
|
|
7127d8912c | ||
|
|
043f66afd2 | ||
|
|
597ce3cdbf | ||
|
|
b22984600a | ||
|
|
54f116b42d | ||
|
|
bf07e0f2c9 | ||
|
|
a71534fcc2 | ||
|
|
fcf41b3cd6 | ||
|
|
ea13de1bb0 | ||
|
|
ecab3d9c2a | ||
|
|
47fee5bfd7 | ||
|
|
76bc0a77b1 | ||
|
|
9c21d38ab8 | ||
|
|
0b3220636d | ||
|
|
8ed84bf4f0 | ||
|
|
ce4c41f89f | ||
|
|
ab948ff9f9 | ||
|
|
33f6ade6e7 | ||
|
|
05077d3784 | ||
|
|
21aca4f380 | ||
|
|
ae414872f5 | ||
|
|
213eac2588 | ||
|
|
96edaa304c | ||
|
|
0636395851 | ||
|
|
b0240030f5 | ||
|
|
df1ce61f1b | ||
|
|
b49c284718 | ||
|
|
a4a8e1d23f | ||
|
|
7983a584a0 | ||
|
|
27bb358f7a | ||
|
|
32caa567a2 | ||
|
|
1cfca8fd93 | ||
|
|
eb2cf08bac | ||
|
|
363e3ecef8 | ||
|
|
4461678fb5 | ||
|
|
96c4e0521c | ||
|
|
8225778fae | ||
|
|
5c39998224 | ||
|
|
e3800e1235 | ||
|
|
43f052352c | ||
|
|
ba5385ffc2 | ||
|
|
69ddcd02f1 | ||
|
|
f4c9931abd | ||
|
|
079df9d216 | ||
|
|
08b797de73 | ||
|
|
9b53b2f6ca | ||
|
|
c9785cc5fc | ||
|
|
8c6a3ce1d7 | ||
|
|
65a24325af | ||
|
|
7c9e725134 | ||
|
|
6be07134de | ||
|
|
c835d16a59 | ||
|
|
385ae6ac5b | ||
|
|
b0d9fa058a | ||
|
|
b2363a8195 | ||
|
|
c62b026609 | ||
|
|
af014fa61c | ||
|
|
dae6064103 | ||
|
|
b9d1d66ab8 | ||
|
|
663321087a | ||
|
|
63ce6839b5 | ||
|
|
e269f90364 | ||
|
|
0412d3f292 | ||
|
|
fa7caf8435 | ||
|
|
f599e2ac7f | ||
|
|
460ee73a64 | ||
|
|
be5de999a9 | ||
|
|
e0c2443179 | ||
|
|
ff089f8e36 | ||
|
|
7e1bde1cb5 | ||
|
|
aa688c68c4 | ||
|
|
a45ade8886 | ||
|
|
6bc41fb11c | ||
|
|
74c80f0bbf | ||
|
|
52d6626254 | ||
|
|
fe413fc6ff | ||
|
|
0c0d3bf736 | ||
|
|
424bbd16eb | ||
|
|
a7f1ce46b9 | ||
|
|
9cd0d042f0 | ||
|
|
8937054d5a | ||
|
|
6aa7aeeda4 | ||
|
|
9e97a22b8b | ||
|
|
3ed53d17fb | ||
|
|
41526f68f3 | ||
|
|
015240e790 | ||
|
|
552833213c | ||
|
|
72db748dd9 | ||
|
|
89fae896ae | ||
|
|
03ed7a3d79 | ||
|
|
cb424b2c45 | ||
|
|
9e00089307 | ||
|
|
57a850f897 | ||
|
|
249008d4ce | ||
|
|
1a0fe1f6b7 | ||
|
|
896304d2da | ||
|
|
9e55d51f46 | ||
|
|
c064d3481b | ||
|
|
d6f05069c1 | ||
|
|
8bfed8aae4 | ||
|
|
8b2e7b32b5 | ||
|
|
55f4cc0caa | ||
|
|
8b54201509 | ||
|
|
acb6e2da30 | ||
|
|
a124f02f2c | ||
|
|
ca9bbc1908 | ||
|
|
081b3902bf | ||
|
|
e7b184e79e | ||
|
|
a30342e1d7 | ||
|
|
b9dc83934d | ||
|
|
d0ea8a1d8c | ||
|
|
110f6a1c7d | ||
|
|
01b7165df5 | ||
|
|
cea0dc4565 | ||
|
|
779ab44aa0 | ||
|
|
ca737ece9a | ||
|
|
1b413fe61a | ||
|
|
13985a7f52 | ||
|
|
cdf837d0a1 | ||
|
|
c4cd250408 | ||
|
|
838d050a63 | ||
|
|
19a59c29b9 | ||
|
|
08861aa903 | ||
|
|
dad94a100e | ||
|
|
b3ef8044cf | ||
|
|
2ba7a01bff | ||
|
|
5eba258d90 | ||
|
|
7e586e0e44 | ||
|
|
c4d70e3571 | ||
|
|
ffd311cf36 | ||
|
|
180a7f9c71 | ||
|
|
0ea3be4dc1 | ||
|
|
d7565f316d | ||
|
|
417ee1393b | ||
|
|
0fffb26c01 | ||
|
|
0fba6cbcda | ||
|
|
336ead720e | ||
|
|
2b323159a7 | ||
|
|
c8b87fa320 | ||
|
|
a6876a9745 | ||
|
|
ac9c21dd6f | ||
|
|
2fcfc42d67 | ||
|
|
822fa43b12 | ||
|
|
823bc7381c | ||
|
|
03c3a54695 | ||
|
|
32ef796e47 | ||
|
|
14da68ebff | ||
|
|
f875a73711 | ||
|
|
17be6c2574 | ||
|
|
a80897ee49 | ||
|
|
463ef39bc7 | ||
|
|
9131d5289c | ||
|
|
aaa6614c48 | ||
|
|
b793e5005c | ||
|
|
ea26955d18 | ||
|
|
852ed69f4f | ||
|
|
f2a8ac16d7 | ||
|
|
6a4d37f189 | ||
|
|
ab69f5df77 | ||
|
|
29de83ab8e | ||
|
|
d433f0f7a6 | ||
|
|
95909be9c3 | ||
|
|
41936183d7 | ||
|
|
fe6b2b8e55 | ||
|
|
ead0f95877 | ||
|
|
a0aeb114e9 | ||
|
|
a2b2148e73 | ||
|
|
da562164ce | ||
|
|
b6398dd60e | ||
|
|
f15afef6b3 | ||
|
|
e8ea850004 | ||
|
|
870ee8feee | ||
|
|
ab48ed6df0 | ||
|
|
82bf2e97f7 | ||
|
|
9ed2613789 | ||
|
|
3a906eb3e5 | ||
|
|
23ab623955 | ||
|
|
b86be5df35 | ||
|
|
71af37d38f | ||
|
|
0d3182c7da | ||
|
|
80f9af144c | ||
|
|
42b767fc67 | ||
|
|
25ef608e2f | ||
|
|
7d9b850574 | ||
|
|
be36ac1d33 | ||
|
|
abad1f320a | ||
|
|
fb29cd77ad | ||
|
|
96398e47af | ||
|
|
34442f3740 | ||
|
|
b9846d1749 | ||
|
|
872b87d5d6 | ||
|
|
1f9228316c | ||
|
|
01209d2777 | ||
|
|
1fb3743f04 | ||
|
|
b9470be379 | ||
|
|
d5ddf61803 | ||
|
|
16f482de8f | ||
|
|
44d9926840 | ||
|
|
ba032bb28b | ||
|
|
7c18723404 | ||
|
|
94d01f5377 | ||
|
|
ebf89bb231 | ||
|
|
c72904c6b3 | ||
|
|
b7d8177404 | ||
|
|
8e0b427bf2 | ||
|
|
80954e8272 | ||
|
|
f46e5db184 | ||
|
|
e19e7e3faa | ||
|
|
2db4c770f8 | ||
|
|
758e97c1e5 | ||
|
|
5a5a770402 | ||
|
|
3107fafa2c | ||
|
|
c302773438 | ||
|
|
705ba09e76 | ||
|
|
42c419e266 | ||
|
|
feda52b084 | ||
|
|
cfe4198b6c | ||
|
|
08ec5b45a5 | ||
|
|
bbb0acf353 | ||
|
|
d25d05d740 | ||
|
|
97b8fad8ed | ||
|
|
96cf19499b | ||
|
|
4ce6373a0b | ||
|
|
7bfa873983 | ||
|
|
0f847f30bc | ||
|
|
01092658a3 | ||
|
|
c9347450c1 | ||
|
|
36d9a165e4 | ||
|
|
856de93673 | ||
|
|
1d598182d6 | ||
|
|
86ef00fd67 | ||
|
|
bcf9565535 | ||
|
|
2b3bb3659c | ||
|
|
4ac6ccc021 | ||
|
|
ad71539d30 | ||
|
|
b5eed1b563 | ||
|
|
44de59b642 | ||
|
|
5fb634ca26 | ||
|
|
5222abdd19 | ||
|
|
4d45d05078 | ||
|
|
99d3962a83 | ||
|
|
d45124d708 | ||
|
|
dd6ad33c75 | ||
|
|
6c184e28d8 | ||
|
|
1a9662e04c | ||
|
|
bd37363332 | ||
|
|
8dc72928b8 | ||
|
|
2f34c433dc | ||
|
|
8b3c09f780 | ||
|
|
e56b0a3d50 | ||
|
|
166ce5b027 | ||
|
|
5fffd563de | ||
|
|
4aabac62f8 | ||
|
|
a13bf24a39 | ||
|
|
80017cba4f | ||
|
|
54237d777a | ||
|
|
f8f1fefe53 | ||
|
|
03217eca81 | ||
|
|
8430e3d661 | ||
|
|
bf27121b5d | ||
|
|
58e2157c7e | ||
|
|
a564ec39af | ||
|
|
9e6de287e7 | ||
|
|
86b8062699 | ||
|
|
227d7d927b | ||
|
|
6fc53b39a2 | ||
|
|
cf1aab28fb |
@@ -1,5 +1,4 @@
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
@@ -1,7 +1,20 @@
|
||||
---
|
||||
Language: Cpp
|
||||
BasedOnStyle: LLVM
|
||||
AlignConsecutiveAssignments: true
|
||||
BraceWrapping:
|
||||
AfterFunction: true
|
||||
#llvm10-11: AfterControlStatement: false - Never
|
||||
BeforeCatch: true
|
||||
BeforeElse: true
|
||||
#llvm11: BeforeLambdaBody: false
|
||||
#llvm11: BeforeWhile: false
|
||||
BreakBeforeBraces: Stroustrup
|
||||
BreakAfterJavaFieldAnnotations: true
|
||||
BreakStringLiterals: true
|
||||
ColumnLimit: 110 # Update $max_trace_macro_line_len in bin/trace also
|
||||
IndentWidth: 4
|
||||
---
|
||||
Language: Cpp
|
||||
#llvm11: AlignConsecutiveBitFields: false
|
||||
AlignConsecutiveDeclarations: true
|
||||
AlignConsecutiveMacros: true
|
||||
@@ -22,17 +35,6 @@ AlwaysBreakAfterReturnType: AllDefinitions
|
||||
# - H5_ATTR_CONST
|
||||
# - H5_ATTR_PURE
|
||||
# - H5_ATTR_FALLTHROUGH
|
||||
BraceWrapping:
|
||||
AfterFunction: true
|
||||
#llvm10-11: AfterControlStatement: false - Never
|
||||
BeforeCatch: true
|
||||
BeforeElse: true
|
||||
#llvm11: BeforeLambdaBody: false
|
||||
#llvm11: BeforeWhile: false
|
||||
BreakBeforeBraces: Stroustrup
|
||||
BreakAfterJavaFieldAnnotations: true
|
||||
BreakStringLiterals: true
|
||||
ColumnLimit: 110 # Update $max_trace_macro_line_len in bin/trace also
|
||||
ForEachMacros: ['ALL_MEMBERS', 'UNIQUE_MEMBERS']
|
||||
IncludeCategories:
|
||||
- Regex: '^"(llvm|llvm-c|clang|clang-c)/'
|
||||
@@ -55,7 +57,6 @@ IndentCaseLabels: true
|
||||
#llvm11: IndentCaseBlocks: false
|
||||
IndentGotoLabels: false
|
||||
#llvm11: IndentExternBlock: AfterExternBlock
|
||||
IndentWidth: 4
|
||||
#llvm11: InsertTrailingCommas: None
|
||||
MacroBlockBegin: "^BEGIN_FUNC"
|
||||
MacroBlockEnd: "^END_FUNC"
|
||||
@@ -92,5 +93,8 @@ StatementMacros:
|
||||
#llvm11: WhitespaceSensitiveMacros:
|
||||
#llvm11: - STRINGIZE
|
||||
#llvm11: - PP_STRINGIZE
|
||||
---
|
||||
Language: Java
|
||||
BreakAfterJavaFieldAnnotations: true
|
||||
JavaImportGroups: ['java', 'hdf', 'hdf.hdf5lib', 'org']
|
||||
...
|
||||
|
||||
|
||||
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -192,12 +192,12 @@ java/examples/testfiles/examples.intro.H5_CreateGroup.txt -text
|
||||
java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt -text
|
||||
java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt -text
|
||||
java/examples/testfiles/examples.intro.H5_ReadWrite.txt -text
|
||||
java/lib/ext/slf4j-nop-1.7.25.jar -text svneol=unset#application/zip
|
||||
java/lib/ext/slf4j-simple-1.7.25.jar -text svneol=unset#application/zip
|
||||
java/lib/ext/slf4j-nop-1.7.33.jar -text svneol=unset#application/zip
|
||||
java/lib/ext/slf4j-simple-1.7.33.jar -text svneol=unset#application/zip
|
||||
java/lib/hamcrest-core.jar -text svneol=unset#application/java-archive
|
||||
java/lib/junit.jar -text svneol=unset#application/java-archive
|
||||
java/lib/simplelogger.properties -text
|
||||
java/lib/slf4j-api-1.7.25.jar -text svneol=unset#application/zip
|
||||
java/lib/slf4j-api-1.7.33.jar -text svneol=unset#application/zip
|
||||
java/src/CMakeLists.txt -text
|
||||
java/src/Makefile.am -text
|
||||
java/src/hdf/CMakeLists.txt -text
|
||||
|
||||
37
.github/CODEOWNERS
vendored
37
.github/CODEOWNERS
vendored
@@ -2,41 +2,10 @@
|
||||
# Each line is a file pattern followed by one or more owners.
|
||||
|
||||
# These owners will be the default owners for everything in the repo.
|
||||
* @lrknox
|
||||
* @lrknox @derobins @byrnHDF @fortnern @jhendersonHDF @qkoziol @vchoi-hdfgroup @bmribler @raylu-hdf @mattjala @brtnfld
|
||||
|
||||
# Order is important. The last matching pattern has the most precedence.
|
||||
# So if a pull request only touches javascript files, only these owners
|
||||
# will be requested to review.
|
||||
*.cmake @byrnHDF @derobins
|
||||
CMakeLists.txt @byrnHDF @derobins
|
||||
CMakeTests.* @byrnHDF @derobins
|
||||
|
||||
/bin/ @lrknox @derobins @qkoziol
|
||||
|
||||
/c++/ @bmribler @byrnHDF @derobins
|
||||
|
||||
/config/ @lrknox @derobins @qkoziol @byrnHDF
|
||||
|
||||
/doc/ @gnuoyd @jrmainzer
|
||||
|
||||
/examples/ @lrknox @derobins @bmribler
|
||||
|
||||
/fortran/ @brtnfld @epourmal
|
||||
|
||||
/hl/ @bmribler @byrnHDF @derobins
|
||||
|
||||
/java/ @jhendersonHDF @byrnHDF
|
||||
|
||||
/m4/ @lrknox @derobins
|
||||
|
||||
/release_docs/ @lrknox @bmribler @byrnHDF
|
||||
|
||||
/src/ @jhendersonHDF @derobins @fortnern @qkoziol @soumagne @vchoi-hdfgroup @jrmainzer
|
||||
|
||||
/test/ @jhendersonHDF @derobins @fortnern @qkoziol @soumagne @vchoi-hdfgroup @jrmainzer
|
||||
|
||||
/testpar/ @jhendersonHDF @rawarren @jrmainzer @qkoziol
|
||||
|
||||
/tools/ @byrnHDF @bmribler @derobins
|
||||
|
||||
/utils/ @lrknox @byrnHDF @derobins
|
||||
/fortran/ @brtnfld @derobins
|
||||
/java/ @jhendersonHDF @byrnHDF @derobins
|
||||
|
||||
3
.github/FUNDING.yml
vendored
Normal file
3
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
custom: "https://hdfgroup.org/about-us/donate-to-the-hdf-group/"
|
||||
25
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
25
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Report a problem with HDF5
|
||||
title: "[BUG]"
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Platform (please complete the following information)**
|
||||
- HDF5 version (if building from a maintenance branch, please include the commit hash)
|
||||
- OS and version
|
||||
- Compiler and version
|
||||
- Build system (e.g. CMake, Autotools) and version
|
||||
- Any configure options you specified
|
||||
- MPI library and version (parallel HDF5)
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
20
.github/ISSUE_TEMPLATE/feature-request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature-request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an improvement to HDF5
|
||||
title: "[Feature Request]"
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
8
.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
vendored
Normal file
8
.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
## Describe your changes
|
||||
|
||||
## Issue ticket number (GitHub or JIRA)
|
||||
|
||||
## Checklist before requesting a review
|
||||
- [ ] My code conforms to the guidelines in CONTRIBUTING.md
|
||||
- [ ] I made an entry in release_docs/RELEASE.txt (bug fixes, new features)
|
||||
- [ ] I added a test (bug fixes, new features)
|
||||
10
.github/workflows/clang-format-check.yml
vendored
10
.github/workflows/clang-format-check.yml
vendored
@@ -7,12 +7,12 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: "!contains(github.event.head_commit.message, 'skip-ci')"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Run clang-format style check for C programs.
|
||||
uses: DoozyX/clang-format-lint-action@v0.11
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run clang-format style check for C and Java programs.
|
||||
uses: DoozyX/clang-format-lint-action@v0.13
|
||||
with:
|
||||
source: '.'
|
||||
extensions: 'c,h,cpp,hpp'
|
||||
clangFormatVersion: 10
|
||||
extensions: 'c,h,cpp,hpp,java'
|
||||
clangFormatVersion: 13
|
||||
style: file
|
||||
exclude: './config ./hl/src/H5LTanalyze.c ./hl/src/H5LTparse.c ./hl/src/H5LTparse.h ./src/H5Epubgen.h ./src/H5Einit.h ./src/H5Eterm.h ./src/H5Edefin.h ./src/H5version.h ./src/H5overflow.h'
|
||||
|
||||
10
.github/workflows/clang-format-fix.yml
vendored
10
.github/workflows/clang-format-fix.yml
vendored
@@ -8,13 +8,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: "!contains(github.event.head_commit.message, 'skip-ci')"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Run clang-format style check for C programs.
|
||||
uses: DoozyX/clang-format-lint-action@v0.11
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run clang-format style check for C and Java programs.
|
||||
uses: DoozyX/clang-format-lint-action@v0.13
|
||||
with:
|
||||
source: '.'
|
||||
extensions: 'c,h,cpp,hpp'
|
||||
clangFormatVersion: 10
|
||||
extensions: 'c,h,cpp,hpp,java'
|
||||
clangFormatVersion: 13
|
||||
inplace: True
|
||||
style: file
|
||||
exclude: './config ./hl/src/H5LTanalyze.c ./hl/src/H5LTparse.c ./hl/src/H5LTparse.h ./src/H5Epubgen.h ./src/H5Einit.h ./src/H5Eterm.h ./src/H5Edefin.h ./src/H5version.h ./src/H5overflow.h'
|
||||
|
||||
6
.github/workflows/codespell.yml
vendored
6
.github/workflows/codespell.yml
vendored
@@ -8,8 +8,8 @@ jobs:
|
||||
name: Check for spelling errors
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: codespell-project/actions-codespell@master
|
||||
with:
|
||||
skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c
|
||||
ignore_words_list: isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,minnum
|
||||
skip: ./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./tools/test/h5repack/testfiles/*.dat
|
||||
ignore_words_list: isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,minnum,ake,gord,numer,ro,oce
|
||||
|
||||
605
.github/workflows/main.yml
vendored
605
.github/workflows/main.yml
vendored
@@ -4,216 +4,493 @@ name: hdf5 dev CI
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [ develop, hdf5_1_12, hdf5_1_10, hdf5_1_8 ]
|
||||
pull_request:
|
||||
branches: [ develop, hdf5_1_14, hdf5_1_12, hdf5_1_10, hdf5_1_8 ]
|
||||
paths-ignore:
|
||||
- '.github/**'
|
||||
- 'doc/**'
|
||||
- 'release_docs/**'
|
||||
- '.github/CODEOWNERS'
|
||||
- '.github/FUNDING.yml'
|
||||
- 'doc/**'
|
||||
- 'release_docs/**'
|
||||
- 'ACKNOWLEDGEMENTS'
|
||||
- 'COPYING**'
|
||||
- '**.md'
|
||||
|
||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||
# A workflow run is made up of one or more jobs that can run sequentially or
|
||||
# in parallel. We just have one job, but the matrix items defined below will
|
||||
# run in parallel.
|
||||
jobs:
|
||||
# This workflow contains a single job called "build"
|
||||
build:
|
||||
|
||||
# A workflow that builds the library and runs all the tests
|
||||
build_and_test:
|
||||
|
||||
strategy:
|
||||
# fail-fast: false
|
||||
|
||||
# The current matrix has three dimensions:
|
||||
#
|
||||
# * config name
|
||||
# * thread-safety on/off
|
||||
# * release vs. debug build
|
||||
#
|
||||
# Most configuration information is added via the 'include' mechanism,
|
||||
# which will append the key-value pairs in the configuration where the
|
||||
# names match.
|
||||
|
||||
matrix:
|
||||
name: ["Windows Latest MSVC", "Ubuntu Latest GCC", "Ubuntu Debug GCC", "macOS Latest Clang", "Ubuntu Autotools GCC", "Windows TS MSVC", "Ubuntu TS GCC", "TS Debug GCC", "macOS TS Clang", "TS Autotools GCC"]
|
||||
|
||||
name:
|
||||
- "Windows MSVC CMake"
|
||||
- "Ubuntu gcc CMake"
|
||||
- "Ubuntu gcc Autotools"
|
||||
- "Ubuntu gcc Autotools parallel (build only)"
|
||||
- "MacOS Clang CMake"
|
||||
|
||||
thread_safety:
|
||||
- enabled: true
|
||||
text: " TS"
|
||||
- enabled: false
|
||||
text: ""
|
||||
|
||||
build_mode:
|
||||
- text: " REL"
|
||||
cmake: "Release"
|
||||
autotools: "production"
|
||||
- text: " DBG"
|
||||
cmake: "Debug"
|
||||
autotools: "debug"
|
||||
|
||||
# This is where we list the bulk of the options for each configuration.
|
||||
# The key-value pair values are usually appropriate for being CMake or
|
||||
# Autotools configure values, so be aware of that.
|
||||
|
||||
include:
|
||||
- name: "Windows Latest MSVC"
|
||||
artifact: "Windows-MSVC.tar.xz"
|
||||
|
||||
# Windows w/ MSVC + CMake
|
||||
#
|
||||
# No Fortran, parallel, or VFDs that rely on POSIX things
|
||||
- name: "Windows MSVC CMake"
|
||||
os: windows-2022
|
||||
build_type: "Release"
|
||||
toolchain: ""
|
||||
cpp: ON
|
||||
fortran: OFF
|
||||
java: ON
|
||||
ts: OFF
|
||||
hl: ON
|
||||
parallel: OFF
|
||||
mirror_vfd: OFF
|
||||
direct_vfd: OFF
|
||||
generator: "-G \"Visual Studio 17 2022\" -A x64"
|
||||
- name: "Ubuntu Latest GCC"
|
||||
artifact: "Linux.tar.xz"
|
||||
run_tests: true
|
||||
|
||||
# Linux (Ubuntu) w/ gcc + CMake
|
||||
#
|
||||
# We might think about adding Clang, but MacOS already tests that
|
||||
# so it's not critical
|
||||
- name: "Ubuntu gcc CMake"
|
||||
os: ubuntu-latest
|
||||
build_type: "Release"
|
||||
cpp: ON
|
||||
fortran: ON
|
||||
java: ON
|
||||
ts: OFF
|
||||
hl: ON
|
||||
parallel: OFF
|
||||
mirror_vfd: ON
|
||||
direct_vfd: ON
|
||||
toolchain: "config/toolchain/GCC.cmake"
|
||||
generator: "-G Ninja"
|
||||
- name: "macOS Latest Clang"
|
||||
artifact: "macOS.tar.xz"
|
||||
os: macos-latest
|
||||
build_type: "Release"
|
||||
cpp: ON
|
||||
fortran: OFF
|
||||
java: ON
|
||||
ts: OFF
|
||||
hl: ON
|
||||
parallel: OFF
|
||||
toolchain: "config/toolchain/clang.cmake"
|
||||
generator: "-G Ninja"
|
||||
- name: "Ubuntu Debug GCC"
|
||||
artifact: "LinuxDBG.tar.xz"
|
||||
run_tests: true
|
||||
|
||||
# Linux (Ubuntu) w/ gcc + Autotools
|
||||
#
|
||||
# Keep this identical to the CMake configs. Note the difference in
|
||||
# the values.
|
||||
- name: "Ubuntu gcc Autotools"
|
||||
os: ubuntu-latest
|
||||
build_type: "Debug"
|
||||
cpp: ON
|
||||
fortran: OFF
|
||||
java: OFF
|
||||
ts: OFF
|
||||
hl: ON
|
||||
parallel: OFF
|
||||
toolchain: "config/toolchain/GCC.cmake"
|
||||
generator: "-G Ninja"
|
||||
- name: "Ubuntu Autotools GCC"
|
||||
artifact: "LinuxA.tar.xz"
|
||||
os: ubuntu-latest
|
||||
build_type: "Release"
|
||||
cpp: enable
|
||||
fortran: enable
|
||||
java: enable
|
||||
ts: disable
|
||||
hl: enable
|
||||
parallel: disable
|
||||
mirror_vfd: enable
|
||||
direct_vfd: enable
|
||||
deprec_sym: enable
|
||||
default_api: v114
|
||||
szip: yes
|
||||
toolchain: ""
|
||||
generator: "autogen"
|
||||
# Threadsafe runs
|
||||
- name: "Windows TS MSVC"
|
||||
artifact: "Windows-MSVCTS.tar.xz"
|
||||
os: windows-2019
|
||||
build_type: "Release"
|
||||
toolchain: ""
|
||||
cpp: OFF
|
||||
fortran: OFF
|
||||
java: OFF
|
||||
ts: ON
|
||||
hl: OFF
|
||||
parallel: OFF
|
||||
generator: "-G \"Visual Studio 16 2019\" -A x64"
|
||||
- name: "Ubuntu TS GCC"
|
||||
artifact: "LinuxTS.tar.xz"
|
||||
flags: ""
|
||||
run_tests: true
|
||||
|
||||
# Parallel Linux (Ubuntu) w/ gcc + Autotools
|
||||
#
|
||||
# The GitHub runners are inadequate for running parallel HDF5 tests,
|
||||
# so we catch most issues in daily testing. What we have here is just
|
||||
# a compile check to make sure nothing obvious is broken.
|
||||
- name: "Ubuntu gcc Autotools parallel (build only)"
|
||||
os: ubuntu-latest
|
||||
build_type: "Release"
|
||||
cpp: OFF
|
||||
cpp: disable
|
||||
fortran: enable
|
||||
java: disable
|
||||
parallel: enable
|
||||
mirror_vfd: disable
|
||||
direct_vfd: disable
|
||||
deprec_sym: enable
|
||||
default_api: v114
|
||||
szip: yes
|
||||
toolchain: ""
|
||||
generator: "autogen"
|
||||
flags: "CC=mpicc"
|
||||
run_tests: false
|
||||
|
||||
# MacOS w/ Clang + CMake
|
||||
#
|
||||
# We could also build with the Autotools via brew installing them,
|
||||
# but that seems unnecessary
|
||||
- name: "MacOS Clang CMake"
|
||||
os: macos-11
|
||||
cpp: ON
|
||||
fortran: OFF
|
||||
java: OFF
|
||||
ts: ON
|
||||
hl: OFF
|
||||
parallel: OFF
|
||||
toolchain: "config/toolchain/GCC.cmake"
|
||||
generator: "-G Ninja"
|
||||
- name: "macOS TS Clang"
|
||||
artifact: "macOSTS.tar.xz"
|
||||
os: macos-latest
|
||||
build_type: "Release"
|
||||
cpp: OFF
|
||||
fortran: OFF
|
||||
java: OFF
|
||||
ts: ON
|
||||
hl: OFF
|
||||
java: ON
|
||||
parallel: OFF
|
||||
mirror_vfd: ON
|
||||
direct_vfd: OFF
|
||||
toolchain: "config/toolchain/clang.cmake"
|
||||
generator: "-G Ninja"
|
||||
- name: "TS Debug GCC"
|
||||
artifact: "LinuxTSDBG.tar.xz"
|
||||
run_tests: true
|
||||
|
||||
|
||||
#
|
||||
# SPECIAL AUTOTOOLS BUILDS
|
||||
#
|
||||
# These do not run tests and are not built into the matrix and instead
|
||||
# become NEW configs as their name would clobber one of the matrix
|
||||
# names (so make sure the names are UNIQUE).
|
||||
#
|
||||
|
||||
- name: "Ubuntu gcc Autotools v1.6 default API (build only)"
|
||||
os: ubuntu-latest
|
||||
build_type: "Debug"
|
||||
cpp: OFF
|
||||
fortran: OFF
|
||||
java: OFF
|
||||
ts: ON
|
||||
hl: OFF
|
||||
parallel: OFF
|
||||
toolchain: "config/toolchain/GCC.cmake"
|
||||
generator: "-G Ninja"
|
||||
- name: "TS Autotools GCC"
|
||||
artifact: "LinuxATS.tar.xz"
|
||||
os: ubuntu-latest
|
||||
build_type: "Release"
|
||||
cpp: disable
|
||||
fortran: disable
|
||||
java: disable
|
||||
ts: enable
|
||||
hl: disable
|
||||
cpp: enable
|
||||
fortran: enable
|
||||
java: enable
|
||||
parallel: disable
|
||||
mirror_vfd: enable
|
||||
direct_vfd: enable
|
||||
deprec_sym: enable
|
||||
default_api: v16
|
||||
szip: yes
|
||||
toolchain: ""
|
||||
generator: "autogen"
|
||||
# - name: "Ubuntu Parallel GCC"
|
||||
# artifact: "LinuxPar.tar.xz"
|
||||
# os: ubuntu-latest
|
||||
# build_type: "Release"
|
||||
# cpp: OFF
|
||||
# fortran: OFF
|
||||
# parallel: ON
|
||||
# toolchain: "config/toolchain/GCC.cmake"
|
||||
# generator: "-G Ninja"
|
||||
flags: ""
|
||||
run_tests: false
|
||||
thread_safety:
|
||||
enabled: false
|
||||
text: ""
|
||||
build_mode:
|
||||
text: " DBG"
|
||||
cmake: "Debug"
|
||||
autotools: "debug"
|
||||
|
||||
- name: "Ubuntu gcc Autotools v1.8 default API (build only)"
|
||||
os: ubuntu-latest
|
||||
cpp: enable
|
||||
fortran: enable
|
||||
java: enable
|
||||
parallel: disable
|
||||
mirror_vfd: enable
|
||||
direct_vfd: enable
|
||||
deprec_sym: enable
|
||||
default_api: v18
|
||||
szip: yes
|
||||
toolchain: ""
|
||||
generator: "autogen"
|
||||
flags: ""
|
||||
run_tests: false
|
||||
thread_safety:
|
||||
enabled: false
|
||||
text: ""
|
||||
build_mode:
|
||||
text: " DBG"
|
||||
cmake: "Debug"
|
||||
autotools: "debug"
|
||||
|
||||
- name: "Ubuntu gcc Autotools v1.10 default API (build only)"
|
||||
os: ubuntu-latest
|
||||
cpp: enable
|
||||
fortran: enable
|
||||
java: enable
|
||||
parallel: disable
|
||||
mirror_vfd: enable
|
||||
direct_vfd: enable
|
||||
deprec_sym: enable
|
||||
default_api: v110
|
||||
szip: yes
|
||||
toolchain: ""
|
||||
generator: "autogen"
|
||||
flags: ""
|
||||
run_tests: false
|
||||
thread_safety:
|
||||
enabled: false
|
||||
text: ""
|
||||
build_mode:
|
||||
text: " DBG"
|
||||
cmake: "Debug"
|
||||
autotools: "debug"
|
||||
|
||||
- name: "Ubuntu gcc Autotools v1.12 default API (build only)"
|
||||
os: ubuntu-latest
|
||||
cpp: enable
|
||||
fortran: enable
|
||||
java: enable
|
||||
parallel: disable
|
||||
mirror_vfd: enable
|
||||
direct_vfd: enable
|
||||
deprec_sym: enable
|
||||
default_api: v112
|
||||
szip: yes
|
||||
toolchain: ""
|
||||
generator: "autogen"
|
||||
flags: ""
|
||||
run_tests: false
|
||||
thread_safety:
|
||||
enabled: false
|
||||
text: ""
|
||||
build_mode:
|
||||
text: " DBG"
|
||||
cmake: "Debug"
|
||||
autotools: "debug"
|
||||
|
||||
- name: "Ubuntu gcc Autotools v1.14 default API (build only)"
|
||||
os: ubuntu-latest
|
||||
cpp: enable
|
||||
fortran: enable
|
||||
java: enable
|
||||
parallel: disable
|
||||
mirror_vfd: enable
|
||||
direct_vfd: enable
|
||||
deprec_sym: enable
|
||||
default_api: v114
|
||||
szip: yes
|
||||
toolchain: ""
|
||||
generator: "autogen"
|
||||
flags: ""
|
||||
run_tests: false
|
||||
thread_safety:
|
||||
enabled: false
|
||||
text: ""
|
||||
build_mode:
|
||||
text: " DBG"
|
||||
cmake: "Debug"
|
||||
autotools: "debug"
|
||||
|
||||
- name: "Ubuntu gcc Autotools no deprecated symbols (build only)"
|
||||
os: ubuntu-latest
|
||||
cpp: enable
|
||||
fortran: enable
|
||||
java: enable
|
||||
parallel: disable
|
||||
mirror_vfd: enable
|
||||
direct_vfd: enable
|
||||
deprec_sym: disable
|
||||
default_api: default
|
||||
szip: yes
|
||||
toolchain: ""
|
||||
generator: "autogen"
|
||||
flags: ""
|
||||
run_tests: false
|
||||
thread_safety:
|
||||
enabled: false
|
||||
text: ""
|
||||
build_mode:
|
||||
text: " DBG"
|
||||
cmake: "Debug"
|
||||
autotools: "debug"
|
||||
|
||||
- name: "Ubuntu gcc Autotools -Werror (build only)"
|
||||
os: ubuntu-latest
|
||||
cpp: enable
|
||||
fortran: disable
|
||||
java: disable
|
||||
parallel: disable
|
||||
mirror_vfd: disable
|
||||
direct_vfd: enable
|
||||
deprec_sym: enable
|
||||
default_api: v114
|
||||
szip: yes
|
||||
toolchain: ""
|
||||
generator: "autogen"
|
||||
flags: "CFLAGS=-Werror"
|
||||
run_tests: false
|
||||
thread_safety:
|
||||
enabled: false
|
||||
text: ""
|
||||
build_mode:
|
||||
text: " DBG"
|
||||
cmake: "Debug"
|
||||
autotools: "debug"
|
||||
|
||||
- name: "Ubuntu gcc Autotools -Werror (build only)"
|
||||
os: ubuntu-latest
|
||||
cpp: enable
|
||||
fortran: disable
|
||||
java: disable
|
||||
parallel: disable
|
||||
mirror_vfd: disable
|
||||
direct_vfd: enable
|
||||
deprec_sym: enable
|
||||
default_api: v114
|
||||
szip: yes
|
||||
toolchain: ""
|
||||
generator: "autogen"
|
||||
flags: "CFLAGS=-Werror"
|
||||
run_tests: false
|
||||
thread_safety:
|
||||
enabled: false
|
||||
text: ""
|
||||
build_mode:
|
||||
text: " REL"
|
||||
cmake: "Release"
|
||||
autotools: "production"
|
||||
|
||||
# Sets the job's name from the properties
|
||||
name: "${{ matrix.name }}${{ matrix.build_mode.text }}${{ matrix.thread_safety.text }}"
|
||||
|
||||
# Don't run the action if the commit message says to skip CI
|
||||
if: "!contains(github.event.head_commit.message, 'skip-ci')"
|
||||
|
||||
name: ${{ matrix.name }}
|
||||
# The type of runner that the job will run on
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: "!contains(github.event.head_commit.message, 'skip-ci')"
|
||||
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
- name: Install Dependencies (Linux)
|
||||
run: sudo apt-get install ninja-build
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
- name: Install Autotools Dependencies (Linux)
|
||||
run: sudo apt-get install automake autoconf libtool libtool-bin
|
||||
if: matrix.generator == 'autogen'
|
||||
- name: Install Dependencies (Windows)
|
||||
run: choco install ninja
|
||||
if: matrix.os == 'windows-latest'
|
||||
- name: Install Dependencies (macOS)
|
||||
run: brew install ninja
|
||||
if: matrix.os == 'macos-latest'
|
||||
- name: Set environment for MSVC (Windows)
|
||||
if: matrix.os == 'windows-latest'
|
||||
run: |
|
||||
# Set these env vars so cmake picks the correct compiler
|
||||
echo "CXX=cl.exe" >> $GITHUB_ENV
|
||||
echo "CC=cl.exe" >> $GITHUB_ENV
|
||||
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- name: Get Sources
|
||||
uses: actions/checkout@v2
|
||||
#
|
||||
# SETUP
|
||||
#
|
||||
|
||||
- name: Autotools Configure
|
||||
if: matrix.generator == 'autogen'
|
||||
run: |
|
||||
sh ./autogen.sh
|
||||
sh ./bin/chkmanifest
|
||||
mkdir "${{ runner.workspace }}/build"
|
||||
cd "${{ runner.workspace }}/build"
|
||||
$GITHUB_WORKSPACE/configure --enable-shared --${{ matrix.ts }}-threadsafe --${{ matrix.hl }}-hl --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java
|
||||
shell: bash
|
||||
#Useful for debugging
|
||||
- name: Dump matrix context
|
||||
run: echo '${{ toJSON(matrix) }}'
|
||||
|
||||
- name: Configure
|
||||
if: matrix.generator != 'autogen'
|
||||
run: |
|
||||
mkdir "${{ runner.workspace }}/build"
|
||||
cd "${{ runner.workspace }}/build"
|
||||
cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=${{ matrix.ts }} -DHDF5_BUILD_HL_LIB:BOOL=${{ matrix.hl }} -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} $GITHUB_WORKSPACE
|
||||
shell: bash
|
||||
- name: Install CMake Dependencies (Linux)
|
||||
run: sudo apt-get install ninja-build
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
|
||||
- name: Autotools Build
|
||||
if: matrix.generator == 'autogen'
|
||||
run: make
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
- name: Install Autotools Dependencies (Linux, serial)
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install automake autoconf libtool libtool-bin
|
||||
sudo apt install gcc-11 g++-11 gfortran-11
|
||||
echo "CC=gcc-11" >> $GITHUB_ENV
|
||||
echo "CXX=g++-11" >> $GITHUB_ENV
|
||||
echo "FC=gfortran-11" >> $GITHUB_ENV
|
||||
sudo apt install libaec0 libaec-dev
|
||||
if: (matrix.generator == 'autogen') && (matrix.parallel != 'enable')
|
||||
|
||||
- name: Build
|
||||
if: matrix.generator != 'autogen'
|
||||
run: cmake --build . --config ${{ matrix.build_type }}
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
- name: Install Autotools Dependencies (Linux, parallel)
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install automake autoconf libtool libtool-bin
|
||||
sudo apt install openmpi-bin openmpi-common mpi-default-dev
|
||||
echo "CC=mpicc" >> $GITHUB_ENV
|
||||
echo "FC=mpif90" >> $GITHUB_ENV
|
||||
sudo apt install libaec0 libaec-dev
|
||||
if: (matrix.generator == 'autogen') && (matrix.parallel == 'enable')
|
||||
|
||||
- name: Autotools Test
|
||||
if: matrix.generator == 'autogen'
|
||||
run: make check
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
- name: Install Dependencies (Windows)
|
||||
run: choco install ninja
|
||||
if: matrix.os == 'windows-latest'
|
||||
|
||||
- name: Test
|
||||
if: matrix.generator != 'autogen'
|
||||
run: ctest --build . -C ${{ matrix.build_type }} -V
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
- name: Install Dependencies (macOS)
|
||||
run: brew install ninja
|
||||
if: matrix.os == 'macos-11'
|
||||
|
||||
- name: Set environment for MSVC (Windows)
|
||||
run: |
|
||||
# Set these environment variables so CMake picks the correct compiler
|
||||
echo "CXX=cl.exe" >> $GITHUB_ENV
|
||||
echo "CC=cl.exe" >> $GITHUB_ENV
|
||||
if: matrix.os == 'windows-latest'
|
||||
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- name: Get Sources
|
||||
uses: actions/checkout@v3
|
||||
|
||||
#
|
||||
# AUTOTOOLS CONFIGURE
|
||||
#
|
||||
|
||||
- name: Autotools Configure
|
||||
run: |
|
||||
sh ./autogen.sh
|
||||
mkdir "${{ runner.workspace }}/build"
|
||||
cd "${{ runner.workspace }}/build"
|
||||
${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --with-szlib=${{ matrix.szip }}
|
||||
shell: bash
|
||||
if: (matrix.generator == 'autogen') && (! matrix.thread_safe.enabled)
|
||||
|
||||
- name: Autotools Configure (Thread-Safe)
|
||||
run: |
|
||||
sh ./autogen.sh
|
||||
mkdir "${{ runner.workspace }}/build"
|
||||
cd "${{ runner.workspace }}/build"
|
||||
${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.parallel }}-parallel --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --with-szlib=${{ matrix.szip }}
|
||||
shell: bash
|
||||
if: (matrix.generator == 'autogen') && (matrix.thread_safe.enabled)
|
||||
|
||||
#
|
||||
# CMAKE CONFIGURE
|
||||
#
|
||||
|
||||
- name: CMake Configure
|
||||
run: |
|
||||
mkdir "${{ runner.workspace }}/build"
|
||||
cd "${{ runner.workspace }}/build"
|
||||
cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE
|
||||
shell: bash
|
||||
if: (matrix.generator != 'autogen') && (! matrix.thread_safe.enabled)
|
||||
|
||||
|
||||
- name: CMake Configure (Thread-Safe)
|
||||
run: |
|
||||
mkdir "${{ runner.workspace }}/build"
|
||||
cd "${{ runner.workspace }}/build"
|
||||
cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_HL_LIB:BOOL=OFF -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE
|
||||
shell: bash
|
||||
if: (matrix.generator != 'autogen') && (matrix.thread_safe.enabled)
|
||||
|
||||
#
|
||||
# BUILD
|
||||
#
|
||||
|
||||
- name: Autotools Build
|
||||
run: make
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
if: matrix.generator == 'autogen'
|
||||
|
||||
- name: CMake Build
|
||||
run: cmake --build . --config ${{ matrix.build_mode.cmake }}
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
if: matrix.generator != 'autogen'
|
||||
|
||||
#
|
||||
# RUN TESTS
|
||||
#
|
||||
|
||||
- name: Autotools Run Tests
|
||||
run: make check
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
if: (matrix.generator == 'autogen') && (matrix.run_tests)
|
||||
|
||||
- name: CMake Run Tests
|
||||
run: ctest --build . -C ${{ matrix.build_mode.cmake }} -V
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
# Skip Debug MSVC while we investigate H5L Java test timeouts
|
||||
if: (matrix.generator != 'autogen') && (matrix.run_tests) && ! ((matrix.name == 'Windows MSVC CMake') && (matrix.build_mode.cmake == 'Debug'))
|
||||
|
||||
#
|
||||
# INSTALL (note that this runs even when we don't run the tests)
|
||||
#
|
||||
|
||||
- name: Autotools Install
|
||||
run: make install
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
if: (matrix.generator == 'autogen')
|
||||
|
||||
- name: Autotools Verify Install
|
||||
run: make check-install
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
if: (matrix.generator == 'autogen')
|
||||
|
||||
214
.github/workflows/pr-check.yml
vendored
214
.github/workflows/pr-check.yml
vendored
@@ -1,214 +0,0 @@
|
||||
name: hdf5 dev CI
|
||||
|
||||
# Controls when the action will run. Triggers the workflow on push or pull request
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ develop, hdf5_1_12, hdf5_1_10, hdf5_1_8 ]
|
||||
|
||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||
jobs:
|
||||
# This workflow contains a single job called "build"
|
||||
build:
|
||||
strategy:
|
||||
# fail-fast: false
|
||||
matrix:
|
||||
name: ["Windows Latest MSVC", "Ubuntu Latest GCC", "Ubuntu Debug GCC", "macOS Latest Clang", "Ubuntu Autotools GCC", "Windows TS MSVC", "Ubuntu TS GCC", "TS Debug GCC", "macOS TS Clang", "TS Autotools GCC"]
|
||||
include:
|
||||
- name: "Windows Latest MSVC"
|
||||
artifact: "Windows-MSVC.tar.xz"
|
||||
os: windows-2022
|
||||
build_type: "Release"
|
||||
toolchain: ""
|
||||
cpp: ON
|
||||
fortran: OFF
|
||||
java: ON
|
||||
ts: OFF
|
||||
hl: ON
|
||||
parallel: OFF
|
||||
generator: "-G \"Visual Studio 17 2022\" -A x64"
|
||||
- name: "Ubuntu Latest GCC"
|
||||
artifact: "Linux.tar.xz"
|
||||
os: ubuntu-latest
|
||||
build_type: "Release"
|
||||
cpp: ON
|
||||
fortran: ON
|
||||
java: ON
|
||||
ts: OFF
|
||||
hl: ON
|
||||
parallel: OFF
|
||||
toolchain: "config/toolchain/GCC.cmake"
|
||||
generator: "-G Ninja"
|
||||
- name: "macOS Latest Clang"
|
||||
artifact: "macOS.tar.xz"
|
||||
os: macos-latest
|
||||
build_type: "Release"
|
||||
cpp: ON
|
||||
fortran: OFF
|
||||
java: ON
|
||||
ts: OFF
|
||||
hl: ON
|
||||
parallel: OFF
|
||||
toolchain: "config/toolchain/clang.cmake"
|
||||
generator: "-G Ninja"
|
||||
- name: "Ubuntu Debug GCC"
|
||||
artifact: "LinuxDBG.tar.xz"
|
||||
os: ubuntu-latest
|
||||
build_type: "Debug"
|
||||
cpp: ON
|
||||
fortran: OFF
|
||||
java: OFF
|
||||
ts: OFF
|
||||
hl: ON
|
||||
parallel: OFF
|
||||
toolchain: "config/toolchain/GCC.cmake"
|
||||
generator: "-G Ninja"
|
||||
- name: "Ubuntu Autotools GCC"
|
||||
artifact: "LinuxA.tar.xz"
|
||||
os: ubuntu-latest
|
||||
build_type: "Release"
|
||||
cpp: enable
|
||||
fortran: enable
|
||||
java: enable
|
||||
ts: disable
|
||||
hl: enable
|
||||
parallel: disable
|
||||
toolchain: ""
|
||||
generator: "autogen"
|
||||
# Threadsafe runs
|
||||
- name: "Windows TS MSVC"
|
||||
artifact: "Windows-MSVCTS.tar.xz"
|
||||
os: windows-2019
|
||||
build_type: "Release"
|
||||
toolchain: ""
|
||||
cpp: OFF
|
||||
fortran: OFF
|
||||
java: OFF
|
||||
ts: ON
|
||||
hl: OFF
|
||||
parallel: OFF
|
||||
generator: "-G \"Visual Studio 16 2019\" -A x64"
|
||||
- name: "Ubuntu TS GCC"
|
||||
artifact: "LinuxTS.tar.xz"
|
||||
os: ubuntu-latest
|
||||
build_type: "Release"
|
||||
cpp: OFF
|
||||
fortran: OFF
|
||||
java: OFF
|
||||
ts: ON
|
||||
hl: OFF
|
||||
parallel: OFF
|
||||
toolchain: "config/toolchain/GCC.cmake"
|
||||
generator: "-G Ninja"
|
||||
- name: "macOS TS Clang"
|
||||
artifact: "macOSTS.tar.xz"
|
||||
os: macos-latest
|
||||
build_type: "Release"
|
||||
cpp: OFF
|
||||
fortran: OFF
|
||||
java: OFF
|
||||
ts: ON
|
||||
hl: OFF
|
||||
parallel: OFF
|
||||
toolchain: "config/toolchain/clang.cmake"
|
||||
generator: "-G Ninja"
|
||||
- name: "TS Debug GCC"
|
||||
artifact: "LinuxTSDBG.tar.xz"
|
||||
os: ubuntu-latest
|
||||
build_type: "Debug"
|
||||
cpp: OFF
|
||||
fortran: OFF
|
||||
java: OFF
|
||||
ts: ON
|
||||
hl: OFF
|
||||
parallel: OFF
|
||||
toolchain: "config/toolchain/GCC.cmake"
|
||||
generator: "-G Ninja"
|
||||
- name: "TS Autotools GCC"
|
||||
artifact: "LinuxATS.tar.xz"
|
||||
os: ubuntu-latest
|
||||
build_type: "Release"
|
||||
cpp: disable
|
||||
fortran: disable
|
||||
java: disable
|
||||
ts: enable
|
||||
hl: disable
|
||||
parallel: disable
|
||||
toolchain: ""
|
||||
generator: "autogen"
|
||||
# - name: "Ubuntu Parallel GCC"
|
||||
# artifact: "LinuxPar.tar.xz"
|
||||
# os: ubuntu-latest
|
||||
# build_type: "Release"
|
||||
# cpp: OFF
|
||||
# fortran: OFF
|
||||
# parallel: ON
|
||||
# toolchain: "config/toolchain/GCC.cmake"
|
||||
# generator: "-G Ninja"
|
||||
|
||||
name: ${{ matrix.name }}
|
||||
# The type of runner that the job will run on
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: "!contains(github.event.head_commit.message, 'skip-ci')"
|
||||
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
- name: Install Dependencies (Linux)
|
||||
run: sudo apt-get install ninja-build
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
- name: Install Autotools Dependencies (Linux)
|
||||
run: sudo apt-get install automake autoconf libtool libtool-bin
|
||||
if: matrix.generator == 'autogen'
|
||||
- name: Install Dependencies (Windows)
|
||||
run: choco install ninja
|
||||
if: matrix.os == 'windows-latest'
|
||||
- name: Install Dependencies (macOS)
|
||||
run: brew install ninja
|
||||
if: matrix.os == 'macos-latest'
|
||||
- name: Set environment for MSVC (Windows)
|
||||
if: matrix.os == 'windows-latest'
|
||||
run: |
|
||||
# Set these env vars so cmake picks the correct compiler
|
||||
echo "CXX=cl.exe" >> $GITHUB_ENV
|
||||
echo "CC=cl.exe" >> $GITHUB_ENV
|
||||
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- name: Get Sources
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Autotools Configure
|
||||
if: matrix.generator == 'autogen'
|
||||
run: |
|
||||
sh ./autogen.sh
|
||||
sh ./bin/chkmanifest
|
||||
mkdir "${{ runner.workspace }}/build"
|
||||
cd "${{ runner.workspace }}/build"
|
||||
$GITHUB_WORKSPACE/configure --enable-shared --${{ matrix.ts }}-threadsafe --${{ matrix.hl }}-hl --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java
|
||||
shell: bash
|
||||
|
||||
- name: Configure
|
||||
if: matrix.generator != 'autogen'
|
||||
run: |
|
||||
mkdir "${{ runner.workspace }}/build"
|
||||
cd "${{ runner.workspace }}/build"
|
||||
cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=${{ matrix.ts }} -DHDF5_BUILD_HL_LIB:BOOL=${{ matrix.hl }} -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} $GITHUB_WORKSPACE
|
||||
shell: bash
|
||||
|
||||
- name: Autotools Build
|
||||
if: matrix.generator == 'autogen'
|
||||
run: make
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
|
||||
- name: Build
|
||||
if: matrix.generator != 'autogen'
|
||||
run: cmake --build . --config ${{ matrix.build_type }}
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
|
||||
- name: Autotools Test
|
||||
if: matrix.generator == 'autogen'
|
||||
run: make check
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
|
||||
- name: Test
|
||||
if: matrix.generator != 'autogen'
|
||||
run: ctest --build . -C ${{ matrix.build_type }} -V
|
||||
working-directory: ${{ runner.workspace }}/build
|
||||
@@ -1,5 +1,4 @@
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
@@ -19,7 +19,7 @@ include (FetchContent)
|
||||
set (HDF5_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO GIT TGZ)")
|
||||
set_property (CACHE HDF5_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ)
|
||||
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
|
||||
option (ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 1)
|
||||
option (ZLIB_USE_EXTERNAL "Use External Library Building for HDF5_ZLIB" 1)
|
||||
option (SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 1)
|
||||
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT")
|
||||
set (ZLIB_URL ${ZLIB_GIT_URL} CACHE STRING "Path to zlib git repository")
|
||||
@@ -33,9 +33,7 @@ if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MAT
|
||||
set (ZLIB_URL ${TGZPATH}/${ZLIB_TGZ_NAME})
|
||||
if (NOT EXISTS "${ZLIB_URL}")
|
||||
set (HDF5_ENABLE_Z_LIB_SUPPORT OFF CACHE BOOL "" FORCE)
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "Filter ZLIB file ${ZLIB_URL} not found")
|
||||
endif ()
|
||||
message (VERBOSE "Filter ZLIB file ${ZLIB_URL} not found")
|
||||
endif ()
|
||||
set (SZIP_URL ${TGZPATH}/${SZIP_TGZ_NAME})
|
||||
if (USE_LIBAEC)
|
||||
@@ -43,9 +41,7 @@ if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MAT
|
||||
endif ()
|
||||
if (NOT EXISTS "${SZIP_URL}")
|
||||
set (HDF5_ENABLE_SZIP_SUPPORT OFF CACHE BOOL "" FORCE)
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "Filter SZIP file ${SZIP_URL} not found")
|
||||
endif ()
|
||||
message (VERBOSE "Filter SZIP file ${SZIP_URL} not found")
|
||||
endif ()
|
||||
else ()
|
||||
set (ZLIB_USE_EXTERNAL 0)
|
||||
@@ -81,9 +77,7 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT)
|
||||
set (H5_HAVE_FILTER_DEFLATE 1)
|
||||
set (H5_HAVE_ZLIB_H 1)
|
||||
set (H5_HAVE_LIBZ 1)
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "Filter ZLIB is built")
|
||||
endif ()
|
||||
message (VERBOSE "Filter HDF5_ZLIB is built")
|
||||
else ()
|
||||
message (FATAL_ERROR " ZLib is Required for ZLib support in HDF5")
|
||||
endif ()
|
||||
@@ -99,9 +93,7 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT)
|
||||
endif ()
|
||||
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_STATIC_LIBRARY})
|
||||
INCLUDE_DIRECTORIES (${ZLIB_INCLUDE_DIRS})
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "Filter ZLIB is ON")
|
||||
endif ()
|
||||
message (VERBOSE "Filter HDF5_ZLIB is ON")
|
||||
endif ()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
@@ -142,16 +134,12 @@ if (HDF5_ENABLE_SZIP_SUPPORT)
|
||||
set (H5_HAVE_FILTER_SZIP 1)
|
||||
set (H5_HAVE_SZLIB_H 1)
|
||||
set (H5_HAVE_LIBSZ 1)
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "Filter SZIP is built")
|
||||
endif ()
|
||||
message (VERBOSE "Filter SZIP is built")
|
||||
if (USE_LIBAEC)
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "... with library AEC")
|
||||
endif ()
|
||||
set (SZ_PACKAGE_NAME ${LIBAEC_PACKAGE_NAME})
|
||||
message (VERBOSE "... with library AEC")
|
||||
set (SZIP_PACKAGE_NAME ${LIBAEC_PACKAGE_NAME})
|
||||
else ()
|
||||
set (SZ_PACKAGE_NAME ${SZIP_PACKAGE_NAME})
|
||||
set (SZIP_PACKAGE_NAME ${SZIP_PACKAGE_NAME})
|
||||
endif ()
|
||||
else ()
|
||||
message (FATAL_ERROR "SZIP is Required for SZIP support in HDF5")
|
||||
@@ -159,9 +147,7 @@ if (HDF5_ENABLE_SZIP_SUPPORT)
|
||||
endif ()
|
||||
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_STATIC_LIBRARY})
|
||||
INCLUDE_DIRECTORIES (${SZIP_INCLUDE_DIRS})
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "Filter SZIP is ON")
|
||||
endif ()
|
||||
message (VERBOSE "Filter SZIP is ON")
|
||||
if (H5_HAVE_FILTER_SZIP)
|
||||
set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} DECODE")
|
||||
endif ()
|
||||
|
||||
@@ -52,7 +52,7 @@ endif ()
|
||||
# Set includes needed for build
|
||||
#-----------------------------------------------------------------------------
|
||||
set (HDF5_INCLUDES_BUILD_TIME
|
||||
${HDF5_SRC_DIR} ${HDF5_CPP_SRC_DIR} ${HDF5_HL_SRC_DIR}
|
||||
${HDF5_SRC_INCLUDE_DIRS} ${HDF5_CPP_SRC_DIR} ${HDF5_HL_SRC_DIR}
|
||||
${HDF5_TOOLS_SRC_DIR} ${HDF5_SRC_BINARY_DIR}
|
||||
)
|
||||
|
||||
@@ -182,7 +182,7 @@ if (HDF5_PACK_EXAMPLES)
|
||||
endif ()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Configure the README.txt file for the binary package
|
||||
# Configure the README.md file for the binary package
|
||||
#-----------------------------------------------------------------------------
|
||||
HDF_README_PROPERTIES(HDF5_BUILD_FORTRAN)
|
||||
|
||||
@@ -242,7 +242,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED)
|
||||
endif ()
|
||||
install (
|
||||
FILES ${release_files}
|
||||
DESTINATION ${HDF5_INSTALL_DATA_DIR}
|
||||
DESTINATION ${HDF5_INSTALL_DOC_DIR}
|
||||
COMPONENT hdfdocuments
|
||||
)
|
||||
endif ()
|
||||
@@ -273,7 +273,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
|
||||
else ()
|
||||
set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}/${CPACK_PACKAGE_NAME}/${CPACK_PACKAGE_VERSION}")
|
||||
endif ()
|
||||
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_EXT_DIR}/hdf.bmp")
|
||||
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}/hdf.bmp")
|
||||
|
||||
set (CPACK_GENERATOR "TGZ")
|
||||
if (WIN32)
|
||||
@@ -296,10 +296,10 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
|
||||
endif ()
|
||||
# set the install/unistall icon used for the installer itself
|
||||
# There is a bug in NSI that does not handle full unix paths properly.
|
||||
set (CPACK_NSIS_MUI_ICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.ico")
|
||||
set (CPACK_NSIS_MUI_UNIICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.ico")
|
||||
set (CPACK_NSIS_MUI_ICON "${HDF_RESOURCES_DIR}\\\\hdf.ico")
|
||||
set (CPACK_NSIS_MUI_UNIICON "${HDF_RESOURCES_DIR}\\\\hdf.ico")
|
||||
# set the package header icon for MUI
|
||||
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.bmp")
|
||||
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}\\\\hdf.bmp")
|
||||
set (CPACK_NSIS_DISPLAY_NAME "${CPACK_NSIS_PACKAGE_NAME}")
|
||||
if (OVERRIDE_INSTALL_VERSION)
|
||||
set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}\\\\${CPACK_PACKAGE_NAME}\\\\${OVERRIDE_INSTALL_VERSION}")
|
||||
@@ -329,7 +329,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
|
||||
set (CPACK_RESOURCE_FILE_LICENSE "${HDF5_BINARY_DIR}/COPYING.txt")
|
||||
# .. variable:: CPACK_WIX_PRODUCT_ICON
|
||||
# The Icon shown next to the program name in Add/Remove programs.
|
||||
set(CPACK_WIX_PRODUCT_ICON "${HDF_RESOURCES_EXT_DIR}\\\\hdf.ico")
|
||||
set(CPACK_WIX_PRODUCT_ICON "${HDF_RESOURCES_DIR}\\\\hdf.ico")
|
||||
#
|
||||
# .. variable:: CPACK_WIX_UI_BANNER
|
||||
#
|
||||
@@ -360,14 +360,14 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
|
||||
list (APPEND CPACK_GENERATOR "DragNDrop")
|
||||
set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON)
|
||||
set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}")
|
||||
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_EXT_DIR}/hdf.icns")
|
||||
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}/hdf.icns")
|
||||
|
||||
option (HDF5_PACK_MACOSX_FRAMEWORK "Package the HDF5 Library in a Frameworks" OFF)
|
||||
if (HDF5_PACK_MACOSX_FRAMEWORK AND HDF5_BUILD_FRAMEWORKS)
|
||||
set (CPACK_BUNDLE_NAME "${HDF5_PACKAGE_STRING}")
|
||||
set (CPACK_BUNDLE_LOCATION "/") # make sure CMAKE_INSTALL_PREFIX ends in /
|
||||
set (CMAKE_INSTALL_PREFIX "/${CPACK_BUNDLE_NAME}.framework/Versions/${CPACK_PACKAGE_VERSION}/${CPACK_PACKAGE_NAME}/")
|
||||
set (CPACK_BUNDLE_ICON "${HDF_RESOURCES_EXT_DIR}/hdf.icns")
|
||||
set (CPACK_BUNDLE_ICON "${HDF_RESOURCES_DIR}/hdf.icns")
|
||||
set (CPACK_BUNDLE_PLIST "${HDF5_BINARY_DIR}/CMakeFiles/Info.plist")
|
||||
set (CPACK_SHORT_VERSION_STRING "${CPACK_PACKAGE_VERSION}")
|
||||
#-----------------------------------------------------------------------------
|
||||
@@ -382,7 +382,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
|
||||
${HDF5_BINARY_DIR}/CMakeFiles/PkgInfo @ONLY
|
||||
)
|
||||
configure_file (
|
||||
${HDF_RESOURCES_EXT_DIR}/version.plist.in
|
||||
${HDF_RESOURCES_DIR}/version.plist.in
|
||||
${HDF5_BINARY_DIR}/CMakeFiles/version.plist @ONLY
|
||||
)
|
||||
install (
|
||||
@@ -395,13 +395,19 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
|
||||
set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}")
|
||||
set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON)
|
||||
|
||||
list (APPEND CPACK_GENERATOR "DEB")
|
||||
set (CPACK_DEBIAN_PACKAGE_SECTION "Libraries")
|
||||
set (CPACK_DEBIAN_PACKAGE_MAINTAINER "${HDF5_PACKAGE_BUGREPORT}")
|
||||
|
||||
# list (APPEND CPACK_GENERATOR "RPM")
|
||||
list (APPEND CPACK_GENERATOR "RPM")
|
||||
set (CPACK_RPM_PACKAGE_RELEASE "1")
|
||||
set (CPACK_RPM_PACKAGE_RELEASE_DIST ON)
|
||||
set (CPACK_RPM_COMPONENT_INSTALL ON)
|
||||
set (CPACK_RPM_PACKAGE_RELOCATABLE ON)
|
||||
set (CPACK_RPM_FILE_NAME "RPM-DEFAULT")
|
||||
set (CPACK_RPM_PACKAGE_NAME "${CPACK_PACKAGE_NAME}")
|
||||
set (CPACK_RPM_PACKAGE_VERSION "${CPACK_PACKAGE_VERSION}")
|
||||
set (CPACK_RPM_PACKAGE_VENDOR "${CPACK_PACKAGE_VENDOR}")
|
||||
set (CPACK_RPM_PACKAGE_LICENSE "BSD-style")
|
||||
set (CPACK_RPM_PACKAGE_GROUP "Development/Libraries")
|
||||
set (CPACK_RPM_PACKAGE_URL "${HDF5_PACKAGE_URL}")
|
||||
@@ -442,11 +448,11 @@ The HDF5 data model, file format, API, library, and tools are open and distribut
|
||||
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
|
||||
if (ZLIB_FOUND AND ZLIB_USE_EXTERNAL)
|
||||
if (WIN32)
|
||||
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;ALL;/")
|
||||
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;ALL;/")
|
||||
else ()
|
||||
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;libraries;/")
|
||||
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;headers;/")
|
||||
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;configinstall;/")
|
||||
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;libraries;/")
|
||||
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;headers;/")
|
||||
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;configinstall;/")
|
||||
endif ()
|
||||
endif ()
|
||||
if (SZIP_FOUND AND SZIP_USE_EXTERNAL)
|
||||
|
||||
221
CMakeLists.txt
221
CMakeLists.txt
@@ -1,14 +1,22 @@
|
||||
cmake_minimum_required (VERSION 3.12)
|
||||
cmake_minimum_required (VERSION 3.18)
|
||||
project (HDF5 C)
|
||||
|
||||
if (POLICY CMP0074)
|
||||
# find_package() uses <PackageName>_ROOT variables.
|
||||
cmake_policy (SET CMP0074 NEW)
|
||||
endif ()
|
||||
|
||||
if (POLICY CMP0083)
|
||||
# To control generation of Position Independent Executable (PIE) or not,
|
||||
# some flags are required at link time.
|
||||
cmake_policy (SET CMP0083 NEW)
|
||||
endif ()
|
||||
|
||||
# Avoid warning about DOWNLOAD_EXTRACT_TIMESTAMP in CMake 3.24:
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.24.0")
|
||||
cmake_policy(SET CMP0135 NEW)
|
||||
endif()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Instructions for use : Normal Build
|
||||
#
|
||||
@@ -36,16 +44,6 @@ if (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR)
|
||||
)
|
||||
endif ()
|
||||
|
||||
# CMake version 3.14 added option --ignore-eol to compare files
|
||||
# cmake -E compare_files --ignore-eol file1 file2
|
||||
set (CMAKE_IGNORE_EOL "--ignore-eol")
|
||||
if (CMAKE_VERSION VERSION_LESS "3.14.0")
|
||||
set (CMAKE_IGNORE_EOL "")
|
||||
if (WIN32)
|
||||
message (FATAL_ERROR "Windows builds require a minimum of CMake 3.14")
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Instructions for use : Sub-Project Build
|
||||
#
|
||||
@@ -212,7 +210,6 @@ set (HDF5_HL_F90_C_LIBSH_TARGET "${HDF5_HL_F90_C_LIB_CORENAME}-shared")
|
||||
#-----------------------------------------------------------------------------
|
||||
set (HDF_CONFIG_DIR ${HDF5_SOURCE_DIR}/config)
|
||||
set (HDF_RESOURCES_DIR ${HDF5_SOURCE_DIR}/config/cmake)
|
||||
set (HDF_RESOURCES_EXT_DIR ${HDF5_SOURCE_DIR}/config/cmake_ext_mod)
|
||||
set (HDF5_SRC_DIR ${HDF5_SOURCE_DIR}/src)
|
||||
set (HDF5_TEST_SRC_DIR ${HDF5_SOURCE_DIR}/test)
|
||||
set (HDF5_CPP_SRC_DIR ${HDF5_SOURCE_DIR}/c++)
|
||||
@@ -229,11 +226,13 @@ set (HDF5_JAVA_JNI_SRC_DIR ${HDF5_SOURCE_DIR}/java/src/jni)
|
||||
set (HDF5_JAVA_HDF5_SRC_DIR ${HDF5_SOURCE_DIR}/java/src/hdf)
|
||||
set (HDF5_JAVA_TEST_SRC_DIR ${HDF5_SOURCE_DIR}/java/test)
|
||||
set (HDF5_JAVA_LIB_DIR ${HDF5_SOURCE_DIR}/java/lib)
|
||||
set (HDF5_JAVA_LOGGING_JAR ${HDF5_SOURCE_DIR}/java/lib/slf4j-api-1.7.25.jar)
|
||||
set (HDF5_JAVA_LOGGING_NOP_JAR ${HDF5_SOURCE_DIR}/java/lib/ext/slf4j-nop-1.7.25.jar)
|
||||
set (HDF5_JAVA_LOGGING_SIMPLE_JAR ${HDF5_SOURCE_DIR}/java/lib/ext/slf4j-simple-1.7.25.jar)
|
||||
set (HDF5_JAVA_LOGGING_JAR ${HDF5_SOURCE_DIR}/java/lib/slf4j-api-1.7.33.jar)
|
||||
set (HDF5_JAVA_LOGGING_NOP_JAR ${HDF5_SOURCE_DIR}/java/lib/ext/slf4j-nop-1.7.33.jar)
|
||||
set (HDF5_JAVA_LOGGING_SIMPLE_JAR ${HDF5_SOURCE_DIR}/java/lib/ext/slf4j-simple-1.7.33.jar)
|
||||
set (HDF5_DOXYGEN_DIR ${HDF5_SOURCE_DIR}/doxygen)
|
||||
|
||||
set (HDF5_SRC_INCLUDE_DIRS ${HDF5_SRC_DIR})
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# parse the full version number from H5public.h and include in H5_VERS_INFO
|
||||
#-----------------------------------------------------------------------------
|
||||
@@ -246,9 +245,7 @@ string (REGEX REPLACE ".*#define[ \t]+H5_VERS_RELEASE[ \t]+([0-9]*).*$"
|
||||
"\\1" H5_VERS_RELEASE ${_h5public_h_contents})
|
||||
string (REGEX REPLACE ".*#define[ \t]+H5_VERS_SUBRELEASE[ \t]+\"([0-9A-Za-z._-]*)\".*$"
|
||||
"\\1" H5_VERS_SUBRELEASE ${_h5public_h_contents})
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (TRACE "VERSION: ${H5_VERS_MAJOR}.${H5_VERS_MINOR}.${H5_VERS_RELEASE}-${H5_VERS_SUBRELEASE}")
|
||||
endif ()
|
||||
message (TRACE "VERSION: ${H5_VERS_MAJOR}.${H5_VERS_MINOR}.${H5_VERS_RELEASE}-${H5_VERS_SUBRELEASE}")
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# parse the full soversion number from config/lt_vers.am and include in H5_SOVERS_INFO
|
||||
@@ -261,9 +258,7 @@ string (REGEX REPLACE ".*LT_VERS_REVISION[ \t]+=[ \t]+([0-9]*).*$"
|
||||
string (REGEX REPLACE ".*LT_VERS_AGE[ \t]+=[ \t]+([0-9]*).*$"
|
||||
"\\1" H5_LIB_SOVERS_RELEASE ${_lt_vers_am_contents})
|
||||
math (EXPR H5_LIB_SOVERS_MAJOR ${H5_LIB_SOVERS_INTERFACE}-${H5_LIB_SOVERS_RELEASE})
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "SOVERSION: ${H5_LIB_SOVERS_MAJOR}.${H5_LIB_SOVERS_RELEASE}.${H5_LIB_SOVERS_MINOR}")
|
||||
endif ()
|
||||
message (VERBOSE "SOVERSION: ${H5_LIB_SOVERS_MAJOR}.${H5_LIB_SOVERS_RELEASE}.${H5_LIB_SOVERS_MINOR}")
|
||||
string (REGEX MATCH ".*LT_TOOLS_VERS_INTERFACE[ \t]+=[ \t]+([0-9]*).*$" H5_TOOLS_SOVERS_EXISTS ${_lt_vers_am_contents})
|
||||
if (H5_TOOLS_SOVERS_EXISTS)
|
||||
string (REGEX REPLACE ".*LT_TOOLS_VERS_INTERFACE[ \t]+=[ \t]+([0-9]*).*$"
|
||||
@@ -273,9 +268,7 @@ if (H5_TOOLS_SOVERS_EXISTS)
|
||||
string (REGEX REPLACE ".*LT_TOOLS_VERS_AGE[ \t]+=[ \t]+([0-9]*).*$"
|
||||
"\\1" H5_TOOLS_SOVERS_RELEASE ${_lt_vers_am_contents})
|
||||
math (EXPR H5_TOOLS_SOVERS_MAJOR ${H5_TOOLS_SOVERS_INTERFACE}-${H5_TOOLS_SOVERS_RELEASE})
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "SOVERSION_TOOLS: ${H5_TOOLS_SOVERS_MAJOR}.${H5_TOOLS_SOVERS_RELEASE}.${H5_TOOLS_SOVERS_MINOR}")
|
||||
endif ()
|
||||
message (VERBOSE "SOVERSION_TOOLS: ${H5_TOOLS_SOVERS_MAJOR}.${H5_TOOLS_SOVERS_RELEASE}.${H5_TOOLS_SOVERS_MINOR}")
|
||||
endif ()
|
||||
string (REGEX MATCH ".*LT_CXX_VERS_INTERFACE[ \t]+=[ \t]+([0-9]*).*$" H5_CXX_SOVERS_EXISTS ${_lt_vers_am_contents})
|
||||
if (H5_CXX_SOVERS_EXISTS)
|
||||
@@ -286,9 +279,7 @@ if (H5_CXX_SOVERS_EXISTS)
|
||||
string (REGEX REPLACE ".*LT_CXX_VERS_AGE[ \t]+=[ \t]+([0-9]*).*$"
|
||||
"\\1" H5_CXX_SOVERS_RELEASE ${_lt_vers_am_contents})
|
||||
math (EXPR H5_CXX_SOVERS_MAJOR ${H5_CXX_SOVERS_INTERFACE}-${H5_CXX_SOVERS_RELEASE})
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "SOVERSION_CXX: ${H5_CXX_SOVERS_MAJOR}.${H5_CXX_SOVERS_RELEASE}.${H5_CXX_SOVERS_MINOR}")
|
||||
endif ()
|
||||
message (VERBOSE "SOVERSION_CXX: ${H5_CXX_SOVERS_MAJOR}.${H5_CXX_SOVERS_RELEASE}.${H5_CXX_SOVERS_MINOR}")
|
||||
endif ()
|
||||
string (REGEX MATCH ".*LT_F_VERS_INTERFACE[ \t]+=[ \t]+([0-9]*).*$" H5_F_SOVERS_EXISTS ${_lt_vers_am_contents})
|
||||
if (H5_F_SOVERS_EXISTS)
|
||||
@@ -299,9 +290,7 @@ if (H5_F_SOVERS_EXISTS)
|
||||
string (REGEX REPLACE ".*LT_F_VERS_AGE[ \t]+=[ \t]+([0-9]*).*$"
|
||||
"\\1" H5_F_SOVERS_RELEASE ${_lt_vers_am_contents})
|
||||
math (EXPR H5_F_SOVERS_MAJOR ${H5_F_SOVERS_INTERFACE}-${H5_F_SOVERS_RELEASE})
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "SOVERSION_F: ${H5_F_SOVERS_MAJOR}.${H5_F_SOVERS_RELEASE}.${H5_F_SOVERS_MINOR}")
|
||||
endif ()
|
||||
message (VERBOSE "SOVERSION_F: ${H5_F_SOVERS_MAJOR}.${H5_F_SOVERS_RELEASE}.${H5_F_SOVERS_MINOR}")
|
||||
endif ()
|
||||
string (REGEX MATCH ".*LT_HL_VERS_INTERFACE[ \t]+=[ \t]+([0-9]*).*$" H5_HL_SOVERS_EXISTS ${_lt_vers_am_contents})
|
||||
if (H5_HL_SOVERS_EXISTS)
|
||||
@@ -312,9 +301,7 @@ if (H5_HL_SOVERS_EXISTS)
|
||||
string (REGEX REPLACE ".*LT_HL_VERS_AGE[ \t]+=[ \t]+([0-9]*).*$"
|
||||
"\\1" H5_HL_SOVERS_RELEASE ${_lt_vers_am_contents})
|
||||
math (EXPR H5_HL_SOVERS_MAJOR ${H5_HL_SOVERS_INTERFACE}-${H5_HL_SOVERS_RELEASE})
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "SOVERSION_HL: ${H5_HL_SOVERS_MAJOR}.${H5_HL_SOVERS_RELEASE}.${H5_HL_SOVERS_MINOR}")
|
||||
endif ()
|
||||
message (VERBOSE "SOVERSION_HL: ${H5_HL_SOVERS_MAJOR}.${H5_HL_SOVERS_RELEASE}.${H5_HL_SOVERS_MINOR}")
|
||||
endif ()
|
||||
string (REGEX MATCH ".*LT_HL_CXX_VERS_INTERFACE[ \t]+=[ \t]+([0-9]*).*$" H5_HL_CXX_SOVERS_EXISTS ${_lt_vers_am_contents})
|
||||
if (H5_HL_CXX_SOVERS_EXISTS)
|
||||
@@ -325,9 +312,7 @@ if (H5_HL_CXX_SOVERS_EXISTS)
|
||||
string (REGEX REPLACE ".*LT_HL_CXX_VERS_AGE[ \t]+=[ \t]+([0-9]*).*$"
|
||||
"\\1" H5_HL_CXX_SOVERS_RELEASE ${_lt_vers_am_contents})
|
||||
math (EXPR H5_HL_CXX_SOVERS_MAJOR ${H5_HL_CXX_SOVERS_INTERFACE}-${H5_HL_CXX_SOVERS_RELEASE})
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "SOVERSION_HL_CXX: ${H5_HL_CXX_SOVERS_MAJOR}.${H5_HL_CXX_SOVERS_RELEASE}.${H5_HL_CXX_SOVERS_MINOR}")
|
||||
endif ()
|
||||
message (VERBOSE "SOVERSION_HL_CXX: ${H5_HL_CXX_SOVERS_MAJOR}.${H5_HL_CXX_SOVERS_RELEASE}.${H5_HL_CXX_SOVERS_MINOR}")
|
||||
endif ()
|
||||
string (REGEX MATCH ".*LT_HL_F_VERS_INTERFACE[ \t]+=[ \t]+([0-9]*).*$" H5_HL_F_SOVERS_EXISTS ${_lt_vers_am_contents})
|
||||
if (H5_HL_F_SOVERS_EXISTS)
|
||||
@@ -338,9 +323,7 @@ if (H5_HL_F_SOVERS_EXISTS)
|
||||
string (REGEX REPLACE ".*LT_HL_F_VERS_AGE[ \t]+=[ \t]+([0-9]*).*$"
|
||||
"\\1" H5_HL_F_SOVERS_RELEASE ${_lt_vers_am_contents})
|
||||
math (EXPR H5_HL_F_SOVERS_MAJOR ${H5_HL_F_SOVERS_INTERFACE}-${H5_HL_F_SOVERS_RELEASE})
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "SOVERSION_HL_F: ${H5_HL_F_SOVERS_MAJOR}.${H5_HL_F_SOVERS_RELEASE}.${H5_HL_F_SOVERS_MINOR}")
|
||||
endif ()
|
||||
message (VERBOSE "SOVERSION_HL_F: ${H5_HL_F_SOVERS_MAJOR}.${H5_HL_F_SOVERS_RELEASE}.${H5_HL_F_SOVERS_MINOR}")
|
||||
endif ()
|
||||
string (REGEX MATCH ".*LT_JAVA_VERS_INTERFACE[ \t]+=[ \t]+([0-9]*).*$" H5_JAVA_SOVERS_EXISTS ${_lt_vers_am_contents})
|
||||
if(H5_JAVA_SOVERS_EXISTS)
|
||||
@@ -351,9 +334,7 @@ if(H5_JAVA_SOVERS_EXISTS)
|
||||
string (REGEX REPLACE ".*LT_JAVA_VERS_AGE[ \t]+=[ \t]+([0-9]*).*$"
|
||||
"\\1" H5_JAVA_SOVERS_RELEASE ${_lt_vers_am_contents})
|
||||
math (EXPR H5_JAVA_SOVERS_MAJOR ${H5_JAVA_SOVERS_INTERFACE}-${H5_JAVA_SOVERS_RELEASE})
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "SOVERSION_JAVA: ${H5_JAVA_SOVERS_MAJOR}.${H5_JAVA_SOVERS_RELEASE}.${H5_JAVA_SOVERS_MINOR}")
|
||||
endif ()
|
||||
message (VERBOSE "SOVERSION_JAVA: ${H5_JAVA_SOVERS_MAJOR}.${H5_JAVA_SOVERS_RELEASE}.${H5_JAVA_SOVERS_MINOR}")
|
||||
endif ()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
@@ -430,11 +411,11 @@ set (HDF5_PACKAGE_BUGREPORT "help@hdfgroup.org")
|
||||
#-----------------------------------------------------------------------------
|
||||
# Include some macros for reusable code
|
||||
#-----------------------------------------------------------------------------
|
||||
include (${HDF_RESOURCES_EXT_DIR}/HDFMacros.cmake)
|
||||
include (${HDF_RESOURCES_DIR}/HDFMacros.cmake)
|
||||
|
||||
HDF_DIR_PATHS(${HDF5_PACKAGE_NAME})
|
||||
|
||||
include (${HDF_RESOURCES_EXT_DIR}/HDFLibMacros.cmake)
|
||||
include (${HDF_RESOURCES_DIR}/HDFLibMacros.cmake)
|
||||
include (${HDF_RESOURCES_DIR}/HDF5PluginMacros.cmake)
|
||||
include (${HDF_RESOURCES_DIR}/HDF5Macros.cmake)
|
||||
|
||||
@@ -562,15 +543,6 @@ if (HDF5_ENABLE_COVERAGE)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Option to indicate using dmalloc
|
||||
#-----------------------------------------------------------------------------
|
||||
# option (HDF5_ENABLE_USING_DMALLOC "Indicate that dmalloc is used" OFF)
|
||||
# if (HDF5_ENABLE_USING_DMALLOC)
|
||||
# find_package (DMALLOC)
|
||||
# set (H5_HAVE_DMALLOC DMALLOC_FOUND)
|
||||
# endif ()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Option to indicate using a memory checker
|
||||
#-----------------------------------------------------------------------------
|
||||
@@ -579,14 +551,6 @@ if (HDF5_ENABLE_USING_MEMCHECKER)
|
||||
set (H5_USING_MEMCHECKER 1)
|
||||
endif ()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Option to indicate internal memory allocation sanity checks are enabled
|
||||
#-----------------------------------------------------------------------------
|
||||
option (HDF5_MEMORY_ALLOC_SANITY_CHECK "Indicate that internal memory allocation sanity checks are enabled" OFF)
|
||||
if (HDF5_MEMORY_ALLOC_SANITY_CHECK)
|
||||
set (H5_MEMORY_ALLOC_SANITY_CHECK 1)
|
||||
endif ()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Option to enable/disable using pread/pwrite for VFDs
|
||||
#-----------------------------------------------------------------------------
|
||||
@@ -654,6 +618,13 @@ if (${HDF_CFG_NAME} MATCHES "Debug")
|
||||
mark_as_advanced (HDF5_ENABLE_INSTRUMENT)
|
||||
endif ()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Add some definitions for Developer Builds
|
||||
#-----------------------------------------------------------------------------
|
||||
if (${HDF_CFG_NAME} MATCHES "Developer")
|
||||
include (${HDF_RESOURCES_DIR}/HDF5DeveloperBuild.cmake)
|
||||
endif ()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Option to embed library info into executables
|
||||
#-----------------------------------------------------------------------------
|
||||
@@ -667,7 +638,7 @@ else ()
|
||||
endif ()
|
||||
|
||||
include (${HDF_RESOURCES_DIR}/HDFCompilerFlags.cmake)
|
||||
set (CMAKE_MODULE_PATH ${HDF_RESOURCES_DIR} ${HDF_RESOURCES_EXT_DIR} ${CMAKE_MODULE_PATH})
|
||||
set (CMAKE_MODULE_PATH ${HDF_RESOURCES_DIR} ${HDF_RESOURCES_DIR} ${CMAKE_MODULE_PATH})
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Option to Enable HDFS
|
||||
@@ -708,10 +679,14 @@ if (HDF5_ENABLE_PARALLEL)
|
||||
|
||||
# Used by Parallel Compression feature
|
||||
set (PARALLEL_FILTERED_WRITES ON)
|
||||
CHECK_SYMBOL_EXISTS (MPI_Mprobe "mpi.h" H5_HAVE_MPI_Mprobe)
|
||||
CHECK_SYMBOL_EXISTS (MPI_Imrecv "mpi.h" H5_HAVE_MPI_Imrecv)
|
||||
if (NOT H5_HAVE_MPI_Mprobe OR NOT H5_HAVE_MPI_Imrecv)
|
||||
message (WARNING "The MPI_Mprobe and/or MPI_Imrecv functions could not be located.
|
||||
CHECK_SYMBOL_EXISTS (MPI_Ibarrier "mpi.h" H5_HAVE_MPI_Ibarrier)
|
||||
CHECK_SYMBOL_EXISTS (MPI_Issend "mpi.h" H5_HAVE_MPI_Issend)
|
||||
CHECK_SYMBOL_EXISTS (MPI_Iprobe "mpi.h" H5_HAVE_MPI_Iprobe)
|
||||
CHECK_SYMBOL_EXISTS (MPI_Irecv "mpi.h" H5_HAVE_MPI_Irecv)
|
||||
if (H5_HAVE_MPI_Ibarrier AND H5_HAVE_MPI_Issend AND H5_HAVE_MPI_Iprobe AND H5_HAVE_MPI_Irecv)
|
||||
set (H5_HAVE_PARALLEL_FILTERED_WRITES 1)
|
||||
else ()
|
||||
message (WARNING "The MPI_Ibarrier/MPI_Issend/MPI_Iprobe/MPI_Irecv functions could not be located.
|
||||
Parallel writes of filtered data will be disabled.")
|
||||
set (PARALLEL_FILTERED_WRITES OFF)
|
||||
endif ()
|
||||
@@ -738,6 +713,56 @@ if (H5_HAVE_PARALLEL)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
# Determine whether to build the HDF5 Subfiling VFD
|
||||
set (H5FD_SUBFILING_DIR ${HDF5_SRC_DIR}/H5FDsubfiling)
|
||||
set (HDF5_SRC_INCLUDE_DIRS
|
||||
${HDF5_SRC_INCLUDE_DIRS}
|
||||
${H5FD_SUBFILING_DIR}
|
||||
)
|
||||
option (HDF5_ENABLE_SUBFILING_VFD "Build Parallel HDF5 Subfiling VFD" OFF)
|
||||
if (HDF5_ENABLE_SUBFILING_VFD)
|
||||
if (NOT HDF5_ENABLE_PARALLEL)
|
||||
message (FATAL_ERROR "Subfiling VFD requires a parallel HDF5 build")
|
||||
else ()
|
||||
# Check for MPI_Comm_split_type
|
||||
CHECK_SYMBOL_EXISTS (MPI_Comm_split_type "mpi.h" H5_HAVE_MPI_Comm_split_type)
|
||||
if (NOT H5_HAVE_MPI_Comm_split_type)
|
||||
message (FATAL_ERROR "Subfiling VFD requires MPI-3 support for MPI_Comm_split_type")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (NOT DEFINED Threads_FOUND)
|
||||
set (THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
find_package (Threads REQUIRED)
|
||||
endif ()
|
||||
|
||||
# For now, make sure we're using pthreads. Once Subfiling can be
|
||||
# supported on Windows, we should allow Win32 threads as well
|
||||
if (NOT ${Threads_FOUND} OR NOT ${CMAKE_USE_PTHREADS_INIT})
|
||||
message (FATAL_ERROR "Subfiling requires pthreads for system thread library")
|
||||
endif ()
|
||||
|
||||
CHECK_INCLUDE_FILE("stdatomic.h" HAVE_STDATOMIC_H)
|
||||
if (NOT HAVE_STDATOMIC_H)
|
||||
message (FATAL_ERROR "Subfiling VFD requires atomic operations support. C11 stdatomic.h NOT available.")
|
||||
else()
|
||||
set (H5_HAVE_STDATOMIC_H 1)
|
||||
endif()
|
||||
|
||||
set (H5_HAVE_SUBFILING_VFD 1)
|
||||
# IOC VFD is currently only built when subfiling is enabled
|
||||
set (H5_HAVE_IOC_VFD 1)
|
||||
|
||||
message (STATUS "Setting up to use Mercury components")
|
||||
set (H5FD_SUBFILING_MERCURY_DIR ${H5FD_SUBFILING_DIR}/mercury/src/util)
|
||||
set (HDF5_SRC_INCLUDE_DIRS
|
||||
${HDF5_SRC_INCLUDE_DIRS}
|
||||
${H5FD_SUBFILING_MERCURY_DIR}
|
||||
)
|
||||
set (H5_HAVE_MERCURY_H 1)
|
||||
set (CMAKE_REQUIRED_INCLUDES "${H5FD_SUBFILING_MERCURY_DIR}")
|
||||
endif()
|
||||
|
||||
#option (DEFAULT_API_VERSION "Enable v1.14 API (v16, v18, v110, v112, v114)" "v114")
|
||||
set (DEFAULT_API_VERSION "v114" CACHE STRING "Enable v1.14 API (v16, v18, v110, v112, v114)")
|
||||
set_property (CACHE DEFAULT_API_VERSION PROPERTY STRINGS v16 v18 v110 v112 v114)
|
||||
@@ -787,6 +812,17 @@ if (DEFAULT_API_VERSION MATCHES "v114")
|
||||
set (H5_USE_114_API_DEFAULT 1)
|
||||
endif ()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Option to use 1.16.x API
|
||||
#-----------------------------------------------------------------------------
|
||||
if (NOT DEFAULT_API_VERSION)
|
||||
set (DEFAULT_API_VERSION "v116")
|
||||
endif ()
|
||||
set (H5_USE_116_API_DEFAULT 0)
|
||||
if (DEFAULT_API_VERSION MATCHES "v116")
|
||||
set (H5_USE_116_API_DEFAULT 1)
|
||||
endif ()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Include user macros
|
||||
#-----------------------------------------------------------------------------
|
||||
@@ -821,51 +857,32 @@ option (HDF5_ENABLE_THREADSAFE "Enable thread-safety" OFF)
|
||||
if (HDF5_ENABLE_THREADSAFE)
|
||||
# check for unsupported options
|
||||
if (WIN32)
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE " **** thread-safety option not supported with static library **** ")
|
||||
message (VERBOSE " **** thread-safety option will not be used building static library **** ")
|
||||
endif ()
|
||||
endif ()
|
||||
if (HDF5_ENABLE_PARALLEL)
|
||||
if (NOT ALLOW_UNSUPPORTED)
|
||||
message (FATAL_ERROR " **** parallel and thread-safety options are not supported, override with ALLOW_UNSUPPORTED option **** ")
|
||||
else ()
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE " **** Allowing unsupported parallel and thread-safety options **** ")
|
||||
endif ()
|
||||
endif ()
|
||||
message (VERBOSE " **** thread-safety option not supported with static library **** ")
|
||||
message (VERBOSE " **** thread-safety option will not be used building static library **** ")
|
||||
endif ()
|
||||
if (HDF5_BUILD_FORTRAN)
|
||||
if (NOT ALLOW_UNSUPPORTED)
|
||||
message (FATAL_ERROR " **** Fortran and thread-safety options are not supported, override with ALLOW_UNSUPPORTED option **** ")
|
||||
else ()
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE " **** Allowing unsupported Fortran and thread-safety options **** ")
|
||||
endif ()
|
||||
message (VERBOSE " **** Allowing unsupported Fortran and thread-safety options **** ")
|
||||
endif ()
|
||||
endif ()
|
||||
if (HDF5_BUILD_CPP_LIB)
|
||||
if (NOT ALLOW_UNSUPPORTED)
|
||||
message (FATAL_ERROR " **** C++ and thread-safety options are not supported, override with ALLOW_UNSUPPORTED option **** ")
|
||||
else ()
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE " **** Allowing unsupported C++ and thread-safety options **** ")
|
||||
endif ()
|
||||
message (VERBOSE " **** Allowing unsupported C++ and thread-safety options **** ")
|
||||
endif ()
|
||||
endif ()
|
||||
if (HDF5_BUILD_HL_LIB)
|
||||
if (NOT ALLOW_UNSUPPORTED)
|
||||
message (FATAL_ERROR " **** HL and thread-safety options are not supported, override with ALLOW_UNSUPPORTED option **** ")
|
||||
else ()
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE " **** Allowing unsupported HL and thread-safety options **** ")
|
||||
endif ()
|
||||
message (VERBOSE " **** Allowing unsupported HL and thread-safety options **** ")
|
||||
endif ()
|
||||
endif ()
|
||||
if (H5_HAVE_IOEO)
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE " **** Win32 threads requires WINVER>=0x600 (Windows Vista/7/8) **** ")
|
||||
endif ()
|
||||
message (VERBOSE " **** Win32 threads requires WINVER>=0x600 (Windows Vista/7/8) **** ")
|
||||
set (H5_HAVE_WIN_THREADS 1)
|
||||
else ()
|
||||
if (NOT H5_HAVE_PTHREAD_H)
|
||||
@@ -895,10 +912,10 @@ add_subdirectory (src)
|
||||
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
|
||||
if (ZLIB_FOUND AND ZLIB_USE_EXTERNAL)
|
||||
if (NOT ONLY_SHARED_LIBS)
|
||||
add_dependencies (${HDF5_LIB_TARGET} ZLIB)
|
||||
add_dependencies (${HDF5_LIB_TARGET} HDF5_ZLIB)
|
||||
endif ()
|
||||
if (BUILD_SHARED_LIBS)
|
||||
add_dependencies (${HDF5_LIBSH_TARGET} ZLIB)
|
||||
add_dependencies (${HDF5_LIBSH_TARGET} HDF5_ZLIB)
|
||||
endif ()
|
||||
endif ()
|
||||
if (SZIP_FOUND AND SZIP_USE_EXTERNAL)
|
||||
@@ -958,8 +975,13 @@ if (BUILD_TESTING)
|
||||
mark_as_advanced (HDF5_TEST_FHEAP_PASSTHROUGH VOL)
|
||||
endif ()
|
||||
|
||||
option (HDF_TEST_EXPRESS "Control testing framework (0-3)" "0")
|
||||
set (H5_TEST_EXPRESS_LEVEL_DEFAULT "3")
|
||||
set (HDF_TEST_EXPRESS "${H5_TEST_EXPRESS_LEVEL_DEFAULT}"
|
||||
CACHE STRING "Control testing framework (0-3) (0 = exhaustive testing; 3 = quicker testing)")
|
||||
mark_as_advanced (HDF_TEST_EXPRESS)
|
||||
if (NOT "${HDF_TEST_EXPRESS}" STREQUAL "")
|
||||
set (H5_TEST_EXPRESS_LEVEL_DEFAULT "${HDF_TEST_EXPRESS}")
|
||||
endif ()
|
||||
|
||||
enable_testing ()
|
||||
include (CTest)
|
||||
@@ -1092,11 +1114,9 @@ set (H5_FC_FUNC_ "H5_FC_FUNC_(name,NAME) name ## _")
|
||||
if (EXISTS "${HDF5_SOURCE_DIR}/fortran" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/fortran")
|
||||
option (HDF5_BUILD_FORTRAN "Build FORTRAN support" OFF)
|
||||
if (HDF5_BUILD_FORTRAN)
|
||||
include (${HDF_RESOURCES_EXT_DIR}/HDFUseFortran.cmake)
|
||||
include (${HDF_RESOURCES_DIR}/HDFUseFortran.cmake)
|
||||
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE "Fortran compiler ID is ${CMAKE_Fortran_COMPILER_ID}")
|
||||
endif ()
|
||||
message (VERBOSE "Fortran compiler ID is ${CMAKE_Fortran_COMPILER_ID}")
|
||||
include (${HDF_RESOURCES_DIR}/HDFFortranCompilerFlags.cmake)
|
||||
include (${HDF_RESOURCES_DIR}/HDF5UseFortran.cmake)
|
||||
set (LINK_Fortran_LIBS ${LINK_LIBS})
|
||||
@@ -1107,6 +1127,7 @@ if (EXISTS "${HDF5_SOURCE_DIR}/fortran" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/for
|
||||
|
||||
# Parallel IO usage requires MPI to be Linked and Included
|
||||
if (H5_HAVE_PARALLEL)
|
||||
find_package(MPI REQUIRED COMPONENTS Fortran)
|
||||
set (LINK_Fortran_LIBS ${LINK_Fortran_LIBS} ${MPI_Fortran_LIBRARIES})
|
||||
if (MPI_Fortran_LINK_FLAGS)
|
||||
set (CMAKE_Fortran_EXE_LINKER_FLAGS "${MPI_Fortran_LINK_FLAGS} ${CMAKE_EXE_LINKER_FLAGS}")
|
||||
@@ -1145,9 +1166,7 @@ if (EXISTS "${HDF5_SOURCE_DIR}/c++" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/c++")
|
||||
if (NOT ALLOW_UNSUPPORTED)
|
||||
message (FATAL_ERROR " **** Parallel and C++ options are mutually exclusive, override with ALLOW_UNSUPPORTED option **** ")
|
||||
else ()
|
||||
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
|
||||
message (VERBOSE " **** Allowing unsupported Parallel and C++ options **** ")
|
||||
endif ()
|
||||
message (VERBOSE " **** Allowing unsupported Parallel and C++ options **** ")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
|
||||
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,128 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
help@hdfgroup.org.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
142
CONTRIBUTING.md
Normal file
142
CONTRIBUTING.md
Normal file
@@ -0,0 +1,142 @@
|
||||
# How to contribute to HDF5
|
||||
|
||||
The HDF Group encourages community members to contribute to the HDF5 project. We accept and are very grateful for any contributions,
|
||||
from minor typos and bug fixes to new features. The HDF Group is committed to work with the code contributors and make contribution
|
||||
process enjoyable and straightforward.
|
||||
|
||||
This document describes guiding principles for the HDF5 code contributors and does not pretend to address any possible
|
||||
contribution. If in doubt, please do not hesitate to ask us for guidance.
|
||||
***Note that no contribution may be accepted unless the donor agrees with the HDF Group software license terms
|
||||
found in the COPYING file in every branch's top source directory.***
|
||||
|
||||
|
||||
> We will assume that you are familiar with `git` and `GitHub`. If not, you may go through the GitHub tutorial found at
|
||||
[https://guides.github.com/activities/hello-world/](https://guides.github.com/activities/hello-world/). This tutorial should only take
|
||||
around 10 minutes.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
* [Workflow](#workflow)
|
||||
* [Acceptance criteria for a pull request](#criteria)
|
||||
* [Release Note](#releasenote)
|
||||
* [Check List](#checklist)
|
||||
|
||||
# Workflow <A NAME="workflow"></A>
|
||||
|
||||
The process for contributing code to HDF5 is as follows:
|
||||
|
||||
* Open an issue on [HDF5 GitHub](https://github.com/HDFGroup/hdf5/issues).
|
||||
|
||||
> This step is ***required*** unless the change is minor (e.g., typo fix).
|
||||
|
||||
* Fork the [HDF5](https://github.com/HDFGroup/hdf5) repository.
|
||||
* Make the desired changes to the HDF5 software.
|
||||
* New features should always go to _develop_ branch first and later should be merged to the appropriate maintenance branches.
|
||||
* Bug fixes should go to all appropriate branches (_develop_ and maintenance).
|
||||
* Build and test your changes. Detailed instructions on building and testing HDF5 can be found in the `INSTALL*` files in the `release_docs` directory.
|
||||
* Push your changes to GitHub.
|
||||
* Issue a pull request and address any code formatting and testing issues reported.
|
||||
|
||||
Once a pull request is correctly formatted and passes **ALL** CI tests, it will be reviewed and evaluated by The HDF Group developers and HDF5
|
||||
community members who can approve pull requests. The HDF Group developers will work with you to ensure that the pull request satisfies the acceptance
|
||||
criteria described in the next section.
|
||||
|
||||
# Acceptance criteria for a pull request <A NAME="criteria"></A>
|
||||
|
||||
We appreciate every contribution we receive, but we may not accept them all. Those that we *do* satisfy the following criteria:
|
||||
|
||||
* **The pull request has a clear purpose** - What does the pull request address? How does it benefit the HDF5 community?
|
||||
If the pull request does not have a clear purpose and benefits, it will not be accepted.
|
||||
|
||||
* **The pull request is documented** - The HDF5 developers must understand not only *what* a change is doing, but *how* it is doing it.
|
||||
Documenting the code makes it easier for us to understand your patch and maintain the code in the future.
|
||||
|
||||
* **The pull request passes HDF5 regression testing** - Any issue fixed or functionality added should be accompanied by the corresponding
|
||||
tests and pass HDF5 regression testing run by The HDF Group. We do not expect you to perform comprehensive testing across multiple platforms
|
||||
before we accept the pull request. If the pull request does not pass regression testing after the merge, The HDF Group developers will work
|
||||
with you on the fixes.
|
||||
|
||||
* **The pull request does not compromise the principles behind HDF5** - HDF5 has a 100% commitment to backward compatibility.
|
||||
* Any file ever created with HDF5 must be readable by any future version of HDF5.
|
||||
If your patch's purpose is to modify the HDF5 data model or file format,
|
||||
**please** discuss this with us first. File format changes and features required by those changes can be introduced only in a new major release.
|
||||
* HDF5 has a commitment to remaining *machine-independent*; data created on one platform/environment/architecture **must** remain readable by HDF5 on any other.
|
||||
* For binary compatibility, no changes are allowed to public APIs and data structures in the maintenance releases; new APIs can be added.
|
||||
|
||||
* **New features are documented** - Any new features should have proper documentation; talk to us if you have any questions.
|
||||
|
||||
* **When to Write a Release Note** - Generally, a release note must be written for every change that is made to the code for which
|
||||
users might see a change in the way the software works. In other words, if a user might see a difference in the way the software works,
|
||||
a note should be written. By code we mean the text that will be compiled into one of the company's software products. The code includes
|
||||
configuration changes and changes to tools users might work with to configure and build our software.
|
||||
|
||||
* Notes should be added for known problems. Known problems are issues that we know about and have not yet been able to fix.
|
||||
|
||||
* Any change made to address a user-reported problem should be described in a release note.
|
||||
|
||||
* A release note does not need to be written for changes to the code that users will not see. Here are some examples. If you add a
|
||||
comment, you do not need to write a release note describing the comment you added. If you rewrite some code to make it read more
|
||||
clearly and if there is no change in functionality or performance, then you do not need to write a release note. If you change the
|
||||
process by which user software is made, you may not need to write a release note since the change was not made to the code.
|
||||
|
||||
* Users. We have different kinds of users. A release note may be written to be helpful to
|
||||
application developers and not system administrators. Users who may find the RELEASE.txt file helpful include the following:
|
||||
application developers, library developers, and system administrators.
|
||||
|
||||
|
||||
# Release Note <A NAME="releasenote"></A>
|
||||
|
||||
* **Entry Syntax**
|
||||
The release note entry syntax is shown below.
|
||||
|
||||
```
|
||||
- Title/Problem
|
||||
|
||||
Problem/Solution
|
||||
|
||||
Signature
|
||||
```
|
||||
|
||||
* **Entry Elements** - The elements of the entry - title, problem, solution, and signature - are described in more detail in the table
|
||||
below. Descriptions of the problem and the solution should be clear without any ambiguities and should be short without losing clarity or specifics.
|
||||
|
||||
* **Title** - The title or tag should identify one or more categories that will help readers decide if the entry is something they need to study. Can be combined with the `Problem` element
|
||||
* **Problem** - Describe the problem and how users might see the problem in a paragraph.
|
||||
You might also consider the following as you describe the problem:
|
||||
* Under what specific conditions does this issue arise?
|
||||
* Under what specific conditions are we sure this issue will not arise?
|
||||
* For a performance issue, instead of saying something is a performance issue, describe what the performance impact of issue is?
|
||||
* **Solution** - Describe the solution in another paragraph.
|
||||
You might also consider the following as you describe the solution:
|
||||
* What was done to resolve the issue?
|
||||
* What is the functional impact?
|
||||
* Is there a workaround – a way for users design their software so as not to encounter the issue? If so, what is the workaround?
|
||||
* For a performance fix, how has the performance improved? Links to published documentation would be good.
|
||||
* **Signature** - Each entry must be signed with the initials of the author, the date in YYYY/MM/DD format, and the JIRA ticket number. The
|
||||
following is an example entry written by developer Xavier Zolo on April 16, 2014 about JIRA ticket HDFFV-5555: (XYZ - 2014/04/16, HDFFV-5555). The
|
||||
signature is enclosed in parentheses. JIRA or Github numbers should not be used in the description of the problem or the solution. They are like
|
||||
abbreviations that customers and external users will not be able to interpret.
|
||||
|
||||
# Checklist <A NAME="checklist"></A>
|
||||
|
||||
Please make sure that you check the items applicable to your pull request:
|
||||
|
||||
* Code
|
||||
* [ ] Does the pull request have a corresponding GitHub issue and clear purpose?
|
||||
* [ ] Does the pull request follow HDF5 best practices (naming conventions, code portability, code structure, etc.)? <<TODO: link to the document>>
|
||||
* [ ] If changes were done to Autotools build, were they added to CMake and vice versa?
|
||||
* [ ] Is the pull request applicable to any other branches? If yes, which ones? Please document it in the GitHub issue.
|
||||
* [ ] Is the new code sufficiently documented for future maintenance?
|
||||
* [ ] Does the new feature require a change to an existing API? See "API Compatibility Macros" document (https://portal.hdfgroup.org/display/HDF5/API+Compatibility+Macros)
|
||||
* Documentation
|
||||
* [ ] Was the change described in the release_docs/RELEASE.txt file?
|
||||
* [ ] Was MANIFEST updated if new files had been added to the source?
|
||||
* [ ] Was the new function documented in the corresponding public header file using [Doxygen](https://docs.hdfgroup.org/hdf5/develop/_r_m_t.html)?
|
||||
* [ ] Was new functionality documented for the HDF5 community (the level of documentation depends on the feature; ask us what would be appropriate)
|
||||
* Testing
|
||||
* [ ] Does the pull request have tests?
|
||||
* [ ] Does the pull request affect HDF5 library performance?
|
||||
|
||||
We want as many contributions as we can get, and we are here to help. Feel free to reach out to us if you have any questions
|
||||
|
||||
Thank you for your contribution!
|
||||
@@ -12,7 +12,7 @@
|
||||
## This file should be placed in the root directory of your project.
|
||||
## Then modify the CMakeLists.txt file in the root directory of your
|
||||
## project to incorporate the testing dashboard.
|
||||
## # The following are required to uses Dart and the Cdash dashboard
|
||||
## # The following are required to use Dart and the CDash dashboard
|
||||
## ENABLE_TESTING()
|
||||
## INCLUDE(CTest)
|
||||
set (CTEST_PROJECT_NAME "HDF5")
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# Top-level distributed Makefile -*- makefile -*-
|
||||
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
117
README.md
Normal file
117
README.md
Normal file
@@ -0,0 +1,117 @@
|
||||
HDF5 version 1.15.0 currently under development
|
||||
|
||||

|
||||
|
||||
[](https://github.com/HDFGroup/hdf5/actions?query=branch%3Adevelop)
|
||||
[](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_14)
|
||||
[](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_12)
|
||||
[](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_10)
|
||||
[](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_8)
|
||||
[](https://github.com/HDFGroup/hdf5/blob/develop/COPYING)
|
||||
|
||||
*Please refer to the release_docs/INSTALL file for installation instructions.*
|
||||
|
||||
This repository contains a high-performance library's source code and a file format
|
||||
specification that implement the HDF5® data model. The model has been adopted across
|
||||
many industries and this implementation has become a de facto data management standard
|
||||
in science, engineering, and research communities worldwide.
|
||||
|
||||
The HDF Group is the developer, maintainer, and steward of HDF5 software. Find more
|
||||
information about The HDF Group, the HDF5 Community, and other HDF5 software projects,
|
||||
tools, and services at The HDF Group's website.
|
||||
|
||||
https://www.hdfgroup.org/
|
||||
|
||||
|
||||
DOCUMENTATION
|
||||
-------------
|
||||
This release is fully functional for the API described in the documentation.
|
||||
|
||||
https://portal.hdfgroup.org/display/HDF5/The+HDF5+API
|
||||
|
||||
Full Documentation and Programming Resources for this release can be found at
|
||||
|
||||
https://portal.hdfgroup.org/display/HDF5
|
||||
|
||||
See the RELEASE.txt file in the release_docs/ directory for information specific
|
||||
to the features and updates included in this release of the library.
|
||||
|
||||
Several more files are located within the release_docs/ directory with specific
|
||||
details for several common platforms and configurations.
|
||||
|
||||
INSTALL - Start Here. General instructions for compiling and installing the library
|
||||
INSTALL_CMAKE - instructions for building with CMake (Kitware.com)
|
||||
INSTALL_parallel - instructions for building and configuring Parallel HDF5
|
||||
INSTALL_Windows and INSTALL_Cygwin - MS Windows installations.
|
||||
|
||||
|
||||
|
||||
HELP AND SUPPORT
|
||||
----------------
|
||||
Information regarding Help Desk and Support services is available at
|
||||
|
||||
https://portal.hdfgroup.org/display/support/The+HDF+Help+Desk
|
||||
|
||||
|
||||
|
||||
FORUM and NEWS
|
||||
--------------
|
||||
The following public forums are provided for public announcements and discussions
|
||||
of interest to the general HDF5 Community.
|
||||
|
||||
- Homepage of the Forum
|
||||
https://forum.hdfgroup.org
|
||||
|
||||
- News and Announcement
|
||||
https://forum.hdfgroup.org/c/news-and-announcements-from-the-hdf-group
|
||||
|
||||
- HDF5 and HDF4 Topics
|
||||
https://forum.hdfgroup.org/c/hdf5
|
||||
|
||||
These forums are provided as an open and public service for searching and reading.
|
||||
Posting requires completing a simple registration and allows one to join in the
|
||||
conversation. Please read the following instructions pertaining to the Forum's
|
||||
use and configuration
|
||||
https://forum.hdfgroup.org/t/quickstart-guide-welcome-to-the-new-hdf-forum
|
||||
|
||||
|
||||
RELEASE SCHEDULE
|
||||
----------------
|
||||
|
||||

|
||||
|
||||
HDF5 does not release on a regular schedule. Instead, releases are driven by
|
||||
new features and bug fixes, though we try to have at least one release of each
|
||||
maintenance branch per year. Future HDF5 releases indicated on this schedule
|
||||
are tentative.
|
||||
|
||||
**NOTE**: HDF5 1.12 is being retired early due to its incomplete and incompatible VOL
|
||||
layer.
|
||||
|
||||
| Release | New Features |
|
||||
| ------- | ------------ |
|
||||
| 1.8.23 | last HDF5 1.8 release |
|
||||
| 1.10.10 | CVE fixes, performance improvements, H5Dchunk\_iter() |
|
||||
| 1.12.3 | CVE fixes, performance improvements, H5Dchunk\_iter(), last HDF5 1.12 release |
|
||||
| 1.14.1 | selection I/O with datatype conversion |
|
||||
| 2.0.0 | TBD |
|
||||
| TBD | VFD SWMR |
|
||||
|
||||
This list of feature release versions is also tentative, and the specific release
|
||||
in which a feature is introduced may change.
|
||||
|
||||
|
||||
SNAPSHOTS, PREVIOUS RELEASES AND SOURCE CODE
|
||||
--------------------------------------------
|
||||
Periodically development code snapshots are provided at the following URL:
|
||||
|
||||
https://gamma.hdfgroup.org/ftp/pub/outgoing/hdf5/snapshots/
|
||||
|
||||
Source packages for current and previous releases are located at:
|
||||
|
||||
https://portal.hdfgroup.org/display/support/Downloads
|
||||
|
||||
Development code is available at our Github location:
|
||||
|
||||
https://github.com/HDFGroup/hdf5.git
|
||||
|
||||
80
README.txt
80
README.txt
@@ -1,80 +0,0 @@
|
||||
HDF5 version 1.13.1-1 currently under development
|
||||
|
||||
------------------------------------------------------------------------------
|
||||
Please refer to the release_docs/INSTALL file for installation instructions.
|
||||
------------------------------------------------------------------------------
|
||||
|
||||
THE HDF GROUP
|
||||
---------------
|
||||
|
||||
The HDF Group is the developer of HDF5®, a high-performance software library and
|
||||
data format that has been adopted across multiple industries and has become a
|
||||
de facto standard in scientific and research communities.
|
||||
|
||||
More information about The HDF Group, the HDF5 Community and the HDF5 software
|
||||
project, tools and services can be found at the Group's website.
|
||||
|
||||
https://www.hdfgroup.org/
|
||||
|
||||
|
||||
DOCUMENTATION
|
||||
-------------
|
||||
This release is fully functional for the API described in the documentation.
|
||||
https://portal.hdfgroup.org/display/HDF5/The+HDF5+API
|
||||
|
||||
Full Documentation and Programming Resources for this release can be found at
|
||||
https://portal.hdfgroup.org/display/HDF5
|
||||
|
||||
See the RELEASE.txt file in the release_docs/ directory for information specific
|
||||
to the features and updates included in this release of the library.
|
||||
|
||||
Several more files are located within the release_docs/ directory with specific
|
||||
details for several common platforms and configurations.
|
||||
|
||||
INSTALL - Start Here. General instructions for compiling and installing the library
|
||||
INSTALL_CMAKE - instructions for building with CMake (Kitware.com)
|
||||
INSTALL_parallel - instructions for building and configuring Parallel HDF5
|
||||
INSTALL_Windows and INSTALL_Cygwin - MS Windows installations.
|
||||
|
||||
|
||||
|
||||
HELP AND SUPPORT
|
||||
----------------
|
||||
Information regarding Help Desk and Support services is available at
|
||||
|
||||
https://portal.hdfgroup.org/display/support/The+HDF+Help+Desk
|
||||
|
||||
|
||||
|
||||
FORUM and NEWS
|
||||
--------------
|
||||
The following public forums are provided for public announcements and discussions
|
||||
of interest to the general HDF5 Community.
|
||||
|
||||
Homepage of the Forum
|
||||
https://forum.hdfgroup.org
|
||||
|
||||
News and Announcement
|
||||
https://forum.hdfgroup.org/c/news-and-announcements-from-the-hdf-group
|
||||
|
||||
HDF5 and HDF4 Topics
|
||||
https://forum.hdfgroup.org/c/hdf5
|
||||
|
||||
These forums are provided as an open and public service for searching and reading.
|
||||
Posting requires completing a simple registration and allows one to join in the
|
||||
conversation. Please read the following instructions pertaining to the Forum's
|
||||
use and configuration
|
||||
https://forum.hdfgroup.org/t/quickstart-guide-welcome-to-the-new-hdf-forum
|
||||
|
||||
|
||||
SNAPSHOTS, PREVIOUS RELEASES AND SOURCE CODE
|
||||
--------------------------------------------
|
||||
Periodically development code snapshots are provided at the following URL:
|
||||
https://gamma.hdfgroup.org/ftp/pub/outgoing/hdf5/snapshots/
|
||||
|
||||
Source packages for current and previous releases are located at:
|
||||
https://portal.hdfgroup.org/display/support/Downloads
|
||||
|
||||
Development code is available at our Github location:
|
||||
https://github.com/HDFGroup/hdf5.git
|
||||
|
||||
@@ -2,7 +2,6 @@ dnl -------------------------------------------------------------------------
|
||||
dnl -------------------------------------------------------------------------
|
||||
dnl
|
||||
dnl Copyright by The HDF Group.
|
||||
dnl Copyright by the Board of Trustees of the University of Illinois.
|
||||
dnl All rights reserved.
|
||||
dnl
|
||||
dnl This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
143
autogen.sh
143
autogen.sh
@@ -3,7 +3,7 @@
|
||||
# Copyright by The HDF Group.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
@@ -50,17 +50,7 @@
|
||||
# Note that aclocal will attempt to include libtool's share/aclocal
|
||||
# directory.
|
||||
#
|
||||
# This script takes two potential options:
|
||||
#
|
||||
# -p
|
||||
#
|
||||
# When this is selected, the autotools versions are set to the paths
|
||||
# and versions used by The HDF Group to produce the released versions
|
||||
# of the library.
|
||||
#
|
||||
# NOTE: This is probably temporary. Once we update our dev machines
|
||||
# to have recent versions of the autotools this option will probably
|
||||
# be removed.
|
||||
# Aside from -h for help, this script takes one potential option:
|
||||
#
|
||||
# -v
|
||||
#
|
||||
@@ -72,9 +62,6 @@ echo "* HDF5 autogen.sh script *"
|
||||
echo "**************************"
|
||||
echo
|
||||
|
||||
# Default is not production
|
||||
production=false
|
||||
|
||||
# Default is not verbose output
|
||||
verbose=false
|
||||
|
||||
@@ -86,10 +73,6 @@ while getopts "$optspec" optchar; do
|
||||
echo
|
||||
echo " -h Print this help message."
|
||||
echo
|
||||
echo " -p Used by THG to use hard-codes autotools"
|
||||
echo " paths on THG machines. Not for non-HDF-Group"
|
||||
echo " users!"
|
||||
echo
|
||||
echo " -v Show more verbose output."
|
||||
echo
|
||||
echo " NOTE: Each tool can be set via an environment variable."
|
||||
@@ -97,11 +80,6 @@ while getopts "$optspec" optchar; do
|
||||
echo
|
||||
exit 0
|
||||
;;
|
||||
p)
|
||||
echo "Setting THG production mode..."
|
||||
echo
|
||||
production=true
|
||||
;;
|
||||
v)
|
||||
echo "Setting verbosity: high"
|
||||
echo
|
||||
@@ -117,84 +95,52 @@ while getopts "$optspec" optchar; do
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$production" = true ] ; then
|
||||
|
||||
# Production mode
|
||||
#
|
||||
# Hard-code canonical HDF Group tool locations.
|
||||
|
||||
# If paths to tools are not specified, assume they are
|
||||
# located in /usr/hdf/bin/AUTOTOOLS and set paths accordingly.
|
||||
if test -z ${HDF5_AUTOCONF}; then
|
||||
HDF5_AUTOCONF=/usr/hdf/bin/AUTOTOOLS/autoconf
|
||||
fi
|
||||
if test -z ${HDF5_AUTOMAKE}; then
|
||||
HDF5_AUTOMAKE=/usr/hdf/bin/AUTOTOOLS/automake
|
||||
fi
|
||||
if test -z ${HDF5_AUTOHEADER}; then
|
||||
HDF5_AUTOHEADER=/usr/hdf/bin/AUTOTOOLS/autoheader
|
||||
fi
|
||||
if test -z ${HDF5_ACLOCAL}; then
|
||||
HDF5_ACLOCAL=/usr/hdf/bin/AUTOTOOLS/aclocal
|
||||
fi
|
||||
if test -z ${HDF5_LIBTOOL}; then
|
||||
HDF5_LIBTOOL=/usr/hdf/bin/AUTOTOOLS/libtool
|
||||
fi
|
||||
if test -z ${HDF5_M4}; then
|
||||
HDF5_M4=/usr/hdf/bin/AUTOTOOLS/m4
|
||||
fi
|
||||
|
||||
else
|
||||
|
||||
# Not in production mode
|
||||
#
|
||||
# If paths to autotools are not specified, use whatever the system
|
||||
# has installed as the default. We use 'which <tool>' to
|
||||
# show exactly what's being used.
|
||||
if test -z ${HDF5_AUTOCONF}; then
|
||||
HDF5_AUTOCONF=$(which autoconf)
|
||||
fi
|
||||
if test -z ${HDF5_AUTOMAKE}; then
|
||||
HDF5_AUTOMAKE=$(which automake)
|
||||
fi
|
||||
if test -z ${HDF5_AUTOHEADER}; then
|
||||
HDF5_AUTOHEADER=$(which autoheader)
|
||||
fi
|
||||
if test -z ${HDF5_ACLOCAL}; then
|
||||
HDF5_ACLOCAL=$(which aclocal)
|
||||
fi
|
||||
if test -z ${HDF5_LIBTOOL}; then
|
||||
case "`uname`" in
|
||||
Darwin*)
|
||||
# libtool on OS-X is non-gnu
|
||||
HDF5_LIBTOOL=$(which glibtool)
|
||||
;;
|
||||
*)
|
||||
HDF5_LIBTOOL=$(which libtool)
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
if test -z ${HDF5_M4}; then
|
||||
HDF5_M4=$(which m4)
|
||||
fi
|
||||
|
||||
fi # production
|
||||
# If paths to autotools are not specified, use whatever the system
|
||||
# has installed as the default. We use 'command -v <tool>' to
|
||||
# show exactly what's being used (shellcheck complains that 'which'
|
||||
# is non-standard and deprecated).
|
||||
if test -z "${HDF5_AUTOCONF}"; then
|
||||
HDF5_AUTOCONF="$(command -v autoconf)"
|
||||
fi
|
||||
if test -z "${HDF5_AUTOMAKE}"; then
|
||||
HDF5_AUTOMAKE="$(command -v automake)"
|
||||
fi
|
||||
if test -z "${HDF5_AUTOHEADER}"; then
|
||||
HDF5_AUTOHEADER="$(command -v autoheader)"
|
||||
fi
|
||||
if test -z "${HDF5_ACLOCAL}"; then
|
||||
HDF5_ACLOCAL="$(command -v aclocal)"
|
||||
fi
|
||||
if test -z "${HDF5_LIBTOOL}"; then
|
||||
case "$(uname)" in
|
||||
Darwin*)
|
||||
# libtool on OS-X is non-gnu
|
||||
HDF5_LIBTOOL="$(command -v glibtool)"
|
||||
;;
|
||||
*)
|
||||
HDF5_LIBTOOL="$(command -v libtool)"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
if test -z "${HDF5_M4}"; then
|
||||
HDF5_M4="$(command -v m4)"
|
||||
fi
|
||||
|
||||
|
||||
# Make sure that these versions of the autotools are in the path
|
||||
AUTOCONF_DIR=`dirname ${HDF5_AUTOCONF}`
|
||||
LIBTOOL_DIR=`dirname ${HDF5_LIBTOOL}`
|
||||
M4_DIR=`dirname ${HDF5_M4}`
|
||||
AUTOCONF_DIR=$(dirname "${HDF5_AUTOCONF}")
|
||||
LIBTOOL_DIR=$(dirname "${HDF5_LIBTOOL}")
|
||||
M4_DIR=$(dirname "${HDF5_M4}")
|
||||
PATH=${AUTOCONF_DIR}:${LIBTOOL_DIR}:${M4_DIR}:$PATH
|
||||
|
||||
# Make libtoolize match the specified libtool
|
||||
case "`uname`" in
|
||||
case "$(uname)" in
|
||||
Darwin*)
|
||||
# On OS X, libtoolize could be named glibtoolize or
|
||||
# libtoolize. Try the former first, then fall back
|
||||
# to the latter if it's not found.
|
||||
HDF5_LIBTOOLIZE="${LIBTOOL_DIR}/glibtoolize"
|
||||
if [ ! -f $HDF5_LIBTOOLIZE ] ; then
|
||||
if [ ! -f "$HDF5_LIBTOOLIZE" ] ; then
|
||||
HDF5_LIBTOOLIZE="${LIBTOOL_DIR}/libtoolize"
|
||||
fi
|
||||
;;
|
||||
@@ -249,7 +195,7 @@ echo
|
||||
|
||||
# LIBTOOLIZE
|
||||
libtoolize_cmd="${HDF5_LIBTOOLIZE} --copy --force"
|
||||
echo ${libtoolize_cmd}
|
||||
echo "${libtoolize_cmd}"
|
||||
if [ "$verbose" = true ] ; then
|
||||
${HDF5_LIBTOOLIZE} --version
|
||||
fi
|
||||
@@ -264,7 +210,7 @@ if test -e "${LIBTOOL_DIR}/../share/aclocal" ; then
|
||||
aclocal_include="-I ${LIBTOOL_DIR}/../share/aclocal"
|
||||
fi
|
||||
aclocal_cmd="${HDF5_ACLOCAL} --force -I m4 ${aclocal_include}"
|
||||
echo ${aclocal_cmd}
|
||||
echo "${aclocal_cmd}"
|
||||
if [ "$verbose" = true ] ; then
|
||||
${HDF5_ACLOCAL} --version
|
||||
fi
|
||||
@@ -273,7 +219,7 @@ echo
|
||||
|
||||
# AUTOHEADER
|
||||
autoheader_cmd="${HDF5_AUTOHEADER} --force"
|
||||
echo ${autoheader_cmd}
|
||||
echo "${autoheader_cmd}"
|
||||
if [ "$verbose" = true ] ; then
|
||||
${HDF5_AUTOHEADER} --version
|
||||
fi
|
||||
@@ -282,7 +228,7 @@ echo
|
||||
|
||||
# AUTOMAKE
|
||||
automake_cmd="${HDF5_AUTOMAKE} --copy --add-missing --force-missing"
|
||||
echo ${automake_cmd}
|
||||
echo "${automake_cmd}"
|
||||
if [ "$verbose" = true ] ; then
|
||||
${HDF5_AUTOMAKE} --version
|
||||
fi
|
||||
@@ -290,8 +236,11 @@ ${automake_cmd} || exit 1
|
||||
echo
|
||||
|
||||
# AUTOCONF
|
||||
autoconf_cmd="${HDF5_AUTOCONF} --force"
|
||||
echo ${autoconf_cmd}
|
||||
# The "obsolete" warnings category flags our Java macros as obsolete.
|
||||
# Since there is no clear way to upgrade them (Java support in the Autotools
|
||||
# is not great) and they work well enough for now, we suppress those warnings.
|
||||
autoconf_cmd="${HDF5_AUTOCONF} --force --warnings=no-obsolete"
|
||||
echo "${autoconf_cmd}"
|
||||
if [ "$verbose" = true ] ; then
|
||||
${HDF5_AUTOCONF} --version
|
||||
fi
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
The daily tests run copies of some of the scripts in this directory from another repository, notably snapshot and runtest. The copies in this directory should work, but are not used in daily tests, though they should be tested occasionally.
|
||||
|
||||
31
bin/README.md
Normal file
31
bin/README.md
Normal file
@@ -0,0 +1,31 @@
|
||||
# Scripts in `bin` and their purpose
|
||||
|
||||
Programs run via `autogen.sh` (or the equivalent in CMake) are indicated.
|
||||
|
||||
|Program|Purpose|
|
||||
|-------|-------|
|
||||
|`buildhdf5`|Convenience script to build HDF5 using the Autotools|
|
||||
|`checkapi`|Checks if public API calls are used in internal functions|
|
||||
|`checkposix`|Checks if C/POSIX calls are prefixed with `HD`|
|
||||
|`chkcopyright`|Checks if files have appropriate copyright statements|
|
||||
|`cmakehdf5`|Convenience script to build HDF5 using CMake|
|
||||
|`debug-ohdr`|Examines debug output from `H5O_open/close` to look for open objects|
|
||||
|`format_source`|Runs `clang-format` over the source files, applying our rules|
|
||||
|`genparser`|Creates the flex/bison-based parser files in the high-level library|
|
||||
|`h5cc.in`|Input file from which h5cc is created|
|
||||
|`h5redeploy.in`|Input file from which h5redeploy is created|
|
||||
|`h5vers`|Updates the library version number|
|
||||
|`make_err`|Generates the H5E header files (called in `autogen.sh`)|
|
||||
|`make_vers`|Generates H5version.h (called in `autogen.sh`)|
|
||||
|`make_overflow`|Generates H5overflow.h (called in `autogen.sh`)|
|
||||
|`output_filter`|Used in the tools test code to strip extraneous output before we diff files|
|
||||
|`restore.sh`|Removes files generated by `autogen.sh`|
|
||||
|`runbkprog`|Used by CMake to run test programs in the background|
|
||||
|`switch_maint_mode`|Switches maintainer mode on/off in `configure.ac`|
|
||||
|`trace`|Adds `TRACE` macros to HDF5 C library source files (run by `autogen.sh`)|
|
||||
|`warnhist`|Generates compiler warning statistics for gcc/clang when fed output of make|
|
||||
|
||||
## TODO
|
||||
|
||||
* chkcopyright is currently semi-broken as it doesn't handle the full variety of copyright headers we need. We're leaving it in place, though, in the hopes that someone will update it in the future.
|
||||
* Extending warnhist to better understand the output of additional compilers/languages would be nice.
|
||||
@@ -11,11 +11,13 @@ CTEST_CMD=`which ctest`
|
||||
|
||||
cd @HDF5_BINARY_DIR@
|
||||
if [[ $SUMMARY_FILE == *"ctestS"* ]]; then
|
||||
CMD="${CTEST_CMD} . -E MPI_TEST_ -C Release -j 32 -T test"
|
||||
CMD="${CTEST_CMD} -S ctest_serial.cmake"
|
||||
qsub -t 60 -n 1 -q debug-flat-quad -A ${ACCOUNT_ID} ${CMD} >& ${SUMMARY_FILE}
|
||||
echo "Done running ctest serial command."
|
||||
touch ctestS.done
|
||||
else
|
||||
CMD="${CTEST_CMD} . -R MPI_TEST_ ${SKIP_TESTS} -C Release -T test"
|
||||
CMD="${CTEST_CMD} -S ctest_parallel.cmake"
|
||||
qsub -t 60 -n 1 -q debug-flat-quad -A ${ACCOUNT_ID} ${CMD} >& ${SUMMARY_FILE}
|
||||
echo "Done running ctest parallel command."
|
||||
touch ctestP.done
|
||||
fi
|
||||
|
||||
qsub -t 60 -n 1 -q debug-flat-quad -A ${ACCOUNT_ID} ${CMD} >& ${SUMMARY_FILE}
|
||||
|
||||
echo "Done running ctest parallel command."
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
cd @HDF5_BINARY_DIR@
|
||||
echo "Run parallel test command. Test output will be in build/ctestP.out"
|
||||
ctest . -R MPI_TEST_ -C Release -T test >& ctestP.out
|
||||
ctest -S ctest_parallel.cmake >& ctestP.out
|
||||
|
||||
echo "Done running ctest parallel command."
|
||||
touch ctestP.done
|
||||
|
||||
@@ -8,7 +8,8 @@
|
||||
#SBATCH --job-name=h5_ctestP
|
||||
|
||||
cd @HDF5_BINARY_DIR@
|
||||
ctest . -R MPI_TEST_ -C Release -T test >& ctestP.out
|
||||
|
||||
echo "Done running ctestP.sl"
|
||||
echo "Run parallel test command. Test output will be in build/ctestP.out"
|
||||
ctest -S ctest_parallel.cmake >& ctestP.out
|
||||
|
||||
echo "Done running ctest parallel command."
|
||||
touch ctestP.done
|
||||
|
||||
@@ -11,8 +11,7 @@
|
||||
|
||||
cd @HDF5_BINARY_DIR@
|
||||
echo "Run command. Test output will be in build/ctestS.out"
|
||||
ctest . -E MPI_TEST_ -C Release -j 32 -T test >& ctestS.out
|
||||
ctest -S ctest_serial.cmake >& ctestS.out
|
||||
|
||||
##$CMD >& ctestS.out
|
||||
echo "Done running command."
|
||||
|
||||
touch ctestS.done
|
||||
|
||||
@@ -8,8 +8,8 @@
|
||||
#SBATCH --job-name=h5_ctestS
|
||||
|
||||
cd @HDF5_BINARY_DIR@
|
||||
CMD="ctest . -E MPI_TEST_ -C Release -j 32 -T test"
|
||||
echo "Run command. Test output will be in build/ctestS.out"
|
||||
ctest -S ctest_serial.cmake >& ctestS.out
|
||||
|
||||
echo "Run $CMD. Test output will be in build/ctestS.out"
|
||||
$CMD >& ctestS.out
|
||||
echo "Done running $CMD"
|
||||
echo "Done running command."
|
||||
touch ctestS.done
|
||||
|
||||
12
bin/batch/ctest_parallel.cmake.in
Normal file
12
bin/batch/ctest_parallel.cmake.in
Normal file
@@ -0,0 +1,12 @@
|
||||
if(NOT "$ENV{CI_SITE_NAME}" STREQUAL "")
|
||||
set(CTEST_SITE "$ENV{CI_SITE_NAME}")
|
||||
endif()
|
||||
if(NOT "$ENV{CI_BUILD_NAME}" STREQUAL "")
|
||||
set(CTEST_BUILD_NAME "$ENV{CI_BUILD_NAME}")
|
||||
endif()
|
||||
|
||||
ctest_start ("$ENV{CI_MODEL}" "@HDF5_SOURCE_DIR@" "@HDF5_BINARY_DIR@" APPEND)
|
||||
ctest_test (BUILD "@HDF5_BINARY_DIR@" APPEND INCLUDE MPI_TEST_ RETURN_VALUE res)
|
||||
if (${res} LESS 0 OR ${res} GREATER 0)
|
||||
file (APPEND ${CTEST_SCRIPT_DIRECTORY}/FailedCTest.txt "Failed Tests: ${res}\n")
|
||||
endif ()
|
||||
12
bin/batch/ctest_serial.cmake.in
Normal file
12
bin/batch/ctest_serial.cmake.in
Normal file
@@ -0,0 +1,12 @@
|
||||
if(NOT "$ENV{CI_SITE_NAME}" STREQUAL "")
|
||||
set(CTEST_SITE "$ENV{CI_SITE_NAME}")
|
||||
endif()
|
||||
if(NOT "$ENV{CI_BUILD_NAME}" STREQUAL "")
|
||||
set(CTEST_BUILD_NAME "$ENV{CI_BUILD_NAME}")
|
||||
endif()
|
||||
|
||||
ctest_start ("$ENV{CI_MODEL}" "@HDF5_SOURCE_DIR@" "@HDF5_BINARY_DIR@" APPEND)
|
||||
ctest_test (BUILD "@HDF5_BINARY_DIR@" APPEND EXCLUDE MPI_TEST_ PARALLEL_LEVEL 32 RETURN_VALUE res)
|
||||
if (${res} LESS 0 OR ${res} GREATER 0)
|
||||
file (APPEND ${CTEST_SCRIPT_DIRECTORY}/FailedCTest.txt "Failed Tests: ${res}\n")
|
||||
endif ()
|
||||
@@ -9,8 +9,8 @@
|
||||
#SBATCH --job-name=h5_ctestP
|
||||
|
||||
cd @HDF5_BINARY_DIR@
|
||||
#run parallel tests except t_cache_image test
|
||||
ctest . -R MPI_TEST_ -C Release -T test >& ctestP.out
|
||||
|
||||
echo "Done running $CMD"
|
||||
echo "Run parallel test command. Test output will be in build/ctestP.out"
|
||||
ctest -S ctest_parallel.cmake >& ctestP.out
|
||||
|
||||
echo "Done running ctest parallel command."
|
||||
touch ctestP.done
|
||||
|
||||
@@ -9,9 +9,8 @@
|
||||
#SBATCH --job-name=h5_ctestS
|
||||
|
||||
cd @HDF5_BINARY_DIR@
|
||||
CMD="ctest . -E MPI_TEST_ -C Release -j 32 -T test"
|
||||
|
||||
echo "Run $CMD. Test output will be in build/ctestS.out"
|
||||
$CMD >& ctestS.out
|
||||
echo "Done running $CMD"
|
||||
echo "Run command. Test output will be in build/ctestS.out"
|
||||
ctest -S ctest_serial.cmake >& ctestS.out
|
||||
|
||||
echo "Done running command."
|
||||
touch ctestS.done
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
|
||||
cd @HDF5_BINARY_DIR@
|
||||
echo "Run parallel test command. Test output will be in build/ctestP.out"
|
||||
ctest . -R 'MPI_TEST_' -C Release -T test >& ctestP.out
|
||||
ctest -S ctest_parallel.cmake >& ctestP.out
|
||||
|
||||
echo "Done running ctest parallel command."
|
||||
touch ctestP.done
|
||||
~
|
||||
|
||||
@@ -11,8 +11,7 @@
|
||||
|
||||
cd @HDF5_BINARY_DIR@
|
||||
echo "Run command. Test output will be in build/ctestS.out"
|
||||
ctest . -E 'MPI_TEST_' -C Release -j 32 -T test >& ctestS.out
|
||||
ctest -S ctest_serial.cmake >& ctestS.out
|
||||
|
||||
##$CMD >& ctestS.out
|
||||
echo "Done running command."
|
||||
|
||||
touch ctestS.done
|
||||
|
||||
258
bin/bbrelease
258
bin/bbrelease
@@ -1,7 +1,6 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
@@ -14,32 +13,13 @@
|
||||
|
||||
# Make a release of hdf5.
|
||||
#
|
||||
# Programmer: Robb Matzke
|
||||
# Creation date: on or before 1998-01-29.
|
||||
# NOTE:
|
||||
# This script differs from bin/release in that this has an added
|
||||
# --revision option to create private releases with the code revision
|
||||
# hash in the version strings.
|
||||
#
|
||||
# Modifications
|
||||
# Robb Matzke, 1999-07-16
|
||||
# The SunOS 5.6 sed *must* have slashes as delimiters. I changed things like
|
||||
# `sed s+/CVS++' to `sed 's/\/CVS//'
|
||||
#
|
||||
# Albert Cheng, 1999-10-26
|
||||
# Moved the MANIFEST checking to a separate command file so that
|
||||
# it can be invoked individually.
|
||||
#
|
||||
# Albert Cheng, 2004-08-14
|
||||
# Added the --private option.
|
||||
#
|
||||
# James Laird, 2005-09-07
|
||||
# Added the md5 method.
|
||||
#
|
||||
# Larry Knox, 2016-08-30
|
||||
# Added the --revision option to create private releases with the
|
||||
# code revision hash in the version strings. Currently the version
|
||||
# of this script with the --revision option is named bbrelease. It
|
||||
# can probably be merged into the original release script in the
|
||||
# future.
|
||||
# Commands to get the revision hash have now been converted to git
|
||||
# to match the source repository change.
|
||||
# This script can probably be merged into the original release script in
|
||||
# the future.
|
||||
|
||||
# Function definitions
|
||||
#
|
||||
@@ -47,27 +27,27 @@
|
||||
USAGE()
|
||||
{
|
||||
cat << EOF
|
||||
Usage: $0 -d <dir> [--docver BRANCHNAME] [-h] [--nocheck] [--private] <methods> ...
|
||||
-d DIR The name of the directory where the release(s) should be
|
||||
Usage: $0 -d <dir> [-h] [--private] [--revision [--branch BRANCHNAME]] <methods> ...
|
||||
-d DIR The name of the directory where the release(s) should be
|
||||
placed.
|
||||
--docver BRANCHNAME This is added for 1.8 and beyond to get the correct
|
||||
version of documentation files from the hdf5docs
|
||||
--branch BRANCHNAME This is to get the correct version of the branch name from the
|
||||
repository. BRANCHNAME for v1.8 should be hdf5_1_8.
|
||||
-h print the help page.
|
||||
--nocheck Ignore errors in MANIFEST file.
|
||||
--private Make a private release with today's date in version information.
|
||||
--revision Make a private release with the code revision number in version information.
|
||||
--private Make a private release with today's date in version information.
|
||||
--revision Make a private release with the code revision number in version information.
|
||||
This allows --branch to be used for the file name.
|
||||
--branch BRANCHNAME This is to get the correct version of the branch name from the
|
||||
repository. BRANCHNAME for v1.8 should be hdf5_1_8.
|
||||
|
||||
This must be run at the top level of the source directory.
|
||||
The other command-line options are the names of the programs to use
|
||||
for compressing the resulting tar archive (if none are given then
|
||||
"tar" is assumed):
|
||||
|
||||
tar -- use tar and don't do any compressing.
|
||||
gzip -- use gzip with "-9" and append ".gz" to the output name.
|
||||
tar -- use tar and don't do any compressing.
|
||||
gzip -- use gzip with "-9" and append ".gz" to the output name.
|
||||
bzip2 -- use bzip2 with "-9" and append ".bz2" to the output name.
|
||||
zip -- convert all text files to DOS style and form a zip file for Windows use.
|
||||
doc -- produce the latest doc tree in addition to the archive.
|
||||
zip -- convert all text files to DOS style and form a zip file for Windows use.
|
||||
|
||||
An md5 checksum is produced for each archive created and stored in the md5 file.
|
||||
|
||||
@@ -97,15 +77,10 @@ EOF
|
||||
# Function name: tar2zip
|
||||
# Convert the release tarball to a Windows zipball.
|
||||
#
|
||||
# Programmer: Albert Cheng
|
||||
# Creation date: 2014-04-23
|
||||
#
|
||||
# Modifications
|
||||
#
|
||||
# Steps:
|
||||
# 1. untar the tarball in a temporary directory;
|
||||
# Note: do this in a temporary directory to avoid changing
|
||||
# the original source directory which maybe around.
|
||||
# the original source directory which may be around.
|
||||
# 2. convert all its text files to DOS (LF-CR) style;
|
||||
# 3. form a zip file which is usable by Windows users.
|
||||
#
|
||||
@@ -119,8 +94,8 @@ EOF
|
||||
tar2zip()
|
||||
{
|
||||
if [ $# -ne 3 ]; then
|
||||
echo "usage: tar2zip <tarfilename> <zipfilename>"
|
||||
return 1
|
||||
echo "usage: tar2zip <tarfilename> <zipfilename>"
|
||||
return 1
|
||||
fi
|
||||
ztmpdir=/tmp/tmpdir$$
|
||||
mkdir -p $ztmpdir
|
||||
@@ -132,10 +107,10 @@ tar2zip()
|
||||
(cd $ztmpdir; tar xf -) < $tarfile
|
||||
# sanity check
|
||||
if [ ! -d $ztmpdir/$version ]; then
|
||||
echo "untar did not create $ztmpdir/$version source dir"
|
||||
# cleanup
|
||||
rm -rf $ztmpdir
|
||||
return 1
|
||||
echo "untar did not create $ztmpdir/$version source dir"
|
||||
# cleanup
|
||||
rm -rf $ztmpdir
|
||||
return 1
|
||||
fi
|
||||
# step 2: convert text files
|
||||
# There maybe a simpler way to do this.
|
||||
@@ -144,11 +119,11 @@ tar2zip()
|
||||
# -q quiet mode
|
||||
# grep redirect output to /dev/null because -q or -s are not portable.
|
||||
find $ztmpdir/$version | \
|
||||
while read inf; do \
|
||||
if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \
|
||||
unix2dos -q -k $inf; \
|
||||
fi\
|
||||
done
|
||||
while read inf; do \
|
||||
if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \
|
||||
unix2dos -q -k $inf; \
|
||||
fi\
|
||||
done
|
||||
# step 3: make zipball
|
||||
# -9 maximum compression
|
||||
# -y Store symbolic links as such in the zip archive
|
||||
@@ -163,14 +138,6 @@ tar2zip()
|
||||
|
||||
# This command must be run at the top level of the hdf5 source directory.
|
||||
# Verify this requirement.
|
||||
# Since we are running bbrelease to create an HDF5 source tarfile for buildbot
|
||||
# testing with source that is not for release, there is not a file named
|
||||
# "configure" but there will be one named "configure.ac". The "configure"
|
||||
# file will be created when autogen.sh runs. There probably will always
|
||||
# be a bin/release file, but just in case it is removed, we can check for
|
||||
# this script, bbrelease, in the bin directory. The bin/release script should
|
||||
# continue to check for "configure" because it should be present in release
|
||||
# source.
|
||||
if [ ! \( -f configure.ac -a -f bin/bbrelease \) ]; then
|
||||
echo "$0 must be run at the top level of the hdf5 source directory"
|
||||
exit 1
|
||||
@@ -182,24 +149,22 @@ VERS=`perl bin/h5vers`
|
||||
VERS_OLD=
|
||||
test "$VERS" || exit 1
|
||||
verbose=yes
|
||||
check=yes
|
||||
release_date=`date +%F`
|
||||
today=`date +%Y%m%d`
|
||||
pmode='no'
|
||||
revmode='no'
|
||||
tmpdir="../#release_tmp.$$" # tmp work directory
|
||||
DOC_URL=https://git@bitbucket.hdfgroup.org/scm/hdffv/hdf5doc.git
|
||||
tmpdir="../#release_tmp.$$" # tmp work directory
|
||||
CPPLUS_RM_NAME=cpplus_RM
|
||||
|
||||
# Restore previous Version information
|
||||
RESTORE_VERSION()
|
||||
{
|
||||
if [ X-${VERS_OLD} != X- ]; then
|
||||
echo restoring version information back to $VERS_OLD
|
||||
rm -f config/lt_vers.am
|
||||
cp $tmpdir/lt_vers.am config/lt_vers.am
|
||||
bin/h5vers -s $VERS_OLD
|
||||
VERS_OLD=
|
||||
echo restoring version information back to $VERS_OLD
|
||||
rm -f config/lt_vers.am
|
||||
cp $tmpdir/lt_vers.am config/lt_vers.am
|
||||
bin/h5vers -s $VERS_OLD
|
||||
VERS_OLD=
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -209,35 +174,32 @@ while [ -n "$1" ]; do
|
||||
arg=$1
|
||||
shift
|
||||
case "$arg" in
|
||||
-d)
|
||||
DEST=$1
|
||||
shift
|
||||
;;
|
||||
--nocheck)
|
||||
check=no
|
||||
;;
|
||||
-h)
|
||||
USAGE
|
||||
exit 0
|
||||
;;
|
||||
--private)
|
||||
pmode=yes
|
||||
;;
|
||||
-d)
|
||||
DEST=$1
|
||||
shift
|
||||
;;
|
||||
-h)
|
||||
USAGE
|
||||
exit 0
|
||||
;;
|
||||
--private)
|
||||
pmode=yes
|
||||
;;
|
||||
--revision)
|
||||
revmode=yes
|
||||
;;
|
||||
--docver)
|
||||
DOCVERSION=$1
|
||||
--branch)
|
||||
BRANCHNAME=$1
|
||||
shift
|
||||
;;
|
||||
-*)
|
||||
echo "Unknown switch: $arg" 1>&2
|
||||
USAGE
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
methods="$methods $arg"
|
||||
;;
|
||||
-*)
|
||||
echo "Unknown switch: $arg" 1>&2
|
||||
USAGE
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
methods="$methods $arg"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
@@ -276,14 +238,17 @@ if [ X$revmode = Xyes ]; then
|
||||
# Copy old version of config/lt_vers.am, since it's hard to
|
||||
# "undo" changes to it.
|
||||
cp config/lt_vers.am $tmpdir
|
||||
branch=`git branch | grep '*' | awk '{print $NF}'`
|
||||
if [ "${BRANCHNAME}" = "" ]; then
|
||||
BRANCHNAME=`git symbolic-ref -q --short HEAD`
|
||||
fi
|
||||
revision=`git rev-parse --short HEAD`
|
||||
# Set version information to m.n.r-r$revision.
|
||||
# Set version information to m.n.r-r$revision.
|
||||
# (h5vers does not correctly handle just m.n.r-$today.)
|
||||
VERS=`echo $VERS | sed -e s/-.*//`-$revision
|
||||
echo Private release of $VERS
|
||||
HDF5_VERS=hdf5-$BRANCHNAME-$revision
|
||||
echo file base of $HDF5_VERS
|
||||
bin/h5vers -s $VERS
|
||||
HDF5_VERS=hdf5-$branch-$revision
|
||||
# use a generic directory name for revision releases
|
||||
HDF5_IN_VERS=hdfsrc
|
||||
else
|
||||
@@ -299,30 +264,17 @@ if [ ! -d $DEST ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check the validity of the MANIFEST file.
|
||||
bin/chkmanifest || fail=yes
|
||||
if [ "X$fail" = "Xyes" ]; then
|
||||
if [ $check = yes ]; then
|
||||
exit 1
|
||||
else
|
||||
echo "Continuing anyway..."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create a manifest that contains only files for distribution.
|
||||
MANIFEST=$tmpdir/H5_MANIFEST
|
||||
grep '^\.' MANIFEST | grep -v _DO_NOT_DISTRIBUTE_ >$MANIFEST
|
||||
|
||||
# Prepare the source tree for a release.
|
||||
#ln -s `pwd` $tmpdir/$HDF5_VERS || exit 1
|
||||
# Create a symlink to the source so files in the tarball have the prefix
|
||||
# we want (gnu's --transform isn't portable)
|
||||
ln -s `pwd` $tmpdir/$HDF5_IN_VERS || exit 1
|
||||
|
||||
# Save a backup copy of Makefile if exists.
|
||||
test -f Makefile && mv Makefile $tmpdir/Makefile.x
|
||||
cp -p Makefile.dist Makefile
|
||||
|
||||
# Update README.txt and release_docs/RELEASE.txt with release information in
|
||||
# Update README.md and release_docs/RELEASE.txt with release information in
|
||||
# line 1.
|
||||
for f in README.txt release_docs/RELEASE.txt; do
|
||||
for f in README.md release_docs/RELEASE.txt; do
|
||||
echo "HDF5 version $VERS released on $release_date" >$f.x
|
||||
sed -e 1d $f >>$f.x
|
||||
mv $f.x $f
|
||||
@@ -330,64 +282,38 @@ for f in README.txt release_docs/RELEASE.txt; do
|
||||
chmod 644 $f
|
||||
done
|
||||
|
||||
# trunk is different than branches.
|
||||
if [ "${DOCVERSION}" ]; then
|
||||
DOC_URL=https://git@bitbucket.hdfgroup.org/scm/hdffv/hdf5doc.git -b ${DOCVERSION}
|
||||
fi
|
||||
|
||||
# Create the tar file
|
||||
test "$verbose" && echo " Running tar..." 1>&2
|
||||
( \
|
||||
cd $tmpdir; \
|
||||
tar cf $HDF5_VERS.tar $HDF5_IN_VERS/Makefile \
|
||||
`sed 's/^\.\//'$HDF5_IN_VERS'\//' $MANIFEST` || exit 1 \
|
||||
)
|
||||
(cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_IN_VERS" || exit 1 )
|
||||
|
||||
# Compress
|
||||
MD5file=$HDF5_VERS.md5
|
||||
cp /dev/null $DEST/$MD5file
|
||||
for comp in $methods; do
|
||||
case $comp in
|
||||
tar)
|
||||
cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar
|
||||
(cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file)
|
||||
;;
|
||||
gzip)
|
||||
test "$verbose" && echo " Running gzip..." 1>&2
|
||||
gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz
|
||||
(cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file)
|
||||
;;
|
||||
bzip2)
|
||||
test "$verbose" && echo " Running bzip2..." 1>&2
|
||||
bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2
|
||||
(cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file)
|
||||
;;
|
||||
zip)
|
||||
test "$verbose" && echo " Creating zip ball..." 1>&2
|
||||
tar2zip $HDF5_IN_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2
|
||||
(cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file)
|
||||
;;
|
||||
doc)
|
||||
if [ "${DOCVERSION}" = "" ]; then
|
||||
DOCVERSION=master
|
||||
fi
|
||||
test "$verbose" && echo " Creating docs..." 1>&2
|
||||
# Check out docs from git repo
|
||||
(cd $tmpdir; git clone $DOC_URL > /dev/null) || exit 1
|
||||
# Create doxygen C++ RM
|
||||
(cd c++/src && doxygen cpp_doc_config > /dev/null ) || exit 1
|
||||
# Replace version of C++ RM with just-created version
|
||||
rm -rf $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME
|
||||
mv c++/src/$CPPLUS_RM_NAME $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME
|
||||
# Compress the docs and move them to the release area
|
||||
mv $tmpdir/$DOCVERSION $tmpdir/${HDF5_VERS}_docs
|
||||
(cd $tmpdir && tar cf ${HDF5_VERS}_docs.tar ${HDF5_VERS}_docs)
|
||||
mv $tmpdir/${HDF5_VERS}_docs.tar $DEST
|
||||
;;
|
||||
*)
|
||||
echo "***Error*** Unknown method $comp"
|
||||
exit 1
|
||||
;;
|
||||
tar)
|
||||
cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar
|
||||
(cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file)
|
||||
;;
|
||||
gzip)
|
||||
test "$verbose" && echo " Running gzip..." 1>&2
|
||||
gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz
|
||||
(cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file)
|
||||
;;
|
||||
bzip2)
|
||||
test "$verbose" && echo " Running bzip2..." 1>&2
|
||||
bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2
|
||||
(cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file)
|
||||
;;
|
||||
zip)
|
||||
test "$verbose" && echo " Creating zip ball..." 1>&2
|
||||
tar2zip $HDF5_IN_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2
|
||||
(cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file)
|
||||
;;
|
||||
*)
|
||||
echo "***Error*** Unknown method $comp"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
@@ -408,4 +334,6 @@ fi
|
||||
# Remove temporary things
|
||||
rm -rf $tmpdir
|
||||
|
||||
echo "DONE"
|
||||
|
||||
exit 0
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env perl
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
@@ -4,7 +4,6 @@ use warnings;
|
||||
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
@@ -115,7 +114,7 @@ foreach $arg (@ARGV) {
|
||||
|
||||
# These are really HDF5 functions/macros even though they don't
|
||||
# start with `h' or `H'.
|
||||
next if $name =~ /^FUNC_(ENTER|LEAVE)(_(NO)?API|_PACKAGE|_STATIC)?(_NOFS|_NOCLEAR|_NOINIT|_NOPUSH)?(_NOFUNC|_TAG)?$/;
|
||||
next if $name =~ /^FUNC_(ENTER|LEAVE)(_(NO)?API|_PACKAGE|_STATIC)?(_NAMECHECK_ONLY|_NOFS|_NOCLEAR|_NOINIT|_NOPUSH)?(_NOFUNC|_TAG)?$/;
|
||||
next if $name =~ /^(BEGIN|END)_FUNC$/;
|
||||
next if $name =~ /^U?INT(8|16|32|64)(ENCODE|DECODE)(_VAR)?$/;
|
||||
next if $name =~ /^CI_(PRINT_STATS|INC_SRC|INC_DST)$/;
|
||||
@@ -123,17 +122,20 @@ foreach $arg (@ARGV) {
|
||||
next if $name =~ /^(MIN3?|MAX3?|NELMTS|POWER_OF_TWO|REGION_OVERFLOW)$/;
|
||||
next if $name =~ /^(SIZE_OVERFLOW|UNIQUE_MEMBERS|S_ISDIR)$/;
|
||||
next if $name =~ /^addr_defined$/;
|
||||
next if $name =~ /^TERMINATOR$/;
|
||||
|
||||
# These functions/macros are exempt.
|
||||
# op, cb, and OP are often spuriously flagged so ignore them.
|
||||
next if $name =~ /^(main|op|cb|OP)$/;
|
||||
# Ignore callback invocation
|
||||
next if $name =~ /^(op|cb|OP|iter_op|func)$/;
|
||||
|
||||
# Ignore main
|
||||
next if $name =~ /^main$/;
|
||||
|
||||
# This often appears in preprocessor lines that span multiple lines
|
||||
next if $name =~ /^(defined)$/;
|
||||
|
||||
# These are Windows system calls. Ignore them.
|
||||
next if $name =~ /^(_get_osfhandle|GetFileInformationByHandle|SetFilePointer|GetLastError|SetEndOfFile)$/;
|
||||
next if $name =~ /^(FindNextFile|FindClose|_tzset|Wgettimeofday|GetSystemTimeAsFileTime|Wgetlogin|GetUserName)$/;
|
||||
next if $name =~ /^(FindNextFile|FindClose|_tzset|Wgettimeofday|GetSystemTimeAsFileTime|GetUserName)$/;
|
||||
next if $name =~ /^(DeleteCriticalSection|TlsFree|TlsGetValue|CreateThread)$/;
|
||||
next if $name =~ /^(ExpandEnvironmentStringsA|LockFileEx|UnlockFileEx)$/;
|
||||
next if $name =~ /^(DllMain|LocalAlloc|LocalFree)$/;
|
||||
@@ -141,7 +143,7 @@ foreach $arg (@ARGV) {
|
||||
next if $name =~ /^(_beginthread|(Initialize|Enter|Leave)CriticalSection|TlsAlloc)$/;
|
||||
|
||||
# These are MPI function calls. Ignore them.
|
||||
next if $name =~ /^(MPI_|MPE_)/;
|
||||
next if $name =~ /^(MPI_)/;
|
||||
|
||||
# These are POSIX threads function calls. Ignore them.
|
||||
next if $name =~ /^pthread_/;
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
#!/bin/sh
|
||||
##
|
||||
## Copyright by the Board of Trustees of the University of Illinois.
|
||||
## All rights reserved.
|
||||
##
|
||||
## This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
## terms governing use, modification, and redistribution, is contained in
|
||||
## the COPYING file, which can be found at the root of the source code
|
||||
## distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
## If you do not have access to either file, you may request a copy from
|
||||
## help@hdfgroup.org.
|
||||
##
|
||||
# Check that all the configure files are properly generated.
|
||||
#
|
||||
# Programmer: Albert Cheng
|
||||
# Created Date: 2004/12/07
|
||||
|
||||
#
|
||||
# Configure: should be generated by autoconf version 2.69.
|
||||
# autoconf: should be of version 2.69.
|
||||
|
||||
# variable initialization
|
||||
nerrors=0
|
||||
AUTOCONFVERSION=2.69
|
||||
AUTOCONFVERSIONLEAD='Generated by GNU Autoconf'
|
||||
CONFIGUREFILES="configure"
|
||||
|
||||
|
||||
# Function definitions
|
||||
#
|
||||
# PRINTMSG
|
||||
# Print a one line message left justified in a field of 70 characters
|
||||
# without newline. More output for this line later.
|
||||
#
|
||||
PRINTMSG() {
|
||||
SPACES=" "
|
||||
echo "$* $SPACES" | cut -c1-70 | tr -d '\012'
|
||||
}
|
||||
|
||||
# print result passed.
|
||||
PASSED() {
|
||||
echo " PASSED"
|
||||
}
|
||||
|
||||
# print result failed.
|
||||
FAILED() {
|
||||
echo "*FAILED*"
|
||||
}
|
||||
|
||||
|
||||
# Main body
|
||||
|
||||
# Check configure files
|
||||
# The autoconf version should be among the first 5 lines.
|
||||
echo "Check autoconf version. Should be version $AUTOCONFVERSION"
|
||||
for xf in $CONFIGUREFILES; do
|
||||
PRINTMSG $xf
|
||||
if [ ! -f $xf ]; then
|
||||
FAILED
|
||||
echo File not found
|
||||
nerrors=`expr $nerrors + 1`
|
||||
continue
|
||||
fi
|
||||
autoconf_version=`head -5 $xf | grep "$AUTOCONFVERSIONLEAD"`
|
||||
echo $autoconf_version | grep "$AUTOCONFVERSIONLEAD $AUTOCONFVERSION" > /dev/null 2>&1
|
||||
if [ $? -eq 0 ]; then
|
||||
PASSED
|
||||
else
|
||||
FAILED
|
||||
echo "Expected: $AUTOCONFVERSIONLEAD $AUTOCONFVERSION"
|
||||
echo "Got: $autoconf_version"
|
||||
nerrors=`expr $nerrors + 1`
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
# Summary
|
||||
echo $0 found $nerrors errors
|
||||
if [ $nerrors != 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
@@ -1,7 +1,6 @@
|
||||
#! /bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
@@ -10,7 +9,6 @@
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
|
||||
# Check Copyright notice.
|
||||
# Check that all the files have the proper copyright notice.
|
||||
@@ -39,8 +37,8 @@ NFIXEDFILES=0 # Number of files fixed.
|
||||
NFIXFAILEDFILES=0 # Number of files fix failed.
|
||||
NUMBEGINLINES=60 # Copyright notice should be located within the
|
||||
# this number of lines at the beginning of the file.
|
||||
UICOPYRIGHTSTR="Copyright by the Board of Trustees of the University of Illinois"
|
||||
THGCOPYRIGHTSTR="Copyright by The HDF Group."
|
||||
UICOPYRIGHTSTR="Copyright by the Board of Trustees of the University of Illinois"
|
||||
|
||||
PASSEDLOG=/tmp/h5chkright_passed.$$
|
||||
SKIPPEDLOG=/tmp/h5chkright_skipped.$$
|
||||
@@ -111,113 +109,92 @@ BUILDCOPYRIGHT()
|
||||
# C and C++ source Copyright notice
|
||||
cat > ${C_COPYRIGHT} << \EOF
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
* terms governing use, modification, and redistribution, is contained in *
|
||||
* the files COPYING and Copyright.html. COPYING can be found at the root *
|
||||
* of the source code distribution tree; Copyright.html can be found at the *
|
||||
* root level of an installed copy of the electronic HDF5 document set and *
|
||||
* is linked from the top-level documents page. It can also be found at *
|
||||
* http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
|
||||
* access to either file, you may request a copy from help@hdfgroup.org. *
|
||||
* the COPYING file, which can be found at the root of the source code *
|
||||
* distribution tree, or in https://www.hdfgroup.org/licenses. *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
EOF
|
||||
|
||||
# Fortran9X source Copyright notice
|
||||
cat > ${FTN_COPYRIGHT} << \EOF
|
||||
! Copyright by The HDF Group. *
|
||||
! Copyright by the Board of Trustees of the University of Illinois. *
|
||||
! All rights reserved. *
|
||||
! *
|
||||
! This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
! terms governing use, modification, and redistribution, is contained in *
|
||||
! the files COPYING and Copyright.html. COPYING can be found at the root *
|
||||
! of the source code distribution tree; Copyright.html can be found at the *
|
||||
! root level of an installed copy of the electronic HDF5 document set and *
|
||||
! is linked from the top-level documents page. It can also be found at *
|
||||
! http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
|
||||
! access to either file, you may request a copy from help@hdfgroup.org. *
|
||||
! the COPYING file, which can be found at the root of the source code *
|
||||
! distribution tree, or in https://www.hdfgroup.org/licenses. *
|
||||
! If you do not have access to either file, you may request a copy from *
|
||||
! help@hdfgroup.org. *
|
||||
EOF
|
||||
|
||||
# HTML file Copyright notice
|
||||
cat > ${HTM_COPYRIGHT} << \EOF
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
* terms governing use, modification, and redistribution, is contained in *
|
||||
* the files COPYING and Copyright.html. COPYING can be found at the root *
|
||||
* of the source code distribution tree; Copyright.html can be found at the *
|
||||
* root level of an installed copy of the electronic HDF5 document set and *
|
||||
* is linked from the top-level documents page. It can also be found at *
|
||||
* http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
|
||||
* access to either file, you may request a copy from help@hdfgroup.org. *
|
||||
* the COPYING file, which can be found at the root of the source code *
|
||||
* distribution tree, or in https://www.hdfgroup.org/licenses. *
|
||||
* If you do not have access to either file, you may request a copy from *
|
||||
* help@hdfgroup.org. *
|
||||
EOF
|
||||
|
||||
# Shell style Copyright notice
|
||||
cat > ${SH_COPYRIGHT} << \EOF
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the files COPYING and Copyright.html. COPYING can be found at the root
|
||||
# of the source code distribution tree; Copyright.html can be found at the
|
||||
# root level of an installed copy of the electronic HDF5 document set and
|
||||
# is linked from the top-level documents page. It can also be found at
|
||||
# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
|
||||
# access to either file, you may request a copy from help@hdfgroup.org.
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
EOF
|
||||
|
||||
# Shell style Copyright notice (2nd type)
|
||||
cat > ${SH_COPYRIGHT2} << \EOF
|
||||
## Copyright by The HDF Group.
|
||||
## Copyright by the Board of Trustees of the University of Illinois.
|
||||
## All rights reserved.
|
||||
##
|
||||
## This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
## terms governing use, modification, and redistribution, is contained in
|
||||
## the files COPYING and Copyright.html. COPYING can be found at the root
|
||||
## of the source code distribution tree; Copyright.html can be found at the
|
||||
## root level of an installed copy of the electronic HDF5 document set and
|
||||
## is linked from the top-level documents page. It can also be found at
|
||||
## http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
|
||||
## access to either file, you may request a copy from help@hdfgroup.org.
|
||||
## the COPYING file, which can be found at the root of the source code
|
||||
## distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
## If you do not have access to either file, you may request a copy from
|
||||
## help@hdfgroup.org.
|
||||
EOF
|
||||
|
||||
# Windows Batch file Copyright notice
|
||||
cat > ${WINBAT_COPYRIGHT} << \EOF
|
||||
@REM Copyright by The HDF Group.
|
||||
@REM Copyright by the Board of Trustees of the University of Illinois.
|
||||
@REM All rights reserved.
|
||||
@REM
|
||||
@REM This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
@REM terms governing use, modification, and redistribution, is contained in
|
||||
@REM the files COPYING and Copyright.html. COPYING can be found at the root
|
||||
@REM of the source code distribution tree; Copyright.html can be found at the
|
||||
@REM root level of an installed copy of the electronic HDF5 document set and
|
||||
@REM is linked from the top-level documents page. It can also be found at
|
||||
@REM http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
|
||||
@REM access to either file, you may request a copy from help@hdfgroup.org.
|
||||
@REM the COPYING file, which can be found at the root of the source code
|
||||
@REM distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
@REM If you do not have access to either file, you may request a copy from
|
||||
@REM help@hdfgroup.org.
|
||||
EOF
|
||||
|
||||
# configure.ac file Copyright notice
|
||||
cat > ${CONFIGURE_AC_COPYRIGHT} << \EOF
|
||||
dnl Copyright by The HDF Group.
|
||||
dnl Copyright by the Board of Trustees of the University of Illinois.
|
||||
dnl All rights reserved.
|
||||
dnl
|
||||
dnl This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
dnl terms governing use, modification, and redistribution, is contained in
|
||||
dnl the files COPYING and Copyright.html. COPYING can be found at the root
|
||||
dnl of the source code distribution tree; Copyright.html can be found at the
|
||||
dnl root level of an installed copy of the electronic HDF5 document set and
|
||||
dnl is linked from the top-level documents page. It can also be found at
|
||||
dnl http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
|
||||
dnl access to either file, you may request a copy from help@hdfgroup.org.
|
||||
dnl the COPYING file, which can be found at the root of the source code
|
||||
dnl distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
dnl If you do not have access to either file, you may request a copy from
|
||||
dnl help@hdfgroup.org.
|
||||
EOF
|
||||
|
||||
}
|
||||
@@ -364,7 +341,7 @@ FindLineInFile()
|
||||
# $1 file which contains the expected copyright notice.
|
||||
# $2 file in which to look for the copyright notice.
|
||||
# Copyright notice must be found within the beginning $NUMBEGINLINES of lines.
|
||||
# Hunt for the particular string $UICOPYRIGHTSTR which signifies the beginning
|
||||
# Hunt for the particular string $THGCOPYRIGHTSTR which signifies the beginning
|
||||
# of the copyright notice.
|
||||
#
|
||||
MATCH_COPYRIGHT()
|
||||
@@ -379,7 +356,7 @@ MATCH_COPYRIGHT()
|
||||
nlines=`wc -l ${COPYRIGHTFILE} | cut -f1 -d' '`
|
||||
# Find a line that contains the copyright string and its line number in
|
||||
# the file.
|
||||
begin=`FindLineInFile "${UICOPYRIGHTSTR}" $f`
|
||||
begin=`FindLineInFile "${THGCOPYRIGHTSTR}" $f`
|
||||
if [ "$begin" -le 0 ] ; then
|
||||
# Not found, generate an empty dummy file
|
||||
cp /dev/null ${EXTRACTEDFILE}
|
||||
@@ -404,7 +381,7 @@ MATCH_COPYRIGHT()
|
||||
# $1 file which contains the expected copyright notice.
|
||||
# $2 file in which to look for the copyright notice.
|
||||
# Copyright notice must be found within the beginning $NUMBEGINLINES of lines.
|
||||
# Hunt for the particular string $UICOPYRIGHTSTR which signifies the beginning
|
||||
# Hunt for the particular string $THGCOPYRIGHTSTR which signifies the beginning
|
||||
# of the copyright notice.
|
||||
#
|
||||
FIX_COPYRIGHT()
|
||||
@@ -427,7 +404,12 @@ FIX_COPYRIGHT()
|
||||
# the file.
|
||||
insertbegin=`FindLineInFile "${THGCOPYRIGHTSTR}" $f`
|
||||
if [ $insertbegin -gt 0 ]; then
|
||||
insertend=`expr $insertbegin + $nlines` # no need to -1. See below.
|
||||
insertUIbegin=`FindLineInFile "${UICOPYRIGHTSTR}" $f`
|
||||
if [ $insertUIbegin -gt 0 ]; then
|
||||
insertend=`expr $insertbegin + $nlines + 1`
|
||||
else
|
||||
insertend=`expr $insertbegin + $nlines`
|
||||
fi
|
||||
else
|
||||
insertbegin=`FindLineInFile "${UICOPYRIGHTSTR}" $f`
|
||||
if [ $insertbegin -gt 0 ]; then
|
||||
|
||||
154
bin/chkmanifest
154
bin/chkmanifest
@@ -1,154 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
|
||||
# Check that all the files in MANIFEST exist and (if this is a
|
||||
# GIT checkout) that all the GIT-managed files appear in the
|
||||
# MANIFEST.
|
||||
#
|
||||
|
||||
verbose=yes
|
||||
MANIFEST=/tmp/HD_MANIFEST.$$
|
||||
AUTOGEN=./autogen.sh
|
||||
AUTOGEN_LOG=/tmp/autogen.log.$$
|
||||
|
||||
# Main
|
||||
test "$verbose" && echo " Checking MANIFEST..." 1>&2
|
||||
# clean up $MANIFEST file when exits
|
||||
trap "rm -f $MANIFEST" 0
|
||||
|
||||
# Only split lines on newline, not whitespace
|
||||
set -f
|
||||
IFS='
|
||||
'
|
||||
|
||||
# First make sure i am in the directory in which there is an MANIFEST file
|
||||
# and then do the checking from there. Will try the following,
|
||||
# current directory, parent directory, the directory this command resides.
|
||||
if [ -f MANIFEST ]; then
|
||||
continue
|
||||
elif [ -f ../MANIFEST ]; then
|
||||
cd ..
|
||||
else
|
||||
commanddir=`dirname $0`
|
||||
if [ -d "$commanddir" -a -f $commanddir/MANIFEST ]; then
|
||||
cd $commanddir
|
||||
continue
|
||||
else
|
||||
echo MANIFEST file not found. Abort.
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Do an autogen if generated files (e.g., configure) is not present
|
||||
if [ ! -f configure ]; then
|
||||
echo " running $AUTOGEN"
|
||||
$AUTOGEN > $AUTOGEN_LOG 2>&1
|
||||
if [ $? -ne 0 ]; then
|
||||
echo $AUTOGEN encountered error. Abort.
|
||||
echo output from $AUTOGEN:
|
||||
cat $AUTOGEN_LOG
|
||||
exit 1
|
||||
fi
|
||||
rm $AUTOGEN_LOG
|
||||
fi
|
||||
|
||||
# Check for duplicate entries. This can be done at any time, but it may as
|
||||
# well be sooner so that if something else fails the presence of duplicates
|
||||
# will already be known.
|
||||
errcode=0
|
||||
DUPLICATES=`perl -ne 's/#.*//; next if /^\s*$/; if ($uniq{$_}++) { print $_; }' MANIFEST`
|
||||
if [ "$DUPLICATES" ]; then
|
||||
cat 1>&2 <<EOF
|
||||
These entries appear more than once in the MANIFEST:
|
||||
$DUPLICATES
|
||||
Please remove the duplicate lines and try again.
|
||||
|
||||
EOF
|
||||
errcode=1
|
||||
fi
|
||||
|
||||
# Copy the manifest file to get a list of file names.
|
||||
grep '^\.' MANIFEST | expand | cut -f1 -d' ' >$MANIFEST
|
||||
|
||||
for file in `cat $MANIFEST`; do
|
||||
if [ ! -f $file ]; then
|
||||
echo "- $file"
|
||||
fail=yes
|
||||
fi
|
||||
done
|
||||
|
||||
# Get the list of files under version control and check that they are
|
||||
# present.
|
||||
#
|
||||
# First get a list of all the pending files with git status and
|
||||
# check those.
|
||||
git_stat=`git status -s`
|
||||
for file in $git_stat; do
|
||||
|
||||
# Newly added files are not listed by git ls-files, which
|
||||
# we check later.
|
||||
|
||||
# The line listing new files starts with 'A'.
|
||||
letter=`echo $file | head -c 1`
|
||||
if [ "$letter" = "A" ]; then
|
||||
# Convert the git status columns to './' so it matches
|
||||
# the manifest file name.
|
||||
#
|
||||
# There is a space between the status columns and file name, hence
|
||||
# the '3'.
|
||||
path=`echo $file | sed 's/^.\{3\}/\.\//g'`
|
||||
# Ignore directories
|
||||
if [ ! -d $path ]; then
|
||||
if (grep ^$path$ $MANIFEST >/dev/null); then
|
||||
:
|
||||
else
|
||||
echo "- $path"
|
||||
fail=yes
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Next check git ls-files, which gets a list of all files that are
|
||||
# checked in.
|
||||
git_ls=`git ls-files`
|
||||
for file in $git_ls; do
|
||||
path="./${file}"
|
||||
# Ignore directories
|
||||
if [ ! -d $path ]; then
|
||||
if (grep ^$path$ $MANIFEST >/dev/null); then
|
||||
:
|
||||
else
|
||||
echo "+ $path"
|
||||
fail=yes
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Finish up
|
||||
if [ "X$fail" = "Xyes" ]; then
|
||||
cat 1>&2 <<EOF
|
||||
The MANIFEST is out of date. Files marked with a minus sign (-) no
|
||||
longer exist; files marked with a plus sign (+) are GIT-managed but do
|
||||
not appear in the MANIFEST. Please remedy the situation and try again.
|
||||
EOF
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $errcode -ne 0 ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
test "$verbose" && echo " The MANIFEST is up to date." 1>&2
|
||||
exit 0
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env perl
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
use warnings;
|
||||
|
||||
my $depend_file;
|
||||
my $new_depend_file;
|
||||
my $srcdir;
|
||||
my $top_srcdir;
|
||||
my $top_builddir;
|
||||
|
||||
while ($_ = shift @ARGV) {
|
||||
if (/^--top_srcdir=([^ \t\n]*)/) {
|
||||
$top_srcdir = $1;
|
||||
$top_srcdir =~ s/\+/\\\+/g;
|
||||
$top_srcdir =~ s/\./\\\./g;
|
||||
} elsif (/^--top_builddir=([^ \t\n]*)/) {
|
||||
$top_builddir = $1;
|
||||
$top_builddir =~ s/\+/\\\+/g;
|
||||
$top_builddir =~ s/\./\\\./g;
|
||||
} else {
|
||||
$depend_file = $_;
|
||||
$new_depend_file = "$_.new";
|
||||
last;
|
||||
}
|
||||
}
|
||||
|
||||
open(DEPEND, "<$depend_file") || die "cannot open file $depend_file: $!\n";
|
||||
open(NEW, ">$new_depend_file") || die "cannot open file $new_depend_file: $!\n";
|
||||
|
||||
while (<DEPEND>) {
|
||||
s/\.o(\b)/\.lo$1/g;
|
||||
s/ $top_srcdir/ \$\(top_srcdir\)/g;
|
||||
s/ $top_builddir/ \$\(top_builddir\)/g;
|
||||
print NEW $_;
|
||||
}
|
||||
|
||||
close(DEPEND);
|
||||
close(NEW);
|
||||
|
||||
`mv $new_depend_file $depend_file`;
|
||||
58
bin/deploy
58
bin/deploy
@@ -1,58 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
# Deploy the HDF5 binary.
|
||||
#
|
||||
# Programmer: Albert Cheng
|
||||
# Created Date: 2004/12/15
|
||||
#
|
||||
# Modifications
|
||||
|
||||
# Function definitions
|
||||
#
|
||||
# Print Usage page
|
||||
USAGE()
|
||||
{
|
||||
cat << EOF
|
||||
Usage: $0 <dir>
|
||||
Install the binary to directory <dir>
|
||||
|
||||
Examples:
|
||||
|
||||
$ bin/deploy /usr/local/hdf5
|
||||
....
|
||||
|
||||
EOF
|
||||
|
||||
}
|
||||
|
||||
|
||||
# Variables
|
||||
|
||||
if [ $# != 1 ]; then
|
||||
USAGE
|
||||
exit 1
|
||||
fi
|
||||
|
||||
installdir=$1
|
||||
# create installdir if it does not exist yet.
|
||||
if [ -d $installdir ] || mkdir $installdir ; then
|
||||
${MAKE:-gmake} install prefix=$installdir && \
|
||||
( cd $installdir/bin; ./h5redeploy -force)
|
||||
exit $?
|
||||
else
|
||||
echo $installdir is not a valid directory
|
||||
USAGE
|
||||
exit 1
|
||||
fi
|
||||
|
||||
27
bin/distdep
27
bin/distdep
@@ -1,27 +0,0 @@
|
||||
#!/bin/sh
|
||||
#! -*-perl-*-
|
||||
eval 'exec perl -p -x -S $0 ${1+"$@"}'
|
||||
if 0;
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
|
||||
# Usage: $0 [<].depend
|
||||
|
||||
# Takes dependency info and generates on stdout dependencies suitable for
|
||||
# distribution by removing all the system include files from the list and
|
||||
# removing all but the base name of other include files (since the Makefiles
|
||||
# contain the logic for searching).
|
||||
|
||||
($h,$_)=/\s*\\/?$h.$`:("",$h.$_);
|
||||
s|( +/\S*)*( *)|$2?" \\\n ":""|eg;
|
||||
#s|(([-\w\.]+)/)+([-\w\.]+)|\3|g;
|
||||
139
bin/errors
139
bin/errors
@@ -1,139 +0,0 @@
|
||||
#!/usr/bin/env perl
|
||||
require 5.003;
|
||||
use warnings;
|
||||
use Text::Tabs;
|
||||
|
||||
# NOTE: THE FORMAT OF HRETURN_ERROR AND HGOTO_ERROR MACROS HAS
|
||||
# CHANGED. THIS SCRIPT NO LONGER WORKS! --rpm
|
||||
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
# Robb Matzke
|
||||
# 30 Aug 1997
|
||||
#
|
||||
# Purpose: This script will read standard input which should be a
|
||||
# function prologue followed by a C function and will emit
|
||||
# on standard output the same source code with the function
|
||||
# prologue containing documentation for the various errors
|
||||
# that occur in the function.
|
||||
#
|
||||
# Errors are raised by calling HGOTO_ERROR() or
|
||||
# HRETURN_ERROR(). The reason for the error message is a
|
||||
# comment which appears immediately after the error macro
|
||||
# call and is contained entirely on one line:
|
||||
#
|
||||
# HRETURN_ERROR (...); /*entry not found*/
|
||||
#
|
||||
# If such a comment doesn't exist, then the previous comment
|
||||
# is used, subject to the constraint that raising an error
|
||||
# clears the previous comment.
|
||||
#
|
||||
# /* Entry not found */
|
||||
# HGOTO_ERROR (...);
|
||||
#
|
||||
# Emacs users can use this script interactively with the
|
||||
# c-mark-function and shell-command-on-region functions which
|
||||
# are normally bound to M-C-h and M-|.
|
||||
|
||||
|
||||
# Split STDIN into the prolog and the function body. Preserve leading
|
||||
# white space.
|
||||
$_ = join "", <STDIN>;
|
||||
my ($head, $prolog, $body) = (/^(\s*)(\/\*(.*?)\*\/)?(.*)/s)[0,2,3];
|
||||
$prolog = "" unless $prolog;
|
||||
|
||||
# Find each error and the comment that goes with it.
|
||||
for ($_=$body,$comment=""; /\/\*|H(RETURN|GOTO)_ERROR/s;) {
|
||||
$_ = $&.$';
|
||||
|
||||
if (/^H(RETURN|GOTO)_ERROR\s*\(\s*H5E_(\w+)\s*,\s*H5E_(\w+)\s*,/s) {
|
||||
($major, $minor, $_) = ($2, $3, $');
|
||||
$comment=$1 if /^.*?\)\s*;\s*\/\*\s*(.*?)\s*\*\//;
|
||||
$comment =~ s/^\s*\*+\s*/ /mg; # leading asterisks.
|
||||
$comment =~ s/^\s+//s; # leading white space.
|
||||
$comment =~ s/\s+$//s; # trailing white space.
|
||||
$comment =~ s/(\w)$/$1./s; # punctuation.
|
||||
$comment ||= "***NO COMMENT***";
|
||||
$errors{"$major\000$minor\000\u$comment"} = 1;
|
||||
$comment = "";
|
||||
|
||||
} else {
|
||||
($comment) = /^\/\*\s*(.*?)\s*\*\//s;
|
||||
$_ = $';
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# Format an error so it isn't too wide.
|
||||
sub fmt_error ($) {
|
||||
local ($_) = @_;
|
||||
|
||||
my ($prefix,$space,$err) = /^((.*?)([A-Z_0-9]+\s+[A-Z_0-9]+\s+))/;
|
||||
$_ = $';
|
||||
tr/\n / /s;
|
||||
my $w = 70 - length expand $prefix;
|
||||
s/(.{$w}\S+)\s+(\S)/$1."\n".$space.' 'x(length $err).$2/eg;
|
||||
return $prefix . $_."\n";
|
||||
}
|
||||
|
||||
|
||||
|
||||
# Sort the errors by major, then minor, then comment. Duplicate
|
||||
# triplets have already been removed.
|
||||
sub by_triplet {
|
||||
my ($a_maj, $a_min, $a_com) = split /\000/, $a;
|
||||
my ($b_maj, $b_min, $b_com) = split /\000/, $b;
|
||||
$a_maj cmp $b_maj || $a_min cmp $b_min || $a_com cmp $b_com;
|
||||
}
|
||||
@errors = map {sprintf "%-9s %-13s %s\n", split /\000/}
|
||||
sort by_triplet keys %errors;
|
||||
|
||||
|
||||
|
||||
# Add the list of errors to the prologue depending on the type of
|
||||
# prolog.
|
||||
if (($front, $back) = $prolog=~/^(.*?Errors:\s*?(?=\n)).*?\n\s*\*\s*\n(.*)/s) {
|
||||
#| * Errors: |#
|
||||
#| * __list_of_error_messages__ (zero or more lines) |#
|
||||
#| * |#
|
||||
print $head, "/*", $front, "\n";
|
||||
map {print fmt_error " *\t\t".$_} @errors;
|
||||
print " *\n", $back, "*/", $body;
|
||||
|
||||
} elsif (($front,$back) = $prolog =~
|
||||
/(.*?\n\s*ERRORS:?\s*?(?=\n)).*?\n\s*\n(.*)/s) {
|
||||
#| ERRORS |#
|
||||
#| __list_of_error_messages__ (zero or more lines) |#
|
||||
#| |#
|
||||
print $head, "/*", $front, "\n";
|
||||
map {print fmt_error " ".$_} @errors;
|
||||
print "\n", $back, "*/", $body;
|
||||
|
||||
} elsif ($prolog eq "") {
|
||||
# No prolog present.
|
||||
print $head;
|
||||
print "\n/*", "-"x73, "\n * Function:\t\n *\n * Purpose:\t\n *\n";
|
||||
print " * Errors:\n";
|
||||
map {print fmt_error " *\t\t".$_} @errors;
|
||||
print " *\n * Return:\tSuccess:\t\n *\n *\t\tFailure:\t\n *\n";
|
||||
print " * Programmer:\t\n *\n * Modifications:\n *\n *", '-'x73, "\n";
|
||||
print " */\n", $body;
|
||||
|
||||
} else {
|
||||
# Prolog format not recognized.
|
||||
print $head, "/*", $prolog, "*/\n\n";
|
||||
print "/*\n * Errors returned by this function...\n";
|
||||
map {print fmt_error " *\t".$_} @errors;
|
||||
print " */\n", $body;
|
||||
}
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ find . \( -type d -path ./config -prune -and -not -path ./config \) \
|
||||
-or -name H5version.h \
|
||||
-or -name H5overflow.h \
|
||||
\) \) \
|
||||
-and \( -iname *.h -or -iname *.c -or -iname *.cpp -or -iname *.hpp \) \) \
|
||||
-and \( -iname *.h -or -iname *.c -or -iname *.cpp -or -iname *.hpp -or -iname *.java \) \) \
|
||||
| xargs clang-format -style=file -i -fallback-style=none
|
||||
|
||||
exit 0
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Recursively format all C & C++ sources and header files, except those in the
|
||||
# 'config' directory and generated files, such as H5LTanalyze.c, etc.
|
||||
#
|
||||
# Note that any files or directories that are excluded here should also be
|
||||
# added to the 'exclude' list in .github/workflows/clang-format-check.yml
|
||||
#
|
||||
# (Remember to update both bin/format_source and bin/format_source_patch)
|
||||
|
||||
find . \( -type d -path ./config -prune -and -not -path ./config \) \
|
||||
-or \( \( \! \( \
|
||||
-name H5LTanalyze.c \
|
||||
-or -name H5LTparse.c \
|
||||
-or -name H5LTparse.h \
|
||||
-or -name H5Epubgen.h \
|
||||
-or -name H5Einit.h \
|
||||
-or -name H5Eterm.h \
|
||||
-or -name H5Edefin.h \
|
||||
-or -name H5version.h \
|
||||
-or -name H5overflow.h \
|
||||
\) \) \
|
||||
-and \( -iname *.h -or -iname *.c -or -iname *.cpp -or -iname *.hpp \) \) \
|
||||
| xargs clang-format -style=file -i -fallback-style=none
|
||||
|
||||
git diff > clang_format.patch
|
||||
|
||||
# Delete if 0 size
|
||||
if [ ! -s clang_format.patch ]
|
||||
then
|
||||
rm clang_format.patch
|
||||
fi
|
||||
|
||||
exit 0
|
||||
@@ -1,51 +0,0 @@
|
||||
#! /bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
|
||||
# A script to generate coverage files for HDF5 using gcov.
|
||||
# Configures, builds, and runs tests in-place; the output files will be placed
|
||||
# in a directory called gcov_logs.
|
||||
# Must be invoked from the root hdf5 directory.
|
||||
# This script has been tested on kagiso.
|
||||
|
||||
CFLAGS="$CFLAGS -ftest-coverage -fprofile-arcs"
|
||||
export CFLAGS
|
||||
LDFLAGS="$LDFLAGS -lgcov"
|
||||
export LDFLAGS
|
||||
CC=gcc
|
||||
export CC
|
||||
./configure
|
||||
make
|
||||
make check
|
||||
mkdir gcov_logs
|
||||
cd src
|
||||
for j in *.h *.c
|
||||
do
|
||||
ln -s ../$j .libs/$j
|
||||
done
|
||||
cd .libs
|
||||
for j in *.gcda
|
||||
do
|
||||
gcov -b $j >> gcov.log 2>&1
|
||||
done
|
||||
for j in *.gcov
|
||||
do
|
||||
mv $j ../../gcov_logs
|
||||
done
|
||||
mv gcov.log ../../gcov_logs
|
||||
for j in *.c *.h
|
||||
do
|
||||
rm $j
|
||||
done
|
||||
cd ../..
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# Copyright by The HDF Group.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
@@ -21,7 +21,7 @@
|
||||
#
|
||||
# There is NO dependency in either the autotools or CMake to regenerate
|
||||
# the parser code. If you modify H5LT analyze.l or H5LTparse.y, you
|
||||
# will need to run this scrpit manually on a system with a suitable
|
||||
# will need to run this script manually on a system with a suitable
|
||||
# lexer and parser generator.
|
||||
#
|
||||
# IMPORTANT OS X NOTE
|
||||
@@ -236,6 +236,7 @@ do
|
||||
echo '#pragma GCC diagnostic ignored "-Wsign-conversion" ' >> tmp.out
|
||||
echo '#pragma GCC diagnostic ignored "-Wstrict-overflow" ' >> tmp.out
|
||||
echo '#pragma GCC diagnostic ignored "-Wstrict-prototypes" ' >> tmp.out
|
||||
echo '#pragma GCC diagnostic ignored "-Wimplicit-fallthrough" ' >> tmp.out
|
||||
echo '#if !defined (__clang__) ' >> tmp.out
|
||||
echo '#pragma GCC diagnostic ignored "-Wlarger-than=" ' >> tmp.out
|
||||
echo '#pragma GCC diagnostic ignored "-Wsuggest-attribute=const" ' >> tmp.out
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#! /bin/sh
|
||||
##
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
@@ -40,7 +39,7 @@ HL="@HL@"
|
||||
## $CLINKER $H5BLD_CPPFLAGS $CPPFLAGS $H5BLD_CFLAGS $CFLAGS $LDFLAGS ##
|
||||
## $LIBS $clibpath $link_objs $link_args $shared_link ##
|
||||
## ##
|
||||
## These settings can be overridden by setting HDF5_CFLAGS, ##
|
||||
## These settings can be overridden by setting HDF5_CFLAGS, ##
|
||||
## HDF5_CPPFLAGS, HDF5_LDFLAGS, or HDF5_LIBS in the environment. ##
|
||||
## ##
|
||||
############################################################################
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#! /bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
17
bin/h5vers
17
bin/h5vers
@@ -7,7 +7,6 @@ require 5.003;
|
||||
use strict;
|
||||
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
@@ -66,7 +65,7 @@ use strict;
|
||||
# ./H5public.h or ./src/H5public.h.
|
||||
#
|
||||
# If the version number is changed (either `-s' or `-i' was used on
|
||||
# the command line) then the first line of the README.txt and RELEASE.txt files
|
||||
# the command line) then the version line of the README.md and RELEASE.txt files
|
||||
# one directory above the H5public.h file is also modified so it looks
|
||||
# something like: This is hdf5-1.2.3-pre1 currently under development.
|
||||
# The AC_INIT macro in configure.ac will also change in this case to be
|
||||
@@ -156,10 +155,10 @@ while ($_ = shift) {
|
||||
}
|
||||
die "mutually exclusive options given\n" if $set && $inc;
|
||||
|
||||
# Determine file to use as H5public.h, README.txt,
|
||||
# Determine file to use as H5public.h, README.md,
|
||||
# release_docs/RELEASE.txt, configure.ac, windows/src/H5pubconf.h
|
||||
# config/lt_vers.am and config/cmake/scripts/HDF5config.cmake.
|
||||
# The README.txt, release_docs/RELEASE.txt, configure.ac,
|
||||
# The README.md, release_docs/RELEASE.txt, configure.ac,
|
||||
# windows/src/H5pubconf.h, config/lt_vers.am and
|
||||
# config/cmake/scripts/HDF5config.cmake
|
||||
# files are always in the directory above H5public.h
|
||||
@@ -178,9 +177,9 @@ die "unable to read file: $LT_VERS\n" unless -r $file;
|
||||
my $HDF5CONFIGCMAKE = $file;
|
||||
$HDF5CONFIGCMAKE =~ s/[^\/]*$/..\/config\/cmake\/scripts\/HDF5config.cmake/;
|
||||
die "unable to read file: $HDF5CONFIGCMAKE\n" unless -r $file;
|
||||
# README.txt
|
||||
# README.md
|
||||
my $README = $file;
|
||||
$README =~ s/[^\/]*$/..\/README.txt/;
|
||||
$README =~ s/[^\/]*$/..\/README.md/;
|
||||
die "unable to read file: $README\n" unless -r $file;
|
||||
# release_docs/RELEASE.txt
|
||||
my $RELEASE = $file;
|
||||
@@ -303,7 +302,7 @@ if ($LT_VERS && $version_increased) {
|
||||
# close FILE;
|
||||
}
|
||||
|
||||
# Update the README.txt file
|
||||
# Update the README.md file
|
||||
if ($README) {
|
||||
open FILE, $README or die "$README: $!\n";
|
||||
my @contents = <FILE>;
|
||||
@@ -377,7 +376,7 @@ if ($H5_JAVA) {
|
||||
my $version_string2 = sprintf("%d, %d, %d", @newver[0,1,2]);
|
||||
|
||||
$data =~ s/\@version HDF5 .* <BR>/\@version HDF5 $version_string1 <BR>/;
|
||||
$data =~ s/ public final static int LIB_VERSION\[\] = \{ \d*, \d*, \d* \};/ public final static int LIB_VERSION[] = \{ $version_string2 \};/;
|
||||
$data =~ s/ public final static int LIB_VERSION\[\] = \{\d*,.\d*,.\d*\};/ public final static int LIB_VERSION[] = \{$version_string2\};/;
|
||||
|
||||
write_file($H5_JAVA, $data);
|
||||
}
|
||||
@@ -394,7 +393,7 @@ if ($TESTH5_JAVA) {
|
||||
my $version_string1 = sprintf("%d, %d, %d", @newver[0,1,2]);
|
||||
my $version_string2 = sprintf("int majnum = %d, minnum = %d, relnum = %d", @newver[0,1,2]);
|
||||
|
||||
$data =~ s/ int libversion\[\] = \{ .* \};/ int libversion\[\] = \{ $version_string1 \};/;
|
||||
$data =~ s/ int libversion\[\] = \{.*\};/ int libversion\[\] = \{$version_string1\};/;
|
||||
$data =~ s/ int majnum = \d*, minnum = \d*, relnum = \d*;/ $version_string2;/;
|
||||
|
||||
write_file($TESTH5_JAVA, $data);
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env perl
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
238
bin/locate_sw
238
bin/locate_sw
@@ -1,238 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
|
||||
# Try to locate the software as named in argument.
|
||||
# This is a sequential search of all possible locations of the software.
|
||||
# Usage: locate_sw <SW-Name>
|
||||
# It prints a string showing the paths leading to the include, lib and bin
|
||||
# directory of the software, separated by colons. E.g., if the software is
|
||||
# located in /usr/sdt/*, it prints
|
||||
# /usr/sdt/include:/usr/sdt/lib:/usr/sdt/bin
|
||||
# Any component that is not found will be returned as an empty string. E.g.,
|
||||
# if somehow the header files of the software are not found, it prints
|
||||
# :/usr/sdt/lib;/usr/sdt/bin
|
||||
|
||||
# Function definitions
|
||||
USAGE()
|
||||
{
|
||||
echo "Usage: locate_sw <SW-Name>"
|
||||
echo " where <SW-Name> can be hdf4, hdf5, zlib"
|
||||
echo " It prints the paths leading the header files (include),"
|
||||
echo " library (lib), and tools (bin). E.g.,"
|
||||
echo " /usr/sdt/include:/usr/sdt/lib:/usr/sdt/bin"
|
||||
echo " Any component that is not found will be returned as an empty string. E.g.,"
|
||||
echo " if somehow the header files of the software are not found, it prints"
|
||||
echo " :/usr/sdt/lib;/usr/sdt/bin"
|
||||
echo "Exit code: 0 if software located; otherwise non-zero"
|
||||
}
|
||||
|
||||
# locate hdf4 software
|
||||
locate_hdf4()
|
||||
{
|
||||
# this default is the best guess of locating hdf4 software
|
||||
swpaths_defaults="/usr/ncsa /usr/sdt /usr/local"
|
||||
swpaths=
|
||||
|
||||
case "$OSname" in
|
||||
SunOS)
|
||||
case "$OSrelease" in
|
||||
5.7)
|
||||
swpaths="/afs/ncsa/packages/hdf/SunOS_5.7"
|
||||
;;
|
||||
*)
|
||||
# use default
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
HP-UX)
|
||||
case "$OSrelease" in
|
||||
B.11.00)
|
||||
swpaths="/afs/ncsa/packages/hdf/HPUX_11.00"
|
||||
;;
|
||||
*)
|
||||
# use default
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
Linux)
|
||||
swpaths="/afs/ncsa/packages/hdf/Linux"
|
||||
;;
|
||||
OSF1)
|
||||
swpaths="/afs/ncsa/packages/hdf/OSF1_V4.0"
|
||||
;;
|
||||
*)
|
||||
# just use the defaults
|
||||
;;
|
||||
esac
|
||||
|
||||
# Check if the hdf4 software is actually available.
|
||||
# Accept the directory only if needed .h, .a and tools are found
|
||||
# in the same place. That way, they are more likely to be of the
|
||||
# same version.
|
||||
#
|
||||
swpaths="$swpaths $swpaths_defaults"
|
||||
for sw in $swpaths; do
|
||||
if [ -r $sw/include/hdf.h -a -r $sw/lib/libdf.a -a -r $sw/bin/hdp ]; then
|
||||
SW_inc=$sw/include
|
||||
SW_lib=$sw/lib
|
||||
SW_bin=$sw/bin
|
||||
SW_Location=$sw
|
||||
break
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# locate hdf5 software
|
||||
locate_hdf5()
|
||||
{
|
||||
# this default is the best guess of locating hdf5 software
|
||||
swpaths_defaults="/usr/ncsa /usr/sdt /usr/local"
|
||||
swpaths=
|
||||
|
||||
case "$OSname" in
|
||||
SunOS)
|
||||
case "$OSrelease" in
|
||||
5.7)
|
||||
swpaths="/afs/ncsa/packages/hdf5/SunOS_5.7"
|
||||
;;
|
||||
*)
|
||||
# use default
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
HP-UX)
|
||||
case "$OSrelease" in
|
||||
B.11.00)
|
||||
swpaths="/afs/ncsa/packages/hdf5/HPUX_11.00"
|
||||
;;
|
||||
*)
|
||||
# use default
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
Linux)
|
||||
swpaths="/afs/ncsa/packages/hdf5/Linux"
|
||||
;;
|
||||
FreeBSD)
|
||||
swpaths="/afs/ncsa/packages/hdf5/FreeBSD"
|
||||
;;
|
||||
OSF1)
|
||||
swpaths="/afs/ncsa/packages/hdf5/OSF1_V4.0"
|
||||
;;
|
||||
*)
|
||||
# just use the defaults
|
||||
;;
|
||||
esac
|
||||
|
||||
# Check if the hdf5 software is actually available.
|
||||
# Accept the directory only if needed .h, .a and tools are found
|
||||
# in the same place. That way, they are more likely to be of the
|
||||
# same version.
|
||||
#
|
||||
swpaths="$swpaths $swpaths_defaults"
|
||||
for sw in $swpaths; do
|
||||
if [ -r $sw/include/hdf5.h -a -r $sw/lib/libhdf5.a -a -r $sw/bin/h5dump ]; then
|
||||
SW_inc=$sw/include
|
||||
SW_lib=$sw/lib
|
||||
SW_bin=$sw/bin
|
||||
SW_Location=$sw
|
||||
break
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# locate zlib software
|
||||
locate_zlib()
|
||||
{
|
||||
# this default is the best guess of locating zlib software
|
||||
swpaths_defaults="/usr /usr/local /usr/ncsa /usr/sdt"
|
||||
swpaths=
|
||||
|
||||
|
||||
# Check if the zlib software is actually available.
|
||||
# Accept the directory only if needed .h, .a and tools are found
|
||||
# in the same place. That way, they are more likely to be of the
|
||||
# same version.
|
||||
# Don't know something specific to check the bin directory. Maybe gzip?
|
||||
# Just make sure it exists.
|
||||
#
|
||||
swpaths="$swpaths $swpaths_defaults"
|
||||
for sw in $swpaths; do
|
||||
if [ -r $sw/include/zlib.h -a \
|
||||
\( -r $sw/lib/libz.a -o -r $sw/lib/libz.so \) -a -d $cw/bin ]; then
|
||||
SW_inc=$sw/include
|
||||
SW_lib=$sw/lib
|
||||
SW_bin=$sw/bin
|
||||
SW_Location=$sw
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# if none found, try HDF4 software which contains a version of zlib.
|
||||
if [ x-$SW_Location = x- ]; then
|
||||
locate_hdf4
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
# Main
|
||||
#
|
||||
# Options
|
||||
#
|
||||
if [ $# -lt 1 ]; then
|
||||
USAGE
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$1" = -h ]; then
|
||||
USAGE
|
||||
exit 0
|
||||
fi
|
||||
|
||||
SW=$1
|
||||
shift
|
||||
|
||||
# locations of the software seeked.
|
||||
SW_inc= # include place
|
||||
SW_lib= # library place
|
||||
SW_bin= # binary place
|
||||
SW_Location= # parent directory of all the above
|
||||
|
||||
OSname=`uname -s`
|
||||
OSrelease=`uname -r`
|
||||
|
||||
case $SW in
|
||||
hdf4|hdf)
|
||||
locate_hdf4
|
||||
;;
|
||||
hdf5)
|
||||
locate_hdf5
|
||||
;;
|
||||
zlib)
|
||||
locate_zlib
|
||||
;;
|
||||
*)
|
||||
echo "unknown software ($SW)"
|
||||
USAGE
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# show the results located, separated by commas.
|
||||
if [ -n "${SW_inc}" -a -n "${SW_lib}" -a -n "${SW_bin}" ]; then
|
||||
echo ${SW_inc},${SW_lib},${SW_bin}
|
||||
exit 0
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
@@ -5,7 +5,6 @@ use warnings;
|
||||
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
@@ -32,7 +31,6 @@ sub print_copyright ($) {
|
||||
|
||||
print $fh "/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n";
|
||||
print $fh " * Copyright by The HDF Group. *\n";
|
||||
print $fh " * Copyright by the Board of Trustees of the University of Illinois. *\n";
|
||||
print $fh " * All rights reserved. *\n";
|
||||
print $fh " * *\n";
|
||||
print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n";
|
||||
|
||||
@@ -10,7 +10,6 @@ my @ctypes = ( () );
|
||||
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
@@ -61,7 +60,6 @@ sub print_copyright ($) {
|
||||
|
||||
print $fh "/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n";
|
||||
print $fh " * Copyright by The HDF Group. *\n";
|
||||
print $fh " * Copyright by the Board of Trustees of the University of Illinois. *\n";
|
||||
print $fh " * All rights reserved. *\n";
|
||||
print $fh " * *\n";
|
||||
print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n";
|
||||
|
||||
@@ -6,10 +6,10 @@ use warnings;
|
||||
# (The max_idx parameter is the only thing that needs to be changed when adding
|
||||
# support for a new major release. If support for a prior major release
|
||||
# is added (like support for 1.4, etc), the min_sup_idx parameter will
|
||||
# need to be decremented. - QAK)
|
||||
# need to be decremented.)
|
||||
|
||||
# Max. library "index" (0 = v1.0, 1 = 1.2, 2 = 1.4, 3 = 1.6, 4 = 1.8, 5 = 1.10, 6 = 1.12, 7 = 1.14, etc)
|
||||
$max_idx = 7;
|
||||
# Max. library "index" (0 = v1.0, 1 = 1.2, 2 = 1.4, 3 = 1.6, 4 = 1.8, 5 = 1.10, 6 = 1.12, 7 = 1.14, 8 = 1.16, etc)
|
||||
$max_idx = 8;
|
||||
|
||||
# Min. supported previous library version "index" (0 = v1.0, 1 = 1.2, etc)
|
||||
$min_sup_idx = 3;
|
||||
@@ -19,7 +19,6 @@ $indent = 2;
|
||||
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
@@ -46,7 +45,6 @@ sub print_copyright ($) {
|
||||
|
||||
print $fh "/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n";
|
||||
print $fh " * Copyright by The HDF Group. *\n";
|
||||
print $fh " * Copyright by the Board of Trustees of the University of Illinois. *\n";
|
||||
print $fh " * All rights reserved. *\n";
|
||||
print $fh " * *\n";
|
||||
print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n";
|
||||
|
||||
33
bin/mkdirs
33
bin/mkdirs
@@ -1,33 +0,0 @@
|
||||
#! /bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
# This is a small program which will create directories n-levels deep.
|
||||
# You just call it with something like:
|
||||
#
|
||||
# mkdirs /tmp/foo/bar/baz
|
||||
#
|
||||
# and it will create all the directories from /tmp down to baz which
|
||||
# don't exist.
|
||||
#
|
||||
chmodprog="${CHMODPROG-chmod}"
|
||||
mkdirprog="${MKDIRPROG-mkdir}"
|
||||
|
||||
make_dir () {
|
||||
if test ! -d $1; then
|
||||
make_dir `echo $1 | sed -e 's#/[^/]*$##'`
|
||||
$mkdirprog $1
|
||||
$chmodprog 755 $1
|
||||
fi
|
||||
}
|
||||
|
||||
make_dir `echo $1 | sed -e 's#/$##'`
|
||||
43
bin/newer
43
bin/newer
@@ -1,43 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
# Compare the modification time of file argument 1 against other file arguments.
|
||||
# Return true (0) if argument 1 is newer than all others, otherwise return
|
||||
# false (1). If any of the argument is not a file, return false (1).
|
||||
#
|
||||
# Programmer: Albert Cheng
|
||||
# Created Date: 2005/07/06
|
||||
# Modification:
|
||||
# Albert Cheng 2005/8/30
|
||||
# Changed from two arguments to multiple arguments.
|
||||
|
||||
if test $# -lt 2; then
|
||||
exit 1
|
||||
fi
|
||||
if test ! -f $1; then
|
||||
exit 1
|
||||
fi
|
||||
f1=$1
|
||||
shift
|
||||
|
||||
for f in $*; do
|
||||
if test ! -f $f; then
|
||||
exit 1
|
||||
fi
|
||||
if test X = X`find $f1 -newer $f -print`; then
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
# passed all tests. Must be a file newer than all others.
|
||||
exit 0
|
||||
@@ -61,26 +61,21 @@ STDOUT_FILTER() {
|
||||
# Remove them from the stderr result file.
|
||||
# $1 is the file name of the file to be filtered.
|
||||
# Cases of filter needed:
|
||||
# 1. MPE:
|
||||
# In parallel mode and if MPE library is used, it prints the following
|
||||
# two message lines whether the MPE tracing is used or not.
|
||||
# Writing logfile.
|
||||
# Finished writing logfile.
|
||||
# 2. LANL MPI:
|
||||
# * LANL MPI:
|
||||
# The LANL MPI will print some messages like the following,
|
||||
# LA-MPI: *** mpirun (1.5.10)
|
||||
# LA-MPI: *** 3 process(es) on 2 host(s): 2*fln21 1*fln22
|
||||
# LA-MPI: *** libmpi (1.5.10)
|
||||
# LA-MPI: *** Copyright 2001-2004, ACL, Los Alamos National Laboratory
|
||||
# 3. h5diff debug output:
|
||||
# * h5diff debug output:
|
||||
# Debug output all have prefix "h5diff debug: ".
|
||||
# 4. AIX system prints messages like these when it is aborting:
|
||||
# * AIX system prints messages like these when it is aborting:
|
||||
# ERROR: 0031-300 Forcing all remote tasks to exit due to exit code 1 in task 0
|
||||
# ERROR: 0031-250 task 4: Terminated
|
||||
# ERROR: 0031-250 task 3: Terminated
|
||||
# ERROR: 0031-250 task 2: Terminated
|
||||
# ERROR: 0031-250 task 1: Terminated
|
||||
# 5. LLNL Blue-Gene mpirun prints messages like there when it exit non-zero:
|
||||
# * LLNL Blue-Gene mpirun prints messages like there when it exit non-zero:
|
||||
# <Apr 12 15:01:49.075658> BE_MPI (ERROR): The error message in the job record is as follows:
|
||||
# <Apr 12 15:01:49.075736> BE_MPI (ERROR): "killed by exit(1) on node 0"
|
||||
STDERR_FILTER() {
|
||||
@@ -91,12 +86,6 @@ STDERR_FILTER() {
|
||||
cp $result_file $tmp_file
|
||||
sed -e '/ BE_MPI (ERROR): /d' \
|
||||
< $tmp_file > $result_file
|
||||
# Filter MPE messages
|
||||
if test -n "$pmode"; then
|
||||
cp $result_file $tmp_file
|
||||
sed -e '/^Writing logfile./d' -e '/^Finished writing logfile./d' \
|
||||
< $tmp_file > $result_file
|
||||
fi
|
||||
# Filter LANL MPI messages
|
||||
# and LLNL srun messages
|
||||
# and AIX error messages
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#! /bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
# makeTarFiles.pl
|
||||
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
@@ -1,215 +0,0 @@
|
||||
#!/usr/bin/perl
|
||||
# makeInternalREADME.pl
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF4. The full HDF4 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the files COPYING and Copyright.html. COPYING can be found at the root
|
||||
# of the source code distribution tree; Copyright.html can be found at the
|
||||
# root level of an installed copy of the electronic HDF4 document set and
|
||||
# is linked from the top-level documents page. It can also be found at
|
||||
# http://hdfgroup.org/HDF4/doc/Copyright.html. If you do not have
|
||||
# access to either file, you may request a copy from help@hdfgroup.org.
|
||||
#
|
||||
|
||||
use warnings;
|
||||
use strict;
|
||||
|
||||
my $section2="For information on compilers and settings used to build these HDF5
|
||||
libraries, please refer to:
|
||||
|
||||
./lib/libhdf5.settings
|
||||
|
||||
The contents of this directory are:
|
||||
|
||||
COPYING - Copyright notice
|
||||
README - This file
|
||||
RELEASE.txt - Detailed information regarding this release
|
||||
bin/ - Directory containing HDF5 pre-compiled utilities
|
||||
include/ - Directory containing HDF5 include files
|
||||
lib/ - Directory containing HDF5 libraries and settings
|
||||
share/ - Directory containing example code in C, C++, and
|
||||
Fortran using HDF5 and HDF5 HL library APIs. The
|
||||
shell scripts provided with these examples will
|
||||
compile and run them, and will also test the
|
||||
h5cc, h5c++, and h5fc compile scripts found
|
||||
in the installed bin directory.
|
||||
|
||||
These binaries were built with the ZLIB and SZIP (version 2.1, Encoder
|
||||
ENABLED) external libraries which are included in the lib directory for
|
||||
convenience.
|
||||
|
||||
We also provide the ZLIB and SZIP source code on our ftp server at:
|
||||
|
||||
ftp://ftp.hdfgroup.org/lib-external/
|
||||
|
||||
The official ZLIB and SZIP pages are at:
|
||||
|
||||
ZLIB: http://www.zlib.net/
|
||||
SZIP: http://hdfgroup.org/doc_resource/SZIP/
|
||||
|
||||
";
|
||||
|
||||
my $section3 = "If using the shared libraries, you must add the HDF5 library path
|
||||
to the LD_LIBRARY_PATH variable.
|
||||
";
|
||||
|
||||
my $section4 = "We provide scripts for compiling applications with the HDF5 libraries:
|
||||
|
||||
bin/h5cc - for C
|
||||
bin/h5fc - for F90 (if Fortran 90 library is included with the binaries)
|
||||
bin/h5c++ - for C++ (if C++ library is included with the binaries)
|
||||
|
||||
After you have installed the binaries to their final destination, you can use
|
||||
these scripts (h5cc, h5fc, h5c++) to compile. However, you must first run
|
||||
./h5redeploy in the bin directory to change site specific paths in the scripts.
|
||||
|
||||
You may also need to change other variables in the scripts, depending
|
||||
on how things are set up on your system. Here are some of the variables
|
||||
to check:
|
||||
|
||||
prefix - Path to the HDF5 top level installation directory
|
||||
CCBASE - Name of the C compiler
|
||||
CLINKERBASE - Name of the linker
|
||||
LIBS - Libraries your application will link with
|
||||
|
||||
For further details refer to the INSTALL files in
|
||||
ftp://ftp.hdfgroup.org/HDF5/current/src/unpacked/release_docs/
|
||||
or in the ./release_docs/ directory of the HDF5 source code, which can be found
|
||||
on the HDF Group ftp server at ftp://ftp.hdfgroup.org/HDF5/current/src/.
|
||||
|
||||
Please send questions, comments, and suggestions to the appropriate
|
||||
contact address from http://www.hdfgroup.org/about/contact.html
|
||||
|
||||
|
||||
";
|
||||
|
||||
my $indirectory = ".";
|
||||
$indirectory = shift;
|
||||
my $linktype = "shared";
|
||||
if ($indirectory =~ /static/) {
|
||||
$linktype = "static";
|
||||
}
|
||||
my $modestring="";
|
||||
if ($indirectory =~ /32/) {
|
||||
$modestring = "in 32 bit mode ";
|
||||
}
|
||||
|
||||
my $version;
|
||||
|
||||
my $outfile = "$indirectory/README";
|
||||
open OUTFILE, ">$outfile" or die "$!Couldn't open $outfile - check permissions for $indirectory\n";
|
||||
my $hostname;
|
||||
my $cmd = "grep \"HDF5 Version\" $indirectory/lib/libhdf5.settings";
|
||||
$_ = `$cmd`;
|
||||
#print OUTFILE $_, "\n";
|
||||
s/HDF5 Version://;
|
||||
s/^\s+//;
|
||||
chomp;
|
||||
$version = $_;
|
||||
#print OUTFILE $_, "\n";
|
||||
|
||||
my $versionstring= "This directory contains the $linktype binary distribution of HDF5-".$version;
|
||||
|
||||
$cmd = "grep \"Uname information:\" $indirectory/lib/libhdf5.settings";
|
||||
$_ = `$cmd`;
|
||||
s/Uname information://;
|
||||
s/^\s+//;
|
||||
#print OUTFILE $_;
|
||||
chomp;
|
||||
#s/(^\w+)(\s)(\S+)/$1/;
|
||||
#s/(^.*)(-)(.*)(200[7-8])(.*)(\s)(\S+)/$1 $5/;
|
||||
#my $platformstring = "\nthat was compiled on:" . $_ . " ";
|
||||
my $platformstring = "";
|
||||
my $hostnamestring = $_;
|
||||
my @hostnamestring = split / /, $hostnamestring;
|
||||
#print OUTFILE "Size of hostnamestring is ", scalar @hostnamestring, "\n";
|
||||
#print OUTFILE $hostnamestring[0] . "\t" . $hostnamestring[2]."\t".$hostnamestring[19]."\n";
|
||||
$hostname = $hostnamestring[1];
|
||||
#my $size = scalar @hostnamestring;
|
||||
if ($hostname =~ /loyalty/) {
|
||||
$platformstring = "\nthat was compiled " . $modestring . "on: " . $hostnamestring[0]." " . $hostnamestring[2]." " . $hostnamestring[-1] . " ";
|
||||
}
|
||||
elsif ($hostname =~ /freedom/) {
|
||||
$platformstring = "\nthat was compiled " . $modestring . "on: " . $hostnamestring[0]." " . $hostnamestring[2]." " . $hostnamestring[-1] . " ";
|
||||
} elsif ($hostname =~ /emu/) {
|
||||
$platformstring = "\nthat was compiled " . $modestring . "on: " . $hostnamestring[0]." " . $hostnamestring[2] . " " . $hostnamestring[-2] . " ";
|
||||
} elsif ($hostname =~ /fred/) {
|
||||
$platformstring = "\nthat was compiled " . $modestring . "on: " . $hostnamestring[0]." " . $hostnamestring[2] . " " . $hostnamestring[-1] . " ";
|
||||
} else {
|
||||
$_ = $hostnamestring[2];
|
||||
my $pos = index $_, '-';
|
||||
my $os = substr $_, 0, $pos;
|
||||
$platformstring = "\nthat was compiled " . $modestring . "on: " . $hostnamestring[0] . " " . $os . " " . $hostnamestring[-2] . " ";
|
||||
}
|
||||
|
||||
|
||||
my $mappingstring = "";
|
||||
#no mappingstring for 1.6. Put it back for 1.8.
|
||||
#$cmd = "grep \"Default API Mapping:\" $indirectory/lib/libhdf5.settings";
|
||||
#$_ = `$cmd`;
|
||||
#s/Default API Mapping://;
|
||||
#s/^\s+//;
|
||||
#chomp;
|
||||
#if (/v\d+/) {
|
||||
# s/v//;
|
||||
# s/(\d)(\d)/$1\.$2/g;
|
||||
# $mappingstring = "using the default\nAPI mapping for VERSION ".$_.".";
|
||||
#
|
||||
#}
|
||||
print OUTFILE $versionstring;
|
||||
print OUTFILE $platformstring."\n\n";
|
||||
#print OUTFILE $mappingstring;
|
||||
|
||||
#if ($hostname eq "loyalty.hdfgroup.uiuc.edu" || $hostname eq "freedom.hdfgroup.uiuc.edu") {
|
||||
# print OUTFILE " It includes the C APIs,\nbuilt using the following ";
|
||||
# print OUTFILE "compilers:\n\n";
|
||||
#}
|
||||
#else {
|
||||
if ($linktype eq "shared" && !($hostname =~ /32/)) {
|
||||
print OUTFILE "\n\nIt includes the C, C++, F90 and Java APIs, built using the following\n";
|
||||
} else {
|
||||
print OUTFILE "\n\nIt includes the C, C++, and F90 APIs, built using the following\n";
|
||||
}
|
||||
print OUTFILE "compilers:\n\n";
|
||||
#}
|
||||
|
||||
# Only the gcc compiler version is in libhdf5.settings, so for now I looked
|
||||
# up the versions and hardcoded them here. We will put them in libhdf5.settings
|
||||
# for the next release.
|
||||
if ($indirectory =~ /gnu484/) {
|
||||
print OUTFILE "\tgcc, g++, and gfortran 4.8.4\n\n";
|
||||
} elsif ($hostname =~ /jam/ || $hostname =~ /koala/) {
|
||||
print OUTFILE "\tgcc, g++, and gfortran 4.1.2\n\n";
|
||||
} elsif ($hostname =~ /platypus/) {
|
||||
print OUTFILE "\tgcc, g++, and gfortran 4.4.7\n\n";
|
||||
if ($linktype eq "shared" && !($hostname =~ /32/)) {
|
||||
print OUTFILE "\tjava 1.8.0_51\n\n";
|
||||
}
|
||||
} elsif ($hostname =~ /moohan/) {
|
||||
print OUTFILE "\tgcc, g++, and gfortran 4.8.5\n\n";
|
||||
if ($linktype eq "shared" && !($hostname =~ /32/)) {
|
||||
print OUTFILE "\tjava 1.8.0_51\n\n";
|
||||
}
|
||||
} elsif ($hostname =~ /emu/) {
|
||||
print OUTFILE "\tSun C and C++ 5.12, Sun Fortran 95 8.6\n\n";
|
||||
} elsif ($hostname =~ /loyalty/ || $hostname =~ /freedom/) {
|
||||
print OUTFILE "\tgcc, g++, and gfortran 4.6.1\n\n";
|
||||
} elsif ($hostname =~ /duck/) {
|
||||
print OUTFILE "\tApple clang/clang++ 3.0 from Xcode 4.6.1 and gfortran 4.8.2\n\n";
|
||||
} elsif ($hostname =~ /kite/) {
|
||||
print OUTFILE "\tApple clang/clang++ 5.1 from Xcode 5.0.2 and gfortran 4.8.2\n\n";
|
||||
} elsif ($hostname =~ /quail/) {
|
||||
print OUTFILE "\tgcc, g++ 5.1 from Xcode 5.1 and gfortran 4.8.2\n\n";
|
||||
} elsif ($hostname =~ /osx1010test/) {
|
||||
print OUTFILE "\tgcc, g++ 5.1 from Xcode 5.1 and gfortran 4.8.2\n\n";
|
||||
}
|
||||
|
||||
print OUTFILE $section2;
|
||||
|
||||
print OUTFILE $section3;
|
||||
|
||||
print OUTFILE $section4;
|
||||
|
||||
@@ -1,182 +0,0 @@
|
||||
#!/usr/bin/perl
|
||||
# makeOuterREADME.pl
|
||||
|
||||
# Copyright by The HDF Group.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF4. The full HDF4 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the files COPYING and Copyright.html. COPYING can be found at the root
|
||||
# of the source code distribution tree; Copyright.html can be found at the
|
||||
# root level of an installed copy of the electronic HDF4 document set and
|
||||
# is linked from the top-level documents page. It can also be found at
|
||||
# http://hdfgroup.org/HDF4/doc/Copyright.html. If you do not have
|
||||
# access to either file, you may request a copy from help@hdfgroup.org.
|
||||
#
|
||||
|
||||
use warnings;
|
||||
use strict;
|
||||
|
||||
my $indirectory = ".";
|
||||
$indirectory = shift;
|
||||
|
||||
my $outdirectory = ".";
|
||||
$outdirectory = shift;
|
||||
|
||||
my $outsubdir = shift;
|
||||
|
||||
my $version;
|
||||
|
||||
my $outfile = "$outdirectory/$outsubdir/README";
|
||||
open OUTFILE, ">$outfile";
|
||||
my $hostname="";
|
||||
my $cmd = "grep \"HDF5 Version\" $indirectory-static/lib/libhdf5.settings";
|
||||
$_ = `$cmd`;
|
||||
print OUTFILE $_, "\n";
|
||||
s/HDF5 Version://;
|
||||
s/^\s+//;
|
||||
chomp;
|
||||
$version = $_;
|
||||
#print OUTFILE $_, "\n";
|
||||
my $versionstring= "This directory contains the precompiled HDF5 $version binary distribution\n(include files, libraries, utilities) for";
|
||||
|
||||
$cmd = "grep \"Uname information:\" $indirectory-static/lib/libhdf5.settings";
|
||||
$_ = `$cmd`;
|
||||
s/Uname information://;
|
||||
s/^\s+//;
|
||||
chomp;
|
||||
print "String to work with is $_\n";
|
||||
my $platformstring = "";
|
||||
my $hostnamestring = $_;
|
||||
my @hostnamestring = split / /, $hostnamestring;
|
||||
$platformstring = "$hostnamestring[0] ";
|
||||
if ($indirectory =~ /jam/ || $indirectory =~ /koala/) {
|
||||
$hostnamestring = $hostnamestring[2];
|
||||
my $pos = index $hostnamestring, "-";
|
||||
if ($pos > 0) {
|
||||
$platformstring .= substr $hostnamestring, 0, $pos;
|
||||
} else {
|
||||
$platformstring .= $hostnamestring[2];
|
||||
}
|
||||
$platformstring .= " ".$hostnamestring[-3];
|
||||
} elsif ($indirectory =~ /linew/) {
|
||||
$platformstring .= "$hostnamestring[2] $hostnamestring[-2]";
|
||||
} else {
|
||||
$platformstring .= "$hostnamestring[2] $hostnamestring[-1]";
|
||||
}
|
||||
|
||||
print OUTFILE $versionstring." ".$platformstring.":\n\n";
|
||||
my $line1;
|
||||
my $line3;
|
||||
my $line5;
|
||||
my $compilerstring="";
|
||||
my $compilerstring1="";
|
||||
my $compilerstring2="";
|
||||
|
||||
|
||||
print $indirectory, "\n";
|
||||
|
||||
if ($indirectory =~ /ostrich/) {
|
||||
# $line1 = " hdf5-$version-$outsubdir-16API.tar.gz - Includes C, C++, F90 APIs (using\n";
|
||||
$line3 = " hdf5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$compilerstring = " gcc, g++, and gfortran 4.4.7)";
|
||||
}
|
||||
elsif ($indirectory =~ /platypus/) {
|
||||
# $line1 = " hdf5-$version-$outsubdir-16API.tar.gz - Includes C, C++, F90 APIs (using\n";
|
||||
$line3 = " hdf5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$compilerstring1 = " gcc, g++, and gfortran 4.4.7)\n";
|
||||
if ($indirectory =~ /32/) {
|
||||
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 and Java APIs (using";
|
||||
$compilerstring2 = " gcc, g++, and gfortran 4.4.7)\n";
|
||||
} else {
|
||||
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 and Java APIs (using";
|
||||
$compilerstring2 = " gcc, g++, and gfortran 4.4.7 and java 1.8.0_51)\n";
|
||||
}
|
||||
}
|
||||
elsif ($indirectory =~ /moohan/) {
|
||||
# $line1 = " hdf5-$version-$outsubdir-16API.tar.gz - Includes C, C++, F90 APIs (using\n";
|
||||
$line3 = " hdf5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$compilerstring1 = " gcc, g++, and gfortran 4.8.5)\n";
|
||||
if ($indirectory =~ /32/) {
|
||||
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 and Java APIs (using";
|
||||
$compilerstring2 = " gcc, g++, and gfortran 4.4.7)\n";
|
||||
} else {
|
||||
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 and Java APIs (using";
|
||||
$compilerstring2 = " gcc, g++, and gfortran 4.8.5 and java 1.8.0_51)\n";
|
||||
}
|
||||
}
|
||||
elsif ($indirectory =~ /emu/) {
|
||||
# $line1 = " hdf5-$version-$outsubdir-16API.tar.gz - Includes C, C++, F90 APIs (using\n";
|
||||
$line3 = " hdf5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$compilerstring = " Sun C and C++ 5.12, Sun Fortran 95 8.6)\n";
|
||||
}
|
||||
elsif ($indirectory =~ /kite/) {
|
||||
$line3 = " hfd5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$line5 = " hfd5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$compilerstring = " Apple clang/clang++ 5.1 from Xcode 5.0.2,
|
||||
gfortran 4.8.2)\n";
|
||||
}
|
||||
elsif ($indirectory =~ /quail/) {
|
||||
$line3 = " hfd5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$line5 = " hfd5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$compilerstring = " Apple clang/clang++ 6.0 from Xcode 5.1,
|
||||
gfortran 4.9.2)\n";
|
||||
}
|
||||
elsif ($indirectory =~ /osx1010test/) {
|
||||
$line3 = " hdf5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$compilerstring = " Apple clang/clang++ 6.1 from Xcode 6.1,
|
||||
gfortran 4.9.2)\n";
|
||||
}
|
||||
elsif ($indirectory =~ /osx1011test/) {
|
||||
$line3 = " hdf5-$version-$outsubdir-static.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$line5 = " hdf5-$version-$outsubdir-shared.tar.gz - Includes C, C++, F90 APIs (using";
|
||||
$compilerstring = " Apple clang/clang++ 7.0.2 from Xcode 7.0,
|
||||
gfortran 5.2.0)\n";
|
||||
}
|
||||
|
||||
print OUTFILE $line3;
|
||||
print OUTFILE $compilerstring1."\n";
|
||||
if ($line5 ne "") {
|
||||
print OUTFILE $line5;
|
||||
print OUTFILE $compilerstring2."\n";
|
||||
}
|
||||
#elsif ($indirectory =~ /-16API/) {
|
||||
# print OUTFILE $line1;
|
||||
# print OUTFILE $compilerstring;
|
||||
# print OUTFILE " USES THE VERSION 1.6 DEFAULT API\n";
|
||||
# print OUTFILE " MAPPING.\n\n";
|
||||
#}
|
||||
|
||||
print OUTFILE " utilities/ - Directory containing the compiled HDF5 utilities.\n";
|
||||
print OUTFILE " These utilities are STATICALLY linked and will run as is.\n\n";
|
||||
#print OUTFILE " The tools can be downloaded separately, or ALL in one\n";
|
||||
#print OUTFILE " compressed file (5-$version-$outsubdir-static-util.tar.gz).\n\n";
|
||||
print OUTFILE "\n";
|
||||
print OUTFILE "To obtain the HDF5 distribution, 'unzip' and 'untar' the distribution\n";
|
||||
print OUTFILE "file:\n\n";
|
||||
print OUTFILE " gzip -cd <gz file from above> | tar xvf -\n";
|
||||
|
||||
|
||||
#$cmd = "grep \"Configured by:\" $indirectory/$key-static/lib/libhdf5.settings";
|
||||
#$_ = `$cmd`;
|
||||
#s/Configured by://;
|
||||
#s/^\s+//;
|
||||
#print OUTFILE $_;
|
||||
#chomp;
|
||||
#my $hostnamestring = $_;
|
||||
#s/(^\w+)(\s)(\S+)/$1/;
|
||||
#s/(^.*)(-)(.*)(200[7-8])(.*)(\s)(\S+)/$1 $5/;
|
||||
#my $platformstring = $_ . ":\n\n";
|
||||
#my @hostnamestring = split /@/, $hostnamestring;
|
||||
#print "Size of hostnamestring is ", scalar @hostnamestring, "\n";
|
||||
#print $hostnamestring[0] . "\t" . $hostnamestring[2]."\t".$hostnamestring[19]."\n";
|
||||
#my $platformstring = $hostnamestring[1].":\n\n";
|
||||
#$hostnamestring = $hostnamestring[1];
|
||||
#my $pos = index $hostnamestring, ".";
|
||||
#if ($pos > 0) {
|
||||
# @hostnamestring = split /\./, $hostnamestring;
|
||||
# $platformstring = $hostnamestring[0].":\n\n";
|
||||
#}
|
||||
117
bin/release
117
bin/release
@@ -1,7 +1,6 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
@@ -13,24 +12,6 @@
|
||||
#
|
||||
|
||||
# Make a release of hdf5.
|
||||
#
|
||||
# Programmer: Robb Matzke
|
||||
# Creation date: on or before 1998-01-29.
|
||||
#
|
||||
# Modifications
|
||||
# Robb Matzke, 1999-07-16
|
||||
# The SunOS 5.6 sed *must* have slashes as delimiters. I changed things like
|
||||
# `sed s+/CVS++' to `sed 's/\/CVS//'
|
||||
#
|
||||
# Albert Cheng, 1999-10-26
|
||||
# Moved the MANIFEST checking to a separate command file so that
|
||||
# it can be invoked individually.
|
||||
#
|
||||
# Albert Cheng, 2004-08-14
|
||||
# Added the --private option.
|
||||
#
|
||||
# James Laird, 2005-09-07
|
||||
# Added the md5 method.
|
||||
|
||||
# Function definitions
|
||||
#
|
||||
@@ -38,14 +19,13 @@
|
||||
USAGE()
|
||||
{
|
||||
cat << EOF
|
||||
Usage: $0 -d <dir> [--docver BRANCHNAME] [-h] [--nocheck] [--private] <methods> ...
|
||||
-d DIR The name of the directory where the release(es) should be
|
||||
Usage: $0 -d <dir> [--docver BRANCHNAME] [-h] [--private] <methods> ...
|
||||
-d DIR The name of the directory where the release(s) should be
|
||||
placed.
|
||||
--docver BRANCHNAME This is added for 1.8 and beyond to get the correct
|
||||
version of documentation files from the hdf5docs
|
||||
repository. BRANCHNAME for v1.8 should be hdf5_1_8.
|
||||
-h print the help page.
|
||||
--nocheck Ignore errors in MANIFEST file.
|
||||
--private Make a private release with today's date in version information.
|
||||
|
||||
This must be run at the top level of the source directory.
|
||||
@@ -72,23 +52,23 @@ for compressing the resulting tar archive (if none are given then
|
||||
information is available in the README_HPC file.
|
||||
doc -- produce the latest doc tree in addition to the archive.
|
||||
|
||||
An md5 checksum is produced for each archive created and stored in the md5 file.
|
||||
A sha256 checksum is produced for each archive created and stored in the sha256 file.
|
||||
|
||||
Examples:
|
||||
|
||||
$ bin/release -d /tmp
|
||||
/tmp/hdf5-1.8.13-RELEASE.txt
|
||||
/tmp/hdf5-1.8.13.md5
|
||||
/tmp/hdf5-1.8.13.sha256
|
||||
/tmp/hdf5-1.8.13.tar
|
||||
|
||||
$ bin/release -d /tmp gzip
|
||||
/tmp/hdf5-1.8.13-RELEASE.txt
|
||||
/tmp/hdf5-1.8.13.md5
|
||||
/tmp/hdf5-1.8.13.sha256
|
||||
/tmp/hdf5-1.8.13.tar.gz
|
||||
|
||||
$ bin/release -d /tmp tar gzip zip
|
||||
/tmp/hdf5-1.8.13-RELEASE.txt
|
||||
/tmp/hdf5-1.8.13.md5
|
||||
/tmp/hdf5-1.8.13.sha256
|
||||
/tmp/hdf5-1.8.13.tar
|
||||
/tmp/hdf5-1.8.13.tar.gz
|
||||
/tmp/hdf5-1.8.13.tar.zip
|
||||
@@ -100,11 +80,6 @@ EOF
|
||||
# Function name: tar2zip
|
||||
# Convert the release tarball to a Windows zipball.
|
||||
#
|
||||
# Programmer: Albert Cheng
|
||||
# Creation date: 2014-04-23
|
||||
#
|
||||
# Modifications
|
||||
#
|
||||
# Steps:
|
||||
# 1. untar the tarball in a temporary directory;
|
||||
# Note: do this in a temporary directory to avoid changing
|
||||
@@ -167,11 +142,6 @@ tar2zip()
|
||||
# Function name: tar2cmakezip
|
||||
# Convert the release tarball to a Windows zipball with files to run CMake build.
|
||||
#
|
||||
# Programmer: Larry Knox
|
||||
# Creation date: 2017-02-20
|
||||
#
|
||||
# Modifications
|
||||
#
|
||||
# Steps:
|
||||
# 1. untar the tarball in a temporary directory;
|
||||
# Note: do this in a temporary directory to avoid changing
|
||||
@@ -235,7 +205,7 @@ tar2cmakezip()
|
||||
# step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmziptmpsubdir
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmziptmpsubdir
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.4-Source.zip $cmziptmpsubdir
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.2-Source.zip $cmziptmpsubdir
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.zip $cmziptmpsubdir
|
||||
cp $cmziptmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmziptmpsubdir
|
||||
cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmziptmpsubdir
|
||||
@@ -269,10 +239,6 @@ tar2cmakezip()
|
||||
# Function name: tar2cmaketgz
|
||||
# Convert the release tarball to a gzipped tar file with files to run CMake build.
|
||||
#
|
||||
# Programmer: Larry Knox
|
||||
# Creation date: 2017-02-20
|
||||
#
|
||||
# Modifications
|
||||
#
|
||||
# Steps:
|
||||
# 1. untar the tarball in a temporary directory;
|
||||
@@ -331,7 +297,7 @@ tar2cmaketgz()
|
||||
# step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.4-Source.tar.gz $cmgztmpsubdir
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.2-Source.tar.gz $cmgztmpsubdir
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.tar.gz $cmgztmpsubdir
|
||||
cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir
|
||||
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir
|
||||
@@ -347,11 +313,6 @@ tar2cmaketgz()
|
||||
# and HDF5options.cmake files for parallel or serial only builds where build
|
||||
# tests are run on compute nodes using batch scripts.
|
||||
#
|
||||
# Programmer: Larry Knox
|
||||
# Creation date: 2019-01-28
|
||||
#
|
||||
# Modifications
|
||||
#
|
||||
# Steps:
|
||||
# 1. untar the tarball in a temporary directory;
|
||||
# Note: do this in a temporary directory to avoid changing
|
||||
@@ -415,7 +376,7 @@ tar2hpccmaketgz()
|
||||
# step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.4-Source.tar.gz $cmgztmpsubdir
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.2-Source.tar.gz $cmgztmpsubdir
|
||||
cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.tar.gz $cmgztmpsubdir
|
||||
cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir
|
||||
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir
|
||||
@@ -443,7 +404,6 @@ VERS=`perl bin/h5vers`
|
||||
VERS_OLD=
|
||||
test "$VERS" || exit 1
|
||||
verbose=yes
|
||||
check=yes
|
||||
release_date=`date +%F`
|
||||
today=`date +%Y%m%d`
|
||||
pmode='no'
|
||||
@@ -482,9 +442,6 @@ while [ -n "$1" ]; do
|
||||
DEST=$1
|
||||
shift
|
||||
;;
|
||||
--nocheck)
|
||||
check=no
|
||||
;;
|
||||
-h)
|
||||
USAGE
|
||||
exit 0
|
||||
@@ -546,35 +503,17 @@ if [ ! -d $DEST ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check the validity of the MANIFEST file.
|
||||
bin/chkmanifest || fail=yes
|
||||
if [ "X$fail" = "Xyes" ]; then
|
||||
if [ $check = yes ]; then
|
||||
echo ""
|
||||
echo "Note! If you are running bin/release in a development branch"
|
||||
echo "later than v 1.8 the MANIFEST check is expected to fail when"
|
||||
echo "autogen.sh has not been run successfully. Either run autogen.sh "
|
||||
echo "with /usr/hdf/bin/AUTOTOOLS at the beginning of PATH or add the"
|
||||
echo "--nocheck argument to the bin/release command."
|
||||
exit 1
|
||||
else
|
||||
echo "Continuing anyway..."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create a manifest that contains only files for distribution.
|
||||
MANIFEST=$tmpdir/H5_MANIFEST
|
||||
grep '^\.' MANIFEST | grep -v _DO_NOT_DISTRIBUTE_ >$MANIFEST
|
||||
|
||||
# Prepare the source tree for a release.
|
||||
# Create a symlink to the source so files in the tarball have the prefix
|
||||
# we want (gnu's --transform isn't portable)
|
||||
ln -s `pwd` $tmpdir/$HDF5_VERS || exit 1
|
||||
|
||||
# Save a backup copy of Makefile if exists.
|
||||
test -f Makefile && mv Makefile $tmpdir/Makefile.x
|
||||
cp -p Makefile.dist Makefile
|
||||
|
||||
# Update README.txt and release_docs/RELEASE.txt with release information in
|
||||
# Update README.md and release_docs/RELEASE.txt with release information in
|
||||
# line 1.
|
||||
for f in README.txt release_docs/RELEASE.txt; do
|
||||
for f in README.md release_docs/RELEASE.txt; do
|
||||
echo "HDF5 version $VERS released on $release_date" >$f.x
|
||||
sed -e 1d $f >>$f.x
|
||||
mv $f.x $f
|
||||
@@ -582,57 +521,53 @@ for f in README.txt release_docs/RELEASE.txt; do
|
||||
chmod 644 $f
|
||||
done
|
||||
|
||||
# trunk is different than branches.
|
||||
# develop is different than branches.
|
||||
if [ "${DOCVERSION}" ]; then
|
||||
DOC_URL="$DOC_URL -b ${DOCVERSION}"
|
||||
fi
|
||||
|
||||
# Create the tar file
|
||||
test "$verbose" && echo " Running tar..." 1>&2
|
||||
( \
|
||||
cd $tmpdir; \
|
||||
tar cf $HDF5_VERS.tar $HDF5_VERS/Makefile \
|
||||
`sed 's/^\.\//hdf5-'$VERS'\//' $MANIFEST` || exit 1 \
|
||||
)
|
||||
(cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_VERS" || exit 1 )
|
||||
|
||||
# Compress
|
||||
MD5file=$HDF5_VERS.md5
|
||||
cp /dev/null $DEST/$MD5file
|
||||
SHA256=$HDF5_VERS.sha256
|
||||
cp /dev/null $DEST/$SHA256
|
||||
for comp in $methods; do
|
||||
case $comp in
|
||||
tar)
|
||||
cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar
|
||||
(cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file)
|
||||
(cd $DEST; sha256sum $HDF5_VERS.tar >> $SHA256)
|
||||
;;
|
||||
gzip)
|
||||
test "$verbose" && echo " Running gzip..." 1>&2
|
||||
gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz
|
||||
(cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file)
|
||||
(cd $DEST; sha256sum $HDF5_VERS.tar.gz >> $SHA256)
|
||||
;;
|
||||
cmake-tgz)
|
||||
test "$verbose" && echo " Creating CMake tar.gz file..." 1>&2
|
||||
tar2cmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.tar.gz 1>&2
|
||||
(cd $DEST; md5sum CMake-$HDF5_VERS.tar.gz >> $MD5file)
|
||||
(cd $DEST; sha256sum CMake-$HDF5_VERS.tar.gz >> $SHA256)
|
||||
;;
|
||||
hpc-cmake-tgz)
|
||||
test "$verbose" && echo " Creating HPC-CMake tar.gz file..." 1>&2
|
||||
tar2hpccmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/HPC-CMake-$HDF5_VERS.tar.gz 1>&2
|
||||
(cd $DEST; md5sum HPC-CMake-$HDF5_VERS.tar.gz >> $MD5file)
|
||||
(cd $DEST; sha256sum HPC-CMake-$HDF5_VERS.tar.gz >> $SHA256)
|
||||
;;
|
||||
bzip2)
|
||||
test "$verbose" && echo " Running bzip2..." 1>&2
|
||||
bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2
|
||||
(cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file)
|
||||
(cd $DEST; sha256sum $HDF5_VERS.tar.bz2 >> $SHA256)
|
||||
;;
|
||||
zip)
|
||||
test "$verbose" && echo " Creating zip ball..." 1>&2
|
||||
tar2zip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2
|
||||
(cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file)
|
||||
(cd $DEST; sha256sum $HDF5_VERS.zip >> $SHA256)
|
||||
;;
|
||||
cmake-zip)
|
||||
test "$verbose" && echo " Creating CMake-zip ball..." 1>&2
|
||||
tar2cmakezip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.zip 1>&2
|
||||
(cd $DEST; md5sum CMake-$HDF5_VERS.zip >> $MD5file)
|
||||
(cd $DEST; sha256sum CMake-$HDF5_VERS.zip >> $SHA256)
|
||||
;;
|
||||
doc)
|
||||
if [ "${DOCVERSION}" = "" ]; then
|
||||
@@ -680,4 +615,6 @@ fi
|
||||
# Remove temporary things
|
||||
rm -rf $tmpdir
|
||||
|
||||
echo "DONE"
|
||||
|
||||
exit 0
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# Copyright by The HDF Group.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
@@ -48,6 +48,9 @@ rm -f bin/missing
|
||||
rm -f bin/test-driver
|
||||
rm -f bin/depcomp
|
||||
|
||||
echo "Remove files generated by autoheader"
|
||||
rm -f src/H5config.h.in
|
||||
|
||||
echo "Remove files generated by bin/make_err"
|
||||
rm -f src/H5Epubgen.h
|
||||
rm -f src/H5Einit.h
|
||||
|
||||
@@ -5,7 +5,6 @@ $indent=4;
|
||||
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
966
bin/runtest
966
bin/runtest
@@ -1,966 +0,0 @@
|
||||
#! /bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
|
||||
# run the hdf5/bin/snapshot
|
||||
# Usage:
|
||||
# runtest run the test for the local host
|
||||
# runtest <hostname> run the test for <hostname>
|
||||
# runtest -all run the test for all predefined hosts
|
||||
#
|
||||
# Assumptions in knowing where to find the right scripts to execute.
|
||||
# 1. assume we are at the top level of the hdf5 source. So, bin/* are
|
||||
# where the script files are.
|
||||
# 2. after the cvs update is completed, we can go to the snapshot area
|
||||
# hdf5 source tree and use the bin/* there.
|
||||
# 3. Cannot use the snapshot area scripts from the beginning because
|
||||
# for one, the current directory is renamed as previous right after
|
||||
# a snapshot release; and for another, some scripts may be changed
|
||||
# by the cvs update while it is being used.
|
||||
|
||||
# local setup
|
||||
DEBUGMODE=""
|
||||
test -n "$DEBUGMODE" && echo "******** DEBUGMODE is $DEBUGMODE ************"
|
||||
WHEREAMI='pwd'
|
||||
CMD=
|
||||
|
||||
# the name of this program
|
||||
PROGNAME="bin/runtest $DEBUGMODE"
|
||||
|
||||
# Setup
|
||||
HOSTNAME=`hostname | cut -f1 -d.` # no domain part
|
||||
TODAY=`date +%m%d%a`
|
||||
WEEKDAY=`date +%a`
|
||||
H5VER= # default to current CVS version
|
||||
H5VERSION= # default to current CVS version
|
||||
n_test=0 # Number of tests ran
|
||||
n_pass=0 # Number of tests passed
|
||||
n_fail=0 # Number of tests failed
|
||||
n_skip=0 # Number of tests skipped
|
||||
|
||||
# Default to do checkout (only once) and test, no release.
|
||||
# Will run test only if there is significant differences from previous version.
|
||||
# If srcdir is not used, don't launched multiple tests
|
||||
SNAPSHOT="${DEBUGMODE:+echo }bin/snapshot"
|
||||
SRCDIR="srcdir"
|
||||
# Default standard Snaptest commands
|
||||
SNAPCMD="$SRCDIR test clean"
|
||||
# Default Standard snaptest command options
|
||||
STANDARD_OPT=""
|
||||
ENABLE_PARALLEL="--enable-parallel"
|
||||
CONFIGNAME=$HOSTNAME # Name used in the SNAPTESTCFG file
|
||||
|
||||
# test host default as local host.
|
||||
TESTHOST=""
|
||||
|
||||
#################################
|
||||
# Function definitions
|
||||
#################################
|
||||
|
||||
# Print messages to stdout
|
||||
# Use this to show output heading to stdout
|
||||
PRINT()
|
||||
{
|
||||
echo "$*"
|
||||
}
|
||||
|
||||
# Show seconds since midnight.
|
||||
# This is used to calculate seconds elapsed
|
||||
SecOfDay()
|
||||
{
|
||||
set `date '+%H %M %S'`
|
||||
t_sec=`expr $1 \* 3600 + $2 \* 60 + $3`
|
||||
echo $t_sec
|
||||
}
|
||||
|
||||
# Calculated the elapsed time (in seconds) between the first
|
||||
# and second time. If second time is smaller than the first,
|
||||
# we assume the clock has passed midnight and calculate appropriately.
|
||||
ElapsedTime()
|
||||
{
|
||||
if [ $2 -lt $1 ]; then
|
||||
t_sec=`expr 3600 \* 24 - $1 + $2`
|
||||
else
|
||||
t_sec=`expr $2 - $1`
|
||||
fi
|
||||
echo `expr $t_sec / 60`m `expr $t_sec % 60`s
|
||||
}
|
||||
|
||||
# Report errors
|
||||
# $1--an error message to be printed
|
||||
REPORT_ERR()
|
||||
{
|
||||
ERRMSG=$1
|
||||
# print it with a banner shifted right a bit
|
||||
PRINT " *************************************"
|
||||
PRINT " `date`"
|
||||
PRINT " $ERRMSG"
|
||||
PRINT " *************************************"
|
||||
# report it in the FAILED-LOG file too
|
||||
PRINT "$ERRMSG" >> $FAILEDLOG
|
||||
}
|
||||
|
||||
#
|
||||
# Report results of the last test done
|
||||
REPORT_RESULT()
|
||||
{
|
||||
if [ $retcode -eq 0 ]; then
|
||||
if [ $skiptest = yes ]; then
|
||||
n_skip=`expr $n_skip + 1`
|
||||
PRINT "SKIPPED ${HOSTNAME}: $TEST_TYPE" | tee -a $SKIPPEDLOG
|
||||
else
|
||||
n_pass=`expr $n_pass + 1`
|
||||
PRINT "PASSED ${HOSTNAME}: $TEST_TYPE" | tee -a $PASSEDLOG
|
||||
fi
|
||||
else
|
||||
# test failed.
|
||||
n_fail=`expr $n_fail + 1`
|
||||
REPORT_ERR "****FAILED ${HOSTNAME}: $TEST_TYPE****"
|
||||
fi
|
||||
}
|
||||
|
||||
# Print a blank line
|
||||
PRINT_BLANK()
|
||||
{
|
||||
PRINT
|
||||
}
|
||||
|
||||
# Print test trailer
|
||||
PRINT_TEST_TRAILER()
|
||||
{
|
||||
PRINT "*** finished $TEST_TYPE tests for $HOSTNAME ***"
|
||||
date; EndTime=`SecOfDay`
|
||||
PRINT Total time = `ElapsedTime $StartTime $EndTime`
|
||||
PRINT_BLANK
|
||||
}
|
||||
|
||||
# Print trailer summary
|
||||
PRINT_TRAILER()
|
||||
{
|
||||
PRINT "*** finished tests in $HOSTNAME ***"
|
||||
date; TotalEndTime=`SecOfDay`
|
||||
PRINT "${HOSTNAME}: Ran $n_test($n_pass/$n_fail/$n_skip) $runtest_type, Grand total test time = " \
|
||||
"`ElapsedTime $TotalStartTime $TotalEndTime`" | tee -a $TIMELOG
|
||||
PRINT_BLANK
|
||||
}
|
||||
|
||||
# Figure out which remote command to use to reach a host.
|
||||
# Try ssh first, then rsh since fewer machines support rsh exec.
|
||||
# $1--hostname to reach.
|
||||
CHECK_RSH()
|
||||
{
|
||||
# Figure out how to use ping command in this host.
|
||||
# Some hosts use "ping host count", some use "ping -c count host".
|
||||
# Test "ping -c 3 -w 5" since it has timeout feature.
|
||||
# Test "ping -c ..." style before "ping host 3" because some machines
|
||||
# that recognize -c treat 'ping localhost 3' as to ping host '3'.
|
||||
if [ -z "$PING" ]; then
|
||||
if ping -c 3 -w 5 localhost >/dev/null 2>&1; then
|
||||
PING='ping -c 3 -w 5'
|
||||
PINGCOUNT=
|
||||
elif ping -c 3 localhost >/dev/null 2>&1; then
|
||||
PING='ping -c 3'
|
||||
PINGCOUNT=
|
||||
elif ping localhost 3 >/dev/null 2>&1; then
|
||||
PING=ping
|
||||
PINGCOUNT=3
|
||||
else # don't know how to use ping.
|
||||
PING=no_ping
|
||||
PINGCOUNT=
|
||||
fi
|
||||
fi
|
||||
#
|
||||
host=$1
|
||||
# Try remote command with host if it responds to ping.
|
||||
# Still try it if we don't know how to do ping.
|
||||
if [ no_ping = "$PING" ] || $PING $host $PINGCOUNT >/dev/null 2>&1; then
|
||||
if ssh $host -n hostname >/dev/null 2>&1; then
|
||||
RSH=ssh
|
||||
elif rsh $host -n hostname >/dev/null 2>&1; then
|
||||
RSH=rsh
|
||||
else
|
||||
PRINT cannot remote command with $host
|
||||
RSH="NoRemoteCommand"
|
||||
fi
|
||||
else
|
||||
RSH="NotReachable"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# Wait for a file for at most number of minutes
|
||||
# $1--the file
|
||||
# $2--number of minutes
|
||||
# WAIT_STATUS set to:
|
||||
# -1 if errors encountered
|
||||
# 0 if file found within time limit
|
||||
# 1 if file not found within time limit
|
||||
WAITFOR()
|
||||
{
|
||||
wait_file=$1
|
||||
nminutes=$2
|
||||
if [ -z "$wait_file" -o ! "$nminutes" -ge 0 ]
|
||||
then
|
||||
PRINT "errors in argument of WAITFOR(): wait_file($1) or nminutes($2)"
|
||||
WAIT_STATUS=-1
|
||||
return
|
||||
fi
|
||||
while [ ! -f $wait_file ]; do
|
||||
if [ $nminutes -gt 0 ]; then
|
||||
PRINT "Wait For $wait_file to appear"
|
||||
sleep 60 #sleep 1 minute
|
||||
else
|
||||
WAIT_STATUS=1
|
||||
return
|
||||
fi
|
||||
nminutes=`expr $nminutes - 1`
|
||||
done
|
||||
WAIT_STATUS=0
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
# Wait till a file disappears for at most number of minutes.
|
||||
# Useful to wait till a lock is removed by another process.
|
||||
# $1--the file
|
||||
# $2--number of minutes
|
||||
# WAIT_STATUS set to:
|
||||
# -1 if errors encountered
|
||||
# 0 if file disappears within time limit
|
||||
# 1 if file has not disappeared within time limit
|
||||
WAITTILL()
|
||||
{
|
||||
wait_file=$1
|
||||
nminutes=$2
|
||||
if [ -z "$wait_file" -o ! "$nminutes" -ge 0 ]
|
||||
then
|
||||
PRINT "errors in argument of WAITTILL(): wait_file($1) or nminutes($2)"
|
||||
WAIT_STATUS=-1
|
||||
return
|
||||
fi
|
||||
while [ -f $wait_file ]; do
|
||||
if [ $nminutes -gt 0 ]; then
|
||||
PRINT "Wait till $wait_file has disappeared"
|
||||
sleep 60 #sleep 1 minute
|
||||
else
|
||||
WAIT_STATUS=1
|
||||
return
|
||||
fi
|
||||
nminutes=`expr $nminutes - 1`
|
||||
done
|
||||
WAIT_STATUS=0
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
# Run one snapshot test
|
||||
# $*--Types of test being run
|
||||
RUNSNAPTEST()
|
||||
{
|
||||
SNAPCMD_OPT="$STANDARD_OPT" # snapshot test option
|
||||
SRCDIRNAME=${HOSTNAME}
|
||||
# restore CC, PATH in case they were changed in the last test.
|
||||
CC="$CC_SAVED"
|
||||
PATH=$PATH_SAVED
|
||||
export PATH # DEC OSF1 needs to export PATH explicitly
|
||||
TEST_TYPE=$*
|
||||
retcode=0
|
||||
skiptest=no
|
||||
date
|
||||
PRINT "*** starting $TEST_TYPE tests in $HOSTNAME ***"
|
||||
PRINT "Uname -a: `uname -a`"
|
||||
|
||||
# Parse the test type and set options accordingly.
|
||||
# See comments of SNAPTEST_CONFIG_PARSE().
|
||||
while [ $# -gt 0 ]; do
|
||||
case $1 in
|
||||
-n32) # want -n32 option
|
||||
SRCDIRNAME=${SRCDIRNAME}-n32
|
||||
CC="cc -n32"
|
||||
export CC
|
||||
;;
|
||||
-64) # want -64 option
|
||||
SRCDIRNAME=${SRCDIRNAME}-64
|
||||
CC="cc -64"
|
||||
export CC
|
||||
;;
|
||||
parallel) # want parallel test
|
||||
SNAPCMD_OPT="$SNAPCMD_OPT $ENABLE_PARALLEL"
|
||||
SRCDIRNAME=${SRCDIRNAME}-pp
|
||||
;;
|
||||
standard) # standard test
|
||||
;;
|
||||
--*)
|
||||
# option for configure
|
||||
SNAPCMD_OPT="$SNAPCMD_OPT $1"
|
||||
;;
|
||||
op-configure)
|
||||
# option for configure
|
||||
SNAPCMD_OPT="$SNAPCMD_OPT $1 $2"
|
||||
shift
|
||||
;;
|
||||
op-snapshot)
|
||||
# option for snapshot
|
||||
shift
|
||||
SNAPCMD_OPT="$SNAPCMD_OPT $1"
|
||||
;;
|
||||
setenv)
|
||||
# pass them along to snapshot set environment variable
|
||||
shift
|
||||
SNAPCMD_OPT="$SNAPCMD_OPT setenv $1 $2"
|
||||
shift
|
||||
;;
|
||||
setenvN)
|
||||
# set environment variable with $1 values
|
||||
# e.g., setenvN 3 x a b c is same as setenv x="a b c".
|
||||
# pass them along to snapshot set environment variable
|
||||
shift
|
||||
envN=$1
|
||||
shift
|
||||
envname=$1
|
||||
SNAPCMD_OPT="$SNAPCMD_OPT setenvN $envN $envname"
|
||||
envalue=
|
||||
while test $envN -gt 0; do
|
||||
shift
|
||||
envalue="$envalue $1"
|
||||
envN=`expr $envN - 1`
|
||||
done
|
||||
SNAPCMD_OPT="$SNAPCMD_OPT $envalue"
|
||||
;;
|
||||
skip)
|
||||
# skip this test
|
||||
skiptest=yes
|
||||
;;
|
||||
srcdirname)
|
||||
# Use this before using parallel and -n32 since this overrides
|
||||
# the others.
|
||||
shift
|
||||
SRCDIRNAME=$1
|
||||
;;
|
||||
deploy)
|
||||
# deploy the built binary.
|
||||
shift
|
||||
SNAPCMD_OPT="$SNAPCMD_OPT deploy $1"
|
||||
;;
|
||||
deploydir)
|
||||
# default directory for deployment.
|
||||
shift
|
||||
SNAPCMD_OPT="$SNAPCMD_OPT deploydir $1"
|
||||
;;
|
||||
*) # unknown test
|
||||
PRINT "$0: unknown type of test ($1)"
|
||||
retcode=1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if [ $retcode -ne 0 -o $skiptest = yes ]; then
|
||||
errcode=$retcode
|
||||
return $retcode
|
||||
fi
|
||||
|
||||
# Track down the zlib software
|
||||
ans=`$SNAPYARD/current/bin/locate_sw zlib`
|
||||
if [ $? = 0 ]; then
|
||||
Z_INC=`echo $ans | cut -f1 -d,`
|
||||
Z_LIB=`echo $ans | cut -f2 -d,`
|
||||
SNAPCMD_OPT="$SNAPCMD_OPT zlib $Z_INC,$Z_LIB"
|
||||
else
|
||||
# cannot locate zlib software.
|
||||
# continue the test, maybe configure can find it.
|
||||
:
|
||||
fi
|
||||
|
||||
if [ -n "${SRCDIRNAME}" ]; then
|
||||
SNAPCMD_OPT="$SNAPCMD_OPT srcdirname ${SRCDIRNAME}"
|
||||
fi
|
||||
|
||||
# Setup log file name to save test output
|
||||
THIS_MINUTE=`date +%H%M`
|
||||
LOGFILE=${LOGBASENAME}/${SRCDIRNAME}_${TODAY}_${THIS_MINUTE}
|
||||
PRINT "Running snapshot with output saved in"
|
||||
PRINT " $LOGFILE"
|
||||
(date; PRINT Hostname=$HOSTNAME) >> $LOGFILE
|
||||
|
||||
(
|
||||
cd $SNAPYARD/current
|
||||
$SNAPSHOT $SNAPCMD $SNAPCMD_OPT
|
||||
) >> $LOGFILE 2>&1
|
||||
retcode=$?
|
||||
[ $retcode -ne 0 ] && errcode=$retcode
|
||||
|
||||
date >> $LOGFILE
|
||||
if [ $retcode -ne 0 ]; then
|
||||
# Dump the first 10 lines and the last 30 lines of the LOGFILE.
|
||||
( ntail=30
|
||||
echo =========================
|
||||
echo "Dumping logfile of ${HOSTNAME}: $TEST_TYPE"
|
||||
echo "Last $ntail lines of $LOGFILE"
|
||||
echo =========================
|
||||
tail -$ntail $LOGFILE
|
||||
echo =========================
|
||||
echo Dumping done
|
||||
echo =========================
|
||||
echo ""
|
||||
) >> $FAILEDDETAIL
|
||||
fi
|
||||
}
|
||||
|
||||
TIMELIMIT_PARSE()
|
||||
{
|
||||
# Function returns timeparam for timekeeper via standard out -
|
||||
# any debug statements should be 'echo "Debug string" >&2' or timekeeper
|
||||
# will declare timeparam to be non-numeric and ignore it.
|
||||
while read x y ; do
|
||||
# Scan for entry for this weekday.
|
||||
xd=`echo $x | cut -f1 -d/`
|
||||
if [ "$xd" = ${WEEKDAY} ]; then
|
||||
# strip away the weekday/ part.
|
||||
timeparam=`echo $x | cut -f2 -d/`
|
||||
break
|
||||
fi
|
||||
case "$x" in
|
||||
'' | '#'*)
|
||||
# blank or comment lines. Continue.
|
||||
;;
|
||||
???/*)
|
||||
# Ignore any entry not of this weekday.
|
||||
;;
|
||||
*)
|
||||
timeparam="$x"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
echo $timeparam
|
||||
return
|
||||
}
|
||||
|
||||
# configuration parsing.
|
||||
# Taking configuration from input.
|
||||
# This should be invoke with configure file as stdin.
|
||||
# Syntax of the configure file:
|
||||
# All lines started with the # are comment lines and are ignored.
|
||||
# Blank lines are ignored too.
|
||||
# Each config line starts with a "Scope" followed by test types.
|
||||
#
|
||||
# Scope can be:
|
||||
# standard ... # what the standard test types are.
|
||||
# <host>: <test> Do <test> for <host>
|
||||
# all: <test> Do <test> for all hosts.
|
||||
# <weekday>/... Use this scope if the <weekday> matches.
|
||||
# <weekday> can be {Mon,Tue,Wed,Thu,Fri,Sat,Sun}
|
||||
# If no <host>: input for a <host>, the standard test is used.
|
||||
#
|
||||
# Test types:
|
||||
# standard tests defined in standard scope.
|
||||
# -n32 -n32 mode. Apply to 64/32 bit OS such as IRIX64.
|
||||
# parallel parallel mode.
|
||||
# op-configure <option> configure option
|
||||
# op-snapshot <option> snapshot option
|
||||
# --* configure option
|
||||
# setenv <name> <value> set environment variable <name> to <value>
|
||||
# Pass along to snapshot
|
||||
# setenvN <N> <name> <value> ...
|
||||
# set environment variable with <N> values
|
||||
# e.g., setenvN 3 x a b c is same as setenv x="a b c".
|
||||
# Pass along to snapshot.
|
||||
# skip skip this test
|
||||
# srcdirname <name> use <name> as the build-directory.
|
||||
# deploy <name> deploy the built binary at directory <name>.
|
||||
# deploydir <name> use <name> as the default directory for deployment.
|
||||
SNAPTEST_CONFIG_PARSE()
|
||||
{
|
||||
while read x y ; do
|
||||
# Scan for entry for this weekday.
|
||||
xd=`echo $x | cut -f1 -d/`
|
||||
if [ "$xd" = ${WEEKDAY} ]; then
|
||||
# strip away the weekday/ part.
|
||||
x=`echo $x | cut -f2 -d/`
|
||||
fi
|
||||
case "$x" in
|
||||
'' | '#'*)
|
||||
# blank or comment lines. Continue.
|
||||
;;
|
||||
???/*)
|
||||
# Ignore any entry not of this weekday.
|
||||
;;
|
||||
standard)
|
||||
#standard configuration
|
||||
STANDARD_OPT="$y"
|
||||
;;
|
||||
all: | ${CONFIGNAME}:)
|
||||
# types of test for all hosts or this host
|
||||
if [ -n "$TEST_TYPES" ]; then
|
||||
TEST_TYPES="$TEST_TYPES ; $y"
|
||||
else
|
||||
TEST_TYPES="$y"
|
||||
fi
|
||||
;;
|
||||
*:) # ignore types of test for other hosts
|
||||
;;
|
||||
*) # unknown configuration option
|
||||
PRINT $x $y
|
||||
PRINT "***Unknown configuration option. Ignored.***"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
# Snap Test configuration parsing.
|
||||
# If TEST_TYPES is not set, set it to do the "standard" test.
|
||||
SNAPTEST_CONFIG()
|
||||
{
|
||||
TEST_TYPES=
|
||||
STANDARD_OPT=
|
||||
if [ -f $SNAPTESTCFG ]; then
|
||||
SNAPTEST_CONFIG_PARSE < $SNAPTESTCFG
|
||||
fi
|
||||
TEST_TYPES=${TEST_TYPES:-'standard'}
|
||||
}
|
||||
|
||||
|
||||
# Show usage page
|
||||
USAGE()
|
||||
{
|
||||
cat <<EOF
|
||||
Usage: runtest [-h] [-debug] [-r<version>] [-all] [-nocvs] [-nodiff] [<host> ...]
|
||||
-h
|
||||
print this help page
|
||||
-debug
|
||||
turn on debug mode
|
||||
-r<version>
|
||||
do runtest for <version>
|
||||
-all
|
||||
launch tests for all pre-defined testing hosts
|
||||
-nocvs
|
||||
do not do cvs commands
|
||||
-nodiff
|
||||
do not do diff commands
|
||||
-setup
|
||||
setup the directory structure for snapshot test
|
||||
-configname <name>
|
||||
use <name> as hostname in the parsing of the snaptest configure file
|
||||
<host>
|
||||
launch tests for <host>
|
||||
|
||||
-all and <host> are contradictory and whichever is specified last, is
|
||||
the one to take effect. If neither are given, do the test for the
|
||||
local host.
|
||||
EOF
|
||||
}
|
||||
|
||||
|
||||
# Verify if directory ($1) exists. If not, create it.
|
||||
CHECK_DIR()
|
||||
{
|
||||
dir=$1
|
||||
if test ! -e $1; then
|
||||
echo mkdir $1
|
||||
mkdir $1
|
||||
errcode=$?
|
||||
elif test ! -d $1; then
|
||||
echo $1 is not a directory
|
||||
errcode=1
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
#################################
|
||||
# Main
|
||||
#################################
|
||||
#################################
|
||||
# Set up global variables
|
||||
#################################
|
||||
retcode=0 # error code of individula task
|
||||
errcode=0 # error code of the whole test
|
||||
skiptest=no # if test is skipped
|
||||
CC_SAVED="$CC" # CC & PATH maybe changed within a test.
|
||||
PATH_SAVED=$PATH # These save the original values.
|
||||
timelimit=300 # default time limit (minutes) for the timekeeper
|
||||
|
||||
#################################
|
||||
# Parse options
|
||||
#################################
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
-h) # help--show usage
|
||||
USAGE
|
||||
exit 0
|
||||
;;
|
||||
-debug*)
|
||||
# set debug mode
|
||||
DEBUGMODE="$1"
|
||||
SNAPSHOT="echo bin/snapshot"
|
||||
PROGNAME="$PROGNAME $DEBUGMODE"
|
||||
PRINT "******** DEBUGMODE is $DEBUGMODE ************"
|
||||
;;
|
||||
-r*)
|
||||
# version string
|
||||
H5VER="$1"
|
||||
;;
|
||||
-all)
|
||||
# Test all hosts.
|
||||
TESTHOST=-all
|
||||
;;
|
||||
-nocvs)
|
||||
# do not do cvs commands.
|
||||
NOCVS=nocvs
|
||||
;;
|
||||
-nodiff)
|
||||
# do not do diff commands.
|
||||
NODIFF=nodiff
|
||||
;;
|
||||
-configname)
|
||||
# use <name> as hostname in the parsing of the snaptest configure file.
|
||||
shift
|
||||
CONFIGNAME=$1
|
||||
;;
|
||||
-setup)
|
||||
# setup the directory structure for snapshot test.
|
||||
CMD=setup
|
||||
;;
|
||||
-*) # Unknown option
|
||||
PRINT "Unknown option ($1)"
|
||||
USAGE
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
TESTHOST=$*
|
||||
break
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# setup H5VER if not set yet
|
||||
if [ -z "$H5VER" -a -f bin/snapshot_version ]
|
||||
then
|
||||
. bin/snapshot_version
|
||||
fi
|
||||
|
||||
if [ -n "$H5VER" ]
|
||||
then
|
||||
H5VERSION=hdf5_`echo $H5VER | sed -e s/-r// -e s/\\\./_/g`
|
||||
PROGNAME="$PROGNAME $H5VER"
|
||||
else
|
||||
H5VERSION=hdf5
|
||||
fi
|
||||
|
||||
#################################
|
||||
# Setup snapshot test directories
|
||||
#################################
|
||||
BASEDIR=${HOME}/snapshots-${H5VERSION}
|
||||
# initial processing of setup option if requested
|
||||
if test x-$CMD = x-setup; then
|
||||
CHECK_DIR $BASEDIR
|
||||
test $errcode -ne 0 && exit 1
|
||||
elif [ ! -d ${BASEDIR} ]; then
|
||||
echo "BASEDIR ($BASEDIR) does not exist"
|
||||
exit 1
|
||||
fi
|
||||
# Show the real physical path rather than the symbolic path
|
||||
SNAPYARD=`cd $BASEDIR && /bin/pwd`
|
||||
# Log file basename
|
||||
LOGDIR=${SNAPYARD}/log
|
||||
LOGBASENAME=${LOGDIR}
|
||||
PASSEDLOG=${LOGDIR}/PASSED_LOG_${TODAY}
|
||||
FAILEDLOG=${LOGDIR}/FAILED_LOG_${TODAY}
|
||||
FAILEDDETAIL=${LOGDIR}/FAILED_DETAIL_${TODAY}
|
||||
SKIPPEDLOG=${LOGDIR}/SKIPPED_LOG_${TODAY}
|
||||
TIMELOG=${LOGDIR}/TIME_LOG_${TODAY}
|
||||
TIMEKEEPERLOG=${LOGDIR}/TIMEKEEPER_LOG_${TODAY}
|
||||
CVSLOG=${LOGDIR}/CVS_LOG_${TODAY}
|
||||
CVSLOG_LOCK=${LOGDIR}/CVS_LOG_LOCK_${TODAY}
|
||||
DIFFLOG=${LOGDIR}/DIFF_LOG_${TODAY}
|
||||
COPYRIGHT_ERR=${LOGDIR}/COPYRIGHT_ERR_${TODAY}
|
||||
# Snap Test hosts and Configuration files
|
||||
ALLHOSTSFILE=${SNAPYARD}/allhostfile
|
||||
SNAPTESTCFG=${SNAPYARD}/snaptest.cfg
|
||||
TIMELIMIT=${SNAPYARD}/timelimit
|
||||
TMPFILE="${LOGDIR}/#runtest.${TODAY}.$$"
|
||||
|
||||
# more processing of setup option if requested
|
||||
if test x-$CMD = x-setup; then
|
||||
CHECK_DIR $LOGDIR
|
||||
test $errcode -ne 0 && exit 1
|
||||
CHECK_DIR $LOGDIR/OLD
|
||||
test $errcode -ne 0 && exit 1
|
||||
CHECK_DIR $SNAPYARD/TestDir
|
||||
test $errcode -ne 0 && exit 1
|
||||
# create empty test hosts or configure files if non-existing
|
||||
for f in $ALLHOSTSFILE $SNAPTESTCFG; do
|
||||
if test ! -f $f; then
|
||||
echo Creating $f
|
||||
touch $f
|
||||
fi
|
||||
done
|
||||
# create or update the current source.
|
||||
echo update current source
|
||||
$SNAPSHOT checkout
|
||||
# setup completed. Exit.
|
||||
exit 0
|
||||
fi
|
||||
|
||||
#################################
|
||||
# Show some host status numbers
|
||||
#################################
|
||||
# df sometimes hangs due to file system problems. Invoke it as background
|
||||
# process and give it 10 seconds to finish. If it hangs, just continue.
|
||||
uptime
|
||||
df &
|
||||
sleep 10
|
||||
|
||||
#################################
|
||||
# Setup test host(s)
|
||||
#################################
|
||||
if [ "$TESTHOST" = -all ]; then
|
||||
if [ -f $ALLHOSTSFILE ]; then
|
||||
TESTHOST=`sed -e '/^#/d;/^ *$/d' $ALLHOSTSFILE`
|
||||
else
|
||||
PRINT "could not access the all-hosts-file ($ALLHOSTSFILE)"
|
||||
USAGE
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
#################################
|
||||
# Setup to print a trailer summary when exiting not via
|
||||
# the normal end of the script.
|
||||
#################################
|
||||
trap PRINT_TRAILER 0
|
||||
|
||||
#
|
||||
TotalStartTime=`SecOfDay`
|
||||
|
||||
# Process the configuration
|
||||
SNAPTEST_CONFIG
|
||||
PRINT STANDARD_OPT=$STANDARD_OPT
|
||||
PRINT TEST_TYPES=$TEST_TYPES
|
||||
PRINT_BLANK
|
||||
|
||||
# Do a checkout if one has not been done today.
|
||||
# Then check MANIFEST file and copyrights noitces.
|
||||
if [ -z "$NOCVS" ]; then
|
||||
PRINT "Running CVS checkout with output saved in"
|
||||
PRINT " $CVSLOG"
|
||||
# Set CVS lock first
|
||||
touch $CVSLOG_LOCK
|
||||
($SNAPSHOT checkout ) >> $CVSLOG 2>&1
|
||||
# Save error code and remove the lock
|
||||
errcode=$?
|
||||
rm -f $CVSLOG_LOCK
|
||||
if [ $errcode -ne 0 ]; then
|
||||
# test failed.
|
||||
REPORT_ERR "****FAILED ${HOSTNAME}: CVS checkout****"
|
||||
exit $errcode
|
||||
fi
|
||||
# ===================
|
||||
# Check MANIFEST file
|
||||
# ===================
|
||||
PRINT Checking MAINFEST file ...
|
||||
(cd $SNAPYARD/current; bin/chkmanifest) > $TMPFILE 2>&1
|
||||
errcode=$?
|
||||
if [ $errcode -eq 0 ]; then
|
||||
# test passed.
|
||||
cat $TMPFILE
|
||||
else
|
||||
# test failed.
|
||||
REPORT_ERR "****FAILED ${HOSTNAME}: MANIFEST check****"
|
||||
( echo =========================
|
||||
echo "MANIFEST checking failed output"
|
||||
echo =========================
|
||||
cat $TMPFILE
|
||||
echo =========================
|
||||
echo "MANIFEST checking failed output done"
|
||||
echo =========================
|
||||
echo ""
|
||||
) >> $FAILEDDETAIL
|
||||
fi
|
||||
rm $TMPFILE
|
||||
PRINT_BLANK
|
||||
# No copyright checking until what need copyright is decided. 2006/4/7.
|
||||
if false; then
|
||||
# ======================
|
||||
# Check Copyright notice
|
||||
# ======================
|
||||
PRINT Checking Copyrights notices ...
|
||||
if (cd $SNAPYARD/current; bin/chkcopyright) > $TMPFILE 2>&1 ; then
|
||||
echo Passed.
|
||||
else
|
||||
# Save the output and report some of it.
|
||||
# Do not report it as failed for runtest yet.
|
||||
# Send a separate report mail via hardcoding.
|
||||
# Need fixes/cleanup later.
|
||||
echo "Failed. See detail in another report mail"
|
||||
cp $TMPFILE $COPYRIGHT_ERR
|
||||
nheadlines=300
|
||||
ntaillines=5 # Number of lines in report summary.
|
||||
(
|
||||
echo =========================
|
||||
echo "Copyright checking failed. Showing first $nheadlines lines of output."
|
||||
echo "Complete output is in file $COPYRIGHT_ERR"
|
||||
echo =========================
|
||||
nreportlines=`wc -l < $COPYRIGHT_ERR`
|
||||
if [ $nreportlines -le `expr $nheadlines + $ntaillines` ]; then
|
||||
# Just print the whole file.
|
||||
cat $COPYRIGHT_ERR
|
||||
else
|
||||
# Show the first $nheadlines plus report summary
|
||||
head -$nheadlines $COPYRIGHT_ERR
|
||||
echo ...
|
||||
tail -$ntaillines $COPYRIGHT_ERR
|
||||
fi
|
||||
) | Mail -s "${H5VERSION} Copyrights check Failed" hdf5lib
|
||||
fi
|
||||
rm $TMPFILE
|
||||
PRINT_BLANK
|
||||
fi
|
||||
else
|
||||
# make sure the cvs update, if done by another host, has completed.
|
||||
# First wait for the presence of $CVSLOG which signals some host
|
||||
# has started the cvs update. Then wait for the absence of $CVSLOG_LOCK
|
||||
# which signals the host has completed the cvs update.
|
||||
WAITFOR $CVSLOG 90
|
||||
if [ $WAIT_STATUS -ne 0 ]; then
|
||||
errcode=$WAIT_STATUS
|
||||
REPORT_ERR "****FAILED ${HOSTNAME}: Time expired waiting CVS update to start****"
|
||||
exit $errcode
|
||||
fi
|
||||
WAITTILL $CVSLOG_LOCK 10
|
||||
if [ $WAIT_STATUS -ne 0 ]; then
|
||||
errcode=$WAIT_STATUS
|
||||
REPORT_ERR "****FAILED ${HOSTNAME}: Time expired waiting CVS update to finish****"
|
||||
exit $errcode
|
||||
fi
|
||||
fi
|
||||
|
||||
# run a snapshot diff to see if any significant differences between
|
||||
# the current and previous versions
|
||||
if [ -z "$NODIFF" ]; then
|
||||
$SNAPSHOT diff >> $DIFFLOG 2>&1
|
||||
errcode=$?
|
||||
# check the errcode only if NOT in DEBUG MODE
|
||||
if [ -z "$DEBUGMODE" -a $errcode -eq 0 ]; then
|
||||
# no need to run test
|
||||
PRINT "NO TEST: no significant differences between current and previous versions" |
|
||||
tee -a $PASSEDLOG
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# we can use the version of script in SNAPYARD/current now.
|
||||
# Don't do the diff or cvs update any more.
|
||||
PROGNAME="$SNAPYARD/current/$PROGNAME -nodiff -nocvs"
|
||||
|
||||
# Decide to do test for the local host or for remote hosts
|
||||
if [ -n "$TESTHOST" -a $HOSTNAME != "$TESTHOST" ]; then
|
||||
date
|
||||
PRINT "*** launching tests from $HOSTNAME ***"
|
||||
PRINT_BLANK
|
||||
TEST_TYPE="launching"
|
||||
cd ${SNAPYARD}/log
|
||||
# Fork off timekeeper if concurrent tests will be used.
|
||||
if [ -n "$SRCDIR" ]; then
|
||||
timelimit=`TIMELIMIT_PARSE < $TIMELIMIT`
|
||||
($SNAPYARD/current/bin/timekeeper $timelimit > $TIMEKEEPERLOG 2>&1 &)
|
||||
PRINT " Fork off timekeeper $timelimit"
|
||||
fi
|
||||
runtest_type="hosts"
|
||||
for h in $TESTHOST; do
|
||||
# Must do CONFIGNAME before $h got changed by the second cut.
|
||||
# cut returns the whole string if there is no / in the string
|
||||
# at all. But that works okay for the CONFIGNAME too.
|
||||
CONFIGNAME=`echo $h | cut -f2 -d/`
|
||||
h=`echo $h | cut -f1 -d/`
|
||||
n_test=`expr $n_test + 1`
|
||||
TMP_OUTPUT="#${h}_${CONFIGNAME}.out"
|
||||
(PRINT "=============="
|
||||
PRINT "Testing $h"
|
||||
PRINT "==============") > $TMP_OUTPUT
|
||||
CHECK_RSH $h
|
||||
# run the remote shell command with output to $TMP_OUTPUT
|
||||
case "$RSH" in
|
||||
rsh|ssh)
|
||||
CMD="$RSH $h -n $PROGNAME -configname $CONFIGNAME"
|
||||
PRINT $CMD
|
||||
|
||||
# launch concurrent tests only if srcdir is used
|
||||
if [ -n "$SRCDIR" ]; then
|
||||
$CMD || REPORT_ERR "****FAILED ${h}: Abnormal exit from runtest****" && PRINT_BLANK &
|
||||
echo $! > PID.${h}_${CONFIGNAME}
|
||||
else
|
||||
$CMD || REPORT_ERR "****FAILED ${h}: Abnormal exit from runtest****" && PRINT_BLANK
|
||||
fi
|
||||
;;
|
||||
NoRemoteCommand)
|
||||
PRINT $h does not accept Remote Command "(`date`)"
|
||||
;;
|
||||
NotReachable)
|
||||
PRINT $h is not reachable "(`date`)"
|
||||
;;
|
||||
*)
|
||||
PRINT "CHECK_RSH for $h returned unknown result ($RSH)"
|
||||
;;
|
||||
esac >> $TMP_OUTPUT 2>&1
|
||||
done
|
||||
# wait for all launched tests to finish, then cat them back out.
|
||||
wait
|
||||
# Pause a moment in case the timekeeper is terminating processes.
|
||||
wait 30
|
||||
for h in $TESTHOST; do
|
||||
CONFIGNAME=`echo $h | cut -f2 -d/`
|
||||
h=`echo $h | cut -f1 -d/`
|
||||
TMP_OUTPUT="#${h}_${CONFIGNAME}.out"
|
||||
cat $TMP_OUTPUT
|
||||
# Verify test script did complete by checking the last lines
|
||||
(tail -5 $TMP_OUTPUT | grep -s 'Grand total' > /dev/null 2>&1) ||
|
||||
(REPORT_ERR "****FAILED ${h}: snaptest did not complete****" &&
|
||||
PRINT_BLANK)
|
||||
rm -f $TMP_OUTPUT PID.${h}_${CONFIGNAME}
|
||||
done
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# run the test(s)
|
||||
# Note that first field is cut without -s but all subsequent cut
|
||||
# must use -s. If -s is not used at all, a $TEST_TYPES that has
|
||||
# no ';' (only 1 test), will pass through intact in all cut. That
|
||||
# results in infinite looping.
|
||||
# If -s is used with the first field, it will suppress completely
|
||||
# a $TYPE_TYPES that has no ';' (only 1 tst ). That results in no
|
||||
# test at all.
|
||||
# Note that n_test must start as 1.
|
||||
#
|
||||
n_test=1
|
||||
runtest_type="tests"
|
||||
TEST="`echo $TEST_TYPES | cut -f$n_test -d';'`"
|
||||
while [ -n "$TEST" ]; do
|
||||
StartTime=`SecOfDay`
|
||||
RUNSNAPTEST $TEST
|
||||
REPORT_RESULT
|
||||
PRINT_TEST_TRAILER
|
||||
|
||||
n_test=`expr $n_test + 1`
|
||||
TEST="`echo $TEST_TYPES | cut -f$n_test -s -d';'`"
|
||||
done
|
||||
# dec n_test to show the actual number of tests ran.
|
||||
n_test=`expr $n_test - 1`
|
||||
|
||||
PRINT_TRAILER
|
||||
|
||||
# disable trailer summary printing since all trailers have been
|
||||
# printed and we are exiting normally.
|
||||
trap 0
|
||||
exit $errcode
|
||||
837
bin/snapshot
837
bin/snapshot
@@ -1,837 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
|
||||
# This script should be run nightly from cron. It checks out the source
|
||||
# from the source repository and compares it against the previous
|
||||
# snapshot. If anything significant changed then a new snapshot is
|
||||
# created, the minor version number is incremented, and the change is
|
||||
# checked back into the source repository.
|
||||
#
|
||||
|
||||
|
||||
# function definitions
|
||||
TIMESTAMP()
|
||||
{
|
||||
echo "=====" "$1": "`date`" "====="
|
||||
}
|
||||
|
||||
EXIT_BANNER()
|
||||
{
|
||||
TIMESTAMP "Exit $PROGNAME with status=$?"
|
||||
}
|
||||
|
||||
# Show current total disk usage.
|
||||
DISKUSAGE()
|
||||
{
|
||||
du -ks | \
|
||||
( read x y; echo "Disk Usage=$x KB" )
|
||||
}
|
||||
|
||||
# function provided for testing software downloaded as tar files. A version of
|
||||
# this function that properly extracts the downloaded files can be provided in
|
||||
# the snapshots-${sw}-overrides file.
|
||||
EXTRACT()
|
||||
{
|
||||
echo "Error: ${SWVERSION} is in source repository - does not need extraction."
|
||||
}
|
||||
|
||||
# Standard procedure for checking out or updating source code from an hdfgroup
|
||||
# git repository. Override the function for other repositories or procedures.
|
||||
SOURCE_CHECKOUT()
|
||||
{
|
||||
if test -n $GIT_URL; then
|
||||
if [ -n "$AUTOGEN" ]; then
|
||||
echo "Creating fresh clone of $GIT_URL in $BASEDIR/current_src"
|
||||
# Check out the current version from source repository.
|
||||
(cd $BASEDIR; rm -rf current_src
|
||||
if test -z $GIT_BRANCH; then
|
||||
echo "Testing empty branch $GIT_BRANCH."
|
||||
git clone $GIT_URL current_src
|
||||
else
|
||||
echo "Testing branch $GIT_BRANCH."
|
||||
git clone $GIT_URL -b $GIT_BRANCH current_src
|
||||
fi
|
||||
) || exit 1
|
||||
else
|
||||
echo "Creating fresh clone of $GIT_URL in $BASEDIR/current"
|
||||
# Check out the current version from source repository.
|
||||
(cd $BASEDIR; rm -rf current
|
||||
if test -n $GIT_BRANCH; then
|
||||
git clone $GIT_URL -b $GIT_BRANCH current
|
||||
else
|
||||
git clone $GIT_URL current
|
||||
fi ) || exit 1
|
||||
fi
|
||||
else
|
||||
echo "Warning! Source directory ("current") is not checked out from git."
|
||||
fi
|
||||
}
|
||||
|
||||
# Standard procedure for running the configure command in a build (test)
|
||||
# directory
|
||||
RUNCONFIGURE()
|
||||
{
|
||||
if [ "${CURRENT}" != "${TESTDIR}" -a "$CPSRC" = "yes" ]; then
|
||||
echo "Copying source files to ${TESTDIR}."
|
||||
cp -pr ${CURRENT}/* ${TESTDIR}
|
||||
cd ${TESTDIR}
|
||||
./${CONFIGURE}
|
||||
elif [ -n "${AUTOGEN}" ]; then
|
||||
${CURRENTSRC}/${CONFIGURE}
|
||||
else
|
||||
${CURRENT}/${CONFIGURE}
|
||||
fi
|
||||
}
|
||||
|
||||
# Sometimes "make distclean" doesn't adequately remove files from the previous
|
||||
# build. If a build (test) directory was used, its contents can be entirely
|
||||
# deleted to provide a clean start. If the test is building in the source
|
||||
# directory, the contents can't be deleted, so run "make distclean".
|
||||
DISTCLEAN()
|
||||
{
|
||||
if [ "${srcdir}" = "yes" -a -n "${SRCDIRNAME}" -a -d ${BASEDIR}/TestDir/${SRCDIRNAME} ]; then
|
||||
echo "Remove contents of $SRCDIRNAME.\n"
|
||||
rm -rf ${BASEDIR}/TestDir/${SRCDIRNAME}/*
|
||||
else
|
||||
echo "$MAKE distclean"
|
||||
(cd ${TESTDIR} && ${MAKE} distclean)
|
||||
fi
|
||||
}
|
||||
|
||||
# Several of the software packages tested do not support make check-install.
|
||||
# Those that support it should have a version of this function in their
|
||||
# override with the following lines:
|
||||
# TIMESTAMP "check-install $1"
|
||||
# ${MAKE} check-install $1
|
||||
CHECKINSTALL()
|
||||
{
|
||||
echo "check-install is not supported for ${SWVERSION}"
|
||||
}
|
||||
|
||||
# Function for hdf4 and hdf5 to override to check in changes after snapshot.
|
||||
# Safety measure to avoid unintended checkins to other repositories.
|
||||
COMMITSNAPSHOT()
|
||||
{
|
||||
echo "original hdf5 script committed code changes back into git."
|
||||
}
|
||||
|
||||
DISPLAYUSAGE()
|
||||
{
|
||||
set -
|
||||
cat <<EOF
|
||||
Usage: $PROGNAME [all] [checkout] [ftp <URL> [diff] [test] [srcdir] [release] [help]
|
||||
[clean] [distclean] [echo] [deploy <dir>] [deploydir <dir>]
|
||||
[zlib <zlib_path>] [releasedir <dir>] [srcdirname <dir>] [check-vfd]
|
||||
[check-passthrough-vol]
|
||||
[exec <command>] [module-load <module-list>] [op-configure <option>]
|
||||
[--<option>]
|
||||
all: Run all commands (checkout, test & release)
|
||||
[Default is all]
|
||||
checkout: Run source checkout
|
||||
diff: Run diff on current and previous versions. Exit 0 if
|
||||
no significant differences are found. Otherwise, non-zero.
|
||||
deploy: deploy binary to directory <dir>
|
||||
deploydir: use <dir> as the default directory for deployment
|
||||
test: Run test
|
||||
release: Run release
|
||||
clean: Run make clean
|
||||
distclean:Run make distclean
|
||||
echo: Turn on echo mode (set -x)
|
||||
setenv <name> <value>:
|
||||
Set environment variable <name> to <value>.
|
||||
setenvN <N> <name> <value> ...:
|
||||
Set environment variable with <N> values.
|
||||
E.g., setenvN 3 x a b c is same as setenv x="a b c".
|
||||
srcdir: Use srcdir option (does not imply other commands)
|
||||
"snapshot srcdir" is equivalent to "snapshot srcdir all"
|
||||
"snapshot srcdir checkout" is equivalent to "snapshot checkout"
|
||||
srcdirname <dir>:
|
||||
Use <dir> as the srcdir testing directory if srcdir is chosen.
|
||||
If <dir> starts with '-', it is append to the default name
|
||||
E.g., "snapshot srcdir srcdirname -xx" uses hostname-xx
|
||||
[Default is hostname]
|
||||
help: Print this message
|
||||
echo: Turn on shell echo
|
||||
zlib <zlib_path>:
|
||||
Use <zlib_path> as the ZLIB locations
|
||||
[Default is $ZLIB_default]
|
||||
releasedir <dir>:
|
||||
Use <dir> as the release directory
|
||||
[Default is $ReleaseDir_default]
|
||||
check-vfd:
|
||||
Run make check-vfd instead of just make check.
|
||||
check-passthrough-vol:
|
||||
Run make check-passthrough-vol instead of just make check.
|
||||
NOTE: Will only succeed with passthrough VOL connectors
|
||||
that use the native VOL connector as the terminal
|
||||
connector.
|
||||
exttest <testscript>;
|
||||
Run testscript;
|
||||
exec <command>:
|
||||
Run <command>;
|
||||
module-load <module-list>:
|
||||
Load modules in comma-separated <module-list>;
|
||||
op-configure <option>:
|
||||
Pass <option> to the configure command
|
||||
E.g., "snapshot op-configure --enable-parallel"
|
||||
configures for parallel mode
|
||||
--<option>:
|
||||
Pass --<option> to the configure command
|
||||
E.g., "snapshot --enable-parallel"
|
||||
configures for parallel mode
|
||||
EOF
|
||||
exit $errcode
|
||||
}
|
||||
|
||||
# MAIN
|
||||
# SGI /bin/sh replaces $0 as function name if used in a function.
|
||||
# Set the name here to avoid that ambiguity and better style too.
|
||||
PROGNAME=$0
|
||||
SNAPSHOTNAME=
|
||||
HDFREPOS=
|
||||
DOCVERSION=""
|
||||
MODULELIST=""
|
||||
|
||||
if [ -f bin/snapshot_params ]; then
|
||||
. bin/snapshot_params
|
||||
echo "Added snapshot_params."
|
||||
fi
|
||||
if [ -z "$SWVER" -a -f bin/snapshot_version ]
|
||||
then
|
||||
. bin/snapshot_version
|
||||
echo "Added snapshot_version."
|
||||
fi
|
||||
if [ -n ${HDFREPOS} -a -f bin/snapshot-${HDFREPOS}-overrides ]; then
|
||||
. bin/snapshot-${HDFREPOS}-overrides
|
||||
echo "Added snapshot-${HDFREPOS}-overrides."
|
||||
fi
|
||||
|
||||
echo "====================================="
|
||||
echo "$PROGNAME $*"
|
||||
echo "====================================="
|
||||
TIMESTAMP MAIN
|
||||
uname -a
|
||||
|
||||
# setup exit banner message
|
||||
trap EXIT_BANNER 0 1 2 9 15
|
||||
|
||||
# Dump environment variables before option parsing
|
||||
echo ===Dumping environment variables before option parsing ===
|
||||
printenv | sort
|
||||
echo ===Done Dumping environment variables before option parsing ===
|
||||
|
||||
# snapshots release directory. Default relative to $BASEDIR.
|
||||
ReleaseDir_default=release_dir
|
||||
|
||||
# Where is the zlib library?
|
||||
# At NCSA, half of the machines have it in /usr/lib, the other half at
|
||||
# /usr/ncsa/lib. Leave it unset.
|
||||
ZLIB_default=
|
||||
ZLIB=$ZLIB_default
|
||||
|
||||
# What compression methods to use? (md5 does checksum). Doc was apparently
|
||||
# added as a compression method to create a separate tarfile containing the
|
||||
# documentation files for v 1.8 and above.
|
||||
if [ "${SWVERSION}" = "hdf5_1_6" ]; then
|
||||
METHODS="gzip bzip2 md5"
|
||||
else
|
||||
METHODS="gzip bzip2 doc"
|
||||
fi
|
||||
|
||||
# Use User's MAKE if set. Else use generic make.
|
||||
MAKE=${MAKE:-make}
|
||||
|
||||
# Default check action.
|
||||
CHECKVAL=check
|
||||
|
||||
#
|
||||
# Command options
|
||||
cmd="all"
|
||||
test_opt=""
|
||||
errcode=0
|
||||
AUTOGEN=""
|
||||
EXTTEST=""
|
||||
EXEC_CMD_ARG=""
|
||||
while [ $# -gt 0 ] ; do
|
||||
case "$1" in
|
||||
all)
|
||||
cmd="all"
|
||||
;;
|
||||
checkout-autogen)
|
||||
cmdcheckout="checkout"
|
||||
AUTOGEN="autogen"
|
||||
cmd=""
|
||||
;;
|
||||
checkout)
|
||||
cmdcheckout="checkout"
|
||||
cmd=""
|
||||
;;
|
||||
ftp)
|
||||
echo "Setting ftp flags in snapshot script"
|
||||
cmdcheckout="checkout"
|
||||
cmdftp="ftp"
|
||||
cmd=""
|
||||
shift
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "URL missing"
|
||||
errcode=1
|
||||
cmd="help"
|
||||
break
|
||||
fi
|
||||
ftp_url="$1"
|
||||
echo "ftp_url is $ftp_url"
|
||||
;;
|
||||
diff)
|
||||
cmddiff="diff"
|
||||
cmd=""
|
||||
;;
|
||||
deploy)
|
||||
# deploy the built binary.
|
||||
shift
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "deploy <dir> missing"
|
||||
errcode=1
|
||||
cmd="help"
|
||||
break
|
||||
fi
|
||||
cmddeploy="deploy"
|
||||
DEPLOYDIRNAME="$1"
|
||||
;;
|
||||
deploydir)
|
||||
# default directory for deployment.
|
||||
shift
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "deploydir <dir> missing"
|
||||
errcode=1
|
||||
cmd="help"
|
||||
break
|
||||
fi
|
||||
deploydir="$1"
|
||||
;;
|
||||
test)
|
||||
cmdtest="test"
|
||||
cmd=""
|
||||
;;
|
||||
setenv)
|
||||
# set environment variable
|
||||
shift
|
||||
eval $1="$2"
|
||||
export $1
|
||||
shift
|
||||
;;
|
||||
setenvN)
|
||||
# set environment variable with $1 values
|
||||
# e.g., setenvN 3 x a b c is same as setenv x="a b c".
|
||||
# a kludge now--the extra single quotes are needed
|
||||
# else eval complains.
|
||||
shift
|
||||
envN=$1
|
||||
shift
|
||||
envname=$1
|
||||
envalue=
|
||||
while test $envN -gt 0; do
|
||||
shift
|
||||
envalue="$envalue $1"
|
||||
envN=`expr $envN - 1`
|
||||
done
|
||||
eval $envname="'$envalue'"
|
||||
export $envname
|
||||
;;
|
||||
srcdir)
|
||||
#use srcdir option for test
|
||||
srcdir="yes"
|
||||
;;
|
||||
srcdirname)
|
||||
shift
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "srcdirname <dir> missing"
|
||||
errcode=1
|
||||
cmd="help"
|
||||
break
|
||||
fi
|
||||
SRCDIRNAME="$1"
|
||||
;;
|
||||
release)
|
||||
cmdrel="release"
|
||||
cmd=""
|
||||
;;
|
||||
autogen-release)
|
||||
cmdrel="autogen-release"
|
||||
cmd=""
|
||||
;;
|
||||
clean | distclean)
|
||||
cmdclean="$1"
|
||||
cmd=""
|
||||
;;
|
||||
help)
|
||||
cmd="help"
|
||||
break
|
||||
;;
|
||||
echo)
|
||||
set -x
|
||||
break
|
||||
;;
|
||||
zlib)
|
||||
shift
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "ZLIB information missing"
|
||||
errcode=1
|
||||
cmd="help"
|
||||
break
|
||||
fi
|
||||
ZLIB="$1"
|
||||
;;
|
||||
releasedir)
|
||||
shift
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "Release directory name missing"
|
||||
errcode=1
|
||||
cmd="help"
|
||||
break
|
||||
fi
|
||||
ReleaseDir="$1"
|
||||
;;
|
||||
exttest)
|
||||
shift
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "exttest script name missing"
|
||||
errcode=1
|
||||
cmd="help"
|
||||
break
|
||||
fi
|
||||
cmd=""
|
||||
EXTTEST="$1"
|
||||
;;
|
||||
exec)
|
||||
shift
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "exec command name missing"
|
||||
errcode=1
|
||||
cmd="help"
|
||||
break
|
||||
fi
|
||||
cmd=""
|
||||
EXEC_CMD_ARG="$@"
|
||||
# exit the parsing while loop since all arguments have been consumed.
|
||||
break
|
||||
;;
|
||||
check-vfd)
|
||||
CHECKVAL=check-vfd
|
||||
;;
|
||||
check-passthrough-vol)
|
||||
CHECKVAL=check-passthrough-vol
|
||||
;;
|
||||
module-load)
|
||||
shift
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "missing module list to load"
|
||||
errcode=1
|
||||
cmd="help"
|
||||
break
|
||||
fi
|
||||
MODULELIST="$1"
|
||||
;;
|
||||
--*)
|
||||
OP_CONFIGURE="$OP_CONFIGURE $1"
|
||||
;;
|
||||
op-configure)
|
||||
shift
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "op-configure option missing"
|
||||
errcode=1
|
||||
cmd="help"
|
||||
break
|
||||
fi
|
||||
OP_CONFIGURE="$OP_CONFIGURE $1"
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option $1"
|
||||
errcode=1
|
||||
cmd="help"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if [ -n "$MODULELIST" ]; then
|
||||
. ~/.bashrc
|
||||
module use /opt/pkgs/modules/all
|
||||
# load module command will take a space separated list of modules.
|
||||
# If we have a comma separated list, convert ',' to ' '.
|
||||
MODULELIST="$( echo -e "$MODULELIST" | tr ',' ' ' )"
|
||||
module load $MODULELIST
|
||||
fi
|
||||
|
||||
# Dump environment variables after option parsing
|
||||
echo ===Dumping environment variables after option parsing ===
|
||||
printenv | sort
|
||||
echo ===Done Dumping environment variables after option parsing ===
|
||||
|
||||
if [ "$cmd" = help ]; then
|
||||
DISPLAYUSAGE
|
||||
fi
|
||||
|
||||
# Setup the proper configure option (--with-zlib) to use zlib library
|
||||
# provide ZLIB is non-empty.
|
||||
ZLIB=${ZLIB:+"--with-zlib="$ZLIB}
|
||||
# Adding --prefix as a configure option will put the path to the deploy
|
||||
# directory in the initial libhdf5*.la files
|
||||
if [ -n "$DEPLOYDIRNAME" ]; then
|
||||
OP_CONFIGURE="$OP_CONFIGURE --prefix=${deploydir}/${DEPLOYDIRNAME}"
|
||||
fi
|
||||
CONFIGURE="configure $OP_CONFIGURE"
|
||||
# echo "Configure command is $CONFIGURE"
|
||||
|
||||
# Execute the requests
|
||||
snapshot=yes
|
||||
|
||||
BASEDIR=${HOME}/snapshots-${SNAPSHOTNAME}
|
||||
if [ ! -d ${BASEDIR} ]; then
|
||||
echo "BASEDIR ($BASEDIR) does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CURRENT=${BASEDIR}/current
|
||||
PREVIOUS=${BASEDIR}/previous
|
||||
ReleaseDir=${ReleaseDir:=${BASEDIR}/${ReleaseDir_default}}
|
||||
HOSTNAME=`hostname | cut -f1 -d.` # no domain part
|
||||
|
||||
# Try finding a version of diff that supports the -I option too.
|
||||
DIFF=diff
|
||||
for d in `echo $PATH | sed -e 's/:/ /g'` ; do
|
||||
test -x $d/diff && $d/diff -I XYZ /dev/null /dev/null > /dev/null 2>&1 &&
|
||||
DIFF=$d/diff && break
|
||||
done
|
||||
|
||||
#=============================
|
||||
# Run source checkout
|
||||
#=============================
|
||||
if [ "$cmd" = "all" -o -n "$cmdcheckout" ]; then
|
||||
TIMESTAMP "checkout"
|
||||
# ${BASEDIR}/bin is now updated from git by EveningMaint or DailyMaint
|
||||
# to avoid updating the scripts in ${BASEDIR}/bin while they are running.
|
||||
|
||||
if [ -z "$AUTOGEN" ]; then
|
||||
# If there is a Makefile in ${CURRENT}, the last test done in it
|
||||
# has not been distclean'ed. They would interfere with other
|
||||
# --srcdir build since make considers the files in ${CURRENT}
|
||||
# take precedence over files in its own build-directory. Run
|
||||
# a "make distclean" to clean them all out. This is not really
|
||||
# part of the "checkout" functions but this is the most convenient
|
||||
# spot to do the distclean. We will also continue the checkout process
|
||||
# regardless of the return code of distclean.
|
||||
( cd ${CURRENT}; test -f Makefile && ${MAKE} distclean)
|
||||
fi
|
||||
# echo "cmdftp is $cmdftp; ftp_url is $ftp_url"
|
||||
if [ -n "$cmdftp" ]; then
|
||||
echo "Get the NetCDF4 source from their ftp server."
|
||||
echo "Command executed is: 2>&1 wget -N $ftp_url"
|
||||
cd ${BASEDIR};
|
||||
WGET_OUTPUT="`2>&1 wget -N $ftp_url`"
|
||||
errcode=$?
|
||||
if [[ $errcode -ne 0 ]]; then
|
||||
exit $errcode
|
||||
fi
|
||||
|
||||
if [ $? -ne 0 ];then
|
||||
echo $0: "$WGET_OUTPUT" Exiting.
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# echo "Wget output was $WGET_OUTPUT"
|
||||
|
||||
if echo "$WGET_OUTPUT" | fgrep 'not retrieving' &> /dev/null
|
||||
then
|
||||
echo "Snapshot unchanged"
|
||||
else
|
||||
echo "New snapshot downloaded"
|
||||
EXTRACT
|
||||
fi
|
||||
else
|
||||
SOURCE_CHECKOUT
|
||||
fi
|
||||
fi # Do source checkout
|
||||
|
||||
|
||||
#=============================
|
||||
# Run Test the HDF5 library
|
||||
#=============================
|
||||
if [ "$cmd" = "all" -o -n "$cmdtest" -o -n "$cmddiff" ]; then
|
||||
TIMESTAMP "Run Tests"
|
||||
# setup if srcdir is used.
|
||||
if [ -z "$srcdir" ]; then
|
||||
TESTDIR=${CURRENT}
|
||||
else
|
||||
#create TESTDIR if not exist yet
|
||||
case "$SRCDIRNAME" in
|
||||
"")
|
||||
SRCDIRNAME=$HOSTNAME
|
||||
;;
|
||||
-*)
|
||||
SRCDIRNAME="$HOSTNAME$SRCDIRNAME"
|
||||
;;
|
||||
esac
|
||||
TESTDIR=${BASEDIR}/TestDir/${SRCDIRNAME}
|
||||
test -d ${TESTDIR} || mkdir ${TESTDIR}
|
||||
# set TESTDIR to use the direct path to the local test directory
|
||||
# rather than the path through ${BASEDIR}.
|
||||
cd ${TESTDIR}
|
||||
TESTDIR=`pwd -P`
|
||||
cd ${CURRENT}
|
||||
fi
|
||||
# Make sure current version exists and is clean
|
||||
if [ -d ${TESTDIR} ]; then
|
||||
DISTCLEAN
|
||||
else
|
||||
errcode=$?
|
||||
snapshot=no
|
||||
exit $errcode
|
||||
fi
|
||||
|
||||
# Compare it with the previous version. Compare only files listed in
|
||||
# the MANIFEST plus the MANIFEST itself.
|
||||
if [ -d ${PREVIOUS} ]; then
|
||||
if [ -z "${AUTOGEN}" ]; then
|
||||
CURRENTSRC=${CURRENT}
|
||||
else
|
||||
CURRENTSRC=${BASEDIR}/current_src
|
||||
fi
|
||||
if (${DIFF} -c ${PREVIOUS}/MANIFEST ${CURRENTSRC}/MANIFEST); then
|
||||
snapshot=no
|
||||
for src in `grep '^\.' ${CURRENTSRC}/MANIFEST|expand|cut -f1 -d' '`; do
|
||||
if ${DIFF} -I H5_VERS_RELEASE -I " released on " \
|
||||
-I " currently under development" \
|
||||
${PREVIOUS}/$src ${CURRENTSRC}/$src
|
||||
then
|
||||
: #continue
|
||||
else
|
||||
snapshot=yes
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
# if diff is chosen, exit 0 if no significant differences are found.
|
||||
# otherwise, exit 1. This includes cases of other failures.
|
||||
if [ -n "$cmddiff" ]; then
|
||||
if [ $snapshot = no ]; then
|
||||
exit 0
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
#=============================
|
||||
# Execute command if defined
|
||||
#=============================
|
||||
#echo BEFORE EXEC command
|
||||
#echo EXEC_CMD_ARG=${EXEC_CMD_ARG}
|
||||
|
||||
if [ -n "$EXEC_CMD_ARG" ]; then
|
||||
TIMESTAMP ${EXEC_CMD_ARG}
|
||||
TESTDIR=${BASEDIR}/TestDir/${SRCDIRNAME}
|
||||
test -d ${TESTDIR} || mkdir ${TESTDIR}
|
||||
if cd ${TESTDIR}; then
|
||||
# clean up the directory before executing the command
|
||||
# Do we need to clean first?
|
||||
# rm -rf *
|
||||
#
|
||||
# If EXEC_CMD_ARG starts with a '/', it has an absolute path, else it is
|
||||
# relative to the BASEDIR.
|
||||
case "$EXEC_CMD_ARG" in
|
||||
/*)
|
||||
${EXEC_CMD_ARG}
|
||||
;;
|
||||
*)
|
||||
${BASEDIR}/${EXEC_CMD_ARG}
|
||||
;;
|
||||
esac
|
||||
errcode=$?
|
||||
else
|
||||
echo "${TESTDIR} not accessible"
|
||||
errcode=1
|
||||
fi
|
||||
# exit snapshot since nothing else to do, for now.
|
||||
exit $errcode
|
||||
fi
|
||||
|
||||
# Build, run tests and install procedures
|
||||
if [ "$snapshot" = "yes" ] && [ "$NOMAKE" != "yes" ]; then
|
||||
FAIL_SECTION=""
|
||||
if [ -f ${TESTDIR}/failsection ]; then
|
||||
rm ${TESTDIR}/failsection
|
||||
fi
|
||||
if (cd ${TESTDIR} && \
|
||||
TIMESTAMP "configure" && echo "configure" > ${TESTDIR}/failsection && \
|
||||
RUNCONFIGURE && \
|
||||
sleep 2 && \
|
||||
TIMESTAMP "make" && echo "make" > ${TESTDIR}/failsection && \
|
||||
${MAKE} && DISKUSAGE \
|
||||
TIMESTAMP ${CHECKVAL} && echo "make check" > ${TESTDIR}/failsection && \
|
||||
${MAKE} ${CHECKVAL} && DISKUSAGE \
|
||||
TIMESTAMP "install" && echo "make install" > ${TESTDIR}/failsection && \
|
||||
${MAKE} install && DISKUSAGE \
|
||||
TIMESTAMP "check-install" && echo "make check-install" > ${TESTDIR}/failsection && \
|
||||
CHECKINSTALL && DISKUSAGE \
|
||||
TIMESTAMP "uninstall" && echo "make uninstall" > ${TESTDIR}/failsection && \
|
||||
${MAKE} uninstall && DISKUSAGE); then
|
||||
:
|
||||
else
|
||||
errcode=$?
|
||||
FAIL_SECTION=`cat ${TESTDIR}/failsection`
|
||||
echo "Failed running ${FAIL_SECTION}"
|
||||
snapshot=no
|
||||
exit $errcode
|
||||
fi
|
||||
elif [ $CPSRC ]; then
|
||||
cp -pr ${CURRENT}/* ${TESTDIR}
|
||||
else
|
||||
cmdclean=""
|
||||
fi
|
||||
fi # Test the HDF5 library
|
||||
|
||||
# Run external test if configured
|
||||
|
||||
#=============================
|
||||
#=============================
|
||||
#if [ -d "$CURRENT" ]; then
|
||||
if [ "$EXTTEST" != "" ]; then
|
||||
TIMESTAMP ${EXTTEST}
|
||||
TESTDIR=${BASEDIR}/TestDir/${SRCDIRNAME}
|
||||
test -d ${TESTDIR} || mkdir ${TESTDIR}
|
||||
cd ${TESTDIR}
|
||||
sleep 1
|
||||
TIMESTAMP $pwd
|
||||
ls
|
||||
${BASEDIR}/${EXTTEST}
|
||||
errcode=$?
|
||||
exit $errcode
|
||||
fi
|
||||
|
||||
#=============================
|
||||
# Run deployment if requested.
|
||||
#=============================
|
||||
if [ -n "$DEPLOYDIRNAME" ]; then
|
||||
# The daily tests deploy to .../hdf5/... or .../hdf4/... except on cobalt where the
|
||||
# deploy directory is in .../HDF5/... lc will take care of this. If hdf4 or hdf5
|
||||
# either upper or lower case isn't in the path, RELEASE.txt won't be found unless
|
||||
# it is in $CURRENT.
|
||||
POS4=`perl -e "print index(lc(\"${deploydir}/${DEPLOYDIRNAME}\"), 'hdf4')"`
|
||||
POS5=`perl -e "print index(lc(\"${deploydir}/${DEPLOYDIRNAME}\"), 'hdf5')"`
|
||||
if [ "${POS4}" -ge "0" ]; then
|
||||
RELEASE_TXT_LOC="release_notes"
|
||||
elif [ "${POS5}" -ge "0" ]; then
|
||||
RELEASE_TXT_LOC="release_docs"
|
||||
else
|
||||
RELEASE_TXT_LOC=""
|
||||
fi
|
||||
|
||||
if [ "$snapshot" = "yes" ]; then
|
||||
TIMESTAMP "deploy"
|
||||
if (cd ${TESTDIR} &&
|
||||
${BASEDIR}/bin/deploy ${deploydir}/${DEPLOYDIRNAME} && \
|
||||
TIMESTAMP "clean" && \
|
||||
${MAKE} clean && \
|
||||
TIMESTAMP "check-install prefix=${deploydir}/${DEPLOYDIRNAME}" && \
|
||||
CHECKINSTALL prefix=${deploydir}/${DEPLOYDIRNAME}); then
|
||||
cd ${CURRENT}
|
||||
cp ${RELEASE_TXT_LOC}/RELEASE.txt ${deploydir}/${DEPLOYDIRNAME}
|
||||
cp COPYING ${deploydir}/${DEPLOYDIRNAME}
|
||||
#: #continue
|
||||
else
|
||||
errcode=$?
|
||||
exit $errcode
|
||||
fi
|
||||
fi
|
||||
fi # Deploy
|
||||
|
||||
|
||||
#=============================
|
||||
# Run Release snapshot, update version, and commit to source repository
|
||||
#=============================
|
||||
if [ "$cmd" = "all" -o -n "$cmdrel" ]; then
|
||||
if [ "$snapshot" = "yes" ]; then
|
||||
TIMESTAMP "release"
|
||||
DISTCLEAN
|
||||
(
|
||||
# Turn on exit on error in the sub-shell so that it does not
|
||||
# commit source if errors encounter here.
|
||||
set -e
|
||||
if [ "$cmdrel" = "autogen-release" ]; then
|
||||
cd ${BASEDIR}/current_src
|
||||
else
|
||||
cd ${CURRENT}
|
||||
fi
|
||||
if [ "$HDFREPOS" = "hdf4" ]; then
|
||||
RELEASE_VERSION="`perl bin/h4vers -v`"
|
||||
echo "Making snapshot release ($RELEASE_VERSION) to ${ReleaseDir}..."
|
||||
bin/release -d $ReleaseDir $METHODS
|
||||
perl bin/h4vers -i
|
||||
elif [ "$HDFREPOS" = "hdf5" ]; then
|
||||
RELEASE_VERSION="`perl bin/h5vers -v`"
|
||||
echo "Making snapshot release ($RELEASE_VERSION) to ${ReleaseDir}..."
|
||||
if [ "${DOCVERSION}" ]; then
|
||||
bin/release -d $ReleaseDir --docver ${DOCVERSION} $METHODS
|
||||
else
|
||||
bin/release -d $ReleaseDir $METHODS
|
||||
fi
|
||||
perl bin/h5vers -i
|
||||
else
|
||||
echo "need real release steps. For now, only move current version to previous"
|
||||
fi
|
||||
COMMITSNAPSHOT
|
||||
)
|
||||
errcode=$?
|
||||
fi
|
||||
|
||||
# Replace the previous version with the current version.
|
||||
# Should check if the errcode of the release process but there
|
||||
# are other failures after release was done (e.g. h5vers or git failures)
|
||||
# that should allow the replacement to occur.
|
||||
rm -rf ${PREVIOUS}
|
||||
mv ${CURRENT} ${PREVIOUS}
|
||||
fi #Release snapshot
|
||||
|
||||
|
||||
#=============================
|
||||
# Clean the test area. Default is no clean.
|
||||
#=============================
|
||||
if [ -n "$cmdclean" ] && [ "$NOMAKE" != "yes" ]; then
|
||||
TIMESTAMP "clean"
|
||||
# setup if srcdir is used.
|
||||
if [ -z "$srcdir" ]; then
|
||||
TESTDIR=${CURRENT}
|
||||
else
|
||||
case "$SRCDIRNAME" in
|
||||
"")
|
||||
SRCDIRNAME=$HOSTNAME
|
||||
;;
|
||||
-*)
|
||||
SRCDIRNAME="$HOSTNAME$SRCDIRNAME"
|
||||
;;
|
||||
esac
|
||||
TESTDIR=${BASEDIR}/TestDir/${SRCDIRNAME}
|
||||
fi
|
||||
# clean it
|
||||
if (cd ${TESTDIR} && ${MAKE} $cmdclean ) then
|
||||
:
|
||||
else
|
||||
errcode=$?
|
||||
snapshot=no
|
||||
exit $errcode
|
||||
fi
|
||||
fi # Clean the Test directory
|
||||
|
||||
exit $errcode
|
||||
@@ -1,19 +0,0 @@
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
|
||||
# default version for snapshot test
|
||||
# H5VERSION matches with a source version symbolic name. Will test use the
|
||||
# latest revision of that branch. If set to "hdf5", it uses the main
|
||||
# version.
|
||||
# H5VER tells runtest which version to run
|
||||
H5VERSION=hdf5
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
129
bin/timekeeper
129
bin/timekeeper
@@ -1,129 +0,0 @@
|
||||
#!/bin/sh
|
||||
##
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
##
|
||||
# As a time keeper of the remote daily test process launched by runtest.
|
||||
# It sleeps for a certain time and then wakes up to hangup those processes
|
||||
# that are still around, assuming they have run too long.
|
||||
#
|
||||
# Programmer: Albert Cheng
|
||||
# Created Date: 2004/12/23
|
||||
|
||||
# variable initialization
|
||||
waitminutes=300 # default to 5 hours == 300 minutes
|
||||
debugtimelimit=
|
||||
debugflag= # no debug
|
||||
|
||||
# Function definitions
|
||||
#
|
||||
# PRINTMSG
|
||||
# Print a one line message left justified in a field of 70 characters
|
||||
# without newline. More output for this line later.
|
||||
#
|
||||
PRINTMSG() {
|
||||
SPACES=" "
|
||||
echo "$* $SPACES" | cut -c1-70 | tr -d '\012'
|
||||
}
|
||||
|
||||
|
||||
USAGE()
|
||||
{
|
||||
echo "Usage: %0 [-h] [-debug] [<time-limit>]"
|
||||
echo " Run timekeeper with <time-limit> minutes, default is $waitminutes."
|
||||
echo " If <time-limit> is in the form of HH:MM, it means wait till then."
|
||||
echo " -h print this help page"
|
||||
echo " -debug run debug mode"
|
||||
}
|
||||
|
||||
|
||||
ParseOption()
|
||||
{
|
||||
if [ $# -gt 0 -a "$1" = -h ]; then
|
||||
shift
|
||||
USAGE
|
||||
exit 0
|
||||
fi
|
||||
if [ $# -gt 0 -a "$1" = -debug ]; then
|
||||
shift
|
||||
debugflag=yes
|
||||
waitminutes=1 # use shorter time for debug
|
||||
fi
|
||||
if [ $# -gt 0 ]; then
|
||||
targettime=$1
|
||||
shift
|
||||
|
||||
# find out it is minutes to wait or HH:MM to wake up
|
||||
case $targettime in
|
||||
*:*) # HH:MM
|
||||
currenttime=`date +%H:%M`
|
||||
currenthour=`echo $currenttime | cut -f1 -d:`
|
||||
currentminute=`echo $currenttime | cut -f2 -d:`
|
||||
targethour=`echo $targettime | cut -f1 -d:`
|
||||
targetminute=`echo $targettime | cut -f2 -d:`
|
||||
waitminutes=`expr \( $targethour - $currenthour \) \* 60 + $targetminute - $currentminute`
|
||||
if test $waitminutes -le 0; then
|
||||
# target time is in tomorrow, add 1 day of minutes
|
||||
waitminutes=`expr 24 \* 60 + $waitminutes`
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
waitminutes=$targettime
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# Main body
|
||||
echo "Timekeeper started at `date`"
|
||||
ParseOption $*
|
||||
waitperiod=`expr $waitminutes \* 60` # convert to seconds
|
||||
|
||||
if [ -z "$debugflag" ]; then
|
||||
# normal time keeping mode
|
||||
# sleep first
|
||||
echo Timekeeper sleeping for $waitperiod seconds
|
||||
sleep $waitperiod
|
||||
# Look for any processes still around
|
||||
echo "Timekeeper woke up at `date`, looking for processes to terminate..."
|
||||
for x in PID.* ; do
|
||||
if [ -f $x ]; then
|
||||
pid=`cat $x`
|
||||
# check if process is still around
|
||||
if test X$pid \!= X && ps -p $pid > /dev/null; then
|
||||
echo "terminating process $x ($pid)"
|
||||
kill -HUP $pid
|
||||
echo "Remote shell command ended. But some processes might still be"
|
||||
echo "running in the remote machine. Login there to verify."
|
||||
fi
|
||||
fi
|
||||
done
|
||||
else
|
||||
# Debug mode. Launch two rsh process, one ends before, the other after
|
||||
# waitperiod. Must launch timekeeper from a subshell, else the debug
|
||||
# will wait for it too.
|
||||
myhostname=`hostname`
|
||||
( $0 $waitminutes &)
|
||||
debugtimelimit=`expr $waitperiod - 10`
|
||||
echo rsh $myhostname sleep $debugtimelimit
|
||||
rsh $myhostname sleep $debugtimelimit &
|
||||
echo $! > PID.before
|
||||
debugtimelimit=`expr $waitperiod + 10`
|
||||
echo rsh $myhostname sleep $debugtimelimit
|
||||
rsh $myhostname sleep $debugtimelimit &
|
||||
echo $! > PID.after
|
||||
|
||||
wait
|
||||
rm PID.before PID.after
|
||||
fi
|
||||
|
||||
echo "Timekeeper ended at `date`"
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env perl
|
||||
##
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
@@ -77,7 +76,6 @@ $Source = "";
|
||||
"H5G_obj_t" => "Go",
|
||||
"H5G_stat_t" => "Gs",
|
||||
"hsize_t" => "h",
|
||||
"H5_alloc_stats_t" => "Ha",
|
||||
"H5_atclose_func_t" => "Hc",
|
||||
"hssize_t" => "Hs",
|
||||
"H5E_major_t" => "i", # H5E_major_t is typedef'd to hid_t
|
||||
@@ -88,6 +86,7 @@ $Source = "";
|
||||
"H5_index_t" => "Ii",
|
||||
"H5I_iterate_func_t" => "II",
|
||||
"H5_iter_order_t" => "Io",
|
||||
"H5FD_subfiling_ioc_select_t" => "IO",
|
||||
"H5I_future_realize_func_t" => "IR",
|
||||
"int" => "Is",
|
||||
"int32_t" => "Is",
|
||||
@@ -188,6 +187,7 @@ $Source = "";
|
||||
"H5Z_filter_t" => "Zf",
|
||||
"H5Z_filter_func_t" => "ZF",
|
||||
"ssize_t" => "Zs",
|
||||
|
||||
# Types below must be defined here, as they appear in function arguments,
|
||||
# but they are not yet supported in the H5_trace_args() routine yet. If
|
||||
# they are used as an actual parameter type (and not just as a pointer to
|
||||
@@ -198,6 +198,7 @@ $Source = "";
|
||||
"H5FD_t" => "#",
|
||||
"H5FD_hdfs_fapl_t" => "#",
|
||||
"H5FD_mirror_fapl_t" => "#",
|
||||
"H5FD_onion_fapl_t" => "#",
|
||||
"H5FD_ros3_fapl_t" => "#",
|
||||
"H5FD_splitter_vfd_config_t" => "#",
|
||||
"H5L_class_t" => "#",
|
||||
|
||||
27
bin/warnhist
27
bin/warnhist
@@ -8,12 +8,10 @@ use warnings;
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the files COPYING and Copyright.html. COPYING can be found at the root
|
||||
# of the source code distribution tree; Copyright.html can be found at the
|
||||
# root level of an installed copy of the electronic HDF5 document set and
|
||||
# is linked from the top-level documents page. It can also be found at
|
||||
# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
|
||||
# access to either file, you may request a copy from help@hdfgroup.org.
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
# Quincey Koziol
|
||||
# 9 Aug 2013
|
||||
@@ -229,6 +227,9 @@ while (<>) {
|
||||
# Skip variables with the word 'warning' in them
|
||||
next if $_ =~ /_warning_/;
|
||||
|
||||
# Skip AMD Optimizing Compiler (aocc) lines "<#> warning(s) generated."
|
||||
next if $_ =~ / warnings? generated\./;
|
||||
|
||||
# "Hide" the C++ '::' symbol until we've parsed out the parts of the line
|
||||
while($_ =~ /\:\:/) {
|
||||
$_ =~ s/\:\:/@@@@/g;
|
||||
@@ -244,6 +245,18 @@ while (<>) {
|
||||
} elsif($_ =~ /^\s*[Ww]arning:.*/) {
|
||||
$name = $last_c_name;
|
||||
($toss, $warning, $extra, $extra2) = split /\:/, $_;
|
||||
# Check for file-scope gcc Fortran warning output
|
||||
} elsif($_ =~ /f\d\d\d: Warning:/) {
|
||||
# These are interspersed with the "compiling a file" output
|
||||
# when compiling with `make -j` and thus difficult to tie to
|
||||
# any particular file. They are due to things like inappropriate
|
||||
# build options and don't have a line number.
|
||||
#
|
||||
# They start with f, as in f951
|
||||
$name = "(generic)";
|
||||
$line = int(rand(1000000)); # Hack to avoid counting as duplictates
|
||||
|
||||
($warning) = $_ =~ /\[(.*)\]/x;
|
||||
# Check for FORTRAN warning output
|
||||
} elsif($_ =~ /^Warning:.*/) {
|
||||
$name = $last_fort_name;
|
||||
@@ -382,7 +395,7 @@ while (<>) {
|
||||
$warning =~ s/"[A-Za-z_0-9]*"/"-"/g;
|
||||
}
|
||||
|
||||
# Genericize [GCC?] C/C++ warning text about suggessted attribute
|
||||
# Genericize [GCC?] C/C++ warning text about suggested attribute
|
||||
if($warning =~ /attribute=[A-Za-z_0-9]*\]/) {
|
||||
$warning =~ s/=[A-Za-z_0-9]*\]/=-\]/g;
|
||||
}
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
# terms governing use, modification, and redistribution, is contained in
|
||||
# the COPYING file, which can be found at the root of the source code
|
||||
# distribution tree, or in https://www.hdfgroup.org/licenses.
|
||||
# If you do not have access to either file, you may request a copy from
|
||||
# help@hdfgroup.org.
|
||||
#
|
||||
# Fix configure file so that it can launch configure testing executable
|
||||
# via the proper launching command, e.g., yod. (Thus the name yodconfigure
|
||||
# is used.)
|
||||
#
|
||||
# Author: Albert Cheng
|
||||
|
||||
if [ "$1" = -f ]; then
|
||||
FORCEMODE=yes
|
||||
echo turn FORCEMODE to $FORCEMODE
|
||||
shift
|
||||
fi
|
||||
|
||||
if [ $# -ne 1 ]; then
|
||||
echo "Usage: $0 [-f] <configure file>"
|
||||
echo " -f apply the change even if it has been applied already."
|
||||
exit 1
|
||||
fi
|
||||
confile=$1
|
||||
|
||||
if [ ! -w $confile ]; then
|
||||
echo "$0: $confile is not writable"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ACTRY_NAME="ACTRY()"
|
||||
if grep ^"$ACTRY_NAME"$ $confile >/dev/null 2>&1 && [ "$FORCEMODE" != yes ]; then
|
||||
echo "$0: $confile is already yodconfigure ready. Use -f to force yodconfigure again."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Insert the ACTRY function after the 1st line which is the #!/bin/sh.
|
||||
# Change all "eval $ac_try" commands to call ACTRY.
|
||||
# auto-configure have changed the ac_try syntax from 'eval $ac_try' to
|
||||
# 'eval "$ac_try"'. Thus requiring two very similar global-substitute.
|
||||
# The single quotes around EOF tell shell NOT to expand or do substitution in
|
||||
# the body of ed input.
|
||||
#
|
||||
ed - $confile <<'EOF'
|
||||
1a
|
||||
# ===inserted by yodconfigure ====
|
||||
# ACTRY will figure out when it is approprirate to run the command by the
|
||||
# $RUNSERIAL launcher (e.g., yod -sz 1) and when to just run it as is.
|
||||
# So far, ./a.out and ./conftest are names of real executable that should
|
||||
# be run by $RUNSERIAL.
|
||||
#
|
||||
# (uncomment the echo line if you want to see what is going on.)
|
||||
ACTRY()
|
||||
{
|
||||
#echo ACTRY: args are: $* > /dev/tty
|
||||
if [ "$1" = ./a.out -o "$1" = ./conftest ]; then
|
||||
# echo $RUNSERIAL $* > /dev/tty
|
||||
$RUNSERIAL $*
|
||||
else
|
||||
$*
|
||||
fi
|
||||
}
|
||||
# === end of ACTRY inserted by yodconfigure ====
|
||||
.
|
||||
g/eval $ac_try/s/eval/eval ACTRY/
|
||||
g/eval "$ac_try"/s/eval/eval ACTRY/
|
||||
w
|
||||
q
|
||||
EOF
|
||||
@@ -1,4 +1,4 @@
|
||||
cmake_minimum_required (VERSION 3.12)
|
||||
cmake_minimum_required (VERSION 3.18)
|
||||
project (HDF5_CPP CXX)
|
||||
|
||||
add_subdirectory (src)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
cmake_minimum_required (VERSION 3.12)
|
||||
cmake_minimum_required (VERSION 3.18)
|
||||
project (HDF5_CPP_EXAMPLES CXX)
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
@@ -34,7 +34,7 @@ set (tutr_examples
|
||||
|
||||
foreach (example ${examples})
|
||||
add_executable (cpp_ex_${example} ${HDF5_CPP_EXAMPLES_SOURCE_DIR}/${example}.cpp)
|
||||
target_include_directories (cpp_ex_${example} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
|
||||
target_include_directories (cpp_ex_${example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
|
||||
if (NOT BUILD_SHARED_LIBS)
|
||||
TARGET_C_PROPERTIES (cpp_ex_${example} STATIC)
|
||||
target_link_libraries (cpp_ex_${example} PRIVATE ${HDF5_CPP_LIB_TARGET} ${HDF5_LIB_TARGET})
|
||||
@@ -59,7 +59,7 @@ endforeach ()
|
||||
|
||||
foreach (example ${tutr_examples})
|
||||
add_executable (cpp_ex_${example} ${HDF5_CPP_EXAMPLES_SOURCE_DIR}/${example}.cpp)
|
||||
target_include_directories (cpp_ex_${example} PRIVATE "${HDF5_SRC_DIR};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
|
||||
target_include_directories (cpp_ex_${example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
|
||||
if (NOT BUILD_SHARED_LIBS)
|
||||
TARGET_C_PROPERTIES (cpp_ex_${example} STATIC)
|
||||
target_link_libraries (cpp_ex_${example} PRIVATE ${HDF5_CPP_LIB_TARGET} ${HDF5_LIB_TARGET})
|
||||
|
||||
@@ -16,17 +16,31 @@
|
||||
##############################################################################
|
||||
##############################################################################
|
||||
# Remove any output file left over from previous test run
|
||||
set (CPP_EX_CLEANFILES
|
||||
Group.h5
|
||||
SDS.h5
|
||||
SDScompound.h5
|
||||
SDSextendible.h5
|
||||
Select.h5
|
||||
)
|
||||
add_test (
|
||||
NAME CPP_ex-clear-objects
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
-E remove
|
||||
Group.h5
|
||||
SDS.h5
|
||||
SDScompound.h5
|
||||
SDSextendible.h5
|
||||
Select.h5
|
||||
-E remove ${CPP_EX_CLEANFILES}
|
||||
)
|
||||
set_tests_properties (CPP_ex-clear-objects PROPERTIES
|
||||
FIXTURES_SETUP clear_cppex
|
||||
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
|
||||
)
|
||||
add_test (
|
||||
NAME CPP_ex-clean-objects
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
-E remove ${CPP_EX_CLEANFILES}
|
||||
)
|
||||
set_tests_properties (CPP_ex-clean-objects PROPERTIES
|
||||
FIXTURES_CLEANUP clear_cppex
|
||||
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
|
||||
)
|
||||
set_tests_properties (CPP_ex-clear-objects PROPERTIES FIXTURES_SETUP clear_cppex)
|
||||
|
||||
foreach (example ${examples})
|
||||
if (HDF5_ENABLE_USING_MEMCHECKER)
|
||||
@@ -41,7 +55,7 @@ foreach (example ${examples})
|
||||
-D "TEST_OUTPUT=cpp_ex_${example}.txt"
|
||||
#-D "TEST_REFERENCE=cpp_ex_${example}.out"
|
||||
-D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
|
||||
-P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake"
|
||||
-P "${HDF_RESOURCES_DIR}/runTest.cmake"
|
||||
)
|
||||
endif ()
|
||||
set_tests_properties (CPP_ex_${example} PROPERTIES FIXTURES_REQUIRED clear_cppex)
|
||||
@@ -53,19 +67,32 @@ endforeach ()
|
||||
#the following dependencies are handled by the order of the files
|
||||
# SET_TESTS_PROPERTIES(CPP_ex_readdata PROPERTIES DEPENDS CPP_ex_create)
|
||||
# SET_TESTS_PROPERTIES(CPP_ex_chunks PROPERTIES DEPENDS CPP_ex_extend_ds)
|
||||
|
||||
set (CPP_EX_TUTR_CLEANFILES
|
||||
h5tutr_cmprss.h5
|
||||
h5tutr_dset.h5
|
||||
h5tutr_extend.h5
|
||||
h5tutr_group.h5
|
||||
h5tutr_groups.h5
|
||||
h5tutr_subset.h5
|
||||
)
|
||||
add_test (
|
||||
NAME CPP_ex_tutr-clear-objects
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
-E remove
|
||||
h5tutr_cmprss.h5
|
||||
h5tutr_dset.h5
|
||||
h5tutr_extend.h5
|
||||
h5tutr_group.h5
|
||||
h5tutr_groups.h5
|
||||
h5tutr_subset.h5
|
||||
-E remove ${CPP_EX_TUTR_CLEANFILES}
|
||||
)
|
||||
set_tests_properties (CPP_ex_tutr-clear-objects PROPERTIES
|
||||
FIXTURES_SETUP clear_cppex_tutr
|
||||
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
|
||||
)
|
||||
add_test (
|
||||
NAME CPP_ex_tutr-clean-objects
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
-E remove ${CPP_EX_TUTR_CLEANFILES}
|
||||
)
|
||||
set_tests_properties (CPP_ex_tutr-clean-objects PROPERTIES
|
||||
FIXTURES_CLEANUP clear_cppex_tutr
|
||||
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
|
||||
)
|
||||
set_tests_properties (CPP_ex_tutr-clear-objects PROPERTIES FIXTURES_SETUP clear_cppex_tutr)
|
||||
|
||||
foreach (example ${tutr_examples})
|
||||
if (HDF5_ENABLE_USING_MEMCHECKER)
|
||||
@@ -77,10 +104,10 @@ foreach (example ${tutr_examples})
|
||||
-D "TEST_ARGS:STRING="
|
||||
-D "TEST_EXPECT=0"
|
||||
-D "TEST_SKIP_COMPARE=TRUE"
|
||||
-D "TEST_OUTPUT=cpp_ex_${example}.txt"
|
||||
#-D "TEST_REFERENCE=cpp_ex_${example}.out"
|
||||
-D "TEST_OUTPUT=tutr_cpp_ex_${example}.txt"
|
||||
#-D "TEST_REFERENCE=cpp_ex_tutr_${example}.out"
|
||||
-D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
|
||||
-P "${HDF_RESOURCES_EXT_DIR}/runTest.cmake"
|
||||
-P "${HDF_RESOURCES_DIR}/runTest.cmake"
|
||||
)
|
||||
endif ()
|
||||
set_tests_properties (CPP_ex_${example} PROPERTIES FIXTURES_REQUIRED clear_cppex_tutr)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
#
|
||||
# Copyright by The HDF Group.
|
||||
# Copyright by the Board of Trustees of the University of Illinois.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is part of HDF5. The full HDF5 copyright notice, including
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
@@ -70,7 +69,7 @@ main(void)
|
||||
dims[1] = 20;
|
||||
cdims[0] = 20;
|
||||
cdims[1] = 20;
|
||||
DataSpace * dataspace = new DataSpace(RANK, dims); // create new dspace
|
||||
DataSpace *dataspace = new DataSpace(RANK, dims); // create new dspace
|
||||
DSetCreatPropList ds_creatplist; // create dataset creation prop list
|
||||
ds_creatplist.setChunk(2, cdims); // then modify it for compression
|
||||
ds_creatplist.setDeflate(6);
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
* Copyright by The HDF Group. *
|
||||
* Copyright by the Board of Trustees of the University of Illinois. *
|
||||
* All rights reserved. *
|
||||
* *
|
||||
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user