Initial commit

main
lib 2 years ago
commit 42a6d127a4
  1. 17
      .autom4te.cfg
  2. 100
      .clang-format
  3. 879
      .gitattributes
  4. 11
      .github/CODEOWNERS
  5. 3
      .github/FUNDING.yml
  6. 25
      .github/ISSUE_TEMPLATE/bug_report.md
  7. 20
      .github/ISSUE_TEMPLATE/feature-request.md
  8. 8
      .github/PULL_REQUEST_TEMPLATE/pull_request_template.md
  9. 18
      .github/workflows/clang-format-check.yml
  10. 34
      .github/workflows/clang-format-fix.yml
  11. 217
      .github/workflows/cmake-ctest.yml
  12. 15
      .github/workflows/codespell.yml
  13. 21
      .github/workflows/daily-build.yml
  14. 47
      .github/workflows/hdfeos5.yml
  15. 530
      .github/workflows/main.yml
  16. 105
      .github/workflows/tarball.yml
  17. 53
      .h5chkright.ini
  18. 18
      ACKNOWLEDGMENTS
  19. 189
      CMakeFilters.cmake
  20. 670
      CMakeInstallation.cmake
  21. 1227
      CMakeLists.txt
  22. 56
      CMakePlugins.cmake
  23. 253
      CMakePresets.json
  24. 128
      CODE_OF_CONDUCT.md
  25. 136
      CONTRIBUTING.md
  26. 106
      COPYING
  27. 61
      COPYING_LBNL_HDF5
  28. 52
      CTestConfig.cmake
  29. 1
      DEPENDENCIES
  30. 1295
      Makefile
  31. 226
      Makefile.am
  32. 34
      Makefile.dist
  33. 1295
      Makefile.in
  34. 88
      README.md
  35. 14
      SETUP
  36. 32
      UserMacros.cmake
  37. 1193
      aclocal.m4
  38. 53
      acsite.m4
  39. 254
      autogen.sh
  40. 1496
      bin/Makefile
  41. 56
      bin/Makefile.am
  42. 1496
      bin/Makefile.in
  43. 31
      bin/README.md
  44. 23
      bin/batch/ctest.qsub.in.cmake
  45. 20
      bin/batch/ctestP.lsf.in.cmake
  46. 15
      bin/batch/ctestP.sl.in.cmake
  47. 17
      bin/batch/ctestS.lsf.in.cmake
  48. 15
      bin/batch/ctestS.sl.in.cmake
  49. 12
      bin/batch/ctest_parallel.cmake.in
  50. 12
      bin/batch/ctest_serial.cmake.in
  51. 20
      bin/batch/knl_H5detect.sl.in.cmake
  52. 16
      bin/batch/knl_ctestP.sl.in.cmake
  53. 16
      bin/batch/knl_ctestS.sl.in.cmake
  54. 22
      bin/batch/ray_ctestP.lsf.in.cmake
  55. 17
      bin/batch/ray_ctestS.lsf.in.cmake
  56. 7
      bin/batch/raybsub
  57. 339
      bin/bbrelease
  58. 308
      bin/buildhdf5
  59. 66
      bin/checkapi
  60. 263
      bin/checkposix
  61. 848
      bin/chkcopyright
  62. 380
      bin/cmakehdf5
  63. 348
      bin/compile
  64. 1667
      bin/config.guess
  65. 1793
      bin/config.sub
  66. 37
      bin/debug-ohdr
  67. 791
      bin/depcomp
  68. 26
      bin/format_source
  69. 265
      bin/genparser
  70. 391
      bin/h5cc
  71. 391
      bin/h5cc.in
  72. 215
      bin/h5redeploy.in
  73. 531
      bin/h5vers
  74. 529
      bin/install-sh
  75. 96
      bin/iostats
  76. 11147
      bin/ltmain.sh
  77. 397
      bin/make_err
  78. 214
      bin/make_overflow
  79. 530
      bin/make_vers
  80. 66
      bin/makehelp
  81. 215
      bin/missing
  82. 103
      bin/output_filter.sh
  83. 164
      bin/pkgscrpts/h5rmflags
  84. 365
      bin/pkgscrpts/makeHDF5BinaryTarfiles.pl
  85. 620
      bin/release
  86. 68
      bin/restore.sh
  87. 87
      bin/runbkgprog
  88. 80
      bin/switch_maint_mode
  89. 148
      bin/test-driver
  90. 548
      bin/trace
  91. 549
      bin/warnhist
  92. 18
      c++/CMakeLists.txt
  93. 1614
      c++/Makefile
  94. 49
      c++/Makefile.am
  95. 1614
      c++/Makefile.in
  96. 87
      c++/examples/CMakeLists.txt
  97. 122
      c++/examples/CMakeTests.cmake
  98. 1566
      c++/examples/Makefile
  99. 81
      c++/examples/Makefile.am
  100. 1566
      c++/examples/Makefile.in
  101. Some files were not shown because too many files have changed in this diff Show More

@ -0,0 +1,17 @@
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
## ------------------ ##
## User Preferences. ##
## ------------------ ##
begin-language: "Autoconf"
args: --no-cache
end-language: "Autoconf"

@ -0,0 +1,100 @@
---
BasedOnStyle: LLVM
AlignConsecutiveAssignments: true
BraceWrapping:
AfterFunction: true
#llvm10-11: AfterControlStatement: false - Never
BeforeCatch: true
BeforeElse: true
#llvm11: BeforeLambdaBody: false
#llvm11: BeforeWhile: false
BreakBeforeBraces: Stroustrup
BreakAfterJavaFieldAnnotations: true
BreakStringLiterals: true
ColumnLimit: 110 # Update $max_trace_macro_line_len in bin/trace also
IndentWidth: 4
---
Language: Cpp
#llvm11: AlignConsecutiveBitFields: false
AlignConsecutiveDeclarations: true
AlignConsecutiveMacros: true
#llvm10-11: AlignOperands: true - Align
#llvm11: AllowShortEnumsOnASingleLine: true
AllowShortFunctionsOnASingleLine: None
AlwaysBreakAfterReturnType: AllDefinitions
# Can enable the following section when llvm 12.x is out
#AttributeMacros:
# - H5_ATTR_FORMAT
# - H5_ATTR_UNUSED
# - H5_ATTR_DEPRECATED_USED
# - H5_ATTR_NDEBUG_UNUSED
# - H5_ATTR_DEBUG_API_USED
# - H5_ATTR_PARALLEL_UNUSED
# - H5_ATTR_PARALLEL_USED
# - H5_ATTR_NORETURN
# - H5_ATTR_CONST
# - H5_ATTR_PURE
# - H5_ATTR_FALLTHROUGH
ForEachMacros: ['ALL_MEMBERS', 'UNIQUE_MEMBERS']
IncludeCategories:
- Regex: '^"(llvm|llvm-c|clang|clang-c)/'
Priority: 3
SortPriority: 0
- Regex: '^(<|"(gtest|gmock|isl|json)/)'
Priority: 4
SortPriority: 0
- Regex: '.*'
Priority: 0
SortPriority: 0
- Regex: '^H5*.*'
Priority: 1
SortPriority: 0
- Regex: 'private.*'
Priority: 2
SortPriority: 0
IncludeIsMainRegex: '(public)?$'
IndentCaseLabels: true
#llvm11: IndentCaseBlocks: false
IndentGotoLabels: false
#llvm11: IndentExternBlock: AfterExternBlock
#llvm11: InsertTrailingCommas: None
MacroBlockBegin: "^BEGIN_FUNC"
MacroBlockEnd: "^END_FUNC"
ObjCBlockIndentWidth: 4
#llvm11: ObjCBreakBeforeNestedBlockParam: true
ReflowComments: true
SortIncludes: false
StatementMacros:
- FUNC_ENTER_API
- FUNC_LEAVE_API
- FUNC_ENTER_NOAPI_NOINIT_NOERR
- FUNC_LEAVE_NOAPI
- H5_BEGIN_TAG
- HGOTO_DONE_TAG
- H5_END_TAG
- HSYS_DONE_ERROR
- HSYS_GOTO_ERROR
- HDONE_ERROR
- HERROR
- H5_LEAVE
- H5E_PRINTF
- H5E_THROW
- HGOTO_DONE
- HGOTO_ERROR
- HMPI_ERROR
- HMPI_DONE_ERROR
- HMPI_GOTO_ERROR
- H5_GCC_DIAG_OFF
- H5_GCC_DIAG_ON
- CATCH
#llvm10: TypenameMacros:
#llvm10: - STACK_OF
#llvm10: - LIST
#llvm11: WhitespaceSensitiveMacros:
#llvm11: - STRINGIZE
#llvm11: - PP_STRINGIZE
---
Language: Java
BreakAfterJavaFieldAnnotations: true
JavaImportGroups: ['java', 'hdf', 'hdf.hdf5lib', 'org']
...

879
.gitattributes vendored

@ -0,0 +1,879 @@
* text=auto !eol
bin/bbrelease -text
bin/genparser -text
bin/switch_maint_mode -text
c++/src/H5OcreatProp.cpp -text
c++/src/H5OcreatProp.h -text
c++/src/footer.html -text
c++/src/header_files/hdf_logo.jpg -text
c++/src/header_files/help.jpg -text
c++/src/header_files/image001.jpg -text
c++/src/header_files/image002.jpg -text
c++/test/tarray.cpp -text
c++/test/th5s.h5 -text
config/cce-fflags -text
config/cce-flags -text
config/cmake/CMakeFindJavaCommon.cmake -text
config/cmake/CPack.Info.plist.in -text svneol=unset#application/xml
config/cmake/FindHDFJAVA.cmake.in -text
config/cmake/FindJNI.cmake -text
config/cmake/HDF5UseFortran.cmake -text
config/cmake/HDF5_Examples.cmake.in -text
config/cmake/HDF5_Process_Flex_Files.cmake -text
config/cmake/HDFCompilerFlags.cmake -text
config/cmake/UseJava.cmake -text
config/cmake/UseJavaClassFilelist.cmake -text
config/cmake/UseJavaSymlinks.cmake -text
config/cmake/jrunTest.cmake -text
config/cmake/patch.xml -text
config/cmake/scripts/CTestScript.cmake -text
config/cmake/scripts/HDF5config.cmake -text
config/cmake_ext_mod/hdf.bmp -text
config/cmake_ext_mod/hdf.icns -text
config/cmake_ext_mod/hdf.ico -text
config/conclude_fc.am -text
config/gnu-cxxflags -text
examples/h5_vds-eiger.c -text
examples/h5_vds-exc.c -text
examples/h5_vds-exclim.c -text
examples/h5_vds-percival-unlim-maxmin.c -text
examples/h5_vds-percival-unlim.c -text
examples/h5_vds-percival.c -text
examples/h5_vds-simpleIO.c -text
examples/h5_vds.c -text
fortran/src/H5_buildiface.F90 -text
fortran/src/H5config_f.inc.cmake -text
fortran/src/H5config_f.inc.in -text
fortran/src/H5fort_type_defines.h.in -text
fortran/src/H5fortkit.F90 -text
fortran/test/H5_test_buildiface.F90 -text
hl/fortran/src/H5HL_buildiface.F90 -text
hl/fortran/src/hdf5_hl_fortrandll.def.in -text
hl/src/H5LD.c -text
hl/src/H5LDprivate.h -text
hl/src/H5LDpublic.h -text
hl/src/H5LTanalyze.c -text
hl/src/H5LTparse.c -text
hl/src/H5LTparse.h -text
hl/test/gen_test_ds.c svneol=native#text/plain
hl/test/gen_test_ld.c -text
hl/test/test_ds_be.h5 -text
hl/test/test_ds_le.h5 -text
hl/test/test_dset_append.c -text
hl/test/test_ld.c -text
hl/test/test_ld.h5 -text
hl/test/test_packet_vlen.c -text
hl/test/test_table_be.h5 -text
hl/test/test_table_cray.h5 -text
hl/test/test_table_le.h5 -text
hl/tools/gif2h5/CMakeLists.txt -text
hl/tools/gif2h5/CMakeTests.cmake -text
hl/tools/gif2h5/testfiles/README -text
hl/tools/gif2h5/testfiles/ex_image2.h5 -text
hl/tools/gif2h5/testfiles/h52giftst.h5 -text
hl/tools/gif2h5/testfiles/image1.gif -text
java/CMakeLists.txt -text
java/COPYING -text
java/Makefile.am -text
java/examples/CMakeLists.txt -text
java/examples/Makefile.am -text
java/examples/datasets/CMakeLists.txt -text
java/examples/datasets/H5Ex_D_Alloc.java -text
java/examples/datasets/H5Ex_D_Checksum.java -text
java/examples/datasets/H5Ex_D_Chunk.java -text
java/examples/datasets/H5Ex_D_Compact.java -text
java/examples/datasets/H5Ex_D_External.java -text
java/examples/datasets/H5Ex_D_FillValue.java -text
java/examples/datasets/H5Ex_D_Gzip.java -text
java/examples/datasets/H5Ex_D_Hyperslab.java -text
java/examples/datasets/H5Ex_D_Nbit.java -text
java/examples/datasets/H5Ex_D_ReadWrite.java -text
java/examples/datasets/H5Ex_D_Shuffle.java -text
java/examples/datasets/H5Ex_D_Sofloat.java -text
java/examples/datasets/H5Ex_D_Soint.java -text
java/examples/datasets/H5Ex_D_Szip.java -text
java/examples/datasets/H5Ex_D_Transform.java -text
java/examples/datasets/H5Ex_D_UnlimitedAdd.java -text
java/examples/datasets/H5Ex_D_UnlimitedGzip.java -text
java/examples/datasets/H5Ex_D_UnlimitedMod.java -text
java/examples/datasets/Makefile.am -text
java/examples/datasets/runExample.sh.in -text
java/examples/datatypes/CMakeLists.txt -text
java/examples/datatypes/H5Ex_T_Array.java -text
java/examples/datatypes/H5Ex_T_ArrayAttribute.java -text
java/examples/datatypes/H5Ex_T_Bit.java -text
java/examples/datatypes/H5Ex_T_BitAttribute.java -text
java/examples/datatypes/H5Ex_T_Commit.java -text
java/examples/datatypes/H5Ex_T_Compound.java -text
java/examples/datatypes/H5Ex_T_CompoundAttribute.java -text
java/examples/datatypes/H5Ex_T_Float.java -text
java/examples/datatypes/H5Ex_T_FloatAttribute.java -text
java/examples/datatypes/H5Ex_T_Integer.java -text
java/examples/datatypes/H5Ex_T_IntegerAttribute.java -text
java/examples/datatypes/H5Ex_T_ObjectReference.java -text
java/examples/datatypes/H5Ex_T_ObjectReferenceAttribute.java -text
java/examples/datatypes/H5Ex_T_Opaque.java -text
java/examples/datatypes/H5Ex_T_OpaqueAttribute.java -text
java/examples/datatypes/H5Ex_T_String.java -text
java/examples/datatypes/H5Ex_T_StringAttribute.java -text
java/examples/datatypes/H5Ex_T_VLString.java -text
java/examples/datatypes/Makefile.am -text
java/examples/datatypes/runExample.sh.in -text
java/examples/groups/CMakeLists.txt -text
java/examples/groups/H5Ex_G_Compact.java -text
java/examples/groups/H5Ex_G_Corder.java -text
java/examples/groups/H5Ex_G_Create.java -text
java/examples/groups/H5Ex_G_Intermediate.java -text
java/examples/groups/H5Ex_G_Iterate.java -text
java/examples/groups/H5Ex_G_Phase.java -text
java/examples/groups/H5Ex_G_Traverse.java -text
java/examples/groups/H5Ex_G_Visit.java -text
java/examples/groups/Makefile.am -text
java/examples/groups/h5ex_g_iterate.h5 -text svneol=unset#application/x-hdf
java/examples/groups/h5ex_g_visit.h5 -text svneol=unset#application/x-hdf
java/examples/groups/runExample.sh.in -text
java/examples/intro/CMakeLists.txt -text
java/examples/intro/H5_CreateAttribute.java -text
java/examples/intro/H5_CreateDataset.java -text
java/examples/intro/H5_CreateFile.java -text
java/examples/intro/H5_CreateGroup.java -text
java/examples/intro/H5_CreateGroupAbsoluteRelative.java -text
java/examples/intro/H5_CreateGroupDataset.java -text
java/examples/intro/H5_ReadWrite.java -text
java/examples/intro/Makefile.am -text
java/examples/intro/runExample.sh.in -text
java/examples/testfiles/examples.datasets.H5Ex_D_Alloc.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_Checksum.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_Chunk.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_Compact.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_External.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_FillValue.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_Gzip.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_Hyperslab.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_Nbit.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_ReadWrite.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_Shuffle.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_Sofloat.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_Soint.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_Szip.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_Transform.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt -text
java/examples/testfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_Array.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_Bit.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_BitAttribute.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_Commit.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_Compound.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_Float.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_Integer.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReference.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_Opaque.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_String.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_StringAttribute.txt -text
java/examples/testfiles/examples.datatypes.H5Ex_T_VLString.txt -text
java/examples/testfiles/examples.groups.H5Ex_G_Compact.txt -text
java/examples/testfiles/examples.groups.H5Ex_G_Corder.txt -text
java/examples/testfiles/examples.groups.H5Ex_G_Create.txt -text
java/examples/testfiles/examples.groups.H5Ex_G_Intermediate.txt -text
java/examples/testfiles/examples.groups.H5Ex_G_Iterate.txt -text
java/examples/testfiles/examples.groups.H5Ex_G_Phase.txt -text
java/examples/testfiles/examples.groups.H5Ex_G_Visit.txt -text
java/examples/testfiles/examples.intro.H5_CreateAttribute.txt -text
java/examples/testfiles/examples.intro.H5_CreateDataset.txt -text
java/examples/testfiles/examples.intro.H5_CreateFile.txt -text
java/examples/testfiles/examples.intro.H5_CreateGroup.txt -text
java/examples/testfiles/examples.intro.H5_CreateGroupAbsoluteRelative.txt -text
java/examples/testfiles/examples.intro.H5_CreateGroupDataset.txt -text
java/examples/testfiles/examples.intro.H5_ReadWrite.txt -text
java/lib/ext/slf4j-nop-2.0.6.jar -text svneol=unset#application/zip
java/lib/ext/slf4j-simple-2.0.6.jar -text svneol=unset#application/zip
java/lib/hamcrest-core.jar -text svneol=unset#application/java-archive
java/lib/junit.jar -text svneol=unset#application/java-archive
java/lib/simplelogger.properties -text
java/lib/slf4j-api-2.0.6.jar -text svneol=unset#application/zip
java/src/CMakeLists.txt -text
java/src/Makefile.am -text
java/src/hdf/CMakeLists.txt -text
java/src/hdf/hdf5lib/CMakeLists.txt -text
java/src/hdf/hdf5lib/H5.java -text
java/src/hdf/hdf5lib/HDF5Constants.java -text
java/src/hdf/hdf5lib/HDF5GroupInfo.java -text
java/src/hdf/hdf5lib/HDFArray.java -text
java/src/hdf/hdf5lib/HDFNativeData.java -text
java/src/hdf/hdf5lib/callbacks/Callbacks.java -text
java/src/hdf/hdf5lib/callbacks/H5A_iterate_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5A_iterate_t.java -text
java/src/hdf/hdf5lib/callbacks/H5D_append_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5D_append_t.java -text
java/src/hdf/hdf5lib/callbacks/H5D_iterate_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5D_iterate_t.java -text
java/src/hdf/hdf5lib/callbacks/H5E_walk_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5E_walk_t.java -text
java/src/hdf/hdf5lib/callbacks/H5L_iterate_t.java -text
java/src/hdf/hdf5lib/callbacks/H5L_iterate_opdata_t.java -text
java/src/hdf/hdf5lib/callbacks/H5O_iterate_t.java -text
java/src/hdf/hdf5lib/callbacks/H5O_iterate_opdata_t.java -text
java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5P_cls_close_func_t.java -text
java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5P_cls_copy_func_t.java -text
java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5P_cls_create_func_t.java -text
java/src/hdf/hdf5lib/callbacks/H5P_iterate_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5P_iterate_t.java -text
java/src/hdf/hdf5lib/callbacks/H5P_prp_close_func_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5P_prp_compare_func_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5P_prp_copy_func_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5P_prp_create_func_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5P_prp_delete_func_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5P_prp_get_func_cb.java -text
java/src/hdf/hdf5lib/callbacks/H5P_prp_set_func_cb.java -text
java/src/hdf/hdf5lib/exceptions/HDF5AtomException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5AttributeException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5BtreeException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5DataFiltersException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5DataStorageException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5DatasetInterfaceException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5DataspaceInterfaceException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5DatatypeInterfaceException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5Exception.java -text
java/src/hdf/hdf5lib/exceptions/HDF5ExternalFileListException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5FileInterfaceException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5FunctionArgumentException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5FunctionEntryExitException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5HeapException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5InternalErrorException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5JavaException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5LibraryException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5LowLevelIOException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5MetaDataCacheException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5ObjectHeaderException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5PropertyListInterfaceException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5ReferenceException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5ResourceUnavailableException.java -text
java/src/hdf/hdf5lib/exceptions/HDF5SymbolTableException.java -text
java/src/hdf/hdf5lib/structs/H5AC_cache_config_t.java -text
java/src/hdf/hdf5lib/structs/H5A_info_t.java -text
java/src/hdf/hdf5lib/structs/H5E_error2_t.java -text
java/src/hdf/hdf5lib/structs/H5F_info2_t.java -text
java/src/hdf/hdf5lib/structs/H5G_info_t.java -text
java/src/hdf/hdf5lib/structs/H5L_info_t.java -text
java/src/hdf/hdf5lib/structs/H5O_hdr_info_t.java -text
java/src/hdf/hdf5lib/structs/H5O_info_t.java -text
java/src/hdf/hdf5lib/structs/H5O_native_info_t.java -text
java/src/hdf/hdf5lib/structs/H5O_token_t.java -text
java/src/hdf/hdf5lib/structs/H5_ih_info_t.java -text
java/src/hdf/overview.html -text
java/src/jni/CMakeLists.txt -text
java/src/jni/Makefile.am -text
java/src/jni/exceptionImp.c -text
java/src/jni/exceptionImp.h -text
java/src/jni/h5Constants.c -text
java/src/jni/h5Imp.c -text
java/src/jni/h5Imp.h -text
java/src/jni/h5aImp.c -text
java/src/jni/h5aImp.h -text
java/src/jni/h5dImp.c -text
java/src/jni/h5dImp.h -text
java/src/jni/h5eImp.c -text
java/src/jni/h5eImp.h -text
java/src/jni/h5fImp.c -text
java/src/jni/h5fImp.h -text
java/src/jni/h5gImp.c -text
java/src/jni/h5gImp.h -text
java/src/jni/h5iImp.c -text
java/src/jni/h5iImp.h -text
java/src/jni/h5jni.h -text
java/src/jni/h5lImp.c -text
java/src/jni/h5lImp.h -text
java/src/jni/h5oImp.c -text
java/src/jni/h5oImp.h -text
java/src/jni/h5pImp.c -text
java/src/jni/h5pImp.h -text
java/src/jni/h5plImp.c -text
java/src/jni/h5plImp.h -text
java/src/jni/h5rImp.c -text
java/src/jni/h5rImp.h -text
java/src/jni/h5sImp.c -text
java/src/jni/h5sImp.h -text
java/src/jni/h5tImp.c -text
java/src/jni/h5tImp.h -text
java/src/jni/h5util.c -text
java/src/jni/h5util.h -text
java/src/jni/h5zImp.c -text
java/src/jni/h5zImp.h -text
java/src/jni/nativeData.c -text
java/src/jni/nativeData.h -text
java/test/CMakeLists.txt -text
java/test/JUnit-interface.ert -text
java/test/JUnit-interface.txt -text
java/test/Makefile.am -text
java/test/TestAll.java -text
java/test/TestH5.java -text
java/test/TestH5A.java -text
java/test/TestH5D.java -text
java/test/TestH5Dparams.java -text
java/test/TestH5Dplist.java -text
java/test/TestH5E.java -text
java/test/TestH5Edefault.java -text
java/test/TestH5Eparams.java -text
java/test/TestH5Eregister.java -text
java/test/TestH5F.java -text
java/test/TestH5Fbasic.java -text
java/test/TestH5Fparams.java -text
java/test/TestH5G.java -text
java/test/TestH5Gbasic.java -text
java/test/TestH5Giterate.java -text
java/test/TestH5Lbasic.java -text
java/test/TestH5Lcreate.java -text
java/test/TestH5Lparams.java -text
java/test/TestH5Obasic.java -text
java/test/TestH5Ocopy.java -text
java/test/TestH5Ocreate.java -text
java/test/TestH5Oparams.java -text
java/test/TestH5P.java -text
java/test/TestH5PData.java -text
java/test/TestH5PL.java -text
java/test/TestH5Pfapl.java -text
java/test/TestH5Plist.java -text
java/test/TestH5Pvirtual.java -text
java/test/TestH5R.java -text
java/test/TestH5S.java -text
java/test/TestH5Sbasic.java -text
java/test/TestH5T.java -text
java/test/TestH5Tbasic.java -text
java/test/TestH5Tparams.java -text
java/test/TestUnit.java -text
java/test/TestH5Z.java -text
java/test/h5ex_g_iterate.orig -text svneol=unset#application/x-hdf
java/test/junit.sh.in -text
m4/aclocal_cxx.m4 -text
m4/aclocal_fc.f90 -text
m4/aclocal_fc.m4 -text
m4/ax_check_class.m4 -text
m4/ax_check_classpath.m4 -text
m4/ax_check_java_home.m4 -text
m4/ax_check_junit.m4 -text
m4/ax_check_rqrd_class.m4 -text
m4/ax_java_check_class.m4 -text
m4/ax_java_options.m4 -text
m4/ax_jni_include_dir.m4 -text
m4/ax_prog_doxygen.m4 -text
m4/ax_prog_jar.m4 -text
m4/ax_prog_java.m4 -text
m4/ax_prog_java_cc.m4 -text
m4/ax_prog_java_works.m4 -text
m4/ax_prog_javac.m4 -text
m4/ax_prog_javac_works.m4 -text
m4/ax_prog_javadoc.m4 -text
m4/ax_prog_javah.m4 -text
m4/ax_try_compile_java.m4 -text
m4/ax_try_run_java.m4 -text
src/H5Cepoch.c -text
src/H5Cquery.c -text
src/H5Ctag.c -text
src/H5Ctest.c -text
src/H5Dnone.c -text
src/H5Dsingle.c -text
src/H5Oflush.c -text
test/bad_compound.h5 -text
test/be_data.h5 -text
test/be_extlink1.h5 -text
test/be_extlink2.h5 -text
test/btree_idx_1_6.h5 -text
test/btree_idx_1_8.h5 -text
test/cork.c -text
test/corrupt_stab_msg.h5 -text
test/deflate.h5 -text
test/dynlib4.c -text
test/family_v16-000000.h5 -text
test/family_v16-000001.h5 -text
test/family_v16-000002.h5 -text
test/family_v16-000003.h5 -text
test/file_image_core_test.h5 -text
test/filespace_1_6.h5 -text
test/filespace_1_8.h5 -text
test/fill_old.h5 -text
test/filter_error.h5 -text
test/group_old.h5 -text
test/le_data.h5 -text
test/le_extlink1.h5 -text
test/le_extlink2.h5 -text
test/mergemsg.h5 -text svneol=unset#application/hdf5
test/multi_file_v16-r.h5 -text
test/multi_file_v16-s.h5 -text
test/noencoder.h5 -text
test/specmetaread.h5 -text
test/tarrold.h5 -text
test/tbad_msg_count.h5 -text
test/tbogus.h5 -text
test/test_filters_be.h5 -text
test/test_filters_le.h5 -text
test/testfiles/plist_files/acpl_32be -text
test/testfiles/plist_files/acpl_32le -text
test/testfiles/plist_files/acpl_64be -text
test/testfiles/plist_files/acpl_64le -text
test/testfiles/plist_files/dapl_32be -text
test/testfiles/plist_files/dapl_32le -text
test/testfiles/plist_files/dapl_64be -text
test/testfiles/plist_files/dapl_64le -text
test/testfiles/plist_files/dcpl_32be -text
test/testfiles/plist_files/dcpl_32le -text
test/testfiles/plist_files/dcpl_64be -text
test/testfiles/plist_files/dcpl_64le -text
test/testfiles/plist_files/def_acpl_32be -text
test/testfiles/plist_files/def_acpl_32le -text
test/testfiles/plist_files/def_acpl_64be -text
test/testfiles/plist_files/def_acpl_64le -text
test/testfiles/plist_files/def_dapl_32be -text
test/testfiles/plist_files/def_dapl_32le -text
test/testfiles/plist_files/def_dapl_64be -text
test/testfiles/plist_files/def_dapl_64le -text
test/testfiles/plist_files/def_dcpl_32be -text
test/testfiles/plist_files/def_dcpl_32le -text
test/testfiles/plist_files/def_dcpl_64be -text
test/testfiles/plist_files/def_dcpl_64le -text
test/testfiles/plist_files/def_dxpl_32be -text
test/testfiles/plist_files/def_dxpl_32le -text
test/testfiles/plist_files/def_dxpl_64be -text
test/testfiles/plist_files/def_dxpl_64le -text
test/testfiles/plist_files/def_fapl_32be -text
test/testfiles/plist_files/def_fapl_32le -text
test/testfiles/plist_files/def_fapl_64be -text
test/testfiles/plist_files/def_fapl_64le -text
test/testfiles/plist_files/def_fcpl_32be -text
test/testfiles/plist_files/def_fcpl_32le -text
test/testfiles/plist_files/def_fcpl_64be -text
test/testfiles/plist_files/def_fcpl_64le -text
test/testfiles/plist_files/def_gcpl_32be -text
test/testfiles/plist_files/def_gcpl_32le -text
test/testfiles/plist_files/def_gcpl_64be -text
test/testfiles/plist_files/def_gcpl_64le -text
test/testfiles/plist_files/def_lapl_32be -text
test/testfiles/plist_files/def_lapl_32le -text
test/testfiles/plist_files/def_lapl_64be -text
test/testfiles/plist_files/def_lapl_64le -text
test/testfiles/plist_files/def_lcpl_32be -text
test/testfiles/plist_files/def_lcpl_32le -text
test/testfiles/plist_files/def_lcpl_64be -text
test/testfiles/plist_files/def_lcpl_64le -text
test/testfiles/plist_files/def_ocpl_32be -text
test/testfiles/plist_files/def_ocpl_32le -text
test/testfiles/plist_files/def_ocpl_64be -text
test/testfiles/plist_files/def_ocpl_64le -text
test/testfiles/plist_files/def_ocpypl_32be -text
test/testfiles/plist_files/def_ocpypl_32le -text
test/testfiles/plist_files/def_ocpypl_64be -text
test/testfiles/plist_files/def_ocpypl_64le -text
test/testfiles/plist_files/def_strcpl_32be -text
test/testfiles/plist_files/def_strcpl_32le -text
test/testfiles/plist_files/def_strcpl_64be -text
test/testfiles/plist_files/def_strcpl_64le -text
test/testfiles/plist_files/dxpl_32be -text
test/testfiles/plist_files/dxpl_32le -text
test/testfiles/plist_files/dxpl_64be -text
test/testfiles/plist_files/dxpl_64le -text
test/testfiles/plist_files/fapl_32be -text
test/testfiles/plist_files/fapl_32le -text
test/testfiles/plist_files/fapl_64be -text
test/testfiles/plist_files/fapl_64le -text
test/testfiles/plist_files/fcpl_32be -text
test/testfiles/plist_files/fcpl_32le -text
test/testfiles/plist_files/fcpl_64be -text
test/testfiles/plist_files/fcpl_64le -text
test/testfiles/plist_files/gcpl_32be -text
test/testfiles/plist_files/gcpl_32le -text
test/testfiles/plist_files/gcpl_64be -text
test/testfiles/plist_files/gcpl_64le -text
test/testfiles/plist_files/lapl_32be -text
test/testfiles/plist_files/lapl_32le -text
test/testfiles/plist_files/lapl_64be -text
test/testfiles/plist_files/lapl_64le -text
test/testfiles/plist_files/lcpl_32be -text
test/testfiles/plist_files/lcpl_32le -text
test/testfiles/plist_files/lcpl_64be -text
test/testfiles/plist_files/lcpl_64le -text
test/testfiles/plist_files/ocpl_32be -text
test/testfiles/plist_files/ocpl_32le -text
test/testfiles/plist_files/ocpl_64be -text
test/testfiles/plist_files/ocpl_64le -text
test/testfiles/plist_files/ocpypl_32be -text
test/testfiles/plist_files/ocpypl_32le -text
test/testfiles/plist_files/ocpypl_64be -text
test/testfiles/plist_files/ocpypl_64le -text
test/testfiles/plist_files/strcpl_32be -text
test/testfiles/plist_files/strcpl_32le -text
test/testfiles/plist_files/strcpl_64be -text
test/testfiles/plist_files/strcpl_64le -text
test/th5s.h5 -text
test/tlayouto.h5 -text
test/tmtimen.h5 -text
test/tmtimeo.h5 -text
test/tsizeslheap.h5 -text
testpar/t_init_term.c -text
testpar/t_prestart.c -text
testpar/t_pshutdown.c -text
tools/h5copy/testfiles/h5copy_extlinks_src.h5 -text
tools/h5copy/testfiles/h5copy_extlinks_trg.h5 -text
tools/h5copy/testfiles/h5copy_ref.h5 -text
tools/h5copy/testfiles/h5copytst.h5 -text
tools/h5copy/testfiles/h5copytst_new.h5 -text
tools/h5copy/testfiles/h5copytst_new.out.ls -text
tools/h5diff/testfiles/compounds_array_vlen1.h5 -text
tools/h5diff/testfiles/compounds_array_vlen2.h5 -text
tools/h5diff/testfiles/h5diff_attr1.h5 -text
tools/h5diff/testfiles/h5diff_attr2.h5 -text
tools/h5diff/testfiles/h5diff_attr_v_level1.h5 -text
tools/h5diff/testfiles/h5diff_attr_v_level2.h5 -text
tools/h5diff/testfiles/h5diff_basic1.h5 -text
tools/h5diff/testfiles/h5diff_basic2.h5 -text
tools/h5diff/testfiles/h5diff_comp_vl_strs.h5 -text
tools/h5diff/testfiles/h5diff_danglelinks1.h5 -text
tools/h5diff/testfiles/h5diff_danglelinks2.h5 -text
tools/h5diff/testfiles/h5diff_dset1.h5 -text
tools/h5diff/testfiles/h5diff_dset2.h5 -text
tools/h5diff/testfiles/h5diff_dset_zero_dim_size1.h5 -text
tools/h5diff/testfiles/h5diff_dset_zero_dim_size2.h5 -text
tools/h5diff/testfiles/h5diff_dtypes.h5 -text
tools/h5diff/testfiles/h5diff_empty.h5 -text
tools/h5diff/testfiles/h5diff_enum_invalid_values.h5 -text
tools/h5diff/testfiles/h5diff_exclude1-1.h5 -text
tools/h5diff/testfiles/h5diff_exclude1-2.h5 -text
tools/h5diff/testfiles/h5diff_exclude2-1.h5 -text
tools/h5diff/testfiles/h5diff_exclude2-2.h5 -text
tools/h5diff/testfiles/h5diff_exclude3-1.h5 -text
tools/h5diff/testfiles/h5diff_exclude3-2.h5 -text
tools/h5diff/testfiles/h5diff_ext2softlink_src.h5 -text
tools/h5diff/testfiles/h5diff_ext2softlink_trg.h5 -text
tools/h5diff/testfiles/h5diff_extlink_src.h5 -text
tools/h5diff/testfiles/h5diff_extlink_trg.h5 -text
tools/h5diff/testfiles/h5diff_grp_recurse1.h5 -text
tools/h5diff/testfiles/h5diff_grp_recurse2.h5 -text
tools/h5diff/testfiles/h5diff_grp_recurse_ext1.h5 -text
tools/h5diff/testfiles/h5diff_grp_recurse_ext2-1.h5 -text
tools/h5diff/testfiles/h5diff_grp_recurse_ext2-2.h5 -text
tools/h5diff/testfiles/h5diff_grp_recurse_ext2-3.h5 -text
tools/h5diff/testfiles/h5diff_hyper1.h5 -text
tools/h5diff/testfiles/h5diff_hyper2.h5 -text
tools/h5diff/testfiles/h5diff_linked_softlink.h5 -text
tools/h5diff/testfiles/h5diff_links.h5 -text
tools/h5diff/testfiles/h5diff_softlinks.h5 -text
tools/h5diff/testfiles/h5diff_types.h5 -text
tools/h5diff/testfiles/h5diff_v1.txt -text
tools/h5diff/testfiles/h5diff_v2.txt -text
tools/h5diff/testfiles/h5diff_v3.txt -text
tools/h5diff/testfiles/non_comparables1.h5 -text
tools/h5diff/testfiles/non_comparables2.h5 -text
tools/h5diff/testfiles/tmpSingleSiteBethe.output.h5 -text
tools/h5diff/testfiles/tmpSingleSiteBethe.reference.h5 -text
tools/h5diff/testfiles/tmptest.he5 -text
tools/h5diff/testfiles/tmptest2.he5 -text
tools/h5dump/errfiles/non_existing.err -text
tools/h5format_convert/CMakeLists.txt -text
tools/h5format_convert/CMakeTests.cmake -text
tools/h5format_convert/Makefile.am -text
tools/h5format_convert/h5fc_chk_idx.c -text
tools/h5format_convert/h5fc_gentest.c -text
tools/h5format_convert/h5format_convert.c -text
tools/h5format_convert/testfiles/h5fc_d_file.ddl -text
tools/h5format_convert/testfiles/h5fc_dname.ddl -text
tools/h5format_convert/testfiles/h5fc_edge_v3.h5 -text
tools/h5format_convert/testfiles/h5fc_err_level.h5 -text
tools/h5format_convert/testfiles/h5fc_ext1_f.ddl -text
tools/h5format_convert/testfiles/h5fc_ext1_f.h5 -text
tools/h5format_convert/testfiles/h5fc_ext1_i.ddl -text
tools/h5format_convert/testfiles/h5fc_ext1_i.h5 -text
tools/h5format_convert/testfiles/h5fc_ext1_s.ddl -text
tools/h5format_convert/testfiles/h5fc_ext1_s.h5 -text
tools/h5format_convert/testfiles/h5fc_ext2_if.ddl -text
tools/h5format_convert/testfiles/h5fc_ext2_if.h5 -text
tools/h5format_convert/testfiles/h5fc_ext2_is.ddl -text
tools/h5format_convert/testfiles/h5fc_ext2_is.h5 -text
tools/h5format_convert/testfiles/h5fc_ext2_sf.ddl -text
tools/h5format_convert/testfiles/h5fc_ext2_sf.h5 -text
tools/h5format_convert/testfiles/h5fc_ext3_isf.ddl -text
tools/h5format_convert/testfiles/h5fc_ext3_isf.h5 -text
tools/h5format_convert/testfiles/h5fc_ext_none.h5 -text
tools/h5format_convert/testfiles/h5fc_help.ddl -text
tools/h5format_convert/testfiles/h5fc_non_v3.h5 -text
tools/h5format_convert/testfiles/h5fc_nonexistdset_file.ddl -text
tools/h5format_convert/testfiles/h5fc_nonexistfile.ddl -text
tools/h5format_convert/testfiles/h5fc_nooption.ddl -text
tools/h5format_convert/testfiles/h5fc_v_all.ddl -text
tools/h5format_convert/testfiles/h5fc_v_bt1.ddl -text
tools/h5format_convert/testfiles/h5fc_v_err.ddl -text
tools/h5format_convert/testfiles/h5fc_v_n_1d.ddl -text
tools/h5format_convert/testfiles/h5fc_v_n_all.ddl -text
tools/h5format_convert/testfiles/h5fc_v_ndata_bt1.ddl -text
tools/h5format_convert/testfiles/h5fc_v_non_chunked.ddl -text
tools/h5format_convert/testfiles/old_h5fc_ext1_f.ddl -text
tools/h5format_convert/testfiles/old_h5fc_ext1_f.h5 -text
tools/h5format_convert/testfiles/old_h5fc_ext1_i.ddl -text
tools/h5format_convert/testfiles/old_h5fc_ext1_i.h5 -text
tools/h5format_convert/testfiles/old_h5fc_ext1_s.ddl -text
tools/h5format_convert/testfiles/old_h5fc_ext1_s.h5 -text
tools/h5format_convert/testfiles/old_h5fc_ext2_if.ddl -text
tools/h5format_convert/testfiles/old_h5fc_ext2_if.h5 -text
tools/h5format_convert/testfiles/old_h5fc_ext2_is.ddl -text
tools/h5format_convert/testfiles/old_h5fc_ext2_is.h5 -text
tools/h5format_convert/testfiles/old_h5fc_ext2_sf.ddl -text
tools/h5format_convert/testfiles/old_h5fc_ext2_sf.h5 -text
tools/h5format_convert/testfiles/old_h5fc_ext3_isf.ddl -text
tools/h5format_convert/testfiles/old_h5fc_ext3_isf.h5 -text
tools/h5format_convert/testfiles/old_h5fc_ext_none.h5 -text
tools/h5format_convert/testh5fc.sh.in -text
tools/h5import/testfiles/binfp64.h5 -text
tools/h5import/testfiles/binin16.h5 -text
tools/h5import/testfiles/binin32.h5 -text
tools/h5import/testfiles/binin8.h5 -text
tools/h5import/testfiles/binin8w.h5 -text
tools/h5import/testfiles/binuin16.h5 -text
tools/h5import/testfiles/binuin32.h5 -text
tools/h5import/testfiles/textpfe.h5 -text
tools/h5import/testfiles/txtfp32.h5 -text
tools/h5import/testfiles/txtfp64.h5 -text
tools/h5import/testfiles/txtin16.h5 -text
tools/h5import/testfiles/txtin32.h5 -text
tools/h5import/testfiles/txtin8.h5 -text
tools/h5import/testfiles/txtstr.h5 -text
tools/h5import/testfiles/txtuin16.h5 -text
tools/h5import/testfiles/txtuin32.h5 -text
tools/h5jam/testfiles/tall.h5 -text
tools/h5jam/testfiles/twithub.h5 -text
tools/h5jam/testfiles/twithub513.h5 -text
tools/h5repack/testfiles/1_vds.h5-vds_dset_chunk20x10x5-v.ddl -text
tools/h5repack/testfiles/2_vds.h5-vds_chunk3x6x9-v.ddl -text
tools/h5repack/testfiles/3_1_vds.h5-vds_chunk2x5x8-v.ddl -text
tools/h5repack/testfiles/4_vds.h5-vds_compa-v.ddl -text
tools/h5repack/testfiles/4_vds.h5-vds_conti-v.ddl -text
tools/h5repack/testfiles/README -text
tools/h5repack/testfiles/h5repack_attr.h5 -text
tools/h5repack/testfiles/h5repack_attr_refs.h5 -text
tools/h5repack/testfiles/h5repack_deflate.h5 -text
tools/h5repack/testfiles/h5repack_early.h5 -text
tools/h5repack/testfiles/h5repack_ext.bin -text
tools/h5repack/testfiles/h5repack_ext.h5 -text
tools/h5repack/testfiles/h5repack_fill.h5 -text
tools/h5repack/testfiles/h5repack_filters.h5 -text
tools/h5repack/testfiles/h5repack_filters.h5-gzip_verbose_filters.tst -text
tools/h5repack/testfiles/h5repack_fletcher.h5 -text
tools/h5repack/testfiles/h5repack_hlink.h5 -text
tools/h5repack/testfiles/h5repack_layout.UD.h5 -text svneol=unset#application/x-hdf
tools/h5repack/testfiles/h5repack_layout.h5 -text
tools/h5repack/testfiles/h5repack_layout.h5-dset2_chunk_20x10-errstk.tst -text
tools/h5repack/testfiles/h5repack_layout.h5-plugin_zero.tst -text
tools/h5repack/testfiles/h5repack_layout2.h5 -text
tools/h5repack/testfiles/h5repack_layout3.h5 -text
tools/h5repack/testfiles/h5repack_layouto.h5 -text
tools/h5repack/testfiles/h5repack_named_dtypes.h5 -text
tools/h5repack/testfiles/h5repack_nbit.h5 -text
tools/h5repack/testfiles/h5repack_nested_8bit_enum.h5 -text
tools/h5repack/testfiles/h5repack_nested_8bit_enum_deflated.h5 -text
tools/h5repack/testfiles/h5repack_objs.h5 -text
tools/h5repack/testfiles/h5repack_refs.h5 -text
tools/h5repack/testfiles/h5repack_shuffle.h5 -text
tools/h5repack/testfiles/h5repack_soffset.h5 -text
tools/h5repack/testfiles/h5repack_szip.h5 -text
tools/h5repack/testfiles/ublock.bin -text
tools/h5stat/testfiles/h5stat_filters.h5 -text
tools/h5stat/testfiles/h5stat_idx.ddl -text
tools/h5stat/testfiles/h5stat_idx.h5 -text
tools/h5stat/testfiles/h5stat_newgrat.h5 -text
tools/h5stat/testfiles/h5stat_threshold.h5 -text
tools/h5stat/testfiles/h5stat_tsohm.h5 -text
tools/lib/h5tools_error.h svneol=native#text/plain
tools/testfiles/charsets.h5 -text
tools/testfiles/family_file00000.h5 -text
tools/testfiles/family_file00001.h5 -text
tools/testfiles/family_file00002.h5 -text
tools/testfiles/family_file00003.h5 -text
tools/testfiles/family_file00004.h5 -text
tools/testfiles/family_file00005.h5 -text
tools/testfiles/family_file00006.h5 -text
tools/testfiles/family_file00007.h5 -text
tools/testfiles/family_file00008.h5 -text
tools/testfiles/family_file00009.h5 -text
tools/testfiles/family_file00010.h5 -text
tools/testfiles/family_file00011.h5 -text
tools/testfiles/family_file00012.h5 -text
tools/testfiles/family_file00013.h5 -text
tools/testfiles/family_file00014.h5 -text
tools/testfiles/family_file00015.h5 -text
tools/testfiles/family_file00016.h5 -text
tools/testfiles/family_file00017.h5 -text
tools/testfiles/file_space.h5 -text
tools/testfiles/filter_fail.h5 -text
tools/testfiles/non_existing.ddl -text
tools/testfiles/packedbits.h5 -text
tools/testfiles/taindices.h5 -text
tools/testfiles/tall.h5 -text
tools/testfiles/tarray1.h5 -text
tools/testfiles/tarray1_big.h5 -text
tools/testfiles/tarray2.h5 -text
tools/testfiles/tarray3.h5 -text
tools/testfiles/tarray4.h5 -text
tools/testfiles/tarray5.h5 -text
tools/testfiles/tarray6.h5 -text
tools/testfiles/tarray7.h5 -text
tools/testfiles/tarray8.h5 -text
tools/testfiles/tattr.h5 -text
tools/testfiles/tattr2.h5 -text
tools/testfiles/tattr4_be.h5 -text
tools/testfiles/tattrintsize.h5 -text
tools/testfiles/tattrreg.h5 -text
tools/testfiles/tbigdims.h5 -text
tools/testfiles/tbinary.h5 -text
tools/testfiles/tbitfields.h5 -text
tools/testfiles/tbitnopaque_be.ddl -text
tools/testfiles/tbitnopaque_le.ddl -text
tools/testfiles/tbitnopaque.h5 -text svneol=unset#application/x-hdf
tools/testfiles/tchar.h5 -text
tools/testfiles/tcmpdattrintsize.h5 -text
tools/testfiles/tcmpdintarray.h5 -text svneol=unset#application/x-hdf
tools/testfiles/tcmpdints.h5 -text svneol=unset#application/x-hdf
tools/testfiles/tcmpdintsize.h5 -text
tools/testfiles/tcompound.h5 -text
tools/testfiles/tcompound2.h5 -text
tools/testfiles/tcompound_complex.h5 -text
tools/testfiles/tcompound_complex2.ddl -text
tools/testfiles/tcompound_complex2.h5 -text svneol=unset#application/x-hdf
tools/testfiles/tdatareg.h5 -text
tools/testfiles/tdset.h5 -text
tools/testfiles/tdset2.h5 -text
tools/testfiles/tdset_idx.h5 -text
tools/testfiles/tdset_idx.ls -text
tools/testfiles/tempty.h5 -text
tools/testfiles/tenum.h5 -text
tools/testfiles/textlink.h5 -text
tools/testfiles/textlinkfar.h5 -text
tools/testfiles/textlinksrc.h5 -text
tools/testfiles/textlinktar.h5 -text
tools/testfiles/tfamily00000.h5 -text
tools/testfiles/tfamily00001.h5 -text
tools/testfiles/tfamily00002.h5 -text
tools/testfiles/tfamily00003.h5 -text
tools/testfiles/tfamily00004.h5 -text
tools/testfiles/tfamily00005.h5 -text
tools/testfiles/tfamily00006.h5 -text
tools/testfiles/tfamily00007.h5 -text
tools/testfiles/tfamily00008.h5 -text
tools/testfiles/tfamily00009.h5 -text
tools/testfiles/tfamily00010.h5 -text
tools/testfiles/tfcontents1.h5 -text
tools/testfiles/tfcontents2.h5 -text
tools/testfiles/tfilters.h5 -text
tools/testfiles/tfpformat.h5 -text
tools/testfiles/tfvalues.h5 -text
tools/testfiles/tgroup.h5 -text
tools/testfiles/tgrp_comments.h5 -text
tools/testfiles/thlink.h5 -text
tools/testfiles/thyperslab.h5 -text
tools/testfiles/tints4dims.ddl -text
tools/testfiles/tints4dims.h5 -text svneol=unset#application/x-hdf
tools/testfiles/tints4dimsBlock2.ddl -text
tools/testfiles/tints4dimsBlockEq.ddl -text
tools/testfiles/tints4dimsCount2.ddl -text
tools/testfiles/tints4dimsCountEq.ddl -text
tools/testfiles/tints4dimsStride2.ddl -text
tools/testfiles/tintsattrs.h5 -text svneol=unset#application/x-hdf
tools/testfiles/tlarge_objname.h5 -text
tools/testfiles/tldouble.h5 -text
tools/testfiles/tldouble_scalar.h5 -text
tools/testfiles/tlonglinks.h5 -text
tools/testfiles/tloop.h5 -text
tools/testfiles/tloop2.h5 -text
tools/testfiles/tmany.h5 -text
tools/testfiles/tmulti-b.h5 -text
tools/testfiles/tmulti-g.h5 -text svneol=unset#application/octet-stream
tools/testfiles/tmulti-l.h5 -text
tools/testfiles/tmulti-o.h5 -text
tools/testfiles/tmulti-r.h5 -text
tools/testfiles/tmulti-s.h5 -text
tools/testfiles/tname-amp.h5 -text
tools/testfiles/tname-apos.h5 -text
tools/testfiles/tname-gt.h5 -text
tools/testfiles/tname-lt.h5 -text
tools/testfiles/tname-quot.h5 -text
tools/testfiles/tname-sp.h5 -text
tools/testfiles/tnamed_dtype_attr.h5 -text
tools/testfiles/tnestedcmpddt.h5 -text
tools/testfiles/tnestedcomp.h5 -text
tools/testfiles/tno-subset.h5 -text
tools/testfiles/tnodata.h5 -text
tools/testfiles/tnullspace.h5 -text
tools/testfiles/tobjref.h5 -text
tools/testfiles/topaque.h5 -text
tools/testfiles/torderattr.h5 -text
tools/testfiles/tordergr.h5 -text
tools/testfiles/tref-escapes-at.h5 -text
tools/testfiles/tref-escapes.h5 -text
tools/testfiles/tref.h5 -text
tools/testfiles/tsaf.h5 -text
tools/testfiles/tscalarattrintsize.h5 -text
tools/testfiles/tscalarintattrsize.h5 -text svneol=unset#application/x-hdf
tools/testfiles/tscalarintsize.h5 -text
tools/testfiles/tscalarstring.h5 -text
tools/testfiles/tslink.h5 -text
tools/testfiles/tsoftlinks.h5 -text
tools/testfiles/tsplit_file-m.h5 -text
tools/testfiles/tsplit_file-r.h5 -text
tools/testfiles/tstr.h5 -text
tools/testfiles/tstr2.h5 -text
tools/testfiles/tstr3.h5 -text
tools/testfiles/tstring-at.h5 -text
tools/testfiles/tstring.ddl -text svneol=unset#application/octet-stream
tools/testfiles/tstring.h5 -text
tools/testfiles/tudlink.h5 -text
tools/testfiles/tvldtypes1.h5 -text
tools/testfiles/tvldtypes2.h5 -text
tools/testfiles/tvldtypes3.h5 -text
tools/testfiles/tvldtypes4.h5 -text
tools/testfiles/tvldtypes5.h5 -text
tools/testfiles/tvlenstr_array.ddl -text
tools/testfiles/tvlenstr_array.h5 -text svneol=unset#application/x-hdf
tools/testfiles/tvlstr.h5 -text
tools/testfiles/tvms.h5 -text
tools/testfiles/vds/1_a.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/1_b.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/1_c.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/1_d.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/1_e.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/1_f.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/1_vds.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/2_a.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/2_b.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/2_c.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/2_d.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/2_e.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/2_vds.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/3_1_vds.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/3_2_vds.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/4_0.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/4_1.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/4_2.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/4_vds.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/5_a.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/5_b.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/5_c.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/5_vds.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/a.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/b.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/c.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/d.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/f-0.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/f-3.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/vds-eiger.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/vds-first.ddl -text
tools/testfiles/vds/vds-gap1.ddl -text
tools/testfiles/vds/vds-gap2.ddl -text
tools/testfiles/vds/vds-percival-unlim-maxmin.h5 -text svneol=unset#application/x-hdf
tools/testfiles/vds/vds_layout-eiger.ddl -text
tools/testfiles/vds/vds_layout-maxmin.ddl -text
tools/testfiles/zerodim.h5 -text

@ -0,0 +1,11 @@
# Lines starting with '#' are comments.
# Each line is a file pattern followed by one or more owners.
# These owners will be the default owners for everything in the repo.
* @lrknox @derobins @byrnHDF @fortnern @jhendersonHDF @qkoziol @vchoi-hdfgroup @bmribler @glennsong09 @mattjala @brtnfld
# Order is important. The last matching pattern has the most precedence.
# So if a pull request only touches javascript files, only these owners
# will be requested to review.
/fortran/ @brtnfld @derobins @epourmal
/java/ @jhendersonHDF @byrnHDF @derobins

@ -0,0 +1,3 @@
# These are supported funding model platforms
custom: "https://hdfgroup.org/about-us/donate-to-the-hdf-group/"

@ -0,0 +1,25 @@
---
name: Bug report
about: Report a problem with HDF5
title: "[BUG]"
labels: bug
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**Expected behavior**
A clear and concise description of what you expected to happen.
**Platform (please complete the following information)**
- HDF5 version (if building from a maintenance branch, please include the commit hash)
- OS and version
- Compiler and version
- Build system (e.g. CMake, Autotools) and version
- Any configure options you specified
- MPI library and version (parallel HDF5)
**Additional context**
Add any other context about the problem here.

@ -0,0 +1,20 @@
---
name: Feature request
about: Suggest an improvement to HDF5
title: "[Feature Request]"
labels: enhancement
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

@ -0,0 +1,8 @@
## Describe your changes
## Issue ticket number (GitHub or JIRA)
## Checklist before requesting a review
- [ ] My code conforms to the guidelines in CONTRIBUTING.md
- [ ] I made an entry in release_docs/RELEASE.txt (bug fixes, new features)
- [ ] I added a test (bug fixes, new features)

@ -0,0 +1,18 @@
name: clang-format Check
on:
pull_request:
jobs:
formatting-check:
name: Formatting Check
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, 'skip-ci')"
steps:
- uses: actions/checkout@v3
- name: Run clang-format style check for C and Java code
uses: DoozyX/clang-format-lint-action@v0.13
with:
source: '.'
extensions: 'c,h,cpp,hpp,java'
clangFormatVersion: 13
style: file
exclude: './config ./hl/src/H5LTanalyze.c ./hl/src/H5LTparse.c ./hl/src/H5LTparse.h ./src/H5Epubgen.h ./src/H5Einit.h ./src/H5Eterm.h ./src/H5Edefin.h ./src/H5version.h ./src/H5overflow.h'

@ -0,0 +1,34 @@
# NOTE: This action requires write permissions to be set in your GitHub
# repo/fork for it to be able to commit changes.
#
# This is currently enabled via:
#
# settings > Actions > General > Workflow permissions
#
# which you will need to set to "Read and write permissions"
#
name: clang-format Commit Changes
on:
workflow_dispatch:
push:
jobs:
formatting-check:
name: Commit Format Changes
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, 'skip-ci')"
steps:
- uses: actions/checkout@v3
- name: Fix C and Java formatting issues detected by clang-format
uses: DoozyX/clang-format-lint-action@v0.13
with:
source: '.'
extensions: 'c,h,cpp,hpp,java'
clangFormatVersion: 13
inplace: True
style: file
exclude: './config ./hl/src/H5LTanalyze.c ./hl/src/H5LTparse.c ./hl/src/H5LTparse.h ./src/H5Epubgen.h ./src/H5Einit.h ./src/H5Eterm.h ./src/H5Edefin.h ./src/H5version.h ./src/H5overflow.h'
- uses: EndBug/add-and-commit@v7
with:
author_name: github-actions
author_email: 41898282+github-actions[bot]@users.noreply.github.com
message: 'Committing clang-format changes'

@ -0,0 +1,217 @@
name: hdf5 dev ctest runs
# Controls when the action will run. Triggers the workflow on a schedule
on:
workflow_call:
inputs:
file_base:
description: "The common base name of the source tarballs"
required: true
type: string
# A workflow run is made up of one or more jobs that can run sequentially or
# in parallel
jobs:
build_and_test_win:
# Windows w/ MSVC + CMake
#
name: "Windows MSVC CTest"
runs-on: windows-latest
steps:
- name: Install Dependencies (Windows)
run: choco install ninja
- name: Enable Developer Command Prompt
uses: ilammy/msvc-dev-cmd@v1.12.1
- name: Set file base name (Windows)
id: set-file-base
run: |
FILE_NAME_BASE=$(echo "${{ inputs.file_base }}")
echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
shell: bash
# Get files created by release script
- name: Get zip-tarball (Windows)
uses: actions/download-artifact@v3
with:
name: zip-tarball
path: ${{ github.workspace }}
- name: using powershell
shell: pwsh
run: Get-Location
- name: List files for the space (Windows)
run: |
Get-ChildItem -Path ${{ github.workspace }}
Get-ChildItem -Path ${{ runner.workspace }}
shell: pwsh
- name: Uncompress source (Windows)
working-directory: ${{ github.workspace }}
run: 7z x ${{ steps.set-file-base.outputs.FILE_BASE }}.zip
shell: bash
- name: Run ctest (Windows)
run: |
cd "${{ runner.workspace }}/hdf5/hdfsrc"
cmake --workflow --preset=ci-StdShar-MSVC --fresh
shell: bash
- name: Publish binary (Windows)
id: publish-ctest-binary
run: |
mkdir "${{ runner.workspace }}/build"
mkdir "${{ runner.workspace }}/build/hdf5"
Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING -Destination ${{ runner.workspace }}/build/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/README.md -Destination ${{ runner.workspace }}/build/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/build/ci-StdShar-MSVC/* -Destination ${{ runner.workspace }}/build/hdf5/ -Include *.zip
cd "${{ runner.workspace }}/build"
7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win_vs2022.zip hdf5
shell: pwsh
- name: List files in the space (Windows)
run: |
Get-ChildItem -Path ${{ github.workspace }}
Get-ChildItem -Path ${{ runner.workspace }}
shell: pwsh
# Save files created by ctest script
- name: Save published binary (Windows)
uses: actions/upload-artifact@v3
with:
name: zip-vs2022-binary
path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-win_vs2022.zip
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
build_and_test_linux:
# Linux (Ubuntu) w/ gcc + CMake
#
name: "Ubuntu gcc CMake"
runs-on: ubuntu-latest
steps:
- name: Install CMake Dependencies (Linux)
run: sudo apt-get install ninja-build
- name: Set file base name (Linux)
id: set-file-base
run: |
FILE_NAME_BASE=$(echo "${{ inputs.file_base }}")
echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
# Get files created by release script
- name: Get tgz-tarball (Linux)
uses: actions/download-artifact@v3
with:
name: tgz-tarball
path: ${{ github.workspace }}
- name: List files for the space (Linux)
run: |
ls ${{ github.workspace }}
ls ${{ runner.workspace }}
- name: Uncompress source (Linux)
run: tar -zxvf ${{ github.workspace }}/${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz
- name: Run ctest (Linux)
run: |
cd "${{ runner.workspace }}/hdf5/hdfsrc"
cmake --workflow --preset=ci-StdShar-GNUC --fresh
shell: bash
- name: Publish binary (Linux)
id: publish-ctest-binary
run: |
mkdir "${{ runner.workspace }}/build"
mkdir "${{ runner.workspace }}/build/hdf5"
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build/hdf5
cp ${{ runner.workspace }}/hdf5/build/ci-StdShar-GNUC/*.tar.gz ${{ runner.workspace }}/build/hdf5
cd "${{ runner.workspace }}/build"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz hdf5
shell: bash
- name: List files in the space (Linux)
run: |
ls ${{ github.workspace }}
ls ${{ runner.workspace }}
# Save files created by ctest script
- name: Save published binary (Linux)
uses: actions/upload-artifact@v3
with:
name: tgz-ubuntu-2204-binary
path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
build_and_test_mac:
# MacOS w/ Clang + CMake
#
name: "MacOS Clang CMake"
runs-on: macos-11
steps:
- name: Install Dependencies (MacOS)
run: brew install ninja
- name: Set file base name (MacOS)
id: set-file-base
run: |
FILE_NAME_BASE=$(echo "${{ inputs.file_base }}")
echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
# Get files created by release script
- name: Get tgz-tarball (MacOS)
uses: actions/download-artifact@v3
with:
name: tgz-tarball
path: ${{ github.workspace }}
- name: List files for the space (MacOS)
run: |
ls ${{ github.workspace }}
ls ${{ runner.workspace }}
- name: Uncompress source (MacOS)
run: tar -zxvf ${{ github.workspace }}/${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz
# symlinks the compiler executables to a common location
- name: Setup GNU Fortran
uses: modflowpy/install-gfortran-action@v1
- name: Run ctest (MacOS)
id: run-ctest
run: |
cd "${{ runner.workspace }}/hdf5/hdfsrc"
cmake --workflow --preset=ci-StdShar-Clang --fresh
shell: bash
- name: Publish binary (MacOS)
id: publish-ctest-binary
run: |
mkdir "${{ runner.workspace }}/build"
mkdir "${{ runner.workspace }}/build/hdf5"
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build/hdf5
cp ${{ runner.workspace }}/hdf5/build/ci-StdShar-Clang/*.tar.gz ${{ runner.workspace }}/build/hdf5
cd "${{ runner.workspace }}/build"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz hdf5
shell: bash
- name: List files in the space (MacOS)
run: |
ls ${{ github.workspace }}
ls ${{ runner.workspace }}
# Save files created by ctest script
- name: Save published binary (MacOS)
uses: actions/upload-artifact@v3
with:
name: tgz-osx12-binary
path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`

@ -0,0 +1,15 @@
# GitHub Action to automate the identification of common misspellings in text files
# https://github.com/codespell-project/codespell
# https://github.com/codespell-project/actions-codespell
name: codespell
on: [push, pull_request]
jobs:
codespell:
name: Check for spelling errors
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: codespell-project/actions-codespell@master
with:
skip: ./.github/workflows/codespell.yml,./bin/trace,./hl/tools/h5watch/h5watch.c,./tools/test/h5jam/tellub.c,./config/sanitizer/LICENSE,./config/sanitizer/sanitizers.cmake,./tools/test/h5repack/testfiles/*.dat,./test/API/driver,./configure,./bin/ltmain.sh,./bin/depcomp,./bin/config.guess,./bin/config.sub,./autom4te.cache,./m4/libtool.m4,./c++/src/*.html
ignore_words_list: ot,isnt,inout,nd,parms,parm,ba,offsetP,ser,ois,had,fiter,fo,clude,refere,minnum,offsetp,creat,ans:,eiter,lastr,ans,isn't,ifset,sur,trun,dne,tthe,hda,filname,te,htmp,ake,gord,numer,ro,oce,msdos

@ -0,0 +1,21 @@
name: hdf5 dev daily build
# Controls when the action will run. Triggers the workflow on a schedule
on:
workflow_dispatch:
schedule:
- cron: "6 0 * * *"
# A workflow run is made up of one or more jobs that can run sequentially or
# in parallel.
jobs:
call-workflow-tarball:
uses: ./.github/workflows/tarball.yml
call-workflow-ctest:
needs: call-workflow-tarball
uses: ./.github/workflows/cmake-ctest.yml
with:
file_base: ${{ needs.call-workflow-tarball.outputs.file_base }}
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}

@ -0,0 +1,47 @@
name: hdfeos5
on:
workflow_dispatch:
push:
pull_request:
branches: [ develop ]
paths-ignore:
- '.github/CODEOWNERS'
- '.github/FUNDING.yml'
- 'doc/**'
- 'release_docs/**'
- 'ACKNOWLEDGEMENTS'
- 'COPYING**'
- '**.md'
# Using concurrency to cancel any in-progress job or run
concurrency:
group: ${{ github.workflow }}-${{ github.sha || github.event.pull_request.number }}
cancel-in-progress: true
jobs:
build:
name: Build hdfeos5
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Install Autotools Dependencies (Linux)
run: |
sudo apt update
sudo apt install automake autoconf libtool libtool-bin
- name: Install HDF5
run: |
./autogen.sh
./configure --prefix=/usr/local --with-default-api-version=v16
make
sudo make install
- name: Install HDF-EOS5
run: |
wget -O HDF-EOS5.2.0.tar.gz "https://git.earthdata.nasa.gov/projects/DAS/repos/hdfeos5/raw/hdf-eos5-2.0-src.tar.gz?at=refs%2Fheads%2FHDFEOS5_2.0"
tar zxvf HDF-EOS5.2.0.tar.gz
cd hdf-eos5-2.0
./configure CC=/usr/local/bin/h5cc --prefix=/usr/local/ --enable-install-include
make
make check
sudo make install

@ -0,0 +1,530 @@
name: hdf5 dev CI
# Controls when the action will run. Triggers the workflow on push or pull request
on:
workflow_dispatch:
push:
pull_request:
branches: [ hdf5_1_14 ]
paths-ignore:
- '.github/CODEOWNERS'
- '.github/FUNDING.yml'
- 'doc/**'
- 'release_docs/**'
- 'ACKNOWLEDGEMENTS'
- 'COPYING**'
- '**.md'
# Using concurrency to cancel any in-progress job or run
concurrency:
group: ${{ github.workflow }}-${{ github.sha || github.event.pull_request.number }}
cancel-in-progress: true
# A workflow run is made up of one or more jobs that can run sequentially or
# in parallel. We just have one job, but the matrix items defined below will
# run in parallel.
jobs:
# A workflow that builds the library and runs all the tests
build_and_test:
strategy:
# The current matrix has three dimensions:
#
# * config name
# * thread-safety on/off
# * release vs. debug build
#
# Most configuration information is added via the 'include' mechanism,
# which will append the key-value pairs in the configuration where the
# names match.
matrix:
name:
- "Windows MSVC CMake"
- "Ubuntu gcc CMake"
- "Ubuntu gcc Autotools"
- "Ubuntu gcc Autotools parallel (build only)"
- "MacOS Clang CMake"
thread_safety:
- enabled: true
text: " TS"
- enabled: false
text: ""
build_mode:
- text: " REL"
cmake: "Release"
autotools: "production"
- text: " DBG"
cmake: "Debug"
autotools: "debug"
# This is where we list the bulk of the options for each configuration.
# The key-value pair values are usually appropriate for being CMake or
# Autotools configure values, so be aware of that.
include:
# Windows w/ MSVC + CMake
#
# No Fortran, parallel, or VFDs that rely on POSIX things
- name: "Windows MSVC CMake"
os: windows-2022
toolchain: ""
cpp: ON
fortran: OFF
java: ON
libaecfc: ON
zlibfc: ON
parallel: OFF
mirror_vfd: OFF
direct_vfd: OFF
generator: "-G \"Visual Studio 17 2022\" -A x64"
run_tests: true
# Linux (Ubuntu) w/ gcc + CMake
#
# We might think about adding Clang, but MacOS already tests that
# so it's not critical
- name: "Ubuntu gcc CMake"
os: ubuntu-latest
cpp: ON
fortran: ON
java: ON
libaecfc: ON
zlibfc: ON
parallel: OFF
mirror_vfd: ON
direct_vfd: ON
toolchain: "config/toolchain/gcc.cmake"
generator: "-G Ninja"
run_tests: true
# Linux (Ubuntu) w/ gcc + Autotools
#
# Keep this identical to the CMake configs. Note the difference in
# the values.
- name: "Ubuntu gcc Autotools"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: enable
default_api: v114
szip: yes
toolchain: ""
generator: "autogen"
flags: ""
run_tests: true
# Parallel Linux (Ubuntu) w/ gcc + Autotools
#
# The GitHub runners are inadequate for running parallel HDF5 tests,
# so we catch most issues in daily testing. What we have here is just
# a compile check to make sure nothing obvious is broken.
- name: "Ubuntu gcc Autotools parallel (build only)"
os: ubuntu-latest
cpp: disable
fortran: enable
java: disable
parallel: enable
mirror_vfd: disable
direct_vfd: disable
deprec_sym: enable
default_api: v114
szip: yes
toolchain: ""
generator: "autogen"
flags: "CC=mpicc"
run_tests: false
# MacOS w/ Clang + CMake
#
# We could also build with the Autotools via brew installing them,
# but that seems unnecessary
- name: "MacOS Clang CMake"
os: macos-11
cpp: ON
fortran: OFF
java: ON
libaecfc: ON
zlibfc: ON
parallel: OFF
mirror_vfd: ON
direct_vfd: OFF
toolchain: "config/toolchain/clang.cmake"
generator: "-G Ninja"
run_tests: true
#
# SPECIAL AUTOTOOLS BUILDS
#
# These do not run tests and are not built into the matrix and instead
# become NEW configs as their name would clobber one of the matrix
# names (so make sure the names are UNIQUE).
#
- name: "Ubuntu gcc Autotools v1.6 default API (build only)"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: enable
default_api: v16
szip: yes
toolchain: ""
generator: "autogen"
flags: ""
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools v1.8 default API (build only)"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: enable
default_api: v18
szip: yes
toolchain: ""
generator: "autogen"
flags: ""
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools v1.10 default API (build only)"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: enable
default_api: v110
szip: yes
toolchain: ""
generator: "autogen"
flags: ""
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools v1.12 default API (build only)"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: enable
default_api: v112
szip: yes
toolchain: ""
generator: "autogen"
flags: ""
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools no deprecated symbols (build only)"
os: ubuntu-latest
cpp: enable
fortran: enable
java: enable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: disable
default_api: default
szip: yes
toolchain: ""
generator: "autogen"
flags: ""
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools -Werror (build only)"
os: ubuntu-latest
cpp: enable
fortran: disable
java: disable
parallel: disable
mirror_vfd: disable
direct_vfd: enable
deprec_sym: enable
default_api: v114
szip: yes
toolchain: ""
generator: "autogen"
flags: "CFLAGS=-Werror"
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
- name: "Ubuntu gcc Autotools -Werror (build only)"
os: ubuntu-latest
cpp: enable
fortran: disable
java: disable
parallel: disable
mirror_vfd: disable
direct_vfd: enable
deprec_sym: enable
default_api: v114
szip: yes
toolchain: ""
generator: "autogen"
flags: "CFLAGS=-Werror"
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " REL"
cmake: "Release"
autotools: "production"
# Parallel Debug -Werror
- name: "Ubuntu gcc Autotools parallel -Werror (build only)"
os: ubuntu-latest
cpp: disable
fortran: disable
java: disable
parallel: enable
mirror_vfd: disable
direct_vfd: enable
deprec_sym: enable
default_api: v114
toolchain: ""
generator: "autogen"
flags: "CFLAGS=-Werror"
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " DBG"
cmake: "Debug"
autotools: "debug"
# Parallel production/release -Werror
- name: "Ubuntu gcc Autotools parallel -Werror (build only)"
os: ubuntu-latest
cpp: disable
fortran: disable
java: disable
parallel: enable
mirror_vfd: disable
direct_vfd: enable
deprec_sym: enable
default_api: v114
toolchain: ""
generator: "autogen"
flags: "CFLAGS=-Werror"
run_tests: false
thread_safety:
enabled: false
text: ""
build_mode:
text: " REL"
cmake: "Release"
autotools: "production"
# Sets the job's name from the properties
name: "${{ matrix.name }}${{ matrix.build_mode.text }}${{ matrix.thread_safety.text }}"
# Don't run the action if the commit message says to skip CI
if: "!contains(github.event.head_commit.message, 'skip-ci')"
# The type of runner that the job will run on
runs-on: ${{ matrix.os }}
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
#
# SETUP
#
#Useful for debugging
- name: Dump matrix context
run: echo '${{ toJSON(matrix) }}'
- name: Install CMake Dependencies (Linux)
run: sudo apt-get install ninja-build
if: matrix.os == 'ubuntu-latest'
- name: Install Autotools Dependencies (Linux, serial)
run: |
sudo apt update
sudo apt install automake autoconf libtool libtool-bin
sudo apt install gcc-11 g++-11 gfortran-11
echo "CC=gcc-11" >> $GITHUB_ENV
echo "CXX=g++-11" >> $GITHUB_ENV
echo "FC=gfortran-11" >> $GITHUB_ENV
sudo apt install libaec0 libaec-dev
if: (matrix.generator == 'autogen') && (matrix.parallel != 'enable')
- name: Install Autotools Dependencies (Linux, parallel)
run: |
sudo apt update
sudo apt install automake autoconf libtool libtool-bin
sudo apt install openmpi-bin openmpi-common mpi-default-dev
echo "CC=mpicc" >> $GITHUB_ENV
echo "FC=mpif90" >> $GITHUB_ENV
sudo apt install libaec0 libaec-dev
if: (matrix.generator == 'autogen') && (matrix.parallel == 'enable')
- name: Install Dependencies (Windows)
run: choco install ninja
if: matrix.os == 'windows-latest'
- name: Install Dependencies (macOS)
run: brew install ninja
if: matrix.os == 'macos-11'
- name: Set environment for MSVC (Windows)
run: |
# Set these environment variables so CMake picks the correct compiler
echo "CXX=cl.exe" >> $GITHUB_ENV
echo "CC=cl.exe" >> $GITHUB_ENV
if: matrix.os == 'windows-latest'
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- name: Get Sources
uses: actions/checkout@v3
#
# AUTOTOOLS CONFIGURE
#
- name: Autotools Configure
run: |
sh ./autogen.sh
mkdir "${{ runner.workspace }}/build"
cd "${{ runner.workspace }}/build"
${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --with-szlib=${{ matrix.szip }}
shell: bash
if: (matrix.generator == 'autogen') && (! matrix.thread_safe.enabled)
- name: Autotools Configure (Thread-Safe)
run: |
sh ./autogen.sh
mkdir "${{ runner.workspace }}/build"
cd "${{ runner.workspace }}/build"
${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.parallel }}-parallel --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --with-szlib=${{ matrix.szip }}
shell: bash
if: (matrix.generator == 'autogen') && (matrix.thread_safe.enabled)
#
# CMAKE CONFIGURE
#
- name: CMake Configure
run: |
mkdir "${{ runner.workspace }}/build"
cd "${{ runner.workspace }}/build"
cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE
shell: bash
if: (matrix.generator != 'autogen') && (! matrix.thread_safe.enabled)
- name: CMake Configure (Thread-Safe)
run: |
mkdir "${{ runner.workspace }}/build"
cd "${{ runner.workspace }}/build"
cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_HL_LIB:BOOL=OFF -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE
shell: bash
if: (matrix.generator != 'autogen') && (matrix.thread_safe.enabled)
#
# BUILD
#
- name: Autotools Build
run: make -j3
working-directory: ${{ runner.workspace }}/build
if: matrix.generator == 'autogen'
- name: CMake Build
run: cmake --build . --parallel 3 --config ${{ matrix.build_mode.cmake }}
working-directory: ${{ runner.workspace }}/build
if: matrix.generator != 'autogen'
#
# RUN TESTS
#
- name: Autotools Run Tests
run: make check -j2
working-directory: ${{ runner.workspace }}/build
if: (matrix.generator == 'autogen') && (matrix.run_tests)
- name: CMake Run Tests
run: ctest --build . --parallel 2 -C ${{ matrix.build_mode.cmake }} -V
working-directory: ${{ runner.workspace }}/build
# Skip Debug MSVC while we investigate H5L Java test timeouts
if: (matrix.generator != 'autogen') && (matrix.run_tests) && ! ((matrix.name == 'Windows MSVC CMake') && (matrix.build_mode.cmake == 'Debug'))
#
# INSTALL (note that this runs even when we don't run the tests)
#
- name: Autotools Install
run: make install
working-directory: ${{ runner.workspace }}/build
if: (matrix.generator == 'autogen')
- name: Autotools Verify Install
run: make check-install
working-directory: ${{ runner.workspace }}/build
if: (matrix.generator == 'autogen')

@ -0,0 +1,105 @@
name: hdf5 dev tarball
# Controls when the action will run. Triggers the workflow on a schedule
on:
workflow_call:
outputs:
has_changes:
description: "Whether there were changes the previous day"
value: ${{ jobs.check_commits.outputs.has_changes }}
file_base:
description: "The common base name of the source tarballs"
value: ${{ jobs.create_tarball.outputs.file_base }}
# A workflow run is made up of one or more jobs that can run sequentially or
# in parallel
jobs:
check_commits:
name: Check for recent commits
runs-on: ubuntu-latest
outputs:
has_changes: ${{ steps.check-new-commits.outputs.has-new-commits }}
branch_ref: ${{ steps.get-branch-name.outputs.BRANCH_REF }}
branch_sha: ${{ steps.get-branch-sha.outputs.BRANCH_SHA }}
steps:
- name: Get branch name
id: get-branch-name
env:
GITHUB_REF: ${{ github.ref }}
GITHUB_REF_NAME: ${{ github.ref_name }}
GITHUB_HEAD_REF: ${{ github.head_ref }}
#run: echo "${{ env.GITHUB_REF_NAME }} | grep -P '[0-9]+/merge' &> /dev/null && BRANCH_REF=${{ env.GITHUB_HEAD_REF }} || BRANCH_REF=${{ env.GITHUB_REF_NAME }}" >> $GITHUB_OUTPUT
run: echo "BRANCH_REF=${{ env.GITHUB_HEAD_REF || env.GITHUB_REF_NAME }}" >> $GITHUB_OUTPUT
- name: Get branch sha
id: get-branch-sha
env:
GITHUB_SHA: ${{ github.sha }}
GITHUB_WF_SHA: ${{ github.workflow_sha }}
run: |
SHORT_SHA=$(echo "${{ env.GITHUB_WF_SHA }}" | cut -c1-7)
echo "BRANCH_SHA=$SHORT_SHA" >> $GITHUB_OUTPUT
- name: Check for changed source
id: check-new-commits
uses: adriangl/check-new-commits-action@v1
with:
seconds: 86400 # One day in seconds
branch: '${{ steps.get-branch-name.outputs.branch_ref }}'
- run: echo "You have ${{ steps.check-new-commits.outputs.new-commits-number }} new commit(s) in ${{ steps.get-branch-name.outputs.BRANCH_REF }} ✅!"
if: ${{ steps.check-new-commits.outputs.has-new-commits == 'true' }}
- run: echo "Short commit sha is ${{ steps.get-branch-sha.outputs.BRANCH_SHA }}!"
create_tarball:
name: Create a source tarball
runs-on: ubuntu-latest
needs: check_commits
if: ${{ needs.check_commits.outputs.has_changes == 'true' }}
outputs:
file_base: ${{ steps.set-file-base.outputs.FILE_BASE }}
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- name: Get Sources
uses: actions/checkout@v3
with:
path: hdfsrc
- name: Install Autotools Dependencies (Linux, serial)
run: |
sudo apt update
sudo apt install automake autoconf libtool libtool-bin gzip dos2unix
- name: Set file base name
id: set-file-base
run: |
FILE_NAME_BASE=$(echo "hdf5-${{ needs.check_commits.outputs.branch_ref }}-${{ needs.check_commits.outputs.branch_sha }}")
echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
- name: Run release script
id: run-release-script
run: |
cd "$GITHUB_WORKSPACE/hdfsrc"
bin/bbrelease -d $GITHUB_WORKSPACE --branch ${{ needs.check_commits.outputs.branch_ref }} --revision gzip zip
shell: bash
- name: List files in the repository
run: |
ls ${{ github.workspace }}
ls $GITHUB_WORKSPACE
# Save files created by release script
- name: Save tgz-tarball
uses: actions/upload-artifact@v3
with:
name: tgz-tarball
path: ${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
- name: Save zip-tarball
uses: actions/upload-artifact@v3
with:
name: zip-tarball
path: ${{ steps.set-file-base.outputs.FILE_BASE }}.zip
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`

@ -0,0 +1,53 @@
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
# Initialization files for the Copyright Checker, chkcopyright.
# Each line is a keyword for action and the rest are values.
# Keywords:
# '#' Comments
# skip Files to be skipped
# prune Directories to be skipped. Notice this prunes all directories
# with the same name. E.g.,
# "prune test" skips test, fortran/test, c++/test, ...
# Skip COPYING since it is the detail Copyright notice.
skip COPYING
# Sort of strange to have a copyright notice in README
skip README
# Non-UI copyrighted files in top-level
skip aclocal.m4
# Non-UI copyrighted files in bin.
skip config.guess
skip config.sub
skip depcomp
skip install-sh
skip ltmain.sh
skip missing
skip mkinstalldirs
# Generated files in top-level
skip configure
# Generated files in src.
skip H5config.h.in
# Generated files in fortran/src.
skip H5match_types.c
skip H5test_kind.f90
# Ignore this expected output file in windows/examples.
skip testExamples_exp_output.txt
# Skip all testfiles/* since if we insert a copyright notice in the expected
# data files, we would have to spend extra effort to filter them out.
prune testfiles

@ -0,0 +1,18 @@
Acknowledgments - November 2010
-------------------------------
We would like to thank the following people who have contributed directly
or indirectly to HDF5:
Werner Benger, for contributing code used to add support for the Windows
Threading library included in the 1.8.6 release.
John A. Biddiscombe, Mike Jackson, and Sean McBride for contributing and
testing CMake code included in the HDF5 1.8.5 distribution.
The HDF5 community for helping shape the development of HDF5 by contributing
bug reports and patches, joining in on forum discussions, and continually
supporting our software.
Finally, we'd like to thank all organizations whose funding has made HDF5
possible.

@ -0,0 +1,189 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
option (USE_LIBAEC_STATIC "Use static AEC library " OFF)
option (ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 0)
option (SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 0)
option (BUILD_ZLIB_WITH_FETCHCONTENT "Use FetchContent to use original source files" OFF)
if (BUILD_ZLIB_WITH_FETCHCONTENT)
set (ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 1)
if (NOT ZLIB_USE_LOCALCONTENT)
set (ZLIB_URL ${ZLIB_TGZ_ORIGPATH}/${ZLIB_TGZ_ORIGNAME})
else ()
set (ZLIB_URL ${TGZPATH}/${ZLIB_TGZ_ORIGNAME})
endif ()
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
message (VERBOSE "Filter ZLIB file is ${ZLIB_URL}")
endif ()
endif ()
option (BUILD_SZIP_WITH_FETCHCONTENT "Use FetchContent to use original source files" OFF)
if (BUILD_SZIP_WITH_FETCHCONTENT)
set (SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 1)
if (NOT LIBAEC_USE_LOCALCONTENT)
set (SZIP_URL ${LIBAEC_TGZ_ORIGPATH}/${LIBAEC_TGZ_ORIGNAME})
else ()
set (SZIP_URL ${TGZPATH}/${LIBAEC_TGZ_ORIGNAME})
endif ()
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0")
message (VERBOSE "Filter SZIP file is ${SZIP_URL}")
endif ()
endif ()
include (ExternalProject)
#option (HDF5_ALLOW_EXTERNAL_SUPPORT "Allow External Library Building (NO GIT TGZ)" "NO")
set (HDF5_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO GIT TGZ)")
set_property (CACHE HDF5_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ)
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
set (ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 1)
set (SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 1)
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT")
set (ZLIB_URL ${ZLIB_GIT_URL} CACHE STRING "Path to zlib git repository")
set (ZLIB_BRANCH ${ZLIB_GIT_BRANCH})
set (SZIP_URL ${SZIP_GIT_URL} CACHE STRING "Path to szip git repository")
set (SZIP_BRANCH ${SZIP_GIT_BRANCH})
elseif (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
if (NOT TGZPATH)
set (TGZPATH ${HDF5_SOURCE_DIR})
endif ()
if (NOT BUILD_ZLIB_WITH_FETCHCONTENT)
set (ZLIB_URL ${TGZPATH}/${ZLIB_TGZ_NAME})
endif ()
if (NOT EXISTS "${ZLIB_URL}")
set (HDF5_ENABLE_Z_LIB_SUPPORT OFF CACHE BOOL "" FORCE)
message (VERBOSE "Filter ZLIB file ${ZLIB_URL} not found")
endif ()
if (NOT BUILD_SZIP_WITH_FETCHCONTENT)
set (SZIP_URL ${TGZPATH}/${SZAEC_TGZ_NAME})
endif ()
if (NOT EXISTS "${SZIP_URL}")
set (HDF5_ENABLE_SZIP_SUPPORT OFF CACHE BOOL "" FORCE)
message (VERBOSE "Filter SZIP file ${SZIP_URL} not found")
endif ()
else ()
set (ZLIB_USE_EXTERNAL 0)
set (SZIP_USE_EXTERNAL 0)
endif ()
endif ()
#-----------------------------------------------------------------------------
# Option for ZLib support
#-----------------------------------------------------------------------------
option (HDF5_ENABLE_Z_LIB_SUPPORT "Enable Zlib Filters" OFF)
if (HDF5_ENABLE_Z_LIB_SUPPORT)
if (NOT H5_ZLIB_HEADER)
if (NOT ZLIB_USE_EXTERNAL)
find_package (ZLIB NAMES ${ZLIB_PACKAGE_NAME}${HDF_PACKAGE_EXT} COMPONENTS static shared)
if (NOT ZLIB_FOUND)
find_package (ZLIB) # Legacy find
endif ()
if (ZLIB_FOUND)
set (H5_HAVE_FILTER_DEFLATE 1)
set (H5_HAVE_ZLIB_H 1)
set (H5_HAVE_LIBZ 1)
set (H5_ZLIB_HEADER "zlib.h")
set (ZLIB_INCLUDE_DIR_GEN ${ZLIB_INCLUDE_DIR})
set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIRS} ${ZLIB_INCLUDE_DIR})
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_LIBRARIES})
endif ()
else ()
if (BUILD_ZLIB_WITH_FETCHCONTENT)
# Only tgz files available
ORIGINAL_ZLIB_LIBRARY ("TGZ")
set (H5_HAVE_FILTER_DEFLATE 1)
set (H5_HAVE_ZLIB_H 1)
set (H5_HAVE_LIBZ 1)
message (VERBOSE "HDF5_ZLIB is built from fetch content")
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_STATIC_LIBRARY})
elseif (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
EXTERNAL_ZLIB_LIBRARY (${HDF5_ALLOW_EXTERNAL_SUPPORT})
set (H5_HAVE_FILTER_DEFLATE 1)
set (H5_HAVE_ZLIB_H 1)
set (H5_HAVE_LIBZ 1)
message (VERBOSE "Filter HDF5_ZLIB is built")
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_STATIC_LIBRARY})
else ()
message (FATAL_ERROR " ZLib is Required for ZLib support in HDF5")
endif ()
endif ()
else ()
# This project is being called from within another and ZLib is already configured
set (H5_HAVE_FILTER_DEFLATE 1)
set (H5_HAVE_ZLIB_H 1)
set (H5_HAVE_LIBZ 1)
endif ()
if (H5_HAVE_FILTER_DEFLATE)
set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} DEFLATE")
endif ()
INCLUDE_DIRECTORIES (${ZLIB_INCLUDE_DIRS})
message (VERBOSE "Filter HDF5_ZLIB is ON")
endif ()
#-----------------------------------------------------------------------------
# Option for SzLib support
#-----------------------------------------------------------------------------
option (HDF5_ENABLE_SZIP_SUPPORT "Use SZip Filter" OFF)
if (HDF5_ENABLE_SZIP_SUPPORT)
option (HDF5_ENABLE_SZIP_ENCODING "Use SZip Encoding" OFF)
if (NOT SZIP_USE_EXTERNAL)
set(SZIP_FOUND FALSE)
set(libaec_USE_STATIC_LIBS ${USE_LIBAEC_STATIC})
find_package (libaec 1.0.5 CONFIG)
if (SZIP_FOUND)
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES})
endif ()
if (NOT SZIP_FOUND)
find_package (SZIP NAMES ${LIBAEC_PACKAGE_NAME}${HDF_PACKAGE_EXT} COMPONENTS static shared)
if (NOT SZIP_FOUND)
find_package (SZIP) # Legacy find
endif ()
if (SZIP_FOUND)
set (H5_HAVE_FILTER_SZIP 1)
set (H5_HAVE_SZLIB_H 1)
set (H5_HAVE_LIBSZ 1)
set (SZIP_INCLUDE_DIR_GEN ${SZIP_INCLUDE_DIR})
set (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIRS} ${SZIP_INCLUDE_DIR})
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES})
endif ()
endif ()
else ()
if (BUILD_SZIP_WITH_FETCHCONTENT)
# Only tgz files available
ORIGINAL_SZIP_LIBRARY ("TGZ" ${HDF5_ENABLE_SZIP_ENCODING})
set (H5_HAVE_FILTER_SZIP 1)
set (H5_HAVE_SZLIB_H 1)
set (H5_HAVE_LIBSZ 1)
message (VERBOSE "SZIP is built from fetch content")
message (VERBOSE "... with library AEC")
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_STATIC_LIBRARY})
elseif (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
EXTERNAL_SZIP_LIBRARY (${HDF5_ALLOW_EXTERNAL_SUPPORT} ${HDF5_ENABLE_SZIP_ENCODING})
set (H5_HAVE_FILTER_SZIP 1)
set (H5_HAVE_SZLIB_H 1)
set (H5_HAVE_LIBSZ 1)
message (VERBOSE "Filter SZIP is built")
message (VERBOSE "... with library AEC")
set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_STATIC_LIBRARY})
else ()
message (FATAL_ERROR "SZIP is Required for SZIP support in HDF5")
endif ()
endif ()
INCLUDE_DIRECTORIES (${SZIP_INCLUDE_DIRS})
message (VERBOSE "Filter SZIP is ON")
if (H5_HAVE_FILTER_SZIP)
set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} DECODE")
endif ()
if (HDF5_ENABLE_SZIP_ENCODING)
set (H5_HAVE_SZIP_ENCODER 1)
set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} ENCODE")
endif ()
endif ()

@ -0,0 +1,670 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
include (CMakePackageConfigHelpers)
#-----------------------------------------------------------------------------
# Check for Installation Utilities
#-----------------------------------------------------------------------------
if (WIN32)
set (PF_ENV_EXT "(x86)")
find_program (NSIS_EXECUTABLE NSIS.exe PATHS "$ENV{ProgramFiles}\\NSIS" "$ENV{ProgramFiles${PF_ENV_EXT}}\\NSIS")
if(NOT CPACK_WIX_ROOT)
file(TO_CMAKE_PATH "$ENV{WIX}" CPACK_WIX_ROOT)
endif ()
find_program (WIX_EXECUTABLE candle PATHS "${CPACK_WIX_ROOT}/bin")
endif ()
#-----------------------------------------------------------------------------
# Add Target(s) to CMake Install for import into other projects
#-----------------------------------------------------------------------------
if (NOT HDF5_EXTERNALLY_CONFIGURED)
if (HDF5_EXPORTED_TARGETS)
install (
EXPORT ${HDF5_EXPORTED_TARGETS}
DESTINATION ${HDF5_INSTALL_CMAKE_DIR}
FILE ${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-targets.cmake
NAMESPACE ${HDF_PACKAGE_NAMESPACE}
COMPONENT configinstall
)
endif ()
#-----------------------------------------------------------------------------
# Export all exported targets to the build tree for use by parent project
#-----------------------------------------------------------------------------
export (
TARGETS ${HDF5_LIBRARIES_TO_EXPORT} ${HDF5_LIB_DEPENDENCIES} ${HDF5_UTILS_TO_EXPORT}
FILE ${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-targets.cmake
NAMESPACE ${HDF_PACKAGE_NAMESPACE}
)
endif ()
#-----------------------------------------------------------------------------
# Set includes needed for build
#-----------------------------------------------------------------------------
set (HDF5_INCLUDES_BUILD_TIME
${HDF5_SRC_INCLUDE_DIRS} ${HDF5_CPP_SRC_DIR} ${HDF5_HL_SRC_DIR}
${HDF5_TOOLS_SRC_DIR} ${HDF5_SRC_BINARY_DIR}
)
#-----------------------------------------------------------------------------
# Set variables needed for installation
#-----------------------------------------------------------------------------
set (HDF5_VERSION_STRING ${HDF5_PACKAGE_VERSION})
set (HDF5_VERSION_MAJOR ${HDF5_PACKAGE_VERSION_MAJOR})
set (HDF5_VERSION_MINOR ${HDF5_PACKAGE_VERSION_MINOR})
#-----------------------------------------------------------------------------
# Configure the hdf5-config.cmake file for the build directory
#-----------------------------------------------------------------------------
set (INCLUDE_INSTALL_DIR ${HDF5_INSTALL_INCLUDE_DIR})
set (SHARE_INSTALL_DIR "${CMAKE_CURRENT_BINARY_DIR}/${HDF5_INSTALL_CMAKE_DIR}" )
set (CURRENT_BUILD_DIR "${CMAKE_CURRENT_BINARY_DIR}" )
configure_package_config_file (
${HDF_RESOURCES_DIR}/hdf5-config.cmake.in
"${HDF5_BINARY_DIR}/${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-config.cmake"
INSTALL_DESTINATION "${HDF5_INSTALL_CMAKE_DIR}"
PATH_VARS INCLUDE_INSTALL_DIR SHARE_INSTALL_DIR CURRENT_BUILD_DIR
INSTALL_PREFIX "${CMAKE_CURRENT_BINARY_DIR}"
)
#-----------------------------------------------------------------------------
# Configure the hdf5-config.cmake file for the install directory
#-----------------------------------------------------------------------------
set (INCLUDE_INSTALL_DIR ${HDF5_INSTALL_INCLUDE_DIR})
set (SHARE_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/${HDF5_INSTALL_CMAKE_DIR}" )
set (CURRENT_BUILD_DIR "${CMAKE_INSTALL_PREFIX}" )
configure_package_config_file (
${HDF_RESOURCES_DIR}/hdf5-config.cmake.in
"${HDF5_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-config.cmake"
INSTALL_DESTINATION "${HDF5_INSTALL_CMAKE_DIR}"
PATH_VARS INCLUDE_INSTALL_DIR SHARE_INSTALL_DIR CURRENT_BUILD_DIR
)
if (NOT HDF5_EXTERNALLY_CONFIGURED)
install (
FILES ${HDF5_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-config.cmake
DESTINATION ${HDF5_INSTALL_CMAKE_DIR}
COMPONENT configinstall
)
endif ()
#-----------------------------------------------------------------------------
# Configure the hdf5-config-version .cmake file for the install directory
#-----------------------------------------------------------------------------
if (NOT HDF5_EXTERNALLY_CONFIGURED)
write_basic_package_version_file (
"${HDF5_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-config-version.cmake"
VERSION ${HDF5_PACKAGE_VERSION}
COMPATIBILITY SameMinorVersion
)
#configure_file (
# ${HDF_RESOURCES_DIR}/hdf5-config-version.cmake.in
# ${HDF5_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-config-version.cmake @ONLY
#)
install (
FILES ${HDF5_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${HDF5_PACKAGE}${HDF_PACKAGE_EXT}-config-version.cmake
DESTINATION ${HDF5_INSTALL_CMAKE_DIR}
COMPONENT configinstall
)
endif ()
#-----------------------------------------------------------------------------
# Configure the libhdf5.settings file for the lib info
#-----------------------------------------------------------------------------
if (H5_WORDS_BIGENDIAN)
set (BYTESEX big-endian)
else ()
set (BYTESEX little-endian)
endif ()
configure_file (
${HDF_RESOURCES_DIR}/libhdf5.settings.cmake.in
${HDF5_SRC_BINARY_DIR}/libhdf5.settings ESCAPE_QUOTES @ONLY
)
install (
FILES ${HDF5_SRC_BINARY_DIR}/libhdf5.settings
DESTINATION ${HDF5_INSTALL_LIB_DIR}
COMPONENT libraries
)
#-----------------------------------------------------------------------------
# Configure the HDF5_Examples.cmake file and the examples
#-----------------------------------------------------------------------------
option (HDF5_PACK_EXAMPLES "Package the HDF5 Library Examples Compressed File" OFF)
if (HDF5_PACK_EXAMPLES)
configure_file (
${HDF_RESOURCES_DIR}/examples/HDF5_Examples.cmake.in
${HDF5_BINARY_DIR}/HDF5_Examples.cmake @ONLY
)
install (
FILES ${HDF5_BINARY_DIR}/HDF5_Examples.cmake
DESTINATION ${HDF5_INSTALL_DATA_DIR}
COMPONENT hdfdocuments
)
option (EXAMPLES_USE_RELEASE_NAME "Use the released examples artifact name" OFF)
option (EXAMPLES_DOWNLOAD "Download to use released examples files" OFF)
if (EXAMPLES_DOWNLOAD)
if (NOT EXAMPLES_USE_LOCALCONTENT)
set (EXAMPLES_URL ${EXAMPLES_TGZ_ORIGPATH}/${EXAMPLES_TGZ_ORIGNAME})
else ()
set (EXAMPLES_URL ${TGZPATH}/${EXAMPLES_TGZ_ORIGNAME})
endif ()
message (VERBOSE "Examples file is ${EXAMPLES_URL}")
file (DOWNLOAD ${EXAMPLES_URL} ${HDF5_BINARY_DIR}/${HDF5_EXAMPLES_COMPRESSED})
if (EXISTS "${HDF5_BINARY_DIR}/${HDF5_EXAMPLES_COMPRESSED}")
execute_process(
COMMAND ${CMAKE_COMMAND} -E tar xzf ${HDF5_EXAMPLES_COMPRESSED}
WORKING_DIRECTORY ${HDF5_BINARY_DIR}
COMMAND_ECHO STDOUT
)
endif ()
set (EXAMPLES_USE_RELEASE_NAME ON CACHE BOOL "" FORCE)
else ()
if (EXISTS "${HDF5_EXAMPLES_COMPRESSED_DIR}/${HDF5_EXAMPLES_COMPRESSED}")
execute_process(
COMMAND ${CMAKE_COMMAND} -E tar xzf ${HDF5_EXAMPLES_COMPRESSED_DIR}/${HDF5_EXAMPLES_COMPRESSED}
WORKING_DIRECTORY ${HDF5_BINARY_DIR}
COMMAND_ECHO STDOUT
)
endif ()
endif ()
if (EXAMPLES_USE_RELEASE_NAME)
get_filename_component (EX_LAST_EXT ${HDF5_EXAMPLES_COMPRESSED} LAST_EXT)
if (${EX_LAST_EXT} STREQUAL ".zip")
get_filename_component (EX_DIR_NAME ${HDF5_EXAMPLES_COMPRESSED} NAME_WLE)
else ()
get_filename_component (EX_DIR_NAME ${HDF5_EXAMPLES_COMPRESSED} NAME_WLE)
get_filename_component (EX_DIR_NAME ${EX_DIR_NAME} NAME_WLE)
endif ()
execute_process(
COMMAND ${CMAKE_COMMAND} -E rename ${EX_DIR_NAME} HDF5Examples
WORKING_DIRECTORY ${HDF5_BINARY_DIR}
COMMAND_ECHO STDOUT
)
endif ()
install (
DIRECTORY ${HDF5_BINARY_DIR}/HDF5Examples
DESTINATION ${HDF5_INSTALL_DATA_DIR}
USE_SOURCE_PERMISSIONS
COMPONENT hdfdocuments
)
install (
FILES
${HDF5_SOURCE_DIR}/release_docs/USING_CMake_Examples.txt
DESTINATION ${HDF5_INSTALL_DATA_DIR}
COMPONENT hdfdocuments
)
install (
FILES
${HDF_RESOURCES_DIR}/examples/CTestScript.cmake
DESTINATION ${HDF5_INSTALL_DATA_DIR}
COMPONENT hdfdocuments
)
install (
FILES
${HDF_RESOURCES_DIR}/examples/HDF5_Examples_options.cmake
DESTINATION ${HDF5_INSTALL_DATA_DIR}
COMPONENT hdfdocuments
)
endif ()
#-----------------------------------------------------------------------------
# Configure the README.md file for the binary package
#-----------------------------------------------------------------------------
HDF_README_PROPERTIES(HDF5_BUILD_FORTRAN)
#-----------------------------------------------------------------------------
# Configure the COPYING.txt file for the windows binary package
#-----------------------------------------------------------------------------
if (WIN32)
configure_file (${HDF5_SOURCE_DIR}/COPYING ${HDF5_BINARY_DIR}/COPYING.txt @ONLY)
endif ()
#-----------------------------------------------------------------------------
# Add Document File(s) to CMake Install
#-----------------------------------------------------------------------------
if (NOT HDF5_EXTERNALLY_CONFIGURED)
install (
FILES ${HDF5_SOURCE_DIR}/COPYING
DESTINATION ${HDF5_INSTALL_DATA_DIR}
COMPONENT hdfdocuments
)
if (EXISTS "${HDF5_SOURCE_DIR}/release_docs" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/release_docs")
set (release_files
${HDF5_SOURCE_DIR}/release_docs/USING_HDF5_CMake.txt
${HDF5_SOURCE_DIR}/release_docs/RELEASE.txt
)
if (WIN32)
set (release_files
${release_files}
${HDF5_SOURCE_DIR}/release_docs/USING_HDF5_VS.txt
)
endif ()
if (HDF5_PACK_INSTALL_DOCS)
set (release_files
${release_files}
${HDF5_SOURCE_DIR}/release_docs/INSTALL_Warnings.txt
${HDF5_SOURCE_DIR}/release_docs/INSTALL_CMake.txt
${HDF5_SOURCE_DIR}/release_docs/HISTORY-1_8.txt
${HDF5_SOURCE_DIR}/release_docs/INSTALL
)
if (WIN32)
set (release_files
${release_files}
${HDF5_SOURCE_DIR}/release_docs/INSTALL_Windows.txt
)
endif ()
if (CYGWIN)
set (release_files
${release_files}
${HDF5_SOURCE_DIR}/release_docs/INSTALL_Cygwin.txt
)
endif ()
if (HDF5_ENABLE_PARALLEL)
set (release_files
${release_files}
${HDF5_SOURCE_DIR}/release_docs/INSTALL_parallel
)
endif ()
endif ()
install (
FILES ${release_files}
DESTINATION ${HDF5_INSTALL_DOC_DIR}
COMPONENT hdfdocuments
)
endif ()
endif ()
#-----------------------------------------------------------------------------
# Set the cpack variables
#-----------------------------------------------------------------------------
if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES)
set (CPACK_PACKAGE_VENDOR "HDF_Group")
set (CPACK_PACKAGE_NAME "${HDF5_PACKAGE_NAME}")
if (CDASH_LOCAL)
set (CPACK_PACKAGE_VERSION "${HDF5_PACKAGE_VERSION}")
else ()
set (CPACK_PACKAGE_VERSION "${HDF5_PACKAGE_VERSION_STRING}")
endif ()
set (CPACK_PACKAGE_VERSION_MAJOR "${HDF5_PACKAGE_VERSION_MAJOR}")
set (CPACK_PACKAGE_VERSION_MINOR "${HDF5_PACKAGE_VERSION_MINOR}")
set (CPACK_PACKAGE_VERSION_PATCH "")
set (CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/COPYING")
if (EXISTS "${HDF5_SOURCE_DIR}/release_docs")
set (CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/release_docs/RELEASE.txt")
set (CPACK_RESOURCE_FILE_README "${CMAKE_CURRENT_SOURCE_DIR}/release_docs/RELEASE.txt")
endif ()
set (CPACK_PACKAGE_RELOCATABLE TRUE)
if (OVERRIDE_INSTALL_VERSION)
set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}/${CPACK_PACKAGE_NAME}/${OVERRIDE_INSTALL_VERSION}")
else ()
set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}/${CPACK_PACKAGE_NAME}/${CPACK_PACKAGE_VERSION}")
endif ()
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}/hdf.bmp")
set (CPACK_GENERATOR "TGZ")
if (WIN32)
set (CPACK_GENERATOR "ZIP")
if (NSIS_EXECUTABLE)
list (APPEND CPACK_GENERATOR "NSIS")
endif ()
# Installers for 32- vs. 64-bit CMake:
# - Root install directory (displayed to end user at installer-run time)
# - "NSIS package/display name" (text used in the installer GUI)
# - Registry key used to store info about the installation
set (CPACK_NSIS_PACKAGE_NAME "${HDF5_PACKAGE_STRING}")
if (CMAKE_CL_64)
set (CPACK_NSIS_INSTALL_ROOT "$PROGRAMFILES64")
set (CPACK_PACKAGE_INSTALL_REGISTRY_KEY "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION} (Win64)")
else ()
set (CPACK_NSIS_INSTALL_ROOT "$PROGRAMFILES")
set (CPACK_PACKAGE_INSTALL_REGISTRY_KEY "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}")
endif ()
# set the install/unistall icon used for the installer itself
# There is a bug in NSI that does not handle full unix paths properly.
set (CPACK_NSIS_MUI_ICON "${HDF_RESOURCES_DIR}\\\\hdf.ico")
set (CPACK_NSIS_MUI_UNIICON "${HDF_RESOURCES_DIR}\\\\hdf.ico")
# set the package header icon for MUI
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}\\\\hdf.bmp")
set (CPACK_NSIS_DISPLAY_NAME "${CPACK_NSIS_PACKAGE_NAME}")
if (OVERRIDE_INSTALL_VERSION)
set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}\\\\${CPACK_PACKAGE_NAME}\\\\${OVERRIDE_INSTALL_VERSION}")
else ()
set (CPACK_PACKAGE_INSTALL_DIRECTORY "${CPACK_PACKAGE_VENDOR}\\\\${CPACK_PACKAGE_NAME}\\\\${CPACK_PACKAGE_VERSION}")
endif ()
set (CPACK_NSIS_CONTACT "${HDF5_PACKAGE_BUGREPORT}")
set (CPACK_NSIS_MODIFY_PATH ON)
if (WIX_EXECUTABLE)
list (APPEND CPACK_GENERATOR "WIX")
endif ()
#WiX variables
set (CPACK_WIX_UNINSTALL "1")
# .. variable:: CPACK_WIX_LICENSE_RTF
# RTF License File
#
# If CPACK_RESOURCE_FILE_LICENSE has an .rtf extension it is used as-is.
#
# If CPACK_RESOURCE_FILE_LICENSE has an .txt extension it is implicitly
# converted to RTF by the WiX Generator.
# The expected encoding of the .txt file is UTF-8.
#
# With CPACK_WIX_LICENSE_RTF you can override the license file used by the
# WiX Generator in case CPACK_RESOURCE_FILE_LICENSE is in an unsupported
# format or the .txt -> .rtf conversion does not work as expected.
set (CPACK_RESOURCE_FILE_LICENSE "${HDF5_BINARY_DIR}/COPYING.txt")
# .. variable:: CPACK_WIX_PRODUCT_ICON
# The Icon shown next to the program name in Add/Remove programs.
set(CPACK_WIX_PRODUCT_ICON "${HDF_RESOURCES_DIR}\\\\hdf.ico")
#
# .. variable:: CPACK_WIX_UI_BANNER
#
# The bitmap will appear at the top of all installer pages other than the
# welcome and completion dialogs.
#
# If set, this image will replace the default banner image.
#
# This image must be 493 by 58 pixels.
#
# .. variable:: CPACK_WIX_UI_DIALOG
#
# Background bitmap used on the welcome and completion dialogs.
#
# If this variable is set, the installer will replace the default dialog
# image.
#
# This image must be 493 by 312 pixels.
#
set(CPACK_WIX_PROPERTY_ARPCOMMENTS "HDF5 (Hierarchical Data Format 5) Software Library and Utilities")
set(CPACK_WIX_PROPERTY_ARPURLINFOABOUT "${HDF5_PACKAGE_URL}")
set(CPACK_WIX_PROPERTY_ARPHELPLINK "${HDF5_PACKAGE_BUGREPORT}")
if (BUILD_SHARED_LIBS)
set(CPACK_WIX_PATCH_FILE "${HDF_RESOURCES_DIR}/patch.xml")
endif ()
elseif (APPLE)
list (APPEND CPACK_GENERATOR "STGZ")
list (APPEND CPACK_GENERATOR "DragNDrop")
set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON)
set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}")
set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}/hdf.icns")
option (HDF5_PACK_MACOSX_FRAMEWORK "Package the HDF5 Library in a Frameworks" OFF)
if (HDF5_PACK_MACOSX_FRAMEWORK AND HDF5_BUILD_FRAMEWORKS)
set (CPACK_BUNDLE_NAME "${HDF5_PACKAGE_STRING}")
set (CPACK_BUNDLE_LOCATION "/") # make sure CMAKE_INSTALL_PREFIX ends in /
set (CMAKE_INSTALL_PREFIX "/${CPACK_BUNDLE_NAME}.framework/Versions/${CPACK_PACKAGE_VERSION}/${CPACK_PACKAGE_NAME}/")
set (CPACK_BUNDLE_ICON "${HDF_RESOURCES_DIR}/hdf.icns")
set (CPACK_BUNDLE_PLIST "${HDF5_BINARY_DIR}/CMakeFiles/Info.plist")
set (CPACK_SHORT_VERSION_STRING "${CPACK_PACKAGE_VERSION}")
#-----------------------------------------------------------------------------
# Configure the Info.plist file for the install bundle
#-----------------------------------------------------------------------------
configure_file (
${HDF_RESOURCES_DIR}/CPack.Info.plist.in
${HDF5_BINARY_DIR}/CMakeFiles/Info.plist @ONLY
)
configure_file (
${HDF_RESOURCES_DIR}/PkgInfo.in
${HDF5_BINARY_DIR}/CMakeFiles/PkgInfo @ONLY
)
configure_file (
${HDF_RESOURCES_DIR}/version.plist.in
${HDF5_BINARY_DIR}/CMakeFiles/version.plist @ONLY
)
install (
FILES ${HDF5_BINARY_DIR}/CMakeFiles/PkgInfo
DESTINATION ..
)
endif ()
else ()
list (APPEND CPACK_GENERATOR "STGZ")
set (CPACK_PACKAGING_INSTALL_PREFIX "/${CPACK_PACKAGE_INSTALL_DIRECTORY}")
set (CPACK_COMPONENTS_ALL_IN_ONE_PACKAGE ON)
find_program (DPKGSHLIB_EXE dpkg-shlibdeps)
if (DPKGSHLIB_EXE)
list (APPEND CPACK_GENERATOR "DEB")
set (CPACK_DEBIAN_PACKAGE_SECTION "Libraries")
set (CPACK_DEBIAN_PACKAGE_MAINTAINER "${HDF5_PACKAGE_BUGREPORT}")
endif ()
find_program (RPMBUILD_EXE rpmbuild)
if (RPMBUILD_EXE)
list (APPEND CPACK_GENERATOR "RPM")
set (CPACK_RPM_PACKAGE_RELEASE "1")
set (CPACK_RPM_PACKAGE_RELEASE_DIST ON)
set (CPACK_RPM_COMPONENT_INSTALL ON)
set (CPACK_RPM_PACKAGE_RELOCATABLE ON)
set (CPACK_RPM_FILE_NAME "RPM-DEFAULT")
set (CPACK_RPM_PACKAGE_NAME "${CPACK_PACKAGE_NAME}")
set (CPACK_RPM_PACKAGE_VERSION "${CPACK_PACKAGE_VERSION}")
set (CPACK_RPM_PACKAGE_VENDOR "${CPACK_PACKAGE_VENDOR}")
set (CPACK_RPM_PACKAGE_LICENSE "BSD-style")
set (CPACK_RPM_PACKAGE_GROUP "Development/Libraries")
set (CPACK_RPM_PACKAGE_URL "${HDF5_PACKAGE_URL}")
set (CPACK_RPM_PACKAGE_SUMMARY "HDF5 is a unique technology suite that makes possible the management of extremely large and complex data collections.")
set (CPACK_RPM_PACKAGE_DESCRIPTION
"The HDF5 technology suite includes:
* A versatile data model that can represent very complex data objects and a wide variety of metadata.
* A completely portable file format with no limit on the number or size of data objects in the collection.
* A software library that runs on a range of computational platforms, from laptops to massively parallel systems, and implements a high-level API with C, C++, Fortran 90, and Java interfaces.
* A rich set of integrated performance features that allow for access time and storage space optimizations.
* Tools and applications for managing, manipulating, viewing, and analyzing the data in the collection.
The HDF5 data model, file format, API, library, and tools are open and distributed without charge.
"
)
#-----------------------------------------------------------------------------
# Configure the spec file for the install RPM
#-----------------------------------------------------------------------------
# configure_file ("${HDF5_RESOURCES_DIR}/hdf5.spec.in" "${CMAKE_CURRENT_BINARY_DIR}/${HDF5_PACKAGE_NAME}.spec" @ONLY IMMEDIATE)
# set (CPACK_RPM_USER_BINARY_SPECFILE "${CMAKE_CURRENT_BINARY_DIR}/${HDF5_PACKAGE_NAME}.spec")
endif ()
endif ()
# By default, do not warn when built on machines using only VS Express:
if (NOT DEFINED CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_NO_WARNINGS)
set (CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_NO_WARNINGS ON)
endif ()
include (InstallRequiredSystemLibraries)
set (CPACK_INSTALL_CMAKE_PROJECTS "${HDF5_BINARY_DIR};HDF5;ALL;/")
if (HDF5_PACKAGE_EXTLIBS)
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
if (ZLIB_FOUND AND ZLIB_USE_EXTERNAL)
if (WIN32)
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;ALL;/")
else ()
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;libraries;/")
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;headers;/")
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;configinstall;/")
endif ()
endif ()
if (SZIP_FOUND AND SZIP_USE_EXTERNAL)
if (WIN32)
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${SZIP_INCLUDE_DIR_GEN};SZIP;ALL;/")
else ()
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${SZIP_INCLUDE_DIR_GEN};SZIP;libraries;/")
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${SZIP_INCLUDE_DIR_GEN};SZIP;headers;/")
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${SZIP_INCLUDE_DIR_GEN};SZIP;configinstall;/")
endif ()
endif ()
if (PLUGIN_FOUND AND PLUGIN_USE_EXTERNAL)
if (WIN32)
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${PLUGIN_BINARY_DIR};PLUGIN;ALL;/")
else ()
set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${PLUGIN_BINARY_DIR};PLUGIN;libraries;/")
endif ()
endif ()
endif ()
endif ()
include (CPack)
cpack_add_install_type(Full DISPLAY_NAME "Everything")
cpack_add_install_type(Developer)
cpack_add_component_group(Runtime)
cpack_add_component_group(Documents
EXPANDED
DESCRIPTION "Release notes for developing HDF5 applications"
)
cpack_add_component_group(Development
EXPANDED
DESCRIPTION "All of the tools you'll need to develop HDF5 applications"
)
cpack_add_component_group(Applications
EXPANDED
DESCRIPTION "Tools for HDF5 files"
)
#---------------------------------------------------------------------------
# Now list the cpack commands
#---------------------------------------------------------------------------
cpack_add_component (libraries
DISPLAY_NAME "HDF5 Libraries"
GROUP Runtime
INSTALL_TYPES Full Developer User
)
cpack_add_component (headers
DISPLAY_NAME "HDF5 Headers"
DEPENDS libraries
GROUP Development
INSTALL_TYPES Full Developer
)
cpack_add_component (hdfdocuments
DISPLAY_NAME "HDF5 Documents"
GROUP Documents
INSTALL_TYPES Full Developer
)
cpack_add_component (configinstall
DISPLAY_NAME "HDF5 CMake files"
HIDDEN
DEPENDS libraries
GROUP Development
INSTALL_TYPES Full Developer User
)
if (HDF5_BUILD_FORTRAN)
cpack_add_component (fortlibraries
DISPLAY_NAME "HDF5 Fortran Libraries"
DEPENDS libraries
GROUP Runtime
INSTALL_TYPES Full Developer User
)
cpack_add_component (fortheaders
DISPLAY_NAME "HDF5 Fortran Headers"
DEPENDS fortlibraries
GROUP Development
INSTALL_TYPES Full Developer
)
endif ()
if (HDF5_BUILD_CPP_LIB)
cpack_add_component (cpplibraries
DISPLAY_NAME "HDF5 C++ Libraries"
DEPENDS libraries
GROUP Runtime
INSTALL_TYPES Full Developer User
)
cpack_add_component (cppheaders
DISPLAY_NAME "HDF5 C++ Headers"
DEPENDS cpplibraries
GROUP Development
INSTALL_TYPES Full Developer
)
endif ()
cpack_add_component (utilsapplications
DISPLAY_NAME "HDF5 Utility Applications"
DEPENDS libraries
GROUP Applications
INSTALL_TYPES Full Developer User
)
if (HDF5_BUILD_TOOLS)
cpack_add_component (toolsapplications
DISPLAY_NAME "HDF5 Tools Applications"
DEPENDS toolslibraries
GROUP Applications
INSTALL_TYPES Full Developer User
)
cpack_add_component (toolslibraries
DISPLAY_NAME "HDF5 Tools Libraries"
DEPENDS libraries
GROUP Runtime
INSTALL_TYPES Full Developer User
)
cpack_add_component (toolsheaders
DISPLAY_NAME "HDF5 Tools Headers"
DEPENDS toolslibraries
GROUP Development
INSTALL_TYPES Full Developer
)
endif ()
if (HDF5_BUILD_HL_LIB)
cpack_add_component (hllibraries
DISPLAY_NAME "HDF5 HL Libraries"
DEPENDS libraries
GROUP Runtime
INSTALL_TYPES Full Developer User
)
cpack_add_component (hlheaders
DISPLAY_NAME "HDF5 HL Headers"
DEPENDS hllibraries
GROUP Development
INSTALL_TYPES Full Developer
)
cpack_add_component (hltoolsapplications
DISPLAY_NAME "HDF5 HL Tools Applications"
DEPENDS hllibraries
GROUP Applications
INSTALL_TYPES Full Developer User
)
if (HDF5_BUILD_CPP_LIB)
cpack_add_component (hlcpplibraries
DISPLAY_NAME "HDF5 HL C++ Libraries"
DEPENDS hllibraries
GROUP Runtime
INSTALL_TYPES Full Developer User
)
cpack_add_component (hlcppheaders
DISPLAY_NAME "HDF5 HL C++ Headers"
DEPENDS hlcpplibraries
GROUP Development
INSTALL_TYPES Full Developer
)
endif ()
if (HDF5_BUILD_FORTRAN)
cpack_add_component (hlfortlibraries
DISPLAY_NAME "HDF5 HL Fortran Libraries"
DEPENDS fortlibraries
GROUP Runtime
INSTALL_TYPES Full Developer User
)
endif ()
endif ()
endif ()

File diff suppressed because it is too large Load Diff

@ -0,0 +1,56 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
include (ExternalProject)
#option (HDF5_ALLOW_EXTERNAL_SUPPORT "Allow External Library Building (NO GIT TGZ)" "NO")
set (HDF5_ALLOW_EXTERNAL_SUPPORT "NO" CACHE STRING "Allow External Library Building (NO GIT TGZ)")
set_property (CACHE HDF5_ALLOW_EXTERNAL_SUPPORT PROPERTY STRINGS NO GIT TGZ)
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
option (PLUGIN_USE_EXTERNAL "Use External Library Building for filter PLUGIN" 1)
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT")
set (PLUGIN_URL ${PLUGIN_GIT_URL} CACHE STRING "Path to PLUGIN git repository")
set (PLUGIN_BRANCH ${PLUGIN_GIT_BRANCH})
elseif (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
if (NOT TGZPATH)
set (TGZPATH ${HDF5_SOURCE_DIR})
endif ()
set (PLUGIN_URL ${TGZPATH}/${PLUGIN_TGZ_NAME})
if (NOT EXISTS "${PLUGIN_URL}")
set (HDF5_ENABLE_PLUGIN_SUPPORT OFF CACHE BOOL "" FORCE)
message (STATUS "Filter PLUGIN file ${PLUGIN_URL} not found")
endif ()
else ()
set (PLUGIN_USE_EXTERNAL 0)
endif ()
endif ()
#-----------------------------------------------------------------------------
# Option for PLUGIN support
#-----------------------------------------------------------------------------
option (HDF5_ENABLE_PLUGIN_SUPPORT "Enable PLUGIN Filters" OFF)
if (HDF5_ENABLE_PLUGIN_SUPPORT)
if (NOT PLUGIN_USE_EXTERNAL)
find_package (PLUGIN NAMES ${PLUGIN_PACKAGE_NAME}${HDF_PACKAGE_EXT})
if (NOT PLUGIN_FOUND)
find_package (PLUGIN) # Legacy find
endif ()
endif ()
if (NOT PLUGIN_FOUND)
if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ")
EXTERNAL_PLUGIN_LIBRARY (${HDF5_ALLOW_EXTERNAL_SUPPORT})
message (STATUS "Filter PLUGIN is built")
else ()
message (FATAL_ERROR " PLUGIN is Required for PLUGIN support in HDF5")
endif ()
endif ()
message (STATUS "Filter PLUGIN is ON")
endif ()

@ -0,0 +1,253 @@
{
"version": 6,
"include": [
"config/cmake-presets/hidden-presets.json"
],
"configurePresets": [
{
"name": "ci-base-tgz",
"hidden": true,
"inherits": "ci-base",
"cacheVariables": {
"HDF5_ALLOW_EXTERNAL_SUPPORT": "NO",
"TGZPATH": {"type": "STRING", "value": "${sourceParentDir}/temp"}
}
},
{
"name": "ci-StdCompression",
"hidden": true,
"inherits": "ci-base-tgz",
"cacheVariables": {
"HDF5_ENABLE_Z_LIB_SUPPORT": "ON",
"HDF5_ENABLE_SZIP_SUPPORT": "ON",
"HDF5_ENABLE_SZIP_ENCODING": "ON",
"BUILD_ZLIB_WITH_FETCHCONTENT": "ON",
"ZLIB_PACKAGE_NAME": {"type": "STRING", "value": "zlib"},
"ZLIB_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/madler/zlib/releases/download/v1.2.13"},
"ZLIB_TGZ_ORIGNAME": {"type": "STRING", "value": "zlib-1.2.13.tar.gz"},
"ZLIB_USE_LOCALCONTENT": "OFF",
"BUILD_SZIP_WITH_FETCHCONTENT": "ON",
"LIBAEC_PACKAGE_NAME": {"type": "STRING", "value": "libaec"},
"LIBAEC_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/MathisRosenhauer/libaec/releases/download/v1.0.6"},
"LIBAEC_TGZ_ORIGNAME": {"type": "STRING", "value": "libaec-1.0.6.tar.gz"},
"LIBAEC_USE_LOCALCONTENT": "OFF"
}
},
{
"name": "ci-base-plugins",
"hidden": true,
"inherits": "ci-base-tgz",
"cacheVariables": {
"PLUGIN_TGZ_NAME": {"type": "STRING", "value": "hdf5_plugins-1.14.0.tar.gz"},
"PLUGIN_PACKAGE_NAME": {"type": "STRING", "value": "pl"},
"BSHUF_TGZ_NAME": {"type": "STRING", "value": "bitshuffle.tar.gz"},
"BSHUF_PACKAGE_NAME": {"type": "STRING", "value": "bshuf"},
"BLOSC_TGZ_NAME": {"type": "STRING", "value": "c-blosc.tar.gz"},
"BLOSC_PACKAGE_NAME": {"type": "STRING", "value": "blosc"},
"BLOSC_ZLIB_TGZ_NAME": {"type": "STRING", "value": "ZLib.tar.gz"},
"BLOSC_ZLIB_PACKAGE_NAME": {"type": "STRING", "value": "zlib"},
"BZ2_TGZ_NAME": {"type": "STRING", "value": "BZ2.tar.gz"},
"BZ2_PACKAGE_NAME": {"type": "STRING", "value": "bz2"},
"FPZIP_TGZ_NAME": {"type": "STRING", "value": "fpzip.tar.gz"},
"FPZIP_PACKAGE_NAME": {"type": "STRING", "value": "fpzip"},
"JPEG_TGZ_NAME": {"type": "STRING", "value": "JPEG.tar.gz"},
"JPEG_PACKAGE_NAME": {"type": "STRING", "value": "jpeg"},
"BUILD_LZ4_LIBRARY_SOURCE": "ON",
"LZ4_TGZ_NAME": {"type": "STRING", "value": "lz4.tar.gz"},
"LZ4_PACKAGE_NAME": {"type": "STRING", "value": "lz4"},
"LZF_TGZ_NAME": {"type": "STRING", "value": "lzf.tar.gz"},
"LZF_PACKAGE_NAME": {"type": "STRING", "value": "lzf"},
"SZ_TGZ_NAME": {"type": "STRING", "value": "szf.tar.gz"},
"SZ_PACKAGE_NAME": {"type": "STRING", "value": "SZ"},
"ZFP_TGZ_NAME": {"type": "STRING", "value": "zfp.tar.gz"},
"ZFP_PACKAGE_NAME": {"type": "STRING", "value": "zfp"},
"ZSTD_TGZ_NAME": {"type": "STRING", "value": "zstd.tar.gz"},
"ZSTD_PACKAGE_NAME": {"type": "STRING", "value": "zstd"}
}
},
{
"name": "ci-StdPlugins",
"hidden": true,
"inherits": ["ci-base-plugins", "ci-base-tgz"],
"cacheVariables": {
"HDF5_ENABLE_PLUGIN_SUPPORT": "ON",
"PLUGIN_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/HDFGroup/hdf5_plugins/archive/refs/tags"},
"PLUGIN_TGZ_ORIGNAME": {"type": "STRING", "value": "hdf5_plugins-1.14.0.tar.gz"}
}
},
{
"name": "ci-StdExamples",
"hidden": true,
"inherits": "ci-base",
"cacheVariables": {
"HDF5_PACK_EXAMPLES": "ON",
"HDF5_EXAMPLES_COMPRESSED": {"type": "STRING", "value": "hdf5-examples-2.0.3.tar.gz"},
"HDF5_EXAMPLES_COMPRESSED_DIR": {"type": "STRING", "value": "${sourceParentDir}/temp"},
"EXAMPLES_TGZ_ORIGPATH": {"type": "STRING", "value": "https://github.com/HDFGroup/hdf5-examples/archive/refs/tags/"},
"EXAMPLES_TGZ_ORIGNAME": {"type": "STRING", "value": "2.0.3.tar.gz"},
"EXAMPLES_DOWNLOAD": "ON"
}
},
{
"name": "ci-StdShar",
"hidden": true,
"inherits": "ci-StdCompression",
"cacheVariables": {
"HDF_PACKAGE_NAMESPACE": {"type": "STRING", "value": "hdf5::"},
"HDF5_INSTALL_MOD_FORTRAN": "NO",
"HDF5_BUILD_GENERATORS": "ON",
"HDF5_ENABLE_ALL_WARNINGS": "ON",
"HDF5_MINGW_STATIC_GCC_LIBS": "ON",
"HDF_TEST_EXPRESS": "2"
}
},
{
"name": "ci-StdShar-MSVC",
"description": "MSVC Standard Config for x64 (Release)",
"inherits": [
"ci-x64-Release-MSVC",
"ci-CPP",
"ci-Java",
"ci-StdShar",
"ci-StdExamples"
]
},
{
"name": "ci-StdShar-MSVC-Fortran",
"description": "MSVC Standard Config for x64 (Release)",
"inherits": [
"ci-x64-Release-MSVC",
"ci-CPP",
"ci-Fortran",
"ci-Java",
"ci-StdShar",
"ci-StdExamples"
]
},
{
"name": "ci-StdShar-Clang",
"description": "Clang Standard Config for x64 (Release)",
"inherits": [
"ci-x64-Release-Clang",
"ci-CPP",
"ci-Fortran",
"ci-Java",
"ci-StdShar",
"ci-StdExamples"
]
},
{
"name": "ci-StdShar-GNUC",
"description": "GNUC Standard Config for x64 (Release)",
"inherits": [
"ci-x64-Release-GNUC",
"ci-CPP",
"ci-Fortran",
"ci-Java",
"ci-StdShar",
"ci-StdExamples"
]
}
],
"buildPresets": [
{
"name": "ci-StdShar-MSVC",
"description": "MSVC Standard Build for x64 (Release)",
"configurePreset": "ci-StdShar-MSVC",
"inherits": [
"ci-x64-Release-MSVC"
]
},
{
"name": "ci-StdShar-Clang",
"description": "Clang Standard Build for x64 (Release)",
"configurePreset": "ci-StdShar-Clang",
"inherits": [
"ci-x64-Release-Clang"
]
},
{
"name": "ci-StdShar-GNUC",
"description": "GNUC Standard Build for x64 (Release)",
"configurePreset": "ci-StdShar-GNUC",
"verbose": false,
"inherits": [
"ci-x64-Release-GNUC"
]
}
],
"testPresets": [
{
"name": "ci-StdShar-MSVC",
"configurePreset": "ci-StdShar-MSVC",
"inherits": [
"ci-x64-Release-MSVC"
],
"filter": {
"exclude": {
"name": "H5DUMP-tfloatsattrs"
}
}
},
{
"name": "ci-StdShar-Clang",
"configurePreset": "ci-StdShar-Clang",
"inherits": [
"ci-x64-Release-Clang"
]
},
{
"name": "ci-StdShar-GNUC",
"configurePreset": "ci-StdShar-GNUC",
"inherits": [
"ci-x64-Release-GNUC"
]
}
],
"packagePresets": [
{
"name": "ci-StdShar-MSVC",
"configurePreset": "ci-StdShar-MSVC",
"inherits": "ci-x64-Release-MSVC"
},
{
"name": "ci-StdShar-Clang",
"configurePreset": "ci-StdShar-Clang",
"inherits": "ci-x64-Release-Clang"
},
{
"name": "ci-StdShar-GNUC",
"configurePreset": "ci-StdShar-GNUC",
"inherits": "ci-x64-Release-GNUC"
}
],
"workflowPresets": [
{
"name": "ci-StdShar-MSVC",
"steps": [
{"type": "configure", "name": "ci-StdShar-MSVC"},
{"type": "build", "name": "ci-StdShar-MSVC"},
{"type": "test", "name": "ci-StdShar-MSVC"},
{"type": "package", "name": "ci-StdShar-MSVC"}
]
},
{
"name": "ci-StdShar-Clang",
"steps": [
{"type": "configure", "name": "ci-StdShar-Clang"},
{"type": "build", "name": "ci-StdShar-Clang"},
{"type": "test", "name": "ci-StdShar-Clang"},
{"type": "package", "name": "ci-StdShar-Clang"}
]
},
{
"name": "ci-StdShar-GNUC",
"steps": [
{"type": "configure", "name": "ci-StdShar-GNUC"},
{"type": "build", "name": "ci-StdShar-GNUC"},
{"type": "test", "name": "ci-StdShar-GNUC"},
{"type": "package", "name": "ci-StdShar-GNUC"}
]
}
]
}

@ -0,0 +1,128 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
help@hdfgroup.org.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

@ -0,0 +1,136 @@
# How to contribute to HDF5
The HDF Group encourages community members to contribute to the HDF5 project. We accept and are very grateful for any contributions,
from minor typos and bug fixes to new features. The HDF Group is committed to work with the code contributors and make contribution
process enjoyable and straightforward.
This document describes guiding principles for the HDF5 code contributors and does not pretend to address any possible
contribution. If in doubt, please do not hesitate to ask us for guidance.
***Note that no contribution may be accepted unless the donor agrees with the HDF Group software license terms
found in the COPYING file in every branch's top source directory.***
> We will assume that you are familiar with `git` and `GitHub`. If not, you may go through the GitHub tutorial found at
[https://guides.github.com/activities/hello-world/](https://guides.github.com/activities/hello-world/). This tutorial should only take
around 10 minutes.
## Table of Contents
* [Workflow](#workflow)
* [Acceptance criteria for a pull request](#criteria)
* [Release Note](#releasenote)
* [Check List](#checklist)
# Workflow <A NAME="workflow"></A>
The process for contributing code to HDF5 is as follows:
* Open an issue on [HDF5 GitHub](https://github.com/HDFGroup/hdf5/issues).
> This step is ***required*** unless the change is minor (e.g., typo fix).
* Fork the [HDF5](https://github.com/HDFGroup/hdf5) repository.
* Make the desired changes to the HDF5 software.
* New features should always go to _develop_ branch first and later should be merged to the appropriate maintenance branches.
* Bug fixes should go to all appropriate branches (_develop_ and maintenance).
* Build and test your changes. Detailed instructions on building and testing HDF5 can be found in the `INSTALL*` files in the `release_docs` directory.
* Push your changes to GitHub.
* Issue a pull request and address any code formatting and testing issues reported.
Once a pull request is correctly formatted and passes **ALL** CI tests, it will be reviewed and evaluated by The HDF Group developers and HDF5
community members who can approve pull requests. The HDF Group developers will work with you to ensure that the pull request satisfies the acceptance
criteria described in the next section.
# Acceptance criteria for a pull request <A NAME="criteria"></A>
We appreciate every contribution we receive, but we may not accept them all. Those that we *do* satisfy the following criteria:
* **The pull request has a clear purpose** - What does the pull request address? How does it benefit the HDF5 community?
If the pull request does not have a clear purpose and benefits, it will not be accepted.
* **The pull request is documented** - The HDF5 developers must understand not only *what* a change is doing, but *how* it is doing it.
Documenting the code makes it easier for us to understand your patch and maintain the code in the future.
* **The pull request passes HDF5 regression testing** - Any issue fixed or functionality added should be accompanied by the corresponding
tests and pass HDF5 regression testing run by The HDF Group. We do not expect you to perform comprehensive testing across multiple platforms
before we accept the pull request. If the pull request does not pass regression testing after the merge, The HDF Group developers will work
with you on the fixes.
* **The pull request does not compromise the principles behind HDF5** - HDF5 has a 100% commitment to backward compatibility.
* Any file ever created with HDF5 must be readable by any future version of HDF5.
If your patch's purpose is to modify the HDF5 data model or file format,
**please** discuss this with us first. File format changes and features required by those changes can be introduced only in a new major release.
* HDF5 has a commitment to remaining *machine-independent*; data created on one platform/environment/architecture **must** remain readable by HDF5 on any other.
* For binary compatibility, no changes are allowed to public APIs and data structures in the maintenance releases; new APIs can be added.
* **New features are documented** - Any new features should have proper documentation; talk to us if you have any questions.
* **When to Write a Release Note** - Generally, a release note must be written for every change that is made to the code for which
users might see a change in the way the software works. In other words, if a user might see a difference in the way the software works,
a note should be written. By code we mean the text that will be compiled into one of the company's software products. The code includes
configuration changes and changes to tools users might work with to configure and build our software.
* Notes should be added for known problems. Known problems are issues that we know about and have not yet been able to fix.
* Any change made to address a user-reported problem should be described in a release note.
* A release note does not need to be written for changes to the code that users will not see. Here are some examples. If you add a
comment, you do not need to write a release note describing the comment you added. If you rewrite some code to make it read more
clearly and if there is no change in functionality or performance, then you do not need to write a release note. If you change the
process by which user software is made, you may not need to write a release note since the change was not made to the code.
* Users. We have different kinds of users. A release note may be written to be helpful to
application developers and not system administrators. Users who may find the RELEASE.txt file helpful include the following:
application developers, library developers, and system administrators.
# Release Note <A NAME="releasenote"></A>
* **Entry Syntax**
The release note entry syntax is shown below.
```
- Title/Problem
Problem/Solution
```
* **Entry Elements** - The elements of the entry - title, problem, solution, and signature - are described in more detail in the table
below. Descriptions of the problem and the solution should be clear without any ambiguities and should be short without losing clarity or specifics.
* **Title** - The title or tag should identify one or more categories that will help readers decide if the entry is something they need to study. Can be combined with the `Problem` element
* **Problem** - Describe the problem and how users might see the problem in a paragraph.
You might also consider the following as you describe the problem:
* Under what specific conditions does this issue arise?
* Under what specific conditions are we sure this issue will not arise?
* For a performance issue, instead of saying something is a performance issue, describe what the performance impact of issue is?
* **Solution** - Describe the solution in another paragraph.
You might also consider the following as you describe the solution:
* What was done to resolve the issue?
* What is the functional impact?
* Is there a workaround – a way for users design their software so as not to encounter the issue? If so, what is the workaround?
* For a performance fix, how has the performance improved? Links to published documentation would be good.
# Checklist <A NAME="checklist"></A>
Please make sure that you check the items applicable to your pull request:
* Code
* [ ] Does the pull request have a corresponding GitHub issue and clear purpose?
* [ ] Does the pull request follow HDF5 best practices (naming conventions, code portability, code structure, etc.)? <<TODO: link to the document>>
* [ ] If changes were done to Autotools build, were they added to CMake and vice versa?
* [ ] Is the pull request applicable to any other branches? If yes, which ones? Please document it in the GitHub issue.
* [ ] Is the new code sufficiently documented for future maintenance?
* [ ] Does the new feature require a change to an existing API? See "API Compatibility Macros" document (https://portal.hdfgroup.org/display/HDF5/API+Compatibility+Macros)
* Documentation
* [ ] Was the change described in the release_docs/RELEASE.txt file?
* [ ] Was the new function documented in the corresponding public header file using [Doxygen](https://docs.hdfgroup.org/hdf5/develop/_r_m_t.html)?
* [ ] Was new functionality documented for the HDF5 community (the level of documentation depends on the feature; ask us what would be appropriate)
* Testing
* [ ] Does the pull request have tests?
* [ ] Does the pull request affect HDF5 library performance?
We want as many contributions as we can get, and we are here to help. Feel free to reach out to us if you have any questions
Thank you for your contribution!

@ -0,0 +1,106 @@
Copyright Notice and License Terms for
HDF5 (Hierarchical Data Format 5) Software Library and Utilities
-----------------------------------------------------------------------------
HDF5 (Hierarchical Data Format 5) Software Library and Utilities
Copyright 2006 by The HDF Group.
NCSA HDF5 (Hierarchical Data Format 5) Software Library and Utilities
Copyright 1998-2006 by The Board of Trustees of the University of Illinois.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted for any purpose (including commercial purposes)
provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions, and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions, and the following disclaimer in the documentation
and/or materials provided with the distribution.
3. Neither the name of The HDF Group, the name of the University, nor the
name of any Contributor may be used to endorse or promote products derived
from this software without specific prior written permission from
The HDF Group, the University, or the Contributor, respectively.
DISCLAIMER:
THIS SOFTWARE IS PROVIDED BY THE HDF GROUP AND THE CONTRIBUTORS
"AS IS" WITH NO WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED. IN NO
EVENT SHALL THE HDF GROUP OR THE CONTRIBUTORS BE LIABLE FOR ANY DAMAGES
SUFFERED BY THE USERS ARISING OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You are under no obligation whatsoever to provide any bug fixes, patches, or
upgrades to the features, functionality or performance of the source code
("Enhancements") to anyone; however, if you choose to make your Enhancements
available either publicly, or directly to The HDF Group, without imposing a
separate written license agreement for such Enhancements, then you hereby
grant the following license: a non-exclusive, royalty-free perpetual license
to install, use, modify, prepare derivative works, incorporate into other
computer software, distribute, and sublicense such enhancements or derivative
works thereof, in binary and source code form.
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Limited portions of HDF5 were developed by Lawrence Berkeley National
Laboratory (LBNL). LBNL's Copyright Notice and Licensing Terms can be
found here: COPYING_LBNL_HDF5 file in this directory or at
http://support.hdfgroup.org/ftp/HDF5/releases/COPYING_LBNL_HDF5.
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Contributors: National Center for Supercomputing Applications (NCSA) at
the University of Illinois, Fortner Software, Unidata Program Center
(netCDF), The Independent JPEG Group (JPEG), Jean-loup Gailly and Mark Adler
(gzip), and Digital Equipment Corporation (DEC).
-----------------------------------------------------------------------------
Portions of HDF5 were developed with support from the Lawrence Berkeley
National Laboratory (LBNL) and the United States Department of Energy
under Prime Contract No. DE-AC02-05CH11231.
-----------------------------------------------------------------------------
Portions of HDF5 were developed with support from Lawrence Livermore
National Laboratory and the United States Department of Energy under
Prime Contract No. DE-AC52-07NA27344.
-----------------------------------------------------------------------------
Portions of HDF5 were developed with support from the University of
California, Lawrence Livermore National Laboratory (UC LLNL).
The following statement applies to those portions of the product and must
be retained in any redistribution of source code, binaries, documentation,
and/or accompanying materials:
This work was partially produced at the University of California,
Lawrence Livermore National Laboratory (UC LLNL) under contract
no. W-7405-ENG-48 (Contract 48) between the U.S. Department of Energy
(DOE) and The Regents of the University of California (University)
for the operation of UC LLNL.
DISCLAIMER:
THIS WORK WAS PREPARED AS AN ACCOUNT OF WORK SPONSORED BY AN AGENCY OF
THE UNITED STATES GOVERNMENT. NEITHER THE UNITED STATES GOVERNMENT NOR
THE UNIVERSITY OF CALIFORNIA NOR ANY OF THEIR EMPLOYEES, MAKES ANY
WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY OR RESPONSIBILITY
FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY INFORMATION,
APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS THAT ITS USE
WOULD NOT INFRINGE PRIVATELY- OWNED RIGHTS. REFERENCE HEREIN TO ANY
SPECIFIC COMMERCIAL PRODUCTS, PROCESS, OR SERVICE BY TRADE NAME,
TRADEMARK, MANUFACTURER, OR OTHERWISE, DOES NOT NECESSARILY CONSTITUTE
OR IMPLY ITS ENDORSEMENT, RECOMMENDATION, OR FAVORING BY THE UNITED
STATES GOVERNMENT OR THE UNIVERSITY OF CALIFORNIA. THE VIEWS AND
OPINIONS OF AUTHORS EXPRESSED HEREIN DO NOT NECESSARILY STATE OR REFLECT
THOSE OF THE UNITED STATES GOVERNMENT OR THE UNIVERSITY OF CALIFORNIA,
AND SHALL NOT BE USED FOR ADVERTISING OR PRODUCT ENDORSEMENT PURPOSES.
-----------------------------------------------------------------------------

@ -0,0 +1,61 @@
****************************
*** Copyright Notice ***
Hierarchical Data Format 5 (HDF5) v1.12.0 Copyright (c) 2020, HDF Group and The
Regents of the University of California, through Lawrence Berkeley National
Laboratory (subject to receipt of any required approvals from the U.S. Dept. of
Energy). All rights reserved.
If you have questions about your rights to use or distribute this software,
please contact Berkeley Lab's Intellectual Property Office at IPO@lbl.gov.
NOTICE. This Software was partially developed under funding from the U.S.
Department of Energy and the U.S. Government consequently retains certain
rights. As such, the U.S. Government has been granted for itself and others
acting on its behalf a paid-up, nonexclusive, irrevocable, worldwide license in
the Software to reproduce, distribute copies to the public, prepare derivative
works, and perform publicly and display publicly, and to permit others to do so.
****************************
*** License Agreement ***
Hierarchical Data Format 5 (HDF5) v1.12.0 Copyright (c) 2020, HDF Group and The
Regents of the University of California, through Lawrence Berkeley National
Laboratory (subject to receipt of any required approvals from the U.S. Dept. of
Energy). All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
(1) Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
(2) Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
(3) Neither the name of the HDF Group, University of California, Lawrence
Berkeley National Laboratory, U.S. Dept. of Energy, nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You are under no obligation whatsoever to provide any bug fixes, patches, or
upgrades to the features, functionality or performance of the source code
("Enhancements") to anyone; however, if you choose to make your Enhancements
available either publicly, or directly to Lawrence Berkeley National Laboratory,
without imposing a separate written license agreement for such Enhancements,
then you hereby grant the following license: a non-exclusive, royalty-free
perpetual license to install, use, modify, prepare derivative works, incorporate
into other computer software, distribute, and sublicense such enhancements or
derivative works thereof, in binary and source code form.

@ -0,0 +1,52 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
## This file should be placed in the root directory of your project.
## Then modify the CMakeLists.txt file in the root directory of your
## project to incorporate the testing dashboard.
## # The following are required to use Dart and the CDash dashboard
## ENABLE_TESTING()
## INCLUDE(CTest)
set (CTEST_PROJECT_NAME "HDF5")
set (CTEST_NIGHTLY_START_TIME "18:00:00 CST")
set (CTEST_DROP_METHOD "https")
if (CTEST_DROP_SITE_INIT)
set (CTEST_DROP_SITE "${CTEST_DROP_SITE_INIT}")
else ()
if (CDASH_LOCAL)
set (CTEST_DROP_SITE "cdash-internal.hdfgroup.org")
else ()
set (CTEST_DROP_SITE "cdash.hdfgroup.org")
endif ()
endif ()
if (CTEST_DROP_LOCATION_INIT)
set (CTEST_DROP_LOCATION "${CTEST_DROP_LOCATION_INIT}")
else ()
if (CDASH_LOCAL)
set (CTEST_DROP_LOCATION "/submit.php?project=HDF5114")
else ()
set (CTEST_DROP_LOCATION "/submit.php?project=HDF5")
endif ()
endif ()
set (CTEST_DROP_SITE_CDASH TRUE)
set (UPDATE_TYPE git)
set (VALGRIND_COMMAND "/usr/bin/valgrind")
set (VALGRIND_COMMAND_OPTIONS "-v --tool=memcheck --leak-check=full --track-fds=yes --num-callers=50 --show-reachable=yes --track-origins=yes --malloc-fill=0xff --free-fill=0xfe")
set (CTEST_TEST_TIMEOUT 1200 CACHE STRING
"Maximum time allowed before CTest will kill the test.")
set (DART_TESTING_TIMEOUT 1200 CACHE STRING
"Maximum time allowed before CTest will kill the test." FORCE)
set (CTEST_SUBMIT_RETRY_DELAY 20 CACHE STRING
"How long to wait between timed-out CTest submissions.")

@ -0,0 +1 @@
zlib >= 1.1.2

1295
Makefile

File diff suppressed because it is too large Load Diff

@ -0,0 +1,226 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
## Makefile.am
## Run automake to generate a Makefile.in from this file.
##
#
# This makefile mostly just reinvokes make in the various subdirectories
# but does so in the correct order. You can alternatively invoke make from
# each subdirectory manually.
#
# Top-level HDF5 Makefile(.in)
# pmake has issues if variables are undefined. Solve this problem in
# top-level Makefile by defining .MAKEFLAGS target to -V before pmake can
# encounter any undefined variables.
# Automake resists putting anything but variable definitions first in
# a Makefile.in, so we'll put a placebo comment here and use sed in
# bin/reconfigure to turn it into the .MAKEFLAGS target. Sigh. -JL 2005
# Configure should set AM_MAKEFLAGS to -V to solve this problem in
# subdirectories.
# NOTE: This means that invoking pmake in a subdirectory will not work.
#xxx.MAKEFLAGS:@AM_MAKEFLAGS@
#xxx $(MAKE) all
#xxx
include $(top_srcdir)/config/commence.am
# include Doxygen rules (requires autoconf-archive >2016-03-20)
@DX_RULES@
# Define subdirectories to build.
## Automake understands that `make distclean' should recurse into
## conditional subdirectories even if `make all' does not.
## We need to list the examples directory in the DIST_SUBDIRS variable
## so that it will be visited by `make distclean'
# Add this directory to SUBDIRS so that examples get built after tools
# but before examples in extra interfaces (c++ and fortran).
# Since we're explicitly listing DIST_SUBDIRS, we also need to list
# directories that are only conditionally built (so that their Makefiles
# are cleaned as well).
# Note that `make clean' will not affect the examples or doc directories.
# Conditionals. These conditionals are defined during configure
# Define each variable to empty if it is not used to placate pmake
if BUILD_CXX_CONDITIONAL
CXX_DIR =c++
else
CXX_DIR=
endif
if BUILD_FORTRAN_CONDITIONAL
FORTRAN_DIR =fortran
else
FORTRAN_DIR=
endif
if BUILD_JAVA_CONDITIONAL
JAVA_DIR=java
else
JAVA_DIR=
endif
if BUILD_HDF5_HL_CONDITIONAL
HDF5_HL_DIR =hl
else
HDF5_HL_DIR=
endif
if BUILD_TESTS_CONDITIONAL
TESTSERIAL_DIR =test
else
TESTSERIAL_DIR=
endif
if BUILD_TESTS_PARALLEL_CONDITIONAL
TESTPARALLEL_DIR =testpar
else
TESTPARALLEL_DIR=
endif
if BUILD_TOOLS_CONDITIONAL
TOOLS_DIR =tools
else
TOOLS_DIR=
endif
SUBDIRS = src $(TESTSERIAL_DIR) $(TESTPARALLEL_DIR) bin $(TOOLS_DIR) utils . \
$(CXX_DIR) $(FORTRAN_DIR) $(JAVA_DIR) $(HDF5_HL_DIR)
DIST_SUBDIRS = src test testpar bin tools utils . c++ fortran hl examples java
# Some files generated during configure that should be cleaned
DISTCLEANFILES=config/stamp1 config/stamp2
# Some files/directories generated during check that should be cleaned
CHECK_CLEANFILES+=*-tmp
# Define rules for lib, progs, check, and tests.
# These simply involve recursing into subdirectories.
test _test: check
lib progs check-p check-s:
for d in $(SUBDIRS); do \
if test $$d != .; then \
(cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
fi; \
done
# Make all, tests, and (un)install
tests:
for d in $(SUBDIRS); do \
if test $$d != .; then \
(cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
fi; \
done
# Check-clean also recurses into examples directory
check-clean:
for d in $(SUBDIRS) examples; do \
if test $$d != .; then \
(cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
fi; \
done
$(RM) -rf prefix-tmp destdir-tmp
# Some C++ compilers/linkers will create a directory named ii_files in
# the root directory, which should be cleaned.
mostlyclean-local:
if test -d ii_files; then \
$(RM) -rf ii_files; \
fi
# 'make install' will now install examples, the same as 'make install-all'.
# 'make-install-all' will be redundant but will still work.
install: install-recursive install-examples
uninstall: uninstall-recursive uninstall-examples
# 'make install-all' also installs examples
install-all:
@$(MAKE) $(AM_MAKEFLAGS) install
uninstall-all:
@$(MAKE) $(AM_MAKEFLAGS) uninstall
# Install examples in this directory and recursively
install-examples uninstall-examples:
for d in examples $(HDF5_INTERFACES) $(HL); do \
(cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
done
# Placeholder to remind users that docs are now in a separate repository.
install-doc:
@echo "docs no longer live in this tree. Use install-examples to install examples."
uninstall-doc:
@echo "docs no longer live in this tree. Use install-examples to install examples."
# `make check-install' or `make installcheck' checks that examples can
# be successfully built
installcheck-local:
if test -n "${DESTDIR}"; then \
(cd ${DESTDIR}$(bindir) && pwd && ./h5redeploy -force); \
fi
@(cd examples && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1;
# check-install is just a synonym for installcheck
check-install: installcheck
# check-all-install tests all installation methods.
# Install via different mechanism and then compare against the default.
# Fine if only libXXX.a files are different since they may have been ranlib'ed.
check-all-install:
@echo Installing to default location
$(MAKE) install
@echo Installing to different prefix location
$(MAKE) prefix=${ROOT}/prefix-tmp install
@echo Compare against the default installation.
@echo Fine if only libXXX.a files are different.
-diff -r prefix-tmp ${prefix}
@echo Installing to different $$\DESTDIR location
env DESTDIR=${ROOT}/destdir-tmp $(MAKE) install
@echo Compare against the default installation.
@echo Fine if only libXXX.a files are different.
-diff -r destdir-tmp${prefix} ${prefix}
# Only source files in the src directory include tracing information,
# so 'make trace' only needs to recurse into that directory.
trace:
@(cd src && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1;
# doxygen support
if BUILD_DOXYGEN_CONDITIONAL
doxygen: doxygen-doc
endif
# Run tests with different Virtual File Drivers.
# Currently, only invoke check-vfd in the test directory.
check-vfd:
for d in src utils test; do \
if test $$d != .; then \
(cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
fi; \
done
# Run tests with different passthrough Virtual Object Layer Connectors.
# NOTE: Will only succeed with passthrough VOL connectors that use
# the native VOL connector as the terminal connector.
check-passthrough-vol:
for d in $(SUBDIRS); do \
if test $$d != .; then \
(cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
fi; \
done
# Automake wants to make config.status depend on configure. This
# makes sense, but config.status can't always be regenerated
# properly, which can cause builds to fail.
# This is a problem for our Daily Tests, which need to be able to
# 'make distclean' reliably before running configure.
# The simple solution is to override the dependency Automake supplies
# for config.status so that it will never be regenerated.
$(top_builddir)/config.status:
# Don't include conclude.am in root Makefile; tests target needs to
# recurse into reguar subdirs.

@ -0,0 +1,34 @@
# Top-level distributed Makefile -*- makefile -*-
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
# This Makefile is a stub (copied from Makefile.dist) which will run
# configure and then invoke the same target in the new Makefile created
# by configure.
# Uncomment this variable if your make(1) doesn't set it automatically.
#
#MAKE=make
SHELL=/bin/sh
all lib progs check test _test install uninstall dep depend: _config
$(MAKE) $@
clean mostlyclean distclean maintainer-clean TAGS: _config
$(MAKE) $@
_config:
sh configure
.PHONY: all lib progs test install uninstall dep depend clean mostlyclean \
distclean maintainer-clean _config

File diff suppressed because it is too large Load Diff

@ -0,0 +1,88 @@
HDF5 version 1.14.1-2 released on 2023-05-11
![HDF5 Logo](doxygen/img/HDF5.png)
[![1.14 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_14&label=1.14)](https://github.com/HDFGroup/hdf5/actions?query=branch%3Ahdf5_1_14)
[![BSD](https://img.shields.io/badge/License-BSD-blue.svg)](https://github.com/HDFGroup/hdf5/blob/develop/COPYING)
*Please refer to the release_docs/INSTALL file for installation instructions.*
This repository contains a high-performance library's source code and a file format
specification that implement the HDF5® data model. The model has been adopted across
many industries and this implementation has become a de facto data management standard
in science, engineering, and research communities worldwide.
The HDF Group is the developer, maintainer, and steward of HDF5 software. Find more
information about The HDF Group, the HDF5 Community, and other HDF5 software projects,
tools, and services at The HDF Group's website.
https://www.hdfgroup.org/
DOCUMENTATION
-------------
This release is fully functional for the API described in the documentation.
https://portal.hdfgroup.org/display/HDF5/The+HDF5+API
Full Documentation and Programming Resources for this release can be found at
https://portal.hdfgroup.org/display/HDF5
See the RELEASE.txt file in the release_docs/ directory for information specific
to the features and updates included in this release of the library.
Several more files are located within the release_docs/ directory with specific
details for several common platforms and configurations.
INSTALL - Start Here. General instructions for compiling and installing the library
INSTALL_CMAKE - instructions for building with CMake (Kitware.com)
INSTALL_parallel - instructions for building and configuring Parallel HDF5
INSTALL_Windows and INSTALL_Cygwin - MS Windows installations.
HELP AND SUPPORT
----------------
Information regarding Help Desk and Support services is available at
https://portal.hdfgroup.org/display/support/The+HDF+Help+Desk
FORUM and NEWS
--------------
The following public forums are provided for public announcements and discussions
of interest to the general HDF5 Community.
- Homepage of the Forum
https://forum.hdfgroup.org
- News and Announcement
https://forum.hdfgroup.org/c/news-and-announcements-from-the-hdf-group
- HDF5 and HDF4 Topics
https://forum.hdfgroup.org/c/hdf5
These forums are provided as an open and public service for searching and reading.
Posting requires completing a simple registration and allows one to join in the
conversation. Please read the following instructions pertaining to the Forum's
use and configuration
https://forum.hdfgroup.org/t/quickstart-guide-welcome-to-the-new-hdf-forum
SNAPSHOTS, PREVIOUS RELEASES AND SOURCE CODE
--------------------------------------------
Periodically development code snapshots are provided at the following URL:
https://gamma.hdfgroup.org/ftp/pub/outgoing/hdf5/snapshots/
Source packages for current and previous releases are located at:
https://portal.hdfgroup.org/display/support/Downloads
Development code is available at our Github location:
https://github.com/HDFGroup/hdf5.git

14
SETUP

@ -0,0 +1,14 @@
#!/usr/bin/env bash
if [ -z "$1" ]
then
printf "Usage: setup.sh install_directory [dependencies].\n"
exit
fi
cd $(dirname $0)
libname=$(basename $(pwd))
./configure --prefix="$1" --with-zlib="$2"
make
make check -j8
make install

@ -0,0 +1,32 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
########################################################
# Include file for user options
########################################################
#-----------------------------------------------------------------------------
#------------------- E X A M P L E B E G I N--------------------------------
#-----------------------------------------------------------------------------
# Option to Build with User Defined Values
#-----------------------------------------------------------------------------
macro (MACRO_USER_DEFINED_LIBS)
set (USER_DEFINED_VALUE "FALSE")
endmacro ()
#-------------------------------------------------------------------------------
option (BUILD_USER_DEFINED_LIBS "Build With User Defined Values" OFF)
if (BUILD_USER_DEFINED_LIBS)
MACRO_USER_DEFINED_LIBS ()
endif ()
#-----------------------------------------------------------------------------
#------------------- E X A M P L E E N D -----------------------------------
#-----------------------------------------------------------------------------

1193
aclocal.m4 vendored

File diff suppressed because it is too large Load Diff

@ -0,0 +1,53 @@
dnl -------------------------------------------------------------------------
dnl -------------------------------------------------------------------------
dnl
dnl Copyright by The HDF Group.
dnl All rights reserved.
dnl
dnl This file is part of HDF5. The full HDF5 copyright notice, including
dnl terms governing use, modification, and redistribution, is contained in
dnl the COPYING file, which can be found at the root of the source code
dnl dnl distribution tree, or in https://www.hdfgroup.org/licenses.
dnl dnl If you do not have access to either file, you may request a copy from
dnl dnl help@hdfgroup.org.
dnl
dnl Macros for HDF5 Fortran
dnl
dnl -------------------------------------------------------------------------
dnl -------------------------------------------------------------------------
dnl -------------------------------------------------------------------------
dnl _AC_SYS_LARGEFILE_MACRO_VALUE
dnl
dnl The following macro overrides the autoconf macro of the same name
dnl with this custom definition. This macro performs the same checks as
dnl autoconf's native _AC_SYS_LARGEFILE_MACRO_VALUE, but will also set
dnl AM_CPPFLAGS with the appropriate -D defines so additional configure
dnl sizeof checks do not fail.
dnl
# _AC_SYS_LARGEFILE_MACRO_VALUE(C-MACRO, VALUE,
# CACHE-VAR,
# DESCRIPTION,
# PROLOGUE, [FUNCTION-BODY])
# ----------------------------------------------------------
m4_define([_AC_SYS_LARGEFILE_MACRO_VALUE],
[AC_CACHE_CHECK([for $1 value needed for large files], [$3],
[while :; do
m4_ifval([$6], [AC_LINK_IFELSE], [AC_COMPILE_IFELSE])(
[AC_LANG_PROGRAM([$5], [$6])],
[$3=no; break])
m4_ifval([$6], [AC_LINK_IFELSE], [AC_COMPILE_IFELSE])(
[AC_LANG_PROGRAM([@%:@define $1 $2
$5], [$6])],
[$3=$2; break])
$3=unknown
break
done])
case $$3 in #(
no | unknown) ;;
*) AC_DEFINE_UNQUOTED([$1], [$$3], [$4])
AM_CPPFLAGS="-D$1=$$3 $AM_CPPFLAGS";;
esac
rm -rf conftest*[]dnl
])# _AC_SYS_LARGEFILE_MACRO_VALUE

@ -0,0 +1,254 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# A script to reconfigure autotools for HDF5, and to recreate other
# generated files specific to HDF5.
#
# IMPORTANT OS X NOTE
#
# If you are using OS X, you will probably not have the autotools
# installed, even if you have the Xcode command-line tools.
#
# The easiest way to fix this is to install everything via Homebrew:
#
# http://brew.sh/
#
# After you install the base packages, install autoconf, automake,
# and libtool.
#
# brew install autoconf
# brew install automake
# brew install libtool
#
# This only takes a few minutes. Note that libtool and libtoolize will
# be glibtool and glibtoolize so as not to conflict with Apple's non-gnu
# tools. This autogen.sh script handles this for you.
#
# END IMPORTANT OS X NOTE
#
# If you want to use a particular version of the autotools, the paths
# to each tool can be overridden using the following environment
# variables:
#
# HDF5_ACLOCAL
# HDF5_AUTOHEADER
# HDF5_AUTOMAKE
# HDF5_AUTOCONF
# HDF5_LIBTOOL
# HDF5_M4
#
# Note that aclocal will attempt to include libtool's share/aclocal
# directory.
#
# Aside from -h for help, this script takes one potential option:
#
# -v
#
# This emits some extra information, mainly tool versions.
echo
echo "**************************"
echo "* HDF5 autogen.sh script *"
echo "**************************"
echo
# Default is not verbose output
verbose=false
optspec=":hpv-"
while getopts "$optspec" optchar; do
case "${optchar}" in
h)
echo "usage: $0 [OPTIONS]"
echo
echo " -h Print this help message."
echo
echo " -v Show more verbose output."
echo
echo " NOTE: Each tool can be set via an environment variable."
echo " These are documented inside this autogen.sh script."
echo
exit 0
;;
v)
echo "Setting verbosity: high"
echo
verbose=true
;;
*)
if [ "$OPTERR" != 1 ] || case $optspec in :*) ;; *) false; esac; then
echo "ERROR: non-option argument: '-${OPTARG}'" >&2
echo "Quitting"
exit 1
fi
;;
esac
done
# If paths to autotools are not specified, use whatever the system
# has installed as the default. We use 'command -v <tool>' to
# show exactly what's being used (shellcheck complains that 'which'
# is non-standard and deprecated).
if test -z "${HDF5_AUTOCONF}"; then
HDF5_AUTOCONF="$(command -v autoconf)"
fi
if test -z "${HDF5_AUTOMAKE}"; then
HDF5_AUTOMAKE="$(command -v automake)"
fi
if test -z "${HDF5_AUTOHEADER}"; then
HDF5_AUTOHEADER="$(command -v autoheader)"
fi
if test -z "${HDF5_ACLOCAL}"; then
HDF5_ACLOCAL="$(command -v aclocal)"
fi
if test -z "${HDF5_LIBTOOL}"; then
case "$(uname)" in
Darwin*)
# libtool on OS-X is non-gnu
HDF5_LIBTOOL="$(command -v glibtool)"
;;
*)
HDF5_LIBTOOL="$(command -v libtool)"
;;
esac
fi
if test -z "${HDF5_M4}"; then
HDF5_M4="$(command -v m4)"
fi
# Make sure that these versions of the autotools are in the path
AUTOCONF_DIR=$(dirname "${HDF5_AUTOCONF}")
LIBTOOL_DIR=$(dirname "${HDF5_LIBTOOL}")
M4_DIR=$(dirname "${HDF5_M4}")
PATH=${AUTOCONF_DIR}:${LIBTOOL_DIR}:${M4_DIR}:$PATH
# Make libtoolize match the specified libtool
case "$(uname)" in
Darwin*)
# On OS X, libtoolize could be named glibtoolize or
# libtoolize. Try the former first, then fall back
# to the latter if it's not found.
HDF5_LIBTOOLIZE="${LIBTOOL_DIR}/glibtoolize"
if [ ! -f "$HDF5_LIBTOOLIZE" ] ; then
HDF5_LIBTOOLIZE="${LIBTOOL_DIR}/libtoolize"
fi
;;
*)
HDF5_LIBTOOLIZE="${LIBTOOL_DIR}/libtoolize"
;;
esac
# Run scripts that process source.
#
# These should be run before the autotools so that failures here block
# compilation.
# Run trace script
# The trace script adds H5TRACE macros to library source files. It should
# have no effect on files that don't have HDF5 API macros in them.
echo "Running trace script:"
bin/trace src/H5*.c || exit 1
echo
# Run make_err
# make_err automatically generates the H5E headers that create error message
# types for HDF5.
echo "Running error generation script:"
bin/make_err src/H5err.txt || exit 1
echo
# Run make_vers
# make_vers automatically generates the public headers that define the API version
# macros for HDF5.
echo "Running API version generation script:"
bin/make_vers src/H5vers.txt || exit 1
echo
# Run make_overflow
# make_overflow automatically generates macros for detecting overflows for type
# conversion.
echo "Running overflow macro generation script:"
bin/make_overflow src/H5overflow.txt || exit 1
echo
# Run autotools in order
#
# When available, we use the --force option to ensure all files are
# updated. This prevents the autotools from re-running to fix dependencies
# during the 'make' step, which can be a problem if environment variables
# were set on the command line during autogen invocation.
# Some versions of libtoolize will suggest that we add ACLOCAL_AMFLAGS
# = '-I m4'. This is already done in commence.am, which is included
# in Makefile.am. You can ignore this suggestion.
# LIBTOOLIZE
libtoolize_cmd="${HDF5_LIBTOOLIZE} --copy --force"
echo "${libtoolize_cmd}"
if [ "$verbose" = true ] ; then
${HDF5_LIBTOOLIZE} --version
fi
${libtoolize_cmd} || exit 1
echo
echo "NOTE: You can ignore the warning about adding -I m4."
echo " We already do this in an included file."
echo
# ACLOCAL
if test -e "${LIBTOOL_DIR}/../share/aclocal" ; then
aclocal_include="-I ${LIBTOOL_DIR}/../share/aclocal"
fi
aclocal_cmd="${HDF5_ACLOCAL} --force -I m4 ${aclocal_include}"
echo "${aclocal_cmd}"
if [ "$verbose" = true ] ; then
${HDF5_ACLOCAL} --version
fi
${aclocal_cmd} || exit 1
echo
# AUTOHEADER
autoheader_cmd="${HDF5_AUTOHEADER} --force"
echo "${autoheader_cmd}"
if [ "$verbose" = true ] ; then
${HDF5_AUTOHEADER} --version
fi
${autoheader_cmd} || exit 1
echo
# AUTOMAKE
automake_cmd="${HDF5_AUTOMAKE} --copy --add-missing --force-missing"
echo "${automake_cmd}"
if [ "$verbose" = true ] ; then
${HDF5_AUTOMAKE} --version
fi
${automake_cmd} || exit 1
echo
# AUTOCONF
# The "obsolete" warnings category flags our Java macros as obsolete.
# Since there is no clear way to upgrade them (Java support in the Autotools
# is not great) and they work well enough for now, we suppress those warnings.
autoconf_cmd="${HDF5_AUTOCONF} --force --warnings=no-obsolete"
echo "${autoconf_cmd}"
if [ "$verbose" = true ] ; then
${HDF5_AUTOCONF} --version
fi
${autoconf_cmd} || exit 1
echo
echo "*** SUCCESS ***"
echo
exit 0

File diff suppressed because it is too large Load Diff

@ -0,0 +1,56 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
## Makefile.am
## Run automake to generate a Makefile.in from this file.
#
# HDF5 Library Makefile(.in)
#
include $(top_srcdir)/config/commence.am
# Include src directory
AM_CPPFLAGS+=-I$(top_srcdir)/src -I$(top_srcdir)/tools/lib
# These are our main targets
bin_SCRIPTS=h5redeploy
# Tell automake to clean h5redeploy script
CLEANFILES=h5redeploy
# These were generated by configure. Remove them only when distclean.
DISTCLEANFILES=h5cc
# All programs rely on hdf5 library and h5tools library
LDADD=$(LIBH5TOOLS) $(LIBHDF5)
# How to build h5redeploy script
h5redeploy: h5redeploy.in
@cp $(srcdir)/$@.in $@
# h5cc needs custom install and uninstall rules, since it may be
# named h5pcc if hdf5 is being built in parallel mode.
if BUILD_PARALLEL_CONDITIONAL
H5CC_NAME=h5pcc
else
H5CC_NAME=h5cc
endif
$(DESTDIR)$(bindir):
echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \
$(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1;
install-exec-local: $(DESTDIR)$(bindir)
@$(INSTALL) h5cc $(DESTDIR)$(bindir)/$(H5CC_NAME)
uninstall-local:
@$(RM) $(DESTDIR)$(bindir)/$(H5CC_NAME)
include $(top_srcdir)/config/conclude.am

File diff suppressed because it is too large Load Diff

@ -0,0 +1,31 @@
# Scripts in `bin` and their purpose
Programs run via `autogen.sh` (or the equivalent in CMake) are indicated.
|Program|Purpose|
|-------|-------|
|`buildhdf5`|Convenience script to build HDF5 using the Autotools|
|`checkapi`|Checks if public API calls are used in internal functions|
|`checkposix`|Checks if C/POSIX calls are prefixed with `HD`|
|`chkcopyright`|Checks if files have appropriate copyright statements|
|`cmakehdf5`|Convenience script to build HDF5 using CMake|
|`debug-ohdr`|Examines debug output from `H5O_open/close` to look for open objects|
|`format_source`|Runs `clang-format` over the source files, applying our rules|
|`genparser`|Creates the flex/bison-based parser files in the high-level library|
|`h5cc.in`|Input file from which h5cc is created|
|`h5redeploy.in`|Input file from which h5redeploy is created|
|`h5vers`|Updates the library version number|
|`make_err`|Generates the H5E header files (called in `autogen.sh`)|
|`make_vers`|Generates H5version.h (called in `autogen.sh`)|
|`make_overflow`|Generates H5overflow.h (called in `autogen.sh`)|
|`output_filter`|Used in the tools test code to strip extraneous output before we diff files|
|`restore.sh`|Removes files generated by `autogen.sh`|
|`runbkprog`|Used by CMake to run test programs in the background|
|`switch_maint_mode`|Switches maintainer mode on/off in `configure.ac`|
|`trace`|Adds `TRACE` macros to HDF5 C library source files (run by `autogen.sh`)|
|`warnhist`|Generates compiler warning statistics for gcc/clang when fed output of make|
## TODO
* chkcopyright is currently semi-broken as it doesn't handle the full variety of copyright headers we need. We're leaving it in place, though, in the hopes that someone will update it in the future.
* Extending warnhist to better understand the output of additional compilers/languages would be nice.

@ -0,0 +1,23 @@
#!/bin/bash -l
if [ $# -gt 0 ]; then
SUMMARY_FILE=$1
fi
ACCOUNT_ID=@ACCOUNT_ID@
echo "Run parallel test command. Test output will be in build/${SUMMARY_FILE}"
CTEST_CMD=`which ctest`
#SKIPTESTS <<KEYWORD:script inserts list of skips tests here -- don't remove>>
cd @HDF5_BINARY_DIR@
if [[ $SUMMARY_FILE == *"ctestS"* ]]; then
CMD="${CTEST_CMD} -S ctest_serial.cmake"
qsub -t 60 -n 1 -q debug-flat-quad -A ${ACCOUNT_ID} ${CMD} >& ${SUMMARY_FILE}
echo "Done running ctest serial command."
touch ctestS.done
else
CMD="${CTEST_CMD} -S ctest_parallel.cmake"
qsub -t 60 -n 1 -q debug-flat-quad -A ${ACCOUNT_ID} ${CMD} >& ${SUMMARY_FILE}
echo "Done running ctest parallel command."
touch ctestP.done
fi

@ -0,0 +1,20 @@
#!/bin/tcsh
### LSF syntax
#BSUB -nnodes 1 #number of nodes
#BSUB -W 30 #walltime in minutes
#BSUB -G guests #account
#BSUB -e ctestPerrors.txt #stderr
#BSUB -o ctestPoutput.txt #stdout
#BSUB -J hdf5_ctestP #job
##BSUB -q pbatch #queue to use
#BSUB -q pdebug
##date; hostname
##echo -n 'JobID is '; echo $LSB_JOBID
cd @HDF5_BINARY_DIR@
echo "Run parallel test command. Test output will be in build/ctestP.out"
ctest -S ctest_parallel.cmake >& ctestP.out
echo "Done running ctest parallel command."
touch ctestP.done

@ -0,0 +1,15 @@
#!/bin/bash
#SBATCH --nodes=1
#SBATCH -t 00:30:00
#SBATCH --mail-type=BEGIN,END,FAIL
##SBATCH --mail-user=<username>@sandia.gov
#SBATCH --export=ALL
#SBATCH --job-name=h5_ctestP
cd @HDF5_BINARY_DIR@
echo "Run parallel test command. Test output will be in build/ctestP.out"
ctest -S ctest_parallel.cmake >& ctestP.out
echo "Done running ctest parallel command."
touch ctestP.done

@ -0,0 +1,17 @@
#!/bin/tcsh
### LSF syntax
#BSUB -nnodes 1 #number of nodes
#BSUB -W 29 #walltime in minutes
#BSUB -G guests #account
#BSUB -e ctestSerrors.txt #stderr
#BSUB -o ctestSoutput.txt #stdout
#BSUB -J hdf5_ctestS #job
##BSUB -q pbatch #queue to use
#BSUB -q pdebug
cd @HDF5_BINARY_DIR@
echo "Run command. Test output will be in build/ctestS.out"
ctest -S ctest_serial.cmake >& ctestS.out
echo "Done running command."
touch ctestS.done

@ -0,0 +1,15 @@
#!/bin/bash
#SBATCH --nodes=1
#SBATCH -t 00:30:00
#SBATCH --mail-type=BEGIN,END,FAIL
##SBATCH --mail-user=<username>@sandia.gov
#SBATCH --export=ALL
#SBATCH --job-name=h5_ctestS
cd @HDF5_BINARY_DIR@
echo "Run command. Test output will be in build/ctestS.out"
ctest -S ctest_serial.cmake >& ctestS.out
echo "Done running command."
touch ctestS.done

@ -0,0 +1,12 @@
if(NOT "$ENV{CI_SITE_NAME}" STREQUAL "")
set(CTEST_SITE "$ENV{CI_SITE_NAME}")
endif()
if(NOT "$ENV{CI_BUILD_NAME}" STREQUAL "")
set(CTEST_BUILD_NAME "$ENV{CI_BUILD_NAME}")
endif()
ctest_start ("$ENV{CI_MODEL}" "@HDF5_SOURCE_DIR@" "@HDF5_BINARY_DIR@" APPEND)
ctest_test (BUILD "@HDF5_BINARY_DIR@" APPEND INCLUDE MPI_TEST_ RETURN_VALUE res)
if (${res} LESS 0 OR ${res} GREATER 0)
file (APPEND ${CTEST_SCRIPT_DIRECTORY}/FailedCTest.txt "Failed Tests: ${res}\n")
endif ()

@ -0,0 +1,12 @@
if(NOT "$ENV{CI_SITE_NAME}" STREQUAL "")
set(CTEST_SITE "$ENV{CI_SITE_NAME}")
endif()
if(NOT "$ENV{CI_BUILD_NAME}" STREQUAL "")
set(CTEST_BUILD_NAME "$ENV{CI_BUILD_NAME}")
endif()
ctest_start ("$ENV{CI_MODEL}" "@HDF5_SOURCE_DIR@" "@HDF5_BINARY_DIR@" APPEND)
ctest_test (BUILD "@HDF5_BINARY_DIR@" APPEND EXCLUDE MPI_TEST_ PARALLEL_LEVEL 32 RETURN_VALUE res)
if (${res} LESS 0 OR ${res} GREATER 0)
file (APPEND ${CTEST_SCRIPT_DIRECTORY}/FailedCTest.txt "Failed Tests: ${res}\n")
endif ()

@ -0,0 +1,20 @@
#!/bin/bash
#SBATCH -p knl -C quad
#SBATCH --nodes=1
#SBATCH -t 00:10:00
#SBATCH --mail-type=BEGIN,END,FAIL
#SBATCH --mail-user=<username>@sandia.gov
#SBATCH --export=ALL
#SBATCH --job-name=knl_h5detect
# Inputs: Build directory, output file name, executable file name (username/email if available).
PROGNAME=H5detect
OUTPUT=H5Tinit.c
CMD="@HDF5_BINARY_DIR@/bin/${PROGNAME} @HDF5_GENERATED_SOURCE_DIR@/${OUTPUT}"
echo "Run $CMD"
srun -n 1 $CMD
echo "Done running $CMD"

@ -0,0 +1,16 @@
#!/bin/bash
#SBATCH -p knl -C quad,cache
#SBATCH --nodes=1
#SBATCH -t 00:30:00
#SBATCH --mail-type=BEGIN,END,FAIL
##SBATCH --mail-user=<username>@sandia.gov
#SBATCH --export=ALL
#SBATCH --job-name=h5_ctestP
cd @HDF5_BINARY_DIR@
echo "Run parallel test command. Test output will be in build/ctestP.out"
ctest -S ctest_parallel.cmake >& ctestP.out
echo "Done running ctest parallel command."
touch ctestP.done

@ -0,0 +1,16 @@
#!/bin/bash
#SBATCH -p knl -C quad,cache
#SBATCH --nodes=1
#SBATCH -t 00:30:00
#SBATCH --mail-type=BEGIN,END,FAIL
##SBATCH --mail-user=<username>@sandia.gov
#SBATCH --export=ALL
#SBATCH --job-name=h5_ctestS
cd @HDF5_BINARY_DIR@
echo "Run command. Test output will be in build/ctestS.out"
ctest -S ctest_serial.cmake >& ctestS.out
echo "Done running command."
touch ctestS.done

@ -0,0 +1,22 @@
#!/bin/tcsh
### LSF syntax
#BSUB -n 6 #number of nodes
#BSUB -R "span[ptile=6]"
#BSUB -W 30 #walltime in minutes
#BSUB -G guests #account
#BSUB -e ctestPerrors.txt #stderr
#BSUB -o ctestPoutput.txt #stdout
#BSUB -J hdf5_ctestP #job
##BSUB -q pbatch #queue to use
#BSUB -q pdebug
##date; hostname
##echo -n 'JobID is '; echo $LSB_JOBID
cd @HDF5_BINARY_DIR@
echo "Run parallel test command. Test output will be in build/ctestP.out"
ctest -S ctest_parallel.cmake >& ctestP.out
echo "Done running ctest parallel command."
touch ctestP.done
~

@ -0,0 +1,17 @@
#!/bin/tcsh
### LSF syntax
#BSUB -n 1 #number of nodes
#BSUB -W 29 #walltime in minutes
#BSUB -G guests #account
#BSUB -e ctestSerrors.txt #stderr
#BSUB -o ctestSoutput.txt #stdout
#BSUB -J hdf5_ctestS #job
##BSUB -q pbatch #queue to use
#BSUB -q pdebug
cd @HDF5_BINARY_DIR@
echo "Run command. Test output will be in build/ctestS.out"
ctest -S ctest_serial.cmake >& ctestS.out
echo "Done running command."
touch ctestS.done

@ -0,0 +1,7 @@
#!/bin/tcsh
# ray.llnl.gov requires a '<' with bsub for submitting .lsf batch jobs.
# CMake is reluctant to pass the '<', so we put it in this script and use
# the script to submit the bsub command on ray.
bsub < $1

@ -0,0 +1,339 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Make a release of hdf5.
#
# NOTE:
# This script differs from bin/release in that this has an added
# --revision option to create private releases with the code revision
# hash in the version strings.
#
# This script can probably be merged into the original release script in
# the future.
# Function definitions
#
# Print Usage page
USAGE()
{
cat << EOF
Usage: $0 -d <dir> [-h] [--private] [--revision [--branch BRANCHNAME]] <methods> ...
-d DIR The name of the directory where the release(s) should be
placed.
--branch BRANCHNAME This is to get the correct version of the branch name from the
repository. BRANCHNAME for v1.8 should be hdf5_1_8.
-h print the help page.
--private Make a private release with today's date in version information.
--revision Make a private release with the code revision number in version information.
This allows --branch to be used for the file name.
--branch BRANCHNAME This is to get the correct version of the branch name from the
repository. BRANCHNAME for v1.8 should be hdf5_1_8.
This must be run at the top level of the source directory.
The other command-line options are the names of the programs to use
for compressing the resulting tar archive (if none are given then
"tar" is assumed):
tar -- use tar and don't do any compressing.
gzip -- use gzip with "-9" and append ".gz" to the output name.
bzip2 -- use bzip2 with "-9" and append ".bz2" to the output name.
zip -- convert all text files to DOS style and form a zip file for Windows use.
An md5 checksum is produced for each archive created and stored in the md5 file.
Examples:
$ bin/release -d /tmp
/tmp/hdf5-1.8.13-RELEASE.txt
/tmp/hdf5-1.8.13.md5
/tmp/hdf5-1.8.13.tar
$ bin/release -d /tmp gzip
/tmp/hdf5-1.8.13-RELEASE.txt
/tmp/hdf5-1.8.13.md5
/tmp/hdf5-1.8.13.tar.gz
$ bin/release -d /tmp tar gzip zip
/tmp/hdf5-1.8.13-RELEASE.txt
/tmp/hdf5-1.8.13.md5
/tmp/hdf5-1.8.13.tar
/tmp/hdf5-1.8.13.tar.gz
/tmp/hdf5-1.8.13.tar.zip
EOF
}
# Function name: tar2zip
# Convert the release tarball to a Windows zipball.
#
# Steps:
# 1. untar the tarball in a temporary directory;
# Note: do this in a temporary directory to avoid changing
# the original source directory which may be around.
# 2. convert all its text files to DOS (LF-CR) style;
# 3. form a zip file which is usable by Windows users.
#
# Parameters:
# $1 version
# $2 release tarball
# $3 output zipball file name
#
# Returns 0 if successful; 1 otherwise
#
tar2zip()
{
if [ $# -ne 3 ]; then
echo "usage: tar2zip <tarfilename> <zipfilename>"
return 1
fi
ztmpdir=/tmp/tmpdir$$
mkdir -p $ztmpdir
version=$1
tarfile=$2
zipfile=$3
# step 1: untar tarball in ztmpdir
(cd $ztmpdir; tar xf -) < $tarfile
# sanity check
if [ ! -d $ztmpdir/$version ]; then
echo "untar did not create $ztmpdir/$version source dir"
# cleanup
rm -rf $ztmpdir
return 1
fi
# step 2: convert text files
# There maybe a simpler way to do this.
# options used in unix2dos:
# -k Keep the date stamp
# -q quiet mode
# grep redirect output to /dev/null because -q or -s are not portable.
find $ztmpdir/$version | \
while read inf; do \
if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \
unix2dos -q -k $inf; \
fi\
done
# step 3: make zipball
# -9 maximum compression
# -y Store symbolic links as such in the zip archive
# -r recursive
# -q quiet
(cd $ztmpdir; zip -9 -y -r -q $version.zip $version)
mv $ztmpdir/$version.zip $zipfile
# cleanup
rm -rf $ztmpdir
}
# This command must be run at the top level of the hdf5 source directory.
# Verify this requirement.
if [ ! \( -f configure.ac -a -f bin/bbrelease \) ]; then
echo "$0 must be run at the top level of the hdf5 source directory"
exit 1
fi
# Defaults
DEST=releases
VERS=`perl bin/h5vers`
VERS_OLD=
test "$VERS" || exit 1
verbose=yes
release_date=`date +%F`
today=`date +%Y%m%d`
pmode='no'
revmode='no'
tmpdir="../#release_tmp.$$" # tmp work directory
CPPLUS_RM_NAME=cpplus_RM
# Restore previous Version information
RESTORE_VERSION()
{
if [ X-${VERS_OLD} != X- ]; then
echo restoring version information back to $VERS_OLD
rm -f config/lt_vers.am
cp $tmpdir/lt_vers.am config/lt_vers.am
bin/h5vers -s $VERS_OLD
VERS_OLD=
fi
}
# Command-line arguments
while [ -n "$1" ]; do
arg=$1
shift
case "$arg" in
-d)
DEST=$1
shift
;;
-h)
USAGE
exit 0
;;
--private)
pmode=yes
;;
--revision)
revmode=yes
;;
--branch)
BRANCHNAME=$1
shift
;;
-*)
echo "Unknown switch: $arg" 1>&2
USAGE
exit 1
;;
*)
methods="$methods $arg"
;;
esac
done
# Default method is tar
if [ "X$methods" = "X" ]; then
methods="tar"
fi
# Create the temporary work directory.
if mkdir $tmpdir; then
echo "temporary work directory for release. "\
"Can be deleted after release completes." > $tmpdir/README
else
echo "Failed to mkdir tmpdir($tmpdir)"
exit 1
fi
# setup restoration in case of abort.
trap RESTORE_VERSION 0
if [ X$pmode = Xyes ]; then
VERS_OLD=$VERS
# Copy old version of config/lt_vers.am, since it's hard to
# "undo" changes to it.
cp config/lt_vers.am $tmpdir
# Set version information to m.n.r-of$today.
# (h5vers does not correctly handle just m.n.r-$today.)
VERS=`echo $VERS | sed -e s/-.*//`-of$today
echo Private release of $VERS
bin/h5vers -s $VERS
fi
if [ X$revmode = Xyes ]; then
VERS_OLD=$VERS
echo "Save old version $VERS_OLD for restoration later."
# Copy old version of config/lt_vers.am, since it's hard to
# "undo" changes to it.
cp config/lt_vers.am $tmpdir
if [ "${BRANCHNAME}" = "" ]; then
BRANCHNAME=`git symbolic-ref -q --short HEAD`
fi
revision=`git rev-parse --short HEAD`
# Set version information to m.n.r-r$revision.
# (h5vers does not correctly handle just m.n.r-$today.)
VERS=`echo $VERS | sed -e s/-.*//`-$revision
echo Private release of $VERS
HDF5_VERS=hdf5-$BRANCHNAME-$revision
echo file base of $HDF5_VERS
bin/h5vers -s $VERS
# use a generic directory name for revision releases
HDF5_IN_VERS=hdfsrc
else
# Store hdf5-$VERS ("hdf5-1.7.51", e.g.) to a variable to avoid typos
HDF5_VERS=hdf5-$VERS
# directory name matches tar file name for non-revision releases
HDF5_IN_VERS=$HDF5_VERS
fi
test "$verbose" && echo "Releasing $HDF5_VERS to $DEST" 1>&2
if [ ! -d $DEST ]; then
echo " Destination directory $DEST does not exist" 1>&2
exit 1
fi
# Create a symlink to the source so files in the tarball have the prefix
# we want (gnu's --transform isn't portable)
ln -s `pwd` $tmpdir/$HDF5_IN_VERS || exit 1
# Save a backup copy of Makefile if exists.
test -f Makefile && mv Makefile $tmpdir/Makefile.x
cp -p Makefile.dist Makefile
# Update README.md and release_docs/RELEASE.txt with release information in
# line 1.
for f in README.md release_docs/RELEASE.txt; do
echo "HDF5 version $VERS released on $release_date" >$f.x
sed -e 1d $f >>$f.x
mv $f.x $f
# Make sure new files are of the right access mode
chmod 644 $f
done
# Create the tar file
test "$verbose" && echo " Running tar..." 1>&2
(cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_IN_VERS" || exit 1 )
# Compress
MD5file=$HDF5_VERS.md5
cp /dev/null $DEST/$MD5file
for comp in $methods; do
case $comp in
tar)
cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar
(cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file)
;;
gzip)
test "$verbose" && echo " Running gzip..." 1>&2
gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz
(cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file)
;;
bzip2)
test "$verbose" && echo " Running bzip2..." 1>&2
bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2
(cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file)
;;
zip)
test "$verbose" && echo " Creating zip ball..." 1>&2
tar2zip $HDF5_IN_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2
(cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file)
;;
*)
echo "***Error*** Unknown method $comp"
exit 1
;;
esac
done
# Copy the RELEASE.txt to the release area.
cp release_docs/RELEASE.txt $DEST/$HDF5_VERS-RELEASE.txt
# Remove distributed Makefile and restore previous Makefile if existed.
rm -f Makefile
test -f $tmpdir/Makefile.x && mv $tmpdir/Makefile.x Makefile
# Restore OLD version information, then no need for trap.
if [ X$pmode = Xyes ] || [ X$revmode = Xyes ]; then
echo "Restore the original version $VERS_OLD"
RESTORE_VERSION
trap 0
fi
# Remove temporary things
rm -rf $tmpdir
echo "DONE"
exit 0

@ -0,0 +1,308 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Build HDF5 library by doing configure, make, and tests.
# Usage: See USAGE()
# Programmer: Albert Cheng
# Creation date: Jul 9, 2003
# Some handy definitions
USAGE()
{
cat <<EOF
Buildhdf5 builds the HDF5 library by running configure, make and make check.
It skips the configure part if one has been done already. In effect, one
can continue from a previous build.
Command Syntax
==============
buildhdf5 [-config] [-szlib] [-help] [-n] [-srcdir dir] [-fortran] [-cxx] [-pp] config-arguments ...
-config: run configure only. [default to do build too]
-szlib: configure in the szlib option
-help: show this help page
-n: no execution, just show commands
-srcdir: use dir as the source directory
[Note: this is different from --srcdir
which will be passed to configure]
-fortran: add --enable-fortran
-cxx: add --enable-cxx
-pp: add --enable-parallel
all other arguments are passed to configure
Configure in place or by srcdir
===============================
By default, the command looks for the configure command in
'.' and then '../hdf5'. When it finds it, it uses it to do
the configure part. In effect, if ./configure is found, it
does the build in place. If it finds ../hdf5/configure, it
does the --srcdir (that is separated source) build. Therefore,
if you have the following structure setup, you can run multiple
hosts building simultantously using a common source code.
hdf5_v1.7/hdf5 # holds the source
.../sunbox # for SunOS
.../linux # for Linux
.../linuxpp # for Linux parallel
EOF
}
TIMESTAMP()
{
echo "=====" "`date`" "====="
}
QUIT()
{
# print the closing time
TIMESTAMP
}
# Do one step bracketed with time stamps
# The '< /dev/null' is needed to prevent some applications like MPI
# jobs blocked for reading when they read stdin unnecessary.
STEP()
{
banner="$1"
command="$2"
resultfile="$3"
echo "$banner"
(TIMESTAMP; nerror=0 ;
echo "eval $command"
eval $command || nerror=1 ;
TIMESTAMP; exit $nerror) < /dev/null >> "$resultfile" 2>&1
if [ $? -ne 0 ]; then
echo "error in '$banner'. buildhdf5 aborted."
exit 1
fi
}
# Try locate the SZLIB.
# This is a hack because there is no consistent szlib pathname.
LOCATE_SZLIB()
{
# Try to guess what domain I am in.
if [ -d /usr/hdf/bin ]; then
# I am in an THG host.
mydomain=thg
elif [ -d /afs/ncsa/projects/hdf/packages ]; then
# I am in an NCSA host that has AFS.
mydomain=ncsa
else
mydomain=unknown
fi
case $mydomain in
thg)
# THG hosts
OS=`uname -s`
echo OS=$OS
case "$OS" in
Linux)
case `uname -m` in
i686) # 32 bits
szlibpaths="/home/packages/szip/static/encoder/Linux2.6-gcc"
;;
x86_64) # 64 bits
szlibpaths="/home/packages/szip/static/encoder/Linux2.6-x86_64-gcc"
;;
*)
# Don't know. Give a shot at standard places.
szlibpaths="/usr/hdf /usr/local"
;;
esac
;;
SunOS)
szlibpaths="/home/packages/szip/static/encoder/SunOS-5.10"
;;
FreeBSD)
case `uname -m` in
i386) # 32 bits
szlibpaths="/home/packages/szip/static/encoder/FreeBSD"
;;
amd64) # 64 bits
szlibpaths="/home/packages/szip/static/encoder/FreeBSD-64"
;;
*)
# Don't know. Give a shot at standard places.
szlibpaths="/usr/hdf /usr/local"
;;
esac
;;
*)
# Don't know. Give a shot at standard places.
szlibpaths="/usr/hdf /usr/local"
;;
esac
;; # end of case thg
ncsa)
# ncsa hosts
OS=`uname -s`
echo OS=$OS
case "$OS" in
HP-UX)
szlibpaths="/afs/ncsa/projects/hdf/packages/szip_new/HPUX-11.00"
;;
Linux)
case `uname -m` in
i686)
szlibpaths="/afs/ncsa/projects/hdf/packages/szip_new/Linux2.4"
;;
*)
# Don't know. Give a shot at standard places.
szlibpaths="/usr/ncsa /usr/sdt"
;;
esac
;;
SunOS)
szlibpaths="/afs/ncsa/projects/hdf/packages/szip_new/SunOS_5.8"
;;
*)
# Don't know. Give a shot at standard places.
szlibpaths="/usr/ncsa /usr/sdt"
;;
esac
;; # end of case ncsa
unknown)
# Unknown domain. Give a shot at the some standard places.
szlibpaths="/usr/local"
;;
esac # end of case $mydomain
echo szlibpaths=$szlibpaths
for x in $szlibpaths dummy; do
if [ $x != dummy -a -f $x/include/szlib.h -a -f $x/lib/libsz.a ]; then
WITH_SZLIB="--with-szlib=$x"
break
fi
done
echo WITH_SZLIB="$WITH_SZLIB"
}
# Configure. Default to do --srcdir.
CONFIG()
{
CMD="$SRCDIR/configure $*"
echo $CMD
if [ "$NOEXEC" != 'noexec' ]; then
$CMD
else
true # set exit code as 0
fi
}
# Main body
TIMESTAMP
trap QUIT 0
#
# setup
#
MAKE=${MAKE:-'gmake'}
export MAKE
CONFIG_CMD="CONFIG"
CONFIG_OP= # configure options
CONFIG_ONLY=no # default is configure and build
NOEXEC= # default to execute commands
SRCDIRLIST=". ../hdf5" # places to look for configure
nerror=0
# parse some options
while [ $# -gt 0 ]; do
case "$1" in
-config)
# do configure only
CONFIG_ONLY=yes
;;
-szlib)
LOCATE_SZLIB
CONFIG_OP="$CONFIG_OP $WITH_SZLIB"
;;
-help)
USAGE
exit 0
;;
-n)
NOEXEC='noexec'
;;
-srcdir)
shift
SRCDIRLIST="$1"
;;
-cxx)
CONFIG_OP="$CONFIG_OP --enable-cxx"
;;
-fortran)
CONFIG_OP="$CONFIG_OP --enable-fortran"
;;
-pp)
CONFIG_OP="$CONFIG_OP --enable-parallel"
;;
*) # pass it as a configure option
CONFIG_OP="$CONFIG_OP $1"
;;
esac
shift
done
# Figure out if srcdir is wished.
# Make sure we are at the library root level
# by checking couple typical files. Not bullet-proof.
for SRCDIR in $SRCDIRLIST dummy; do
if [ x-$SRCDIR = x-dummy ]; then
break
fi
if [ -d $SRCDIR/src -a -d $SRCDIR/config -a -f $SRCDIR/configure ]
then
break
fi
done
if [ x-$SRCDIR = x-dummy ]; then
echo "Could not find the source dir or configure script. Abort."
exit 1
fi
# Configure
# no configure if already done.
if [ ! -f config.status ]; then
STEP "Configure HDF5..." "$CONFIG_CMD $CONFIG_OP" "#config"
else
STEP "Configure Skipped" "echo Configure Skipped" "#config"
fi
if [ x-$CONFIG_ONLY = x-yes ]; then
exit 0
fi
# Compile
STEP "Make HDF5..." "$MAKE" "#make"
# Serial tests
STEP "Testing HDF5 serial parts..." "$MAKE check-s" "#test-s"
# Parallel tests
STEP "Testing HDF5 parallel parts..." "$MAKE check-p" "#test-p"
# all done
echo "No Errors encountered"

@ -0,0 +1,66 @@
#!/usr/bin/env perl
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
require 5.003;
use warnings;
# Purpose: insures that API functions aren't called internally.
# Usage: checkapi H5*.c
my $filename = "";
my $lastname = "";
if(<>) {
while (<>) {
if($ARGV =~ /\//) {
($filename) = ($ARGV =~ /^.*\/([A-Za-z0-9_]*)\.c$/);
} else {
($filename) = ($ARGV =~ /([A-Za-z0-9_]*)\.c$/);
}
if($filename =~ /H5FDmulti|H5FDstdio/) {
if($filename ne $lastname) {
print "$ARGV is exempt from checking\n";
$lastname = $filename;
}
} else {
# Get rid of comments by removing the inside part.
s|/\*.*?\*/||g;
if ($in_comment) {
if (/\*\//) {
s|.*?\*/||;
$in_comment = 0;
} else {
$_="\n";
}
} elsif (m|/\*|) {
s|/\*.*||;
$in_comment = 1;
}
# Remove character strings
s/\\.//g; # remove escaped characters
s/\".*?\"//g; # remove string constants
# Disregard the following hits
next if /^H5/;
next if /^\#/;
next if /FUNC_ENTER(_NOINIT)*/;
next unless /(H5[A-Z]{1,2}[a-z]\w*)/;
print "$ARGV:$.: $1\n";
}
} continue {
close ARGV if eof; # reset line number
}
}

@ -0,0 +1,263 @@
#!/usr/bin/env perl
require 5.003;
use warnings;
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Dana Robinson
# Spring 2019
# (Original by Robb Matzke)
#
# Purpose: Given the names of C source files this script will print the
# file name, line number, and function name of any function that
# doesn't begin with the letter 'h' or 'H' as stipulated by the
# HDF5 programming style guide.
#
# Emacs users can run this script as the compile command and
# use 'next-error' (usually bound to M-`) to find each name
# violation.
use File::Basename;
# Loop over all files passed to the function
foreach $arg (@ARGV) {
# Get the filename from the path
$filename = fileparse($arg);
# Skip files that don't include H5private.h
# H5system. has to be inspected by hand since it wraps POSIX files
#
# H5detect and H5make_libsettings are created before the library exists
# so calls that link to function replacements won't work. We'll ignore
# it here.
#
# If a user specifies one file, process it no matter what so people
# can inspect files we normally skip (like H5system.c).
$ignore = 0;
# Ignored files in src/
if($#ARGV gt 0 and $filename =~ /H5FDmulti|H5FDstdio|H5VLpassthru|H5system|H5detect|H5make_libsettings/) {
$ignore = 1;
}
# Ignored atomic test files in test/
if($#ARGV gt 0 and $filename =~ /atomic_reader|atomic_writer/) {
$ignore = 1;
}
# Ignored filter plugins in test/
if($#ARGV gt 0 and $filename =~ /^filter_plugin\d_/) {
$ignore = 1;
}
# Ignored generators in test/
if($#ARGV gt 0 and $filename =~ /^gen_/) {
$ignore = 1;
}
if($ignore) {
print "$filename is exempt from using Standard library macro wrappers\n";
next;
}
# Open the file
open(my $fh, "<", $arg) or do {
warn "NOTE: Unable to open $arg: !$\n";
next;
};
# Loop over all lines in the file to find undecorated functions
while (<$fh>) {
# Get rid of comments by removing the inside part.
s|/\*.*?\*/||g;
if ($in_comment) {
if (/\*\//) {
s|.*?\*/||;
$in_comment = 0;
} else {
$_="\n";
}
} elsif (m|/\*|) {
s|/\*.*||;
$in_comment = 1;
}
# Get rid of string constants if they begin and end on this line.
s/([\'\"])([^\1]|\\\1)*?\1/$1$1/g;
# Get rid of preprocessor directives
s/^\#.*//;
# Skip callbacks invoked as methods in a struct
next if $_ =~ /\b(\)?]?->|\.)\(?([a-z_A-Z]\w*)\s*\(/;
# Now find all function calls on this line which don't start with 'H'
while (($name)=/\b([a-z_A-GI-Z]\w*)\s*\(/) {
$_ = $';
# Ignore C statements that look sort of like function
# calls.
next if $name =~ /^(if|for|offsetof|return|sizeof|switch|while|void)$/;
# Ignore things that get misdetected because of the simplified
# parsing that takes place here.
next if $name =~ /^(int|herr_t|_term_interface|_term_package)$/;
# These are really HDF5 functions/macros even though they don't
# start with `h' or `H'.
next if $name =~ /^FUNC_(ENTER|LEAVE)(_(NO)?API|_PACKAGE|_STATIC)?(_NAMECHECK_ONLY|_NOFS|_NOCLEAR|_NOINIT|_NOPUSH)?(_NOFUNC|_TAG)?$/;
next if $name =~ /^(BEGIN|END)_FUNC$/;
next if $name =~ /^U?INT(8|16|32|64)(ENCODE|DECODE)(_VAR)?$/;
next if $name =~ /^CI_(PRINT_STATS|INC_SRC|INC_DST)$/;
next if $name =~ /^(ABS|ADDR_OVERFLOW|ALL_MEMBERS|BOUND|CONSTR|DETECT_[I|F|M]|DOWN)$/;
next if $name =~ /^(MIN3?|MAX3?|NELMTS|POWER_OF_TWO|REGION_OVERFLOW)$/;
next if $name =~ /^(SIZE_OVERFLOW|UNIQUE_MEMBERS|S_ISDIR)$/;
next if $name =~ /^addr_defined$/;
next if $name =~ /^TERMINATOR$/;
# Ignore callback invocation
next if $name =~ /^(op|cb|OP|iter_op|func)$/;
# Ignore main
next if $name =~ /^main$/;
# This often appears in preprocessor lines that span multiple lines
next if $name =~ /^(defined)$/;
# These are Windows system calls. Ignore them.
next if $name =~ /^(_get_osfhandle|GetFileInformationByHandle|SetFilePointer|GetLastError|SetEndOfFile)$/;
next if $name =~ /^(FindNextFile|FindClose|_tzset|Wgettimeofday|GetSystemTimeAsFileTime|GetUserName)$/;
next if $name =~ /^(DeleteCriticalSection|TlsFree|TlsGetValue|CreateThread)$/;
next if $name =~ /^(ExpandEnvironmentStringsA|LockFileEx|UnlockFileEx)$/;
next if $name =~ /^(DllMain|LocalAlloc|LocalFree)$/;
next if $name =~ /^(FindFirstFileA|FindNextFileA)$/;
next if $name =~ /^(_beginthread|(Initialize|Enter|Leave)CriticalSection|TlsAlloc)$/;
# These are MPI function calls. Ignore them.
next if $name =~ /^(MPI_)/;
# These are POSIX threads function calls. Ignore them.
next if $name =~ /^pthread_/;
# These are zlib & szlib function calls. Ignore them.
next if $name =~ /^(inflate|SZ_)/;
next if $name =~ /^compress2$/;
# These is an H5Dfill function. Ignore it in this file.
if($filename =~ /H5Dfill/) {
next if $name =~ /^(alloc_func)$/;
}
# These are H5Zscaleoffset functions. Ignore them in this file.
if($filename =~ /H5Zscaleoffset/) {
next if $name =~ /^(pow_fun|round_fun|abs_fun|lround_fun|llround_fun)$/;
}
# This is a macro parameter in H5Rint.c. Ignore it in this file.
if($filename =~ /H5Rint/) {
next if $name =~ /^(func)$/;
}
# Internal calls in the HDFS VFD (H5FDhdfs.c). Ignore it in this file.
if($filename =~ /H5FDhdfs/) {
next if $name =~ /^(hdfs)/;
}
# Macros, etc. from the mirror VFD (H5FDmirror.c). Ignore in this file.
if($filename =~ /H5FDmirror/) {
next if $name =~ /^(LOG)/;
next if $name =~ /^(BSWAP_64|is_host_little_endian)$/;
}
# These are things in H5FDs3comms.c and H5FDros3.c. Ignore them in these files.
if($filename =~ /H5FDs3comms|H5FDros3/) {
next if $name =~ /^(curl_|curlwritecallback|gmnow)/;
next if $name =~ /^(ros3_|ROS3_|S3COMMS_)/;
next if $name =~ /^(EVP_sha256|SHA256|ISO8601NOW)$/;
}
# TESTING (not comprehensive - just noise reduction)
# Test macros and functions (testhdf5.h)
next if $name =~ /^(AddTest|TestErrPrintf|TestSummary|TestCleanup|TestShutdown)$/;
next if $name =~ /^(CHECK|CHECK_PTR|CHECK_PTR_NULL|CHECK_PTR_EQ|CHECK_I)$/;
next if $name =~ /^(VERIFY|VERIFY_STR|VERIFY_TYPE|MESSAGE|ERROR)$/;
# Test macros and functions (h5test.h)
next if $name =~ /^(TESTING|PASSED|SKIPPED|PUTS_ERROR|FAIL_PUTS_ERROR|FAIL_STACK_ERROR|TEST_ERROR|AT)$/;
next if $name =~ /^(GetTestExpress)$/;
# Ignore functions that start with test_ or check_
next if $name =~ /^test_/;
next if $name =~ /^check_/;
# Ignore functions that start with h5_
next if $name =~ /^h5_/;
# Ignore process completed status
next if $name =~ /(WIFEXITED|WEXITSTATUS|WIFSIGNALED|WTERMSIG|WCOREDUMP|WIFSTOPPED|WSTOPSIG)/;
# Ignore usage functions
next if $name =~ /^usage$/;
# Ignore callbacks
next if $name =~ /(_cb\d?)$/;
# Specific tests (not even remotely comprehensive)
# accum test code
if($filename =~ /accum/) {
next if $name =~ /^(accum_)/;
}
# cache test code
if($filename =~ /cache/) {
next if $name =~ /(_entry|_entries|_cache|_check|_dependency|_status|_op)$/;
next if $name =~ /^(verify_|smoke_check_|row_major_|col_major_)/;
next if $name =~ /^(resize_configs_are_equal|CACHE_ERROR)$/
}
# Splitter VFD test code. Ignore in vfd.c.
if($filename =~ /vfd/) {
next if $name =~ /^(SPLITTER_|splitter_)/;
next if $name =~ /(_splitter_)/;
next if $name =~ /^(file_exists)$/;
}
# S3 VFD test code. Ignore in ros3.c and s3comms.c.
# HDFS VFD test code. Ignore in hdfs.c.
if($filename =~ /ros3|s3comms|hdfs/) {
next if $name =~ /^(JSVERIFY|JSFAILED_|JSERR_|jserr_|FAIL_)/;
next if $name =~ /^(curl_)/;
next if $name =~ /^(S3COMMS_FORMAT_CREDENTIAL|ISO8601NOW|gmnow)$/;
}
# VDS test code. Ignore in vds.c.
if($filename =~ /vds/) {
next if $name =~ /^(vds_)/;
}
print "$filename:$.: $name\n";
}
}
# Close the file
close($fh);
}
if($#ARGV gt 0) {
print "\n";
print "NOTE:\n";
print "If any files were skipped due to being exempt, you can inspect them manually\n";
print "by using this script on them one at a time, which will always process the file.\n";
}

@ -0,0 +1,848 @@
#! /bin/sh
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
# Check Copyright notice.
# Check that all the files have the proper copyright notice.
# It goes down directories recursively.
#
# Programmer: Albert Cheng
# Created Data: 2003/07/22
# Modification:
# Rewrote most of it. Albert Cheng, 2005/10/10.
# Setup
#
PROGNAME=$0
DIFF="diff"
INITFILE=.h5chkright.ini
EXCEPTIONS=/tmp/h5chkright.except.$$
tmpfile=/tmp/h5chkright_tmp.$$
EXCEPTIONDIRS="-name .git" # at least skip .git directories.
EXTRACTEDFILE=/tmp/h5chkright.extracted.$$
VERBOSE= # default no
FIXIT= # default no
DIRS=. # default current directory
NFAILEDFILES=0 # Number of failed files found.
NPASSEDFILES=0 # Number of passed files found.
NFIXEDFILES=0 # Number of files fixed.
NFIXFAILEDFILES=0 # Number of files fix failed.
NUMBEGINLINES=60 # Copyright notice should be located within the
# this number of lines at the beginning of the file.
THGCOPYRIGHTSTR="Copyright by The HDF Group."
UICOPYRIGHTSTR="Copyright by the Board of Trustees of the University of Illinois"
PASSEDLOG=/tmp/h5chkright_passed.$$
SKIPPEDLOG=/tmp/h5chkright_skipped.$$
FAILEDLOG=/tmp/h5chkright_failed.$$
FIXEDLOG=/tmp/h5chkright_fixed.$$
FIXFAILEDLOG=/tmp/h5chkright_fixfailed.$$
C_COPYRIGHT=/tmp/h5chkright_C.$$ # C style copyright
FTN_COPYRIGHT=/tmp/h5chkright_FTN.$$ # Fortran style copyright
HTM_COPYRIGHT=/tmp/h5chkright_HTM.$$ # HTML style copyright
SH_COPYRIGHT=/tmp/h5chkright_SH.$$ # SHELL style copyright
SH_COPYRIGHT2=/tmp/h5chkright_SH2.$$ # SHELL style copyright, 2nd style.
WINBAT_COPYRIGHT=/tmp/h5chkright_WINBAT.$$ # Windows Batch file Copyright notice
CONFIGURE_AC_COPYRIGHT=/tmp/h5chkright_CONFIGURE_AC.$$ # configure.ac file Copyright notice
tmpfixfile=/tmp/h5chkright_fix.$$ # Temporary fixed copy of file
# Caution message of the fix feature.
FIXCAUTIONMSG()
{
cat <<EOF
**CAUTION**
The fix is a best attempt. Check the changes before committing them.
EOF
}
# Help page
#
USAGE()
{
cat <<EOF
Usage: $PROGNAME [-h | -help] [-fname name-patter] [-v | -v9] [dir1 dir2 ...]
Check copyright notices of files in [dir1 dir2 ...}.
Default is to check files in current directory.
-h | -help
show this page.
-fname name-pattern
limit to files of name-pattern
-v
verbose mode
-v9
highly verbose
-fix
fix failed files if possible.
EOF
FIXCAUTIONMSG
}
# Print Debug output
#
PRINTDEBUG()
{
if [ -n "$VERBOSE" ]; then
echo $*
else
: # noop
fi
}
# Generate various styles of Copyright notices
#
BUILDCOPYRIGHT()
{
# C and C++ source Copyright notice
cat > ${C_COPYRIGHT} << \EOF
* Copyright by The HDF Group. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
EOF
# Fortran9X source Copyright notice
cat > ${FTN_COPYRIGHT} << \EOF
! Copyright by The HDF Group. *
! All rights reserved. *
! *
! This file is part of HDF5. The full HDF5 copyright notice, including *
! terms governing use, modification, and redistribution, is contained in *
! the COPYING file, which can be found at the root of the source code *
! distribution tree, or in https://www.hdfgroup.org/licenses. *
! If you do not have access to either file, you may request a copy from *
! help@hdfgroup.org. *
EOF
# HTML file Copyright notice
cat > ${HTM_COPYRIGHT} << \EOF
* Copyright by The HDF Group. *
* All rights reserved. *
* *
* This file is part of HDF5. The full HDF5 copyright notice, including *
* terms governing use, modification, and redistribution, is contained in *
* the COPYING file, which can be found at the root of the source code *
* distribution tree, or in https://www.hdfgroup.org/licenses. *
* If you do not have access to either file, you may request a copy from *
* help@hdfgroup.org. *
EOF
# Shell style Copyright notice
cat > ${SH_COPYRIGHT} << \EOF
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
EOF
# Shell style Copyright notice (2nd type)
cat > ${SH_COPYRIGHT2} << \EOF
## Copyright by The HDF Group.
## All rights reserved.
##
## This file is part of HDF5. The full HDF5 copyright notice, including
## terms governing use, modification, and redistribution, is contained in
## the COPYING file, which can be found at the root of the source code
## distribution tree, or in https://www.hdfgroup.org/licenses.
## If you do not have access to either file, you may request a copy from
## help@hdfgroup.org.
EOF
# Windows Batch file Copyright notice
cat > ${WINBAT_COPYRIGHT} << \EOF
@REM Copyright by The HDF Group.
@REM All rights reserved.
@REM
@REM This file is part of HDF5. The full HDF5 copyright notice, including
@REM terms governing use, modification, and redistribution, is contained in
@REM the COPYING file, which can be found at the root of the source code
@REM distribution tree, or in https://www.hdfgroup.org/licenses.
@REM If you do not have access to either file, you may request a copy from
@REM help@hdfgroup.org.
EOF
# configure.ac file Copyright notice
cat > ${CONFIGURE_AC_COPYRIGHT} << \EOF
dnl Copyright by The HDF Group.
dnl All rights reserved.
dnl
dnl This file is part of HDF5. The full HDF5 copyright notice, including
dnl terms governing use, modification, and redistribution, is contained in
dnl the COPYING file, which can be found at the root of the source code
dnl distribution tree, or in https://www.hdfgroup.org/licenses.
dnl If you do not have access to either file, you may request a copy from
dnl help@hdfgroup.org.
EOF
}
# Initialization
#
# Initialize file format:
# Each line is a keyword for action and the rest are values.
# Keywords:
# '#' Comments
# skip Files to be skipped
# prune Directories to be skipped. Notice this prunes all directories
# with the same name. E.g.,
# "prune test" skips test, fortran/test, c++/test, ...
# (See -name option in the find command.)
# prunepath Directory or file to be skipped. Notice this is different from
# prunes since it matches the exact pathname. E.g.,
# "prunepath ./tools/testfiles" skips the directory/file matching
# exactly that path but NOT tools/h5dump/testfiles nor
# tools/h5dump/testfiles.
# (See -path option in the find command.)
#
INITIALIZATION()
{
# clean up log files
rm -f $PASSEDLOG $SKIPPEDLOG $FAILEDLOG $FIXEDLOG $FIXFAILEDLOG
# Generate various styles of copyright notice.
BUILDCOPYRIGHT
echo Initialization...
# setup exceptions.
cp /dev/null $EXCEPTIONS
# Process Initial setting file if exists
if [ -r $INITFILE ]; then
while read key value; do
case "$key" in
\#* | '') # Comment or blank line, skip it
continue
;;
skip)
echo $key $value
echo $value >> $EXCEPTIONS
;;
prune)
echo $key $value
EXCEPTIONDIRS="$EXCEPTIONDIRS -o -name $value"
;;
prunepath)
echo $key $value
EXCEPTIONDIRS="$EXCEPTIONDIRS -o -path $value"
;;
*)
echo unknown setting input in file $INITFILE
echo $key $value
;;
esac
done < $INITFILE
fi
# Change EXCEPTIONDIRS to be compatible with find command.
EXCEPTIONDIRS="( $EXCEPTIONDIRS ) -prune -o"
echo Initialization done
}
# Parse Options
#
PARSE_OPTION()
{
while test $# -gt 0 ; do
case "$1" in
-h | -help )
USAGE
exit 0
;;
-fname )
shift
FNAME="$1"
;;
-fix )
FIXIT=yes
;;
-v* )
VERBOSE=yes
if test X$1 = X-v9; then
set -x
fi
;;
-* )
echo "***Unknown option ($1)"
USAGE
exit 1
;;
* )
DIRS=$*
break
;;
esac
shift
done
}
# Rinse the file by,
# removing all \r which is often present in Windows files;
# replace tabs with equivalent spaces;
# removing all trailing spaces.
# $1 is the file to be rinsed.
RINSE()
{
rf=$1
cp $rf $tmpfile
dos2unix < $tmpfile | expand | sed -e 's/ *$//' > $rf
}
# Locate a line in the file and print the line number.
# Print 0 if not found; -1 if error.
# $1 The line.
# $2 The file.
#
FindLineInFile()
{
if [ $# -ne 2 ]; then
# expect two arguments
echo -1
return
fi
xline=$1
xf=$2
xpos=`grep -n "${xline}" $xf`
if [ "$?" -ne 0 ] ; then
# Not found, return 0
xpos=0
else
xpos=`echo $xpos | cut -f1 -d:`
fi
echo $xpos
}
# Match Copyright notice.
# $1 file which contains the expected copyright notice.
# $2 file in which to look for the copyright notice.
# Copyright notice must be found within the beginning $NUMBEGINLINES of lines.
# Hunt for the particular string $THGCOPYRIGHTSTR which signifies the beginning
# of the copyright notice.
#
MATCH_COPYRIGHT()
{
if [ $# -ne 2 ]; then
# expect two arguments
echo FAILED
return
fi
COPYRIGHTFILE=$1
f=$2
nlines=`wc -l ${COPYRIGHTFILE} | cut -f1 -d' '`
# Find a line that contains the copyright string and its line number in
# the file.
begin=`FindLineInFile "${THGCOPYRIGHTSTR}" $f`
if [ "$begin" -le 0 ] ; then
# Not found, generate an empty dummy file
cp /dev/null ${EXTRACTEDFILE}
false
else
if [ $begin -gt 1 ]; then
begin=`expr $begin - 1`
fi
end=`expr $begin + $nlines - 1`
sed -n -e "${begin},${end}p" < $f > ${EXTRACTEDFILE}
RINSE ${EXTRACTEDFILE}
$DIFF ${EXTRACTEDFILE} ${COPYRIGHTFILE} >/dev/null 2>&1
fi
if test $? -eq 0; then
echo PASSED
else
echo FAILED
fi
}
# Fix Copyright notice.
# $1 file which contains the expected copyright notice.
# $2 file in which to look for the copyright notice.
# Copyright notice must be found within the beginning $NUMBEGINLINES of lines.
# Hunt for the particular string $THGCOPYRIGHTSTR which signifies the beginning
# of the copyright notice.
#
FIX_COPYRIGHT()
{
if [ $# -ne 2 ]; then
# expect two arguments
echo FAILED
return
fi
COPYRIGHTFILE=$1
f=$2
nlines=`wc -l ${COPYRIGHTFILE} | cut -f1 -d' '`
# If the file has UICOPYRIGHTSTR but not THGCOPYRIGHTSTR, then replace the lines
# starting at UICOPYRIGHTSTR and down.
# If the file has THGCOPYRIGHTSTR, then replace the lines starting at the
# THGCOPYRIGHTSTR and down.
# If neither found, abort it.
# Find a line that contains the THG copyright string and its line number in
# the file.
insertbegin=`FindLineInFile "${THGCOPYRIGHTSTR}" $f`
if [ $insertbegin -gt 0 ]; then
insertUIbegin=`FindLineInFile "${UICOPYRIGHTSTR}" $f`
if [ $insertUIbegin -gt 0 ]; then
insertend=`expr $insertbegin + $nlines + 1`
else
insertend=`expr $insertbegin + $nlines`
fi
else
insertbegin=`FindLineInFile "${UICOPYRIGHTSTR}" $f`
if [ $insertbegin -gt 0 ]; then
insertend=`expr $insertbegin + $nlines - 1` # no need to -2. See below.
else
FIXFAILED
return
fi
fi
# Copy line 1 up to insertbegin from original file
xbegin=`expr $insertbegin - 1`
if [ $xbegin -gt 0 ]; then
sed -n -e "1,${xbegin}p" $f > $tmpfixfile
else
cp /dev/null $tmpfixfile # make it empty.
fi
# now the correct copyright file
cat $COPYRIGHTFILE >> $tmpfixfile
# the rest of the original file
sed -n -e "${insertend},"'$p' $f >> $tmpfixfile
# copy them all back to the original file
cp $tmpfixfile $f
FIXED
rm -f $tmpfixfile
}
# Check C and C++ source files
#
C_SOURCE()
{
f=$1
case `MATCH_COPYRIGHT $C_COPYRIGHT $f` in
PASSED)
PASSED $f
return
;;
FAILED)
# show the difference
FAILED $f
$DIFF ${EXTRACTEDFILE} ${C_COPYRIGHT}
if [ -n "$FIXIT" ]; then
FIX_COPYRIGHT $C_COPYRIGHT $f
fi
;;
esac
}
# Check Fortran90 source files
#
FORTRAN_SOURCE()
{
f=$1
case `MATCH_COPYRIGHT $FTN_COPYRIGHT $f` in
PASSED)
PASSED $f
return
;;
FAILED)
# show the difference
FAILED $f
$DIFF ${EXTRACTEDFILE} ${FTN_COPYRIGHT}
if [ -n "$FIXIT" ]; then
FIX_COPYRIGHT $FTN_COPYRIGHT $f
fi
;;
esac
}
# Check HTML Files
#
HTML_FILE()
{
f=$1
case `MATCH_COPYRIGHT $HTM_COPYRIGHT $f` in
PASSED)
PASSED $f
return
;;
FAILED)
# show the difference
FAILED $f
$DIFF ${EXTRACTEDFILE} ${HTM_COPYRIGHT}
if [ -n "$FIXIT" ]; then
FIX_COPYRIGHT $HTM_COPYRIGHT $f
fi
;;
esac
}
# Check Shell script files which use the style of copyright notice of leading #'s.
#
SHELL_FILE()
{
f=$1
case `MATCH_COPYRIGHT $SH_COPYRIGHT $f` in
PASSED)
PASSED $f
return
;;
FAILED)
# show the difference
FAILED $f
$DIFF ${EXTRACTEDFILE} ${SH_COPYRIGHT}
if [ -n "$FIXIT" ]; then
FIX_COPYRIGHT $SH_COPYRIGHT $f
fi
;;
esac
}
# Check files that use #'s as comments such as Makefile.
# The Copyright body of text happen to be the same as used by Shell script
# files.
#
MAKE_FILE()
{
f=$1
case `MATCH_COPYRIGHT $SH_COPYRIGHT $f` in
PASSED)
PASSED $f
return
;;
FAILED)
# show the difference
FAILED $f
$DIFF ${EXTRACTEDFILE} ${SH_COPYRIGHT}
if [ -n "$FIXIT" ]; then
FIX_COPYRIGHT $SH_COPYRIGHT $f
fi
;;
esac
}
# Check Windows Batch files
#
BATCH_FILE()
{
f=$1
case `MATCH_COPYRIGHT $WINBAT_COPYRIGHT $f` in
PASSED)
PASSED $f
return
;;
FAILED)
# show the difference
FAILED $f
$DIFF ${EXTRACTEDFILE} ${WINBAT_COPYRIGHT}
if [ -n "$FIXIT" ]; then
FIX_COPYRIGHT $WINBAT_COPYRIGHT $f
fi
;;
esac
}
# Check Configure.in type files
#
CONFIGURE_AC_FILE()
{
f=$1
case `MATCH_COPYRIGHT $CONFIGURE_AC_COPYRIGHT $f` in
PASSED)
PASSED $f
return
;;
FAILED)
# show the difference
FAILED $f
$DIFF ${EXTRACTEDFILE} ${CONFIGURE_AC_COPYRIGHT}
if [ -n "$FIXIT" ]; then
FIX_COPYRIGHT $CONFIGURE_AC_COPYRIGHT $f
fi
;;
esac
}
# Guess the type of file.
# Inspect the first 5 lines to guess what type of file it is.
#
GUESS_File_Type()
{
if [ $# -ne 1 ]; then
echo "wrong number of arguments($#)"
return
fi
f=$1
# Now guess the file type.
head -5 < $f > $tmpfile
if head -1 < $tmpfile | grep '^#!' > /dev/null; then
# First line is "#!". It is likely a shell script or similar type.
echo SHELL_FILE
elif grep '\/\*' < $tmpfile > /dev/null; then
# Found some lines containing '/*'. It may be a C/C++ style file.
echo C_SOURCE
elif grep '^!' < $tmpfile > /dev/null; then
# Some lines start with a "!". It may be a Fortran 9X style file.
echo FORTRAN_SOURCE
elif grep '^#' < $tmpfile > /dev/null; then
# Some lines start with a "#". It may be a shell like type.
# Put this after C_SOURCE which may have #define and such lines.
echo SHELL_FILE
elif grep '^dnl' < $tmpfile > /dev/null; then
# Some lines start with a "dnl". It may be a configure.ac type file.
echo CONFIGURE_AC_FILE
elif grep -i '^<html>' < $tmpfile > /dev/null || \
grep '^<!--' < $tmpfile > /dev/null ; then
# Some lines start with a "<html>" or having an html comment tag.
# It may be an HTML file.
echo HTML_FILE
else
# Unknown type.
echo UNKNOWN_TYPE
fi
}
# Check Unknown type file.
# First check if there is something that resemble a copyright notice in
# the first "page". If so, then inspect the first 5 lines to guess what
# type of file it is. Then try verify Copyright notice according to
# guessed type.
#
UNKNOWN_FILE()
{
f=$1
if head -$NUMBEGINLINES < $f | grep "${COPYRIGHTSTR}" > /dev/null; then
xftype=`GUESS_File_Type $f`
PRINTDEBUG f=$f xftype=$xftype > /dev/tty
case $xftype in
SHELL_FILE) SHELL_FILE $f;;
C_SOURCE) C_SOURCE $f;;
FORTRAN_SOURCE) FORTRAN_SOURCE $f;;
SHELL_FILE) SHELL_FILE $f;;
HTML_FILE) HTML_FILE $f;;
UNKNOWN_TYPE) UNKNOWN_TYPE $f;;
esac
else
# Unknown type.
UNKNOWN_TYPE $f
fi
}
# Passed checking.
# $1 file that has passed.
#
PASSED()
{
if test X-$VERBOSE = X-yes; then
echo " PASSED"
fi
echo $1 >> $PASSEDLOG
}
# Unknown file type. Considered a fail.
# $1 name of unknown file.
#
UNKNOWN_TYPE()
{
echo "UNKNOWN type: $1" | tee -a $FAILEDLOG
}
# Skip checking.
# $1 file that is skipped.
#
SKIP()
{
if test X-$VERBOSE = X-yes; then
echo " SKIPPED"
fi
echo $1 >> $SKIPPEDLOG
}
# Failed checking.
# $1 file that has failed.
#
FAILED()
{
echo "FAILED: $1"
echo $1 >> $FAILEDLOG
}
# Copyright fixed.
# $1 file that has been fixed.
#
FIXED()
{
if test X-$VERBOSE = X-yes; then
echo " FIXED"
fi
echo $1 >> $FIXEDLOG
}
# Copyright fix failed.
# $1 file that has failed.
#
FIXFAILED()
{
echo "FIX FAILED: $1"
echo $1 >> $FIXFAILEDLOG
}
#
# Main body
PARSE_OPTION "$@"
INITIALIZATION
# use find to list all those file names and process them
# one by one.
if test -z "$FNAME" ; then
find $DIRS $EXCEPTIONDIRS -type f -print
else
find $DIRS -type f -name "${FNAME}" -print
fi |
while read file; do
if test X-$VERBOSE = X-yes; then
echo checking ${file}...
fi
if echo $file | egrep -f $EXCEPTIONS > /dev/null; then
SKIP ${file}
else
case ${file} in
*.c | *.h | *.cpp )
C_SOURCE ${file}
;;
*.f90 )
FORTRAN_SOURCE ${file}
;;
*.htm | *.html )
HTML_FILE ${file}
;;
*.sh | *.sh.in )
SHELL_FILE ${file}
;;
*.pl )
# Perl script files are similar to Shell files
SHELL_FILE ${file}
;;
*Makefile | *Makefile.in | *Makefile.am | Makefile.dist )
MAKE_FILE ${file}
;;
configure.ac )
CONFIGURE_AC_FILE ${file}
;;
*.bat | *.BAT )
# Windows Batch files
BATCH_FILE ${file}
;;
*.h5 | *.hdf5 )
# Ignore HDF5 data files
continue
;;
*.jpg | *.obj | *.gif | *.png | *.pdf | \
*.JPG | *.OBJ | *.GIF | *.PNG | *.PDF )
# Ignore binary data files
continue
;;
*.zip | *.dsp | *.dsw | *.js | *.sln )
# Ignore Windows binary or special files.
# .dsp & .dsw are Visual Studio project files.
# .sln are .NET solution files.
# .js are Microsoft Java Script files.
continue
;;
*CVS/* )
# Ignore CVS control files.
continue
;;
*.txt | *.TXT )
# Ignore text files.
continue
;;
*)
UNKNOWN_FILE $file
;;
esac
fi
done
# check results
if [ -f $PASSEDLOG ]; then
NPASSEDFILES=`wc -l < $PASSEDLOG`
fi
if [ -f $FAILEDLOG ]; then
NFAILEDFILES=`wc -l < $FAILEDLOG`
fi
if [ -f $FIXEDLOG ]; then
NFIXEDFILES=`wc -l < $FIXEDLOG`
fi
if [ -f $FIXFAILEDLOG ]; then
NFIXFAILEDFILES=`wc -l < $FIXFAILEDLOG`
fi
# Cleanup
rm -f $C_COPYRIGHT $FTN_COPYRIGHT $HTM_COPYRIGHT $SH_COPYRIGHT $SH_COPYRIGHT2
rm -f $EXCEPTIONS $EXTRACTEDFILE
rm -f $PASSEDLOG $SKIPPEDLOG $FAILEDLOG $FIXEDLOG $FIXFAILEDLOG
# Report Results
# Results are not total accurate--e.g., Skipped are not counted, thus not
# reported.
#
echo "*******************"
echo " REPORT"
echo "*******************"
echo Number of passed files = $NPASSEDFILES
echo Number of failed files = $NFAILEDFILES
echo Number of fixed files = $NFIXEDFILES
echo Number of fix failed files = $NFIXFAILEDFILES
if [ $NFIXEDFILES -gt 0 ]; then
FIXCAUTIONMSG
fi
if [ $NFAILEDFILES -gt 0 ]; then
exitcode=1
else
exitcode=0
fi
exit $exitcode

@ -0,0 +1,380 @@
#! /bin/sh
# Build and Test HDF5 using cmake.
# Author: Allen Byrne
# Albert Cheng
# Creation Date: Nov 2012
# Modified:
# Changed to use the quick steps described in INSTALL_CMake.txt. (AKC 2014/1/1)
# Copyright: The HDF Group, 2012-14
# Debug Print: remove the comment hash if you want DPRINT to do echo
DPRINT=:
#DPRINT=echo
# variable names
# The "extra" number is the step number and easier to see all logfiles in
# the sorted order of steps
progname=`basename $0` # program name
configlog="#${progname}_1config.log"
makelog="#${progname}_2build.log"
testlog="#${progname}_3test.log"
packlog="#${progname}_4pack.log"
installlog="#${progname}_5install.log"
vflag=1 # verbose flag default to on.
config_summary=libhdf5.settings
exit_code=0
# This command should be in the source directory's bin/
# and should have invoked as "$srcdir/bin/$progname" or
# "bin/$progname". So, by striping bin/$program from $0,
# we can find $srcdir.
if [ "$0" = "bin/${progname}" ]; then
srcdir="." # current directory
else
# $0 is $srdir/bin/$progname
srcdir=`echo $0 | sed -e s%/bin/$progname\$%%`
fi
# Sanity check
if [ ! -r $srcdir/bin/$progname ]; then
echo "encountered error while trying to find srcdir($srdir)"
exit 1
fi
# Cmake build options
cacheinit=$srcdir/config/cmake/cacheinit.cmake
build_cpp_lib=-DHDF5_BUILD_CPP_LIB:BOOL=OFF # C++ interface default off
build_fortran=-DHDF5_BUILD_FORTRAN:BOOL=OFF # Fortran interface default off
build_hl_lib=-DHDF5_BUILD_HL_LIB:BOOL=ON # High Level interface default on
build_threadsafe=-DHDF5_ENABLE_THREADSAFE:BOOL=OFF # Threadsafe feature default off
build_testing=-DBUILD_TESTING:BOOL=ON # Build tests default on
build_test_shell=-DTEST_SHELL_SCRIPTS:BOOL=ON # Run shell script tests default on
build_tools=-DHDF5_BUILD_TOOLS:BOOL=ON # Build tools default on
with_zlib=-DHDF5_ENABLE_Z_LIB_SUPPORT=ON # enable zlib filter default on
with_szlib=-DHDF5_ENABLE_SZIP_SUPPORT=OFF # enables szip filter default off
szlib_path="" # szip lib path default off
shared_lib=-DBUILD_SHARED_LIBS:BOOL=ON # enables shared lib; default on
njobs="" # number of jobs (commands) to
# run simultaneously; default is
# value from $MAKE if defined,
# otherwise none (1)
#=============
# Function definitions
#=============
# Show user brief help page
HELP_BRIEF()
{
cat << EOF
Usage: $progname [options]
--help: shows details help page
EOF
}
# Show user detail help page
HELP()
{
cat << EOF
Usage: $progname [<options>]
where options are:
--enable-fortran | --disable-fortran:
enable or disable fortran API. Default is off.
--enable-cxx | --disable-cxx:
enable or disable c++ API. Default is off.
--enable-hl | --disable-hl:
enable or disable high level API. Default is on.
--enable-threadsafe | --disable-threadsafe:
enable or disable threadsafe feature. Default is off
--enable-shared | --disable-shared:
enable or disable shared lib. Default is on.
--enable-tools | --disable-tools:
enable or disable building tools. Default is on.
--enable-testing | --disable-testing:
enable or disable building tests. Default is on.
--with-zlib | --with-zlib=<libpath> | --without-zlib:
Use zlib library for external deflate I/O filter. Default is on.
--with-szlib | --with-szlib=<libpath> | --without-szlib:
Use szlib library for external deflate I/O filter. Default is on.
--njobs=<-j N>:
number of jobs (commands) to run simultaneously; default is value from
$MAKE if defined, otherwise none
--enable-verbose | --disable-verbose:
enable or disable verbose output. Default is on.
--help: shows details help page
<libpath>: the file path to the library, expect <libpath>/lib and <libpath>/include.
EOF
}
# Display a time stamp
TIMESTAMP()
{
echo "=====" "`date`" "====="
}
# Do one step bracketed with time stamps
# The '< /dev/null' is needed to prevent some applications like MPI
# jobs blocked for reading when they read stdin unnecessary.
# $1 is banner message to be displayed.
# $2 is command to run
# $3 is logfile name for saving output from the command
STEP()
{
banner="$1"
command="$2"
logfile="$3"
echo "$banner" with output saved in $logfile
(TIMESTAMP; nerror=0 ;
echo "eval $command"
eval $command || nerror=1 ;
TIMESTAMP; exit $nerror) < /dev/null > "$logfile" 2>&1
if [ $? -ne 0 ]; then
echo "error in '$banner'. $progname aborted."
exit 1
fi
}
# Install the generated installation image file. Different platform uses
# different image files.
# Linux: HDF5-<version>-Linux.sh file
# Mac OS X: Not implemented yet
# Others: Not implemented yet
INSTALL_HDF5()
{
myos="`uname -s`"
case "$myos" in
Linux)
install_file=./HDF5-${version}-Linux.sh
$install_file --skip-license $*
;;
Darwin) # Mac OSX DMG file
# These steps were a kludge. Need proper support from Cmake engineering.
echo Darwin install step needs proper implementation. Quit.
return 1
#
install_file=HDF5-${version}-Darwin.dmg
test -d hdf5 || mkdir hdf5
basename=`basename $install_file .dmg`
# mount the DMG file as /Volumes/$basename
# echo 'Y' as yes for license.
echo Y | hdiutil mount $install_file
# copy the contents to the install location
cp -R "/Volumes/$basename/HDF_Group" hdf5
# unmount the DMG file
hdiutil unmount /Volumes/$basename
;;
*) # unknown/unsupported OS.
echo "INSTALL_HDF5: Error--unknown/unsupported OS($myos)"
return 1
;;
esac
}
# Print logfiles.
# $*: logfiles
DUMP_LOGFILE()
{
for x in $*; do
if [ -f $x ]; then
echo "=================================="
echo "Dumping $x"
echo "=================================="
cat $x
echo "==== END $x ====="
echo
fi
done
}
#==========
# main
#==========
# Show a start time stamp
TIMESTAMP
# Initialize njobs if $MAKE is defined
if [ -n "$MAKE" ]; then
# assume all arguments are for --jobs
njobs=`echo $MAKE | cut -s -d' ' -f2-`
fi
# Parse Cmake configure options
# --enable-XXX or --disable-XXX will enable or disable feature XXX.
# XXX can be:
# fortran Fortran interface
# cxx C++ interface
# hl Highlevel interface
# testing Build tests
# tools Build tools
while [ $# -gt 0 ]; do
case "$1" in
--enable-fortran)
build_fortran=-DHDF5_BUILD_FORTRAN:BOOL=ON
;;
--disable-fortran)
build_fortran=-DHDF5_BUILD_FORTRAN:BOOL=OFF
;;
--enable-cxx)
build_cpp_lib=-DHDF5_BUILD_CPP_LIB:BOOL=ON
;;
--disable-cxx)
build_cpp_lib=-DHDF5_BUILD_CPP_LIB:BOOL=OFF
;;
--enable-hl)
build_hl_lib=-DHDF5_BUILD_HL_LIB:BOOL=ON
;;
--disable-hl)
build_hl_lib=-DHDF5_BUILD_HL_LIB:BOOL=OFF
;;
--enable-threadsafe)
build_threadsafe=-DHDF5_ENABLE_THREADSAFE:BOOL=ON
;;
--disable-threadsafe)
build_threadsafe=-DHDF5_ENABLE_THREADSAFE:BOOL=OFF
;;
--enable-shared)
shared_lib=-DBUILD_SHARED_LIBS:BOOL=ON
;;
--disable-shared)
shared_lib=-DBUILD_SHARED_LIBS:BOOL=OFF
;;
--enable-tools)
build_tools=-DHDF5_BUILD_TOOLS:BOOL=ON
;;
--disable-tools)
build_tools=-DHDF5_BUILD_TOOLS:BOOL=OFF
;;
--enable-testing)
build_testing=-DBUILD_TESTING:BOOL=ON
;;
--disable-testing)
build_testing=-DBUILD_TESTING:BOOL=OFF
;;
--enable-shell-testing)
build_test_shell=-DTEST_SHELL_SCRIPTS:BOOL=ON
;;
--disable-shell-testing)
build_test_shell=-DTEST_SHELL_SCRIPTS:BOOL=OFF
;;
--with-zlib)
with_zlib=-DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=ON
;;
--with-zlib=*)
xarg=`echo $1 | cut -d= -f2-`
with_zlib="-DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=ON -DZLIB_ROOT=$xarg"
;;
--without-zlib)
with_zlib=-DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF
;;
--with-szlib)
with_szlib=-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=ON
szlib_path="" # szlib is in default paths
;;
--with-szlib=*)
xarg=`echo $1 | cut -d= -f2-`
with_szlib="-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=ON"
szlib_path="SZIP_INSTALL=$xarg"
;;
--without-szlib)
with_szlib=-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF
szlib_path="" # reset the path
;;
--njobs=*)
njobs=`echo $1 | cut -d= -f2-`
;;
--enable-verbose)
vflag=1
;;
--disable-verbose)
vflag=0
;;
--help)
# print the detail help page and exit
HELP
exit 0
;;
*)
echo "Unknown options: $1"
HELP
exit 1
;;
esac
shift
done
$DPRINT after option parsing vflag=$vflag
# Always display the brief help page
HELP_BRIEF
# Verify there is a valid hdf5 source directory present
if [ ! -d $srcdir ]; then
echo $srcdir not found. Aborted.
exit 1
fi
# figure out version information
vers=bin/h5vers
if [ ! -x $srcdir/$vers ]; then
echo $srcdir/$vers not found or not executable. Aborted.
exit 1
fi
version=`cd $srcdir; $vers`
if [ $? != 0 ]; then
echo $vers failed. Aborted.
exit 1
fi
# setup output of all the log files if verbose is on upon exit
trap \
"if [ $vflag -ne 0 ]; then DUMP_LOGFILE \$configlog \$makelog \$testlog \$packlog \$installlog; fi" \
0
echo Running Cmake for HDF5-${version} ...
# 4. Configure the C library, tools and tests with this command:
# If successful, append the configure summary to the configure logfile.
STEP "Configure..." \
"env ${szlib_path} \
cmake \
-C $cacheinit \
$build_cpp_lib \
$build_fortran \
$build_hl_lib \
$build_threadsafe \
$shared_lib \
$build_testing \
$build_test_shell \
$build_tools \
$with_zlib \
$with_szlib \
$srcdir" $configlog &&\
cat $config_summary >> $configlog
# 5. Build the C library, tools and tests with this command:
STEP "Build the library, tools and tests, ..." "cmake --build . --config Release -- $njobs" $makelog
# 6. Test the C library and tools with this command:
STEP "Test the library and tools..." "ctest . -C Release $njobs" $testlog
# 7. Create an install image with this command:
STEP "Create an install image..." "cpack -C Release CPackConfig.cmake" $packlog
# The implementation of installation is incomplete (only works for linux).
# Screen it out for now till it is completed.
if false; then
# 8. Install with this command:
STEP "Install..." "INSTALL_HDF5" $installlog
fi
# save the last exit code
exit_code=$?
# Show a closing time stamp
TIMESTAMP
exit $exit_code

@ -0,0 +1,348 @@
#! /bin/sh
# Wrapper for compilers which do not understand '-c -o'.
scriptversion=2018-03-07.03; # UTC
# Copyright (C) 1999-2020 Free Software Foundation, Inc.
# Written by Tom Tromey <tromey@cygnus.com>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
# This file is maintained in Automake, please report
# bugs to <bug-automake@gnu.org> or send patches to
# <automake-patches@gnu.org>.
nl='
'
# We need space, tab and new line, in precisely that order. Quoting is
# there to prevent tools from complaining about whitespace usage.
IFS=" "" $nl"
file_conv=
# func_file_conv build_file lazy
# Convert a $build file to $host form and store it in $file
# Currently only supports Windows hosts. If the determined conversion
# type is listed in (the comma separated) LAZY, no conversion will
# take place.
func_file_conv ()
{
file=$1
case $file in
/ | /[!/]*) # absolute file, and not a UNC file
if test -z "$file_conv"; then
# lazily determine how to convert abs files
case `uname -s` in
MINGW*)
file_conv=mingw
;;
CYGWIN* | MSYS*)
file_conv=cygwin
;;
*)
file_conv=wine
;;
esac
fi
case $file_conv/,$2, in
*,$file_conv,*)
;;
mingw/*)
file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'`
;;
cygwin/* | msys/*)
file=`cygpath -m "$file" || echo "$file"`
;;
wine/*)
file=`winepath -w "$file" || echo "$file"`
;;
esac
;;
esac
}
# func_cl_dashL linkdir
# Make cl look for libraries in LINKDIR
func_cl_dashL ()
{
func_file_conv "$1"
if test -z "$lib_path"; then
lib_path=$file
else
lib_path="$lib_path;$file"
fi
linker_opts="$linker_opts -LIBPATH:$file"
}
# func_cl_dashl library
# Do a library search-path lookup for cl
func_cl_dashl ()
{
lib=$1
found=no
save_IFS=$IFS
IFS=';'
for dir in $lib_path $LIB
do
IFS=$save_IFS
if $shared && test -f "$dir/$lib.dll.lib"; then
found=yes
lib=$dir/$lib.dll.lib
break
fi
if test -f "$dir/$lib.lib"; then
found=yes
lib=$dir/$lib.lib
break
fi
if test -f "$dir/lib$lib.a"; then
found=yes
lib=$dir/lib$lib.a
break
fi
done
IFS=$save_IFS
if test "$found" != yes; then
lib=$lib.lib
fi
}
# func_cl_wrapper cl arg...
# Adjust compile command to suit cl
func_cl_wrapper ()
{
# Assume a capable shell
lib_path=
shared=:
linker_opts=
for arg
do
if test -n "$eat"; then
eat=
else
case $1 in
-o)
# configure might choose to run compile as 'compile cc -o foo foo.c'.
eat=1
case $2 in
*.o | *.[oO][bB][jJ])
func_file_conv "$2"
set x "$@" -Fo"$file"
shift
;;
*)
func_file_conv "$2"
set x "$@" -Fe"$file"
shift
;;
esac
;;
-I)
eat=1
func_file_conv "$2" mingw
set x "$@" -I"$file"
shift
;;
-I*)
func_file_conv "${1#-I}" mingw
set x "$@" -I"$file"
shift
;;
-l)
eat=1
func_cl_dashl "$2"
set x "$@" "$lib"
shift
;;
-l*)
func_cl_dashl "${1#-l}"
set x "$@" "$lib"
shift
;;
-L)
eat=1
func_cl_dashL "$2"
;;
-L*)
func_cl_dashL "${1#-L}"
;;
-static)
shared=false
;;
-Wl,*)
arg=${1#-Wl,}
save_ifs="$IFS"; IFS=','
for flag in $arg; do
IFS="$save_ifs"
linker_opts="$linker_opts $flag"
done
IFS="$save_ifs"
;;
-Xlinker)
eat=1
linker_opts="$linker_opts $2"
;;
-*)
set x "$@" "$1"
shift
;;
*.cc | *.CC | *.cxx | *.CXX | *.[cC]++)
func_file_conv "$1"
set x "$@" -Tp"$file"
shift
;;
*.c | *.cpp | *.CPP | *.lib | *.LIB | *.Lib | *.OBJ | *.obj | *.[oO])
func_file_conv "$1" mingw
set x "$@" "$file"
shift
;;
*)
set x "$@" "$1"
shift
;;
esac
fi
shift
done
if test -n "$linker_opts"; then
linker_opts="-link$linker_opts"
fi
exec "$@" $linker_opts
exit 1
}
eat=
case $1 in
'')
echo "$0: No command. Try '$0 --help' for more information." 1>&2
exit 1;
;;
-h | --h*)
cat <<\EOF
Usage: compile [--help] [--version] PROGRAM [ARGS]
Wrapper for compilers which do not understand '-c -o'.
Remove '-o dest.o' from ARGS, run PROGRAM with the remaining
arguments, and rename the output as expected.
If you are trying to build a whole package this is not the
right script to run: please start by reading the file 'INSTALL'.
Report bugs to <bug-automake@gnu.org>.
EOF
exit $?
;;
-v | --v*)
echo "compile $scriptversion"
exit $?
;;
cl | *[/\\]cl | cl.exe | *[/\\]cl.exe | \
icl | *[/\\]icl | icl.exe | *[/\\]icl.exe )
func_cl_wrapper "$@" # Doesn't return...
;;
esac
ofile=
cfile=
for arg
do
if test -n "$eat"; then
eat=
else
case $1 in
-o)
# configure might choose to run compile as 'compile cc -o foo foo.c'.
# So we strip '-o arg' only if arg is an object.
eat=1
case $2 in
*.o | *.obj)
ofile=$2
;;
*)
set x "$@" -o "$2"
shift
;;
esac
;;
*.c)
cfile=$1
set x "$@" "$1"
shift
;;
*)
set x "$@" "$1"
shift
;;
esac
fi
shift
done
if test -z "$ofile" || test -z "$cfile"; then
# If no '-o' option was seen then we might have been invoked from a
# pattern rule where we don't need one. That is ok -- this is a
# normal compilation that the losing compiler can handle. If no
# '.c' file was seen then we are probably linking. That is also
# ok.
exec "$@"
fi
# Name of file we expect compiler to create.
cofile=`echo "$cfile" | sed 's|^.*[\\/]||; s|^[a-zA-Z]:||; s/\.c$/.o/'`
# Create the lock directory.
# Note: use '[/\\:.-]' here to ensure that we don't use the same name
# that we are using for the .o file. Also, base the name on the expected
# object file name, since that is what matters with a parallel build.
lockdir=`echo "$cofile" | sed -e 's|[/\\:.-]|_|g'`.d
while true; do
if mkdir "$lockdir" >/dev/null 2>&1; then
break
fi
sleep 1
done
# FIXME: race condition here if user kills between mkdir and trap.
trap "rmdir '$lockdir'; exit 1" 1 2 15
# Run the compile.
"$@"
ret=$?
if test -f "$cofile"; then
test "$cofile" = "$ofile" || mv "$cofile" "$ofile"
elif test -f "${cofile}bj"; then
test "${cofile}bj" = "$ofile" || mv "${cofile}bj" "$ofile"
fi
rmdir "$lockdir"
exit $ret
# Local Variables:
# mode: shell-script
# sh-indentation: 2
# eval: (add-hook 'before-save-hook 'time-stamp)
# time-stamp-start: "scriptversion="
# time-stamp-format: "%:y-%02m-%02d.%02H"
# time-stamp-time-zone: "UTC0"
# time-stamp-end: "; # UTC"
# End:

1667
bin/config.guess vendored

File diff suppressed because it is too large Load Diff

1793
bin/config.sub vendored

File diff suppressed because it is too large Load Diff

@ -0,0 +1,37 @@
#!/usr/bin/env perl
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
require 5.003;
# Looks for lines emitted by H5O_open() and H5O_close() and tries to
# determine which objects were not properly closed.
while (<>) {
next unless /^([<>])(0x[\da-f]+|\d+)$/;
my ($op, $addr) = ($1, $2);
if ($op eq ">") {
# Open object
$obj{$addr} += 1;
} else {
# Close object
die unless $obj{$addr}>0;
$obj{$addr} -= 1;
delete $obj{$addr} unless $obj{$addr};
}
}
for (sort keys %obj) {
printf "%3d %s\n", $obj{$_}, $_;
}
exit 0;

@ -0,0 +1,791 @@
#! /bin/sh
# depcomp - compile a program generating dependencies as side-effects
scriptversion=2018-03-07.03; # UTC
# Copyright (C) 1999-2020 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
# Originally written by Alexandre Oliva <oliva@dcc.unicamp.br>.
case $1 in
'')
echo "$0: No command. Try '$0 --help' for more information." 1>&2
exit 1;
;;
-h | --h*)
cat <<\EOF
Usage: depcomp [--help] [--version] PROGRAM [ARGS]
Run PROGRAMS ARGS to compile a file, generating dependencies
as side-effects.
Environment variables:
depmode Dependency tracking mode.
source Source file read by 'PROGRAMS ARGS'.
object Object file output by 'PROGRAMS ARGS'.
DEPDIR directory where to store dependencies.
depfile Dependency file to output.
tmpdepfile Temporary file to use when outputting dependencies.
libtool Whether libtool is used (yes/no).
Report bugs to <bug-automake@gnu.org>.
EOF
exit $?
;;
-v | --v*)
echo "depcomp $scriptversion"
exit $?
;;
esac
# Get the directory component of the given path, and save it in the
# global variables '$dir'. Note that this directory component will
# be either empty or ending with a '/' character. This is deliberate.
set_dir_from ()
{
case $1 in
*/*) dir=`echo "$1" | sed -e 's|/[^/]*$|/|'`;;
*) dir=;;
esac
}
# Get the suffix-stripped basename of the given path, and save it the
# global variable '$base'.
set_base_from ()
{
base=`echo "$1" | sed -e 's|^.*/||' -e 's/\.[^.]*$//'`
}
# If no dependency file was actually created by the compiler invocation,
# we still have to create a dummy depfile, to avoid errors with the
# Makefile "include basename.Plo" scheme.
make_dummy_depfile ()
{
echo "#dummy" > "$depfile"
}
# Factor out some common post-processing of the generated depfile.
# Requires the auxiliary global variable '$tmpdepfile' to be set.
aix_post_process_depfile ()
{
# If the compiler actually managed to produce a dependency file,
# post-process it.
if test -f "$tmpdepfile"; then
# Each line is of the form 'foo.o: dependency.h'.
# Do two passes, one to just change these to
# $object: dependency.h
# and one to simply output
# dependency.h:
# which is needed to avoid the deleted-header problem.
{ sed -e "s,^.*\.[$lower]*:,$object:," < "$tmpdepfile"
sed -e "s,^.*\.[$lower]*:[$tab ]*,," -e 's,$,:,' < "$tmpdepfile"
} > "$depfile"
rm -f "$tmpdepfile"
else
make_dummy_depfile
fi
}
# A tabulation character.
tab=' '
# A newline character.
nl='
'
# Character ranges might be problematic outside the C locale.
# These definitions help.
upper=ABCDEFGHIJKLMNOPQRSTUVWXYZ
lower=abcdefghijklmnopqrstuvwxyz
digits=0123456789
alpha=${upper}${lower}
if test -z "$depmode" || test -z "$source" || test -z "$object"; then
echo "depcomp: Variables source, object and depmode must be set" 1>&2
exit 1
fi
# Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po.
depfile=${depfile-`echo "$object" |
sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`}
tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`}
rm -f "$tmpdepfile"
# Avoid interferences from the environment.
gccflag= dashmflag=
# Some modes work just like other modes, but use different flags. We
# parameterize here, but still list the modes in the big case below,
# to make depend.m4 easier to write. Note that we *cannot* use a case
# here, because this file can only contain one case statement.
if test "$depmode" = hp; then
# HP compiler uses -M and no extra arg.
gccflag=-M
depmode=gcc
fi
if test "$depmode" = dashXmstdout; then
# This is just like dashmstdout with a different argument.
dashmflag=-xM
depmode=dashmstdout
fi
cygpath_u="cygpath -u -f -"
if test "$depmode" = msvcmsys; then
# This is just like msvisualcpp but w/o cygpath translation.
# Just convert the backslash-escaped backslashes to single forward
# slashes to satisfy depend.m4
cygpath_u='sed s,\\\\,/,g'
depmode=msvisualcpp
fi
if test "$depmode" = msvc7msys; then
# This is just like msvc7 but w/o cygpath translation.
# Just convert the backslash-escaped backslashes to single forward
# slashes to satisfy depend.m4
cygpath_u='sed s,\\\\,/,g'
depmode=msvc7
fi
if test "$depmode" = xlc; then
# IBM C/C++ Compilers xlc/xlC can output gcc-like dependency information.
gccflag=-qmakedep=gcc,-MF
depmode=gcc
fi
case "$depmode" in
gcc3)
## gcc 3 implements dependency tracking that does exactly what
## we want. Yay! Note: for some reason libtool 1.4 doesn't like
## it if -MD -MP comes after the -MF stuff. Hmm.
## Unfortunately, FreeBSD c89 acceptance of flags depends upon
## the command line argument order; so add the flags where they
## appear in depend2.am. Note that the slowdown incurred here
## affects only configure: in makefiles, %FASTDEP% shortcuts this.
for arg
do
case $arg in
-c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;;
*) set fnord "$@" "$arg" ;;
esac
shift # fnord
shift # $arg
done
"$@"
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
mv "$tmpdepfile" "$depfile"
;;
gcc)
## Note that this doesn't just cater to obsosete pre-3.x GCC compilers.
## but also to in-use compilers like IMB xlc/xlC and the HP C compiler.
## (see the conditional assignment to $gccflag above).
## There are various ways to get dependency output from gcc. Here's
## why we pick this rather obscure method:
## - Don't want to use -MD because we'd like the dependencies to end
## up in a subdir. Having to rename by hand is ugly.
## (We might end up doing this anyway to support other compilers.)
## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like
## -MM, not -M (despite what the docs say). Also, it might not be
## supported by the other compilers which use the 'gcc' depmode.
## - Using -M directly means running the compiler twice (even worse
## than renaming).
if test -z "$gccflag"; then
gccflag=-MD,
fi
"$@" -Wp,"$gccflag$tmpdepfile"
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
echo "$object : \\" > "$depfile"
# The second -e expression handles DOS-style file names with drive
# letters.
sed -e 's/^[^:]*: / /' \
-e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile"
## This next piece of magic avoids the "deleted header file" problem.
## The problem is that when a header file which appears in a .P file
## is deleted, the dependency causes make to die (because there is
## typically no way to rebuild the header). We avoid this by adding
## dummy dependencies for each header file. Too bad gcc doesn't do
## this for us directly.
## Some versions of gcc put a space before the ':'. On the theory
## that the space means something, we add a space to the output as
## well. hp depmode also adds that space, but also prefixes the VPATH
## to the object. Take care to not repeat it in the output.
## Some versions of the HPUX 10.20 sed can't process this invocation
## correctly. Breaking it into two sed invocations is a workaround.
tr ' ' "$nl" < "$tmpdepfile" \
| sed -e 's/^\\$//' -e '/^$/d' -e "s|.*$object$||" -e '/:$/d' \
| sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
hp)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
sgi)
if test "$libtool" = yes; then
"$@" "-Wp,-MDupdate,$tmpdepfile"
else
"$@" -MDupdate "$tmpdepfile"
fi
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files
echo "$object : \\" > "$depfile"
# Clip off the initial element (the dependent). Don't try to be
# clever and replace this with sed code, as IRIX sed won't handle
# lines with more than a fixed number of characters (4096 in
# IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines;
# the IRIX cc adds comments like '#:fec' to the end of the
# dependency line.
tr ' ' "$nl" < "$tmpdepfile" \
| sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' \
| tr "$nl" ' ' >> "$depfile"
echo >> "$depfile"
# The second pass generates a dummy entry for each header file.
tr ' ' "$nl" < "$tmpdepfile" \
| sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \
>> "$depfile"
else
make_dummy_depfile
fi
rm -f "$tmpdepfile"
;;
xlc)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
aix)
# The C for AIX Compiler uses -M and outputs the dependencies
# in a .u file. In older versions, this file always lives in the
# current directory. Also, the AIX compiler puts '$object:' at the
# start of each line; $object doesn't have directory information.
# Version 6 uses the directory in both cases.
set_dir_from "$object"
set_base_from "$object"
if test "$libtool" = yes; then
tmpdepfile1=$dir$base.u
tmpdepfile2=$base.u
tmpdepfile3=$dir.libs/$base.u
"$@" -Wc,-M
else
tmpdepfile1=$dir$base.u
tmpdepfile2=$dir$base.u
tmpdepfile3=$dir$base.u
"$@" -M
fi
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
exit $stat
fi
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
do
test -f "$tmpdepfile" && break
done
aix_post_process_depfile
;;
tcc)
# tcc (Tiny C Compiler) understand '-MD -MF file' since version 0.9.26
# FIXME: That version still under development at the moment of writing.
# Make that this statement remains true also for stable, released
# versions.
# It will wrap lines (doesn't matter whether long or short) with a
# trailing '\', as in:
#
# foo.o : \
# foo.c \
# foo.h \
#
# It will put a trailing '\' even on the last line, and will use leading
# spaces rather than leading tabs (at least since its commit 0394caf7
# "Emit spaces for -MD").
"$@" -MD -MF "$tmpdepfile"
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
# Each non-empty line is of the form 'foo.o : \' or ' dep.h \'.
# We have to change lines of the first kind to '$object: \'.
sed -e "s|.*:|$object :|" < "$tmpdepfile" > "$depfile"
# And for each line of the second kind, we have to emit a 'dep.h:'
# dummy dependency, to avoid the deleted-header problem.
sed -n -e 's|^ *\(.*\) *\\$|\1:|p' < "$tmpdepfile" >> "$depfile"
rm -f "$tmpdepfile"
;;
## The order of this option in the case statement is important, since the
## shell code in configure will try each of these formats in the order
## listed in this file. A plain '-MD' option would be understood by many
## compilers, so we must ensure this comes after the gcc and icc options.
pgcc)
# Portland's C compiler understands '-MD'.
# Will always output deps to 'file.d' where file is the root name of the
# source file under compilation, even if file resides in a subdirectory.
# The object file name does not affect the name of the '.d' file.
# pgcc 10.2 will output
# foo.o: sub/foo.c sub/foo.h
# and will wrap long lines using '\' :
# foo.o: sub/foo.c ... \
# sub/foo.h ... \
# ...
set_dir_from "$object"
# Use the source, not the object, to determine the base name, since
# that's sadly what pgcc will do too.
set_base_from "$source"
tmpdepfile=$base.d
# For projects that build the same source file twice into different object
# files, the pgcc approach of using the *source* file root name can cause
# problems in parallel builds. Use a locking strategy to avoid stomping on
# the same $tmpdepfile.
lockdir=$base.d-lock
trap "
echo '$0: caught signal, cleaning up...' >&2
rmdir '$lockdir'
exit 1
" 1 2 13 15
numtries=100
i=$numtries
while test $i -gt 0; do
# mkdir is a portable test-and-set.
if mkdir "$lockdir" 2>/dev/null; then
# This process acquired the lock.
"$@" -MD
stat=$?
# Release the lock.
rmdir "$lockdir"
break
else
# If the lock is being held by a different process, wait
# until the winning process is done or we timeout.
while test -d "$lockdir" && test $i -gt 0; do
sleep 1
i=`expr $i - 1`
done
fi
i=`expr $i - 1`
done
trap - 1 2 13 15
if test $i -le 0; then
echo "$0: failed to acquire lock after $numtries attempts" >&2
echo "$0: check lockdir '$lockdir'" >&2
exit 1
fi
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
# Each line is of the form `foo.o: dependent.h',
# or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'.
# Do two passes, one to just change these to
# `$object: dependent.h' and one to simply `dependent.h:'.
sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile"
# Some versions of the HPUX 10.20 sed can't process this invocation
# correctly. Breaking it into two sed invocations is a workaround.
sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" \
| sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
hp2)
# The "hp" stanza above does not work with aCC (C++) and HP's ia64
# compilers, which have integrated preprocessors. The correct option
# to use with these is +Maked; it writes dependencies to a file named
# 'foo.d', which lands next to the object file, wherever that
# happens to be.
# Much of this is similar to the tru64 case; see comments there.
set_dir_from "$object"
set_base_from "$object"
if test "$libtool" = yes; then
tmpdepfile1=$dir$base.d
tmpdepfile2=$dir.libs/$base.d
"$@" -Wc,+Maked
else
tmpdepfile1=$dir$base.d
tmpdepfile2=$dir$base.d
"$@" +Maked
fi
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile1" "$tmpdepfile2"
exit $stat
fi
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2"
do
test -f "$tmpdepfile" && break
done
if test -f "$tmpdepfile"; then
sed -e "s,^.*\.[$lower]*:,$object:," "$tmpdepfile" > "$depfile"
# Add 'dependent.h:' lines.
sed -ne '2,${
s/^ *//
s/ \\*$//
s/$/:/
p
}' "$tmpdepfile" >> "$depfile"
else
make_dummy_depfile
fi
rm -f "$tmpdepfile" "$tmpdepfile2"
;;
tru64)
# The Tru64 compiler uses -MD to generate dependencies as a side
# effect. 'cc -MD -o foo.o ...' puts the dependencies into 'foo.o.d'.
# At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put
# dependencies in 'foo.d' instead, so we check for that too.
# Subdirectories are respected.
set_dir_from "$object"
set_base_from "$object"
if test "$libtool" = yes; then
# Libtool generates 2 separate objects for the 2 libraries. These
# two compilations output dependencies in $dir.libs/$base.o.d and
# in $dir$base.o.d. We have to check for both files, because
# one of the two compilations can be disabled. We should prefer
# $dir$base.o.d over $dir.libs/$base.o.d because the latter is
# automatically cleaned when .libs/ is deleted, while ignoring
# the former would cause a distcleancheck panic.
tmpdepfile1=$dir$base.o.d # libtool 1.5
tmpdepfile2=$dir.libs/$base.o.d # Likewise.
tmpdepfile3=$dir.libs/$base.d # Compaq CCC V6.2-504
"$@" -Wc,-MD
else
tmpdepfile1=$dir$base.d
tmpdepfile2=$dir$base.d
tmpdepfile3=$dir$base.d
"$@" -MD
fi
stat=$?
if test $stat -ne 0; then
rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
exit $stat
fi
for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3"
do
test -f "$tmpdepfile" && break
done
# Same post-processing that is required for AIX mode.
aix_post_process_depfile
;;
msvc7)
if test "$libtool" = yes; then
showIncludes=-Wc,-showIncludes
else
showIncludes=-showIncludes
fi
"$@" $showIncludes > "$tmpdepfile"
stat=$?
grep -v '^Note: including file: ' "$tmpdepfile"
if test $stat -ne 0; then
rm -f "$tmpdepfile"
exit $stat
fi
rm -f "$depfile"
echo "$object : \\" > "$depfile"
# The first sed program below extracts the file names and escapes
# backslashes for cygpath. The second sed program outputs the file
# name when reading, but also accumulates all include files in the
# hold buffer in order to output them again at the end. This only
# works with sed implementations that can handle large buffers.
sed < "$tmpdepfile" -n '
/^Note: including file: *\(.*\)/ {
s//\1/
s/\\/\\\\/g
p
}' | $cygpath_u | sort -u | sed -n '
s/ /\\ /g
s/\(.*\)/'"$tab"'\1 \\/p
s/.\(.*\) \\/\1:/
H
$ {
s/.*/'"$tab"'/
G
p
}' >> "$depfile"
echo >> "$depfile" # make sure the fragment doesn't end with a backslash
rm -f "$tmpdepfile"
;;
msvc7msys)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
#nosideeffect)
# This comment above is used by automake to tell side-effect
# dependency tracking mechanisms from slower ones.
dashmstdout)
# Important note: in order to support this mode, a compiler *must*
# always write the preprocessed file to stdout, regardless of -o.
"$@" || exit $?
# Remove the call to Libtool.
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
# Remove '-o $object'.
IFS=" "
for arg
do
case $arg in
-o)
shift
;;
$object)
shift
;;
*)
set fnord "$@" "$arg"
shift # fnord
shift # $arg
;;
esac
done
test -z "$dashmflag" && dashmflag=-M
# Require at least two characters before searching for ':'
# in the target name. This is to cope with DOS-style filenames:
# a dependency such as 'c:/foo/bar' could be seen as target 'c' otherwise.
"$@" $dashmflag |
sed "s|^[$tab ]*[^:$tab ][^:][^:]*:[$tab ]*|$object: |" > "$tmpdepfile"
rm -f "$depfile"
cat < "$tmpdepfile" > "$depfile"
# Some versions of the HPUX 10.20 sed can't process this sed invocation
# correctly. Breaking it into two sed invocations is a workaround.
tr ' ' "$nl" < "$tmpdepfile" \
| sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \
| sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
dashXmstdout)
# This case only exists to satisfy depend.m4. It is never actually
# run, as this mode is specially recognized in the preamble.
exit 1
;;
makedepend)
"$@" || exit $?
# Remove any Libtool call
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
# X makedepend
shift
cleared=no eat=no
for arg
do
case $cleared in
no)
set ""; shift
cleared=yes ;;
esac
if test $eat = yes; then
eat=no
continue
fi
case "$arg" in
-D*|-I*)
set fnord "$@" "$arg"; shift ;;
# Strip any option that makedepend may not understand. Remove
# the object too, otherwise makedepend will parse it as a source file.
-arch)
eat=yes ;;
-*|$object)
;;
*)
set fnord "$@" "$arg"; shift ;;
esac
done
obj_suffix=`echo "$object" | sed 's/^.*\././'`
touch "$tmpdepfile"
${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@"
rm -f "$depfile"
# makedepend may prepend the VPATH from the source file name to the object.
# No need to regex-escape $object, excess matching of '.' is harmless.
sed "s|^.*\($object *:\)|\1|" "$tmpdepfile" > "$depfile"
# Some versions of the HPUX 10.20 sed can't process the last invocation
# correctly. Breaking it into two sed invocations is a workaround.
sed '1,2d' "$tmpdepfile" \
| tr ' ' "$nl" \
| sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \
| sed -e 's/$/ :/' >> "$depfile"
rm -f "$tmpdepfile" "$tmpdepfile".bak
;;
cpp)
# Important note: in order to support this mode, a compiler *must*
# always write the preprocessed file to stdout.
"$@" || exit $?
# Remove the call to Libtool.
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
# Remove '-o $object'.
IFS=" "
for arg
do
case $arg in
-o)
shift
;;
$object)
shift
;;
*)
set fnord "$@" "$arg"
shift # fnord
shift # $arg
;;
esac
done
"$@" -E \
| sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
-e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
| sed '$ s: \\$::' > "$tmpdepfile"
rm -f "$depfile"
echo "$object : \\" > "$depfile"
cat < "$tmpdepfile" >> "$depfile"
sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile"
rm -f "$tmpdepfile"
;;
msvisualcpp)
# Important note: in order to support this mode, a compiler *must*
# always write the preprocessed file to stdout.
"$@" || exit $?
# Remove the call to Libtool.
if test "$libtool" = yes; then
while test "X$1" != 'X--mode=compile'; do
shift
done
shift
fi
IFS=" "
for arg
do
case "$arg" in
-o)
shift
;;
$object)
shift
;;
"-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI")
set fnord "$@"
shift
shift
;;
*)
set fnord "$@" "$arg"
shift
shift
;;
esac
done
"$@" -E 2>/dev/null |
sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile"
rm -f "$depfile"
echo "$object : \\" > "$depfile"
sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::'"$tab"'\1 \\:p' >> "$depfile"
echo "$tab" >> "$depfile"
sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile"
rm -f "$tmpdepfile"
;;
msvcmsys)
# This case exists only to let depend.m4 do its work. It works by
# looking at the text of this script. This case will never be run,
# since it is checked for above.
exit 1
;;
none)
exec "$@"
;;
*)
echo "Unknown depmode $depmode" 1>&2
exit 1
;;
esac
exit 0
# Local Variables:
# mode: shell-script
# sh-indentation: 2
# eval: (add-hook 'before-save-hook 'time-stamp)
# time-stamp-start: "scriptversion="
# time-stamp-format: "%:y-%02m-%02d.%02H"
# time-stamp-time-zone: "UTC0"
# time-stamp-end: "; # UTC"
# End:

@ -0,0 +1,26 @@
#!/bin/bash
#
# Recursively format all C & C++ sources and header files, except those in the
# 'config' directory and generated files, such as H5LTanalyze.c, etc.
#
# Note that any files or directories that are excluded here should also be
# added to the 'exclude' list in .github/workflows/clang-format-check.yml
#
# (Remember to update both bin/format_source and bin/format_source_patch)
find . \( -type d -path ./config -prune -and -not -path ./config \) \
-or \( \( \! \( \
-name H5LTanalyze.c \
-or -name H5LTparse.c \
-or -name H5LTparse.h \
-or -name H5Epubgen.h \
-or -name H5Einit.h \
-or -name H5Eterm.h \
-or -name H5Edefin.h \
-or -name H5version.h \
-or -name H5overflow.h \
\) \) \
-and \( -iname *.h -or -iname *.c -or -iname *.cpp -or -iname *.hpp -or -iname *.java \) \) \
| xargs -P0 -n1 clang-format -style=file -i -fallback-style=none
exit 0

@ -0,0 +1,265 @@
#! /bin/bash
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# This script runs flex/lex and bison/yacc to generate parser code for
# the high-level library. It used to be a part of autogen.sh, but many
# people encountered problems with installing flex and bison on their
# system and the parser code rarely changes, so those parts of the
# script were moved to their own file.
#
# NOTE CAREFULLY!
#
# There is NO dependency in either the autotools or CMake to regenerate
# the parser code. If you modify H5LT analyze.l or H5LTparse.y, you
# will need to run this script manually on a system with a suitable
# lexer and parser generator.
#
# IMPORTANT OS X NOTE
#
# If you are using OS X, you will probably not have flex or bison
# installed. In addition, even if you do have bison installed, the bison
# version you have installed may also have a bug that makes it unable to
# process our input files.
#
# The easiest way to fix this is to install everything via Homebrew:
#
# http://brew.sh/
#
# After you install the base packages, install flex/bison.
#
# brew install flex
# brew install bison
#
# END IMPORTANT OS X NOTE
#
# If you want to use a particular version of flex or bison, the paths
# to each tool can be overridden using the following environment
# variables:
#
# HDF5_FLEX
# HDF5_BISON
#
# This script takes two potential options:
#
# -p
#
# When this is selected, the flex/bison versions are set to the paths
# and versions used by The HDF Group to produce the released versions
# of the library.
#
# NOTE: This is probably temporary. Once we update our dev machines
# to have recent versions of the autotools this option will probably
# be removed.
#
# -v
#
# This emits some extra information, mainly tool versions.
echo
echo "*******************************************"
echo "* HDF5 high-level parser generator script *"
echo "*******************************************"
echo
# Default is not verbose output
verbose=false
optspec=":hpv-"
while getopts "$optspec" optchar; do
case "${optchar}" in
h)
echo "usage: $0 [OPTIONS] /path/to/hl/src/directory"
echo
echo " -h Print this help message."
echo
echo " -p Used by THG to use hard-codes flex/bison"
echo " paths on THG machines. Not for non-HDF-Group"
echo " users!"
echo
echo " -v Show more verbose output."
echo
echo " NOTE: Each tool can be set via an environment variable."
echo " These are documented inside this script."
echo
exit 0
;;
p)
echo "Setting THG production mode..."
echo
production=true
;;
v)
echo "Setting verbosity: high"
echo
verbose=true
;;
*)
if [ "$OPTERR" != 1 ] || [ "${optspec:0:1}" = ":" ]; then
echo "Non-option argument: '-${OPTARG}'" >&2
fi
;;
esac
done
# Get the path to the hl src directory
shift $(($OPTIND - 1))
path_to_hl_src=$1
if test -z ${path_to_hl_src}; then
echo "*** ERROR *** - Path to hl/src not set"
echo "Please add the path to the hl/src directory as a parameter"
echo "See $0 -h for more help."
echo
exit -1
fi
if [ "$production" = true ] ; then
# Production mode
#
# Hard-code canonical HDF Group tool locations.
# If paths to tools are not specified, assume they are
# located in /usr/hdf/bin/AUTOTOOLS and set paths accordingly.
if test -z ${HDF5_BISON}; then
HDF5_BISON=/usr/hdf/bin/AUTOTOOLS/bison
fi
if test -z ${HDF5_FLEX}; then
HDF5_FLEX=/usr/hdf/bin/AUTOTOOLS/flex
fi
else
# Not in production mode
#
# If paths to autotools are not specified, use whatever the system
# has installed as the default. We use 'which <tool>' to
# show exactly what's being used.
if test -z ${HDF5_BISON}; then
HDF5_BISON=$(which bison)
fi
if test -z ${HDF5_FLEX}; then
HDF5_FLEX=$(which flex)
fi
fi # production
# Make sure that these versions of the tools are in the path
BISON_DIR=`dirname ${HDF5_BISON}`
FLEX_DIR=`dirname ${HDF5_FLEX}`
PATH=${FLEX_DIR}:${BISON_DIR}:$PATH
# Run flex and bison
# automatically generates hl/src/H5LTanalyze.c and hl/src/H5LTparse.c
# Note that, as of Xcode 6.1 (2015), the default bison version on OS X
# is old enough to have the circular dependency bug. You'll have
# to install a later version of bison. See the OS X note at the top
# of this script.
echo
echo "Generating H5LT parser code (requires yacc/bison):"
echo "Generate hl/src/H5LTparse.c from hl/src/H5LTparse.y"
# HDF5_BISON is set via the environment or 'which bison', above
if test -z ${HDF5_BISON}; then
echo
echo "*************************"
echo " ERROR - bison not found"
echo "*************************"
echo "bison is required to generate parser code in H5LT"
echo
exit 127
fi
if [ "$verbose" = true ] ; then
${HDF5_BISON} --version
fi
${HDF5_BISON} -pH5LTyy -o ${path_to_hl_src}/H5LTparse.c -d ${path_to_hl_src}/H5LTparse.y
echo
echo "Generating H5LT lexer code (requires lex/flex):"
echo "Generate hl/src/H5LTanalyze.c from hl/src/H5LTanalyze.l"
# HDF5_FLEX is set via the environment or 'which flex', above
if test -z ${HDF5_FLEX}; then
echo
echo "************************"
echo " ERROR - flex not found"
echo "************************"
echo "flex is required to generate lexer code in H5LT"
echo
exit 127
fi
if [ "$verbose" = true ] ; then
${HDF5_FLEX} --version
fi
${HDF5_FLEX} --nounistd -PH5LTyy -o ${path_to_hl_src}/H5LTanalyze.c ${path_to_hl_src}/H5LTanalyze.l
# fix H5LTparse.c and H5LTlparse.h to declare H5LTyyparse return type as an
# hid_t instead of int. Currently the generated function H5LTyyparse is
# generated with a return value of type int, which is a mapping to the
# flex yyparse function. The return value in the HL library should be
# an hid_t.
# I propose to not use flex to generate this function, but for now I am
# adding a perl command to find and replace this function declaration in
# H5LTparse.c.
perl -0777 -pi -e 's/int yyparse/hid_t yyparse/igs' ${path_to_hl_src}/H5LTparse.c
perl -0777 -pi -e 's/int\nyyparse/hid_t\nyyparse/igs' ${path_to_hl_src}/H5LTparse.c
perl -0777 -pi -e 's/int H5LTyyparse/hid_t H5LTyyparse/igs' ${path_to_hl_src}/H5LTparse.c
perl -0777 -pi -e 's/int yyparse/hid_t yyparse/igs' ${path_to_hl_src}/H5LTparse.h
perl -0777 -pi -e 's/int\nyyparse/hid_t\nyyparse/igs' ${path_to_hl_src}/H5LTparse.h
perl -0777 -pi -e 's/int H5LTyyparse/hid_t H5LTyyparse/igs' ${path_to_hl_src}/H5LTparse.h
# Add code that disables warnings in the flex/bison-generated code.
#
# Note that the GCC pragmas did not exist until gcc 4.2. Earlier versions
# will simply ignore them, but we want to avoid those warnings.
#
# Note also that although clang defines __GNUC__, it doesn't support every
# warning that GCC does.
for f in ${path_to_hl_src}/H5LTparse.c ${path_to_hl_src}/H5LTanalyze.c
do
echo '#if defined (__GNUC__) ' >> tmp.out
echo '#if ((__GNUC__ * 100) + __GNUC_MINOR__) >= 402 ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wconversion" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wimplicit-function-declaration" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wmissing-prototypes" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wnested-externs" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wold-style-definition" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wredundant-decls" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wsign-compare" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wsign-conversion" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wstrict-overflow" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wstrict-prototypes" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wimplicit-fallthrough" ' >> tmp.out
echo '#if !defined (__clang__) ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wlarger-than=" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wsuggest-attribute=const" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wsuggest-attribute=pure" ' >> tmp.out
echo '#endif ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wswitch-default" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wunused-function" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wunused-macros" ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wunused-parameter" ' >> tmp.out
echo '#endif ' >> tmp.out
echo '#if ((__GNUC__ * 100) + __GNUC_MINOR__) >= 600 ' >> tmp.out
echo '#pragma GCC diagnostic ignored "-Wnull-dereference" ' >> tmp.out
echo '#endif ' >> tmp.out
echo '#elif defined __SUNPRO_CC ' >> tmp.out
echo '#pragma disable_warn ' >> tmp.out
echo '#elif defined _MSC_VER ' >> tmp.out
echo '#pragma warning(push, 1) ' >> tmp.out
echo '#endif ' >> tmp.out
cat $f >> tmp.out
mv tmp.out $f
done
echo
exit 0

@ -0,0 +1,391 @@
#! /bin/sh
##
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
# This tool is adapted from the mpicc command of the MPICH Software.
############################################################################
## ##
## Things You May Have to Modify: ##
## ##
## If the following paths don't point to the place were HDF5 is installed ##
## on your system (i.e., you received a binary distribution or moved the ##
## files from the originally installed directory to another directory) ##
## then modify them accordingly to represent the new paths. ##
## ##
############################################################################
prefix="/mnt/Storage/Documents/Code/LibraryTest/lib/hdf5-1.14.1"
exec_prefix="${prefix}"
libdir="${exec_prefix}/lib"
includedir="${prefix}/include"
HL="hl"
############################################################################
## ##
## Things You Can Modify to Override HDF5 Library Build Components: ##
## ##
## (Advanced usage - know what you're doing - you're on your own here.) ##
## The four variables below can be used to insert paths and flags in ##
## CPPFLAGS, CFLAGS, LDFLAGS, or LIBS in the h5cc compile line: ##
## $CLINKER $H5BLD_CPPFLAGS $CPPFLAGS $H5BLD_CFLAGS $CFLAGS $LDFLAGS ##
## $LIBS $clibpath $link_objs $link_args $shared_link ##
## ##
## These settings can be overridden by setting HDF5_CFLAGS, ##
## HDF5_CPPFLAGS, HDF5_LDFLAGS, or HDF5_LIBS in the environment. ##
## ##
############################################################################
CFLAGSBASE=""
CPPFLAGSBASE=""
LDFLAGSBASE=""
LIBSBASE=""
############################################################################
## ##
## You shouldn't have to modify anything below this line. ##
## ##
############################################################################
# Constants definitions
EXIT_SUCCESS=0
EXIT_FAILURE=1
host_os="linux-gnu"
prog_name="`basename $0`"
allargs=""
compile_args=""
libraries=""
link_args=""
link_objs=""
clibpath=""
do_link="yes"
do_compile="no"
dash_o="no"
dash_c="no"
get_output_file="no"
SHOW="eval"
CCBASE="gcc"
CLINKERBASE="gcc"
# CFLAGS, CPPFLAGS and LDFLAGS are reserved for use by the script user.
# FLAGS brought from the hdf5 build are put in H5BLD_*FLAGS.
# User's CPPFLAGS and CFLAGS come after their H5BLD counterparts. User's
# LDFLAGS come just before clibpath, user's LIBS come after $link_objs and
# before the hdf5 libraries in $link_args, followed by any external library
# paths and libraries from AM_LDFLAGS, LDFLAGS, AM_LIBS or LIBS carried in
# from the hdf5 build. The order of the flags is intended to give precedence
# to the user's flags.
H5BLD_CFLAGS=" "
H5BLD_CPPFLAGS=" -I/mnt/Storage/Documents/Code/LibraryTest/lib/zlib-1.2.13/include "
H5BLD_LDFLAGS=" -L/mnt/Storage/Documents/Code/LibraryTest/lib/zlib-1.2.13/lib -L/mnt/Storage/Documents/Code/LibraryTest/lib/zlib-1.2.13/lib -Wl,-rpath,/mnt/Storage/Documents/Code/LibraryTest/lib/zlib-1.2.13/lib"
H5BLD_LIBS="-lz -ldl -lm "
CC="${HDF5_CC:-$CCBASE}"
CLINKER="${HDF5_CLINKER:-$CLINKERBASE}"
CFLAGS="${HDF5_CFLAGS:-$CFLAGSBASE}"
CPPFLAGS="${HDF5_CPPFLAGS:-$CPPFLAGSBASE}"
LDFLAGS="${HDF5_LDFLAGS:-$LDFLAGSBASE}"
LIBS="${HDF5_LIBS:-$LIBSBASE}"
# If a static library is available, the default will be to use it. If the only
# available library is shared, it will be used by default. The user can
# override either default, although choosing an unavailable library will result
# in link errors.
STATIC_AVAILABLE="yes"
if test "${STATIC_AVAILABLE}" = "yes"; then
USE_SHARED_LIB="${HDF5_USE_SHLIB:-no}"
else
USE_SHARED_LIB="${HDF5_USE_SHLIB:-yes}"
fi
usage() {
# A wonderfully informative "usage" message.
echo "usage: $prog_name [OPTIONS] <compile line>"
echo " OPTIONS:"
echo " -help This help message."
echo " -echo Show all the shell commands executed"
echo " -prefix=DIR Prefix directory to find HDF5 lib/ and include/"
echo " subdirectories [default: $prefix]"
echo " -show Show the commands without executing them"
echo " -showconfig Show the HDF5 library configuration summary"
echo " -shlib Compile with shared HDF5 libraries [default for hdf5 built"
echo " without static libraries]"
echo " -noshlib Compile with static HDF5 libraries [default for hdf5 built"
echo " with static libraries]"
echo " "
echo " <compile line> - the normal compile line options for your compiler."
echo " $prog_name uses the same compiler you used to compile"
echo " HDF5. Check with your compiler's man pages for more"
echo " information on which options are needed."
echo " "
echo " You can override the compiler, linker, and whether or not to use static"
echo " or shared libraries to compile your program by setting the following"
echo " environment variables accordingly:"
echo " "
echo " HDF5_CC - use a different C compiler"
echo " HDF5_CLINKER - use a different linker"
echo " HDF5_USE_SHLIB=[yes|no] - use shared or static version of the HDF5 library"
echo " [default: no except when built with only"
echo " shared libraries]"
echo " "
echo " You can also add or change paths and flags to the compile line using"
echo " the following environment variables or by assigning them to their counterparts"
echo " in the 'Things You Can Modify to Override...'" section of $prog_name
echo " "
echo " Variable Current value to be replaced"
echo " HDF5_CPPFLAGS \"$CPPFLAGSBASE\""
echo " HDF5_CFLAGS \"$CFLAGSBASE\""
echo " HDF5_LDFLAGS \"$LDFLAGSBASE\""
echo " HDF5_LIBS \"$LIBSBASE\""
echo " "
echo " Note that adding library paths to HDF5_LDFLAGS where another hdf5 version"
echo " is located may link your program with that other hdf5 library version."
echo " "
exit $EXIT_FAILURE
}
# Show the configuration summary of the library recorded in the
# libhdf5.settings file reside in the lib directory.
showconfigure()
{
cat ${libdir}/libhdf5.settings
status=$?
}
# Main
status=$EXIT_SUCCESS
if test "$#" = "0"; then
# No parameters specified, issue usage statement and exit.
usage
fi
case "$CC" in
gcc)
kind="gcc"
;;
mpicc|mpcc|mpicc_r)
# Is this gcc masquarading as an MPI compiler?
if test "`${CC} -v 2>&1 | sed -n 2p | cut -c1-3`" = "gcc"; then
kind="gcc"
else
# Nope
kind="$host_os"
fi
;;
*)
kind="$host_os"
;;
esac
for arg in $@ ; do
if test "x$get_output_file" = "xyes"; then
link_args="$link_args $arg"
output_file="$arg"
get_output_file="no"
continue
fi
case "$arg" in
-c)
allargs="$allargs $arg"
compile_args="$compile_args $arg"
if test "x$do_link" = "xyes" -a -n "$output_file"; then
compile_args="$compile_args -o $output_file"
fi
do_link="no"
dash_c="yes"
;;
-o)
allargs="$allargs $arg"
dash_o="yes"
if test "x$dash_c" = "xyes"; then
compile_args="$compile_args $arg"
else
link_args="$link_args $arg"
do_link="yes"
get_output_file="yes"
fi
;;
-E|-M|-MT)
allargs="$allargs $arg"
compile_args="$compile_args $arg"
dash_c="yes"
do_link="no"
;;
-l*)
libraries=" $libraries $arg "
allargs="$allargs $arg"
;;
-prefix=*)
prefix="`expr "$arg" : '-prefix=\(.*\)'`"
;;
-echo)
set -x
;;
-show)
SHOW="echo"
;;
-showconfig)
showconfigure
exit $status
;;
-shlib)
USE_SHARED_LIB="yes"
;;
-noshlib)
USE_SHARED_LIB="no"
;;
-help)
usage
;;
*\"*)
qarg="'"$arg"'"
allargs="$allargs $qarg"
;;
*\'*)
qarg='\"'"$arg"'\"'
allargs="$allargs $qarg"
;;
*)
allargs="$allargs $qarg"
if test -s "$arg"; then
ext=`expr "$arg" : '.*\(\..*\)'`
if test "x$ext" = "x.c"; then
do_compile="yes"
compile_args="$compile_args $arg"
fname=`basename $arg .c`
link_objs="$link_objs $fname.o"
elif test "x$ext" = "x.o"; then
if test "x$dash_c" = "xyes"; then
compile_args="$compile_args $arg"
else
do_link="yes"
link_objs="$link_objs $arg"
fi
elif test "x$ext" = "x.a"; then
# This is an archive that we're linking in
libraries=" $libraries $arg "
else
compile_args="$compile_args $arg"
link_args="$link_args $arg"
fi
else
compile_args="$compile_args $arg"
link_args="$link_args $arg"
fi
;;
esac
done
if test "$dash_c" = "yes" -a "$do_compile" = no -a "$do_link" = no ; then
# -c was specified. Force do_compile on.
do_compile=yes
fi
if test "x$do_compile" = "xyes"; then
if test "x$dash_c" != "xyes"; then
compile_args="-c $compile_args"
fi
$SHOW $CC -I$includedir $H5BLD_CPPFLAGS $CPPFLAGS $H5BLD_CFLAGS $CFLAGS $compile_args
status=$?
if test "$status" != "0"; then
exit $status
fi
fi
if test "x$do_link" = "xyes"; then
shared_link=""
# conditionally link with the hl library
if test "X$HL" = "Xhl"; then
libraries=" $libraries -lhdf5_hl -lhdf5 "
else
libraries=" $libraries -lhdf5 "
fi
link_args="$link_args -L${libdir}"
case "$kind" in
gcc|linux*)
# MacOS X doesn't support the "-Wl,-rpath -Wl," style of linker flags.
# It appears to want none of them specified.
case "$host_os" in
darwin*) flag="" ;;
*) flag="-Wl,-rpath -Wl," ;;
esac
;;
hpux*) flag="-Wl,+b -Wl," ;;
freebsd*|solaris*) flag="-R" ;;
rs6000*|aix*) flag="-L" ;;
sgi) flag="-rpath " ;;
*) flag="" ;;
esac
if test -n "$flag"; then
shared_link="${flag}${libdir}"
fi
if test "x$USE_SHARED_LIB" != "xyes"; then
# The "-lhdf5" & "-lhdf5_hl" flags are in here already...This is a static
# compile though, so change it to the static version (.a) of the library.
new_libraries=""
for lib in $libraries; do
case "$lib" in
-lhdf5)
new_libraries="$new_libraries ${libdir}/libhdf5.a"
;;
-lhdf5_hl)
new_libraries="$new_libraries ${libdir}/libhdf5_hl.a"
;;
*)
new_libraries="$new_libraries $lib"
;;
esac
done
libraries="$new_libraries"
fi
for lib in $libraries; do
if echo $link_args | grep " $lib " > /dev/null ||
echo $link_args | grep " $lib$" > /dev/null; then
:
else
link_args="$link_args $lib "
fi
done
# The LIBS are just a bunch of -l* libraries necessary for the HDF5
# module. It's okay if they're included twice in the compile line.
link_args="$link_args $H5BLD_LDFLAGS $H5BLD_LIBS"
# User's CPPFLAGS and CFLAGS come after their H5BLD counterparts. User's
# LDFLAGS come just before clibpath, user's LIBS come after $link_objs and
# before the hdf5 libraries in $link_args, followed by any external library
# paths and libraries from AM_LDFLAGS, LDFLAGS, AM_LIBS or LIBS carried in
# from the hdf5 build. The order of the flags is intended to give precedence
# to the user's flags.
$SHOW $CLINKER $H5BLD_CPPFLAGS $CPPFLAGS $H5BLD_CFLAGS $CFLAGS $LDFLAGS $clibpath $link_objs $LIBS $link_args $shared_link
status=$?
fi
exit $status

@ -0,0 +1,391 @@
#! /bin/sh
##
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
# This tool is adapted from the mpicc command of the MPICH Software.
############################################################################
## ##
## Things You May Have to Modify: ##
## ##
## If the following paths don't point to the place were HDF5 is installed ##
## on your system (i.e., you received a binary distribution or moved the ##
## files from the originally installed directory to another directory) ##
## then modify them accordingly to represent the new paths. ##
## ##
############################################################################
prefix="@prefix@"
exec_prefix="@exec_prefix@"
libdir="@libdir@"
includedir="@includedir@"
HL="@HL@"
############################################################################
## ##
## Things You Can Modify to Override HDF5 Library Build Components: ##
## ##
## (Advanced usage - know what you're doing - you're on your own here.) ##
## The four variables below can be used to insert paths and flags in ##
## CPPFLAGS, CFLAGS, LDFLAGS, or LIBS in the h5cc compile line: ##
## $CLINKER $H5BLD_CPPFLAGS $CPPFLAGS $H5BLD_CFLAGS $CFLAGS $LDFLAGS ##
## $LIBS $clibpath $link_objs $link_args $shared_link ##
## ##
## These settings can be overridden by setting HDF5_CFLAGS, ##
## HDF5_CPPFLAGS, HDF5_LDFLAGS, or HDF5_LIBS in the environment. ##
## ##
############################################################################
CFLAGSBASE=""
CPPFLAGSBASE=""
LDFLAGSBASE=""
LIBSBASE=""
############################################################################
## ##
## You shouldn't have to modify anything below this line. ##
## ##
############################################################################
# Constants definitions
EXIT_SUCCESS=0
EXIT_FAILURE=1
host_os="@host_os@"
prog_name="`basename $0`"
allargs=""
compile_args=""
libraries=""
link_args=""
link_objs=""
clibpath=""
do_link="yes"
do_compile="no"
dash_o="no"
dash_c="no"
get_output_file="no"
SHOW="eval"
CCBASE="@CC@"
CLINKERBASE="@CC@"
# CFLAGS, CPPFLAGS and LDFLAGS are reserved for use by the script user.
# FLAGS brought from the hdf5 build are put in H5BLD_*FLAGS.
# User's CPPFLAGS and CFLAGS come after their H5BLD counterparts. User's
# LDFLAGS come just before clibpath, user's LIBS come after $link_objs and
# before the hdf5 libraries in $link_args, followed by any external library
# paths and libraries from AM_LDFLAGS, LDFLAGS, AM_LIBS or LIBS carried in
# from the hdf5 build. The order of the flags is intended to give precedence
# to the user's flags.
H5BLD_CFLAGS="@AM_CFLAGS@ @CFLAGS@"
H5BLD_CPPFLAGS="@AM_CPPFLAGS@ @CPPFLAGS@"
H5BLD_LDFLAGS="@AM_LDFLAGS@ @LDFLAGS@"
H5BLD_LIBS="@LIBS@"
CC="${HDF5_CC:-$CCBASE}"
CLINKER="${HDF5_CLINKER:-$CLINKERBASE}"
CFLAGS="${HDF5_CFLAGS:-$CFLAGSBASE}"
CPPFLAGS="${HDF5_CPPFLAGS:-$CPPFLAGSBASE}"
LDFLAGS="${HDF5_LDFLAGS:-$LDFLAGSBASE}"
LIBS="${HDF5_LIBS:-$LIBSBASE}"
# If a static library is available, the default will be to use it. If the only
# available library is shared, it will be used by default. The user can
# override either default, although choosing an unavailable library will result
# in link errors.
STATIC_AVAILABLE="@enable_static@"
if test "${STATIC_AVAILABLE}" = "yes"; then
USE_SHARED_LIB="${HDF5_USE_SHLIB:-no}"
else
USE_SHARED_LIB="${HDF5_USE_SHLIB:-yes}"
fi
usage() {
# A wonderfully informative "usage" message.
echo "usage: $prog_name [OPTIONS] <compile line>"
echo " OPTIONS:"
echo " -help This help message."
echo " -echo Show all the shell commands executed"
echo " -prefix=DIR Prefix directory to find HDF5 lib/ and include/"
echo " subdirectories [default: $prefix]"
echo " -show Show the commands without executing them"
echo " -showconfig Show the HDF5 library configuration summary"
echo " -shlib Compile with shared HDF5 libraries [default for hdf5 built"
echo " without static libraries]"
echo " -noshlib Compile with static HDF5 libraries [default for hdf5 built"
echo " with static libraries]"
echo " "
echo " <compile line> - the normal compile line options for your compiler."
echo " $prog_name uses the same compiler you used to compile"
echo " HDF5. Check with your compiler's man pages for more"
echo " information on which options are needed."
echo " "
echo " You can override the compiler, linker, and whether or not to use static"
echo " or shared libraries to compile your program by setting the following"
echo " environment variables accordingly:"
echo " "
echo " HDF5_CC - use a different C compiler"
echo " HDF5_CLINKER - use a different linker"
echo " HDF5_USE_SHLIB=[yes|no] - use shared or static version of the HDF5 library"
echo " [default: no except when built with only"
echo " shared libraries]"
echo " "
echo " You can also add or change paths and flags to the compile line using"
echo " the following environment variables or by assigning them to their counterparts"
echo " in the 'Things You Can Modify to Override...'" section of $prog_name
echo " "
echo " Variable Current value to be replaced"
echo " HDF5_CPPFLAGS \"$CPPFLAGSBASE\""
echo " HDF5_CFLAGS \"$CFLAGSBASE\""
echo " HDF5_LDFLAGS \"$LDFLAGSBASE\""
echo " HDF5_LIBS \"$LIBSBASE\""
echo " "
echo " Note that adding library paths to HDF5_LDFLAGS where another hdf5 version"
echo " is located may link your program with that other hdf5 library version."
echo " "
exit $EXIT_FAILURE
}
# Show the configuration summary of the library recorded in the
# libhdf5.settings file reside in the lib directory.
showconfigure()
{
cat ${libdir}/libhdf5.settings
status=$?
}
# Main
status=$EXIT_SUCCESS
if test "$#" = "0"; then
# No parameters specified, issue usage statement and exit.
usage
fi
case "$CC" in
gcc)
kind="gcc"
;;
mpicc|mpcc|mpicc_r)
# Is this gcc masquarading as an MPI compiler?
if test "`${CC} -v 2>&1 | sed -n 2p | cut -c1-3`" = "gcc"; then
kind="gcc"
else
# Nope
kind="$host_os"
fi
;;
*)
kind="$host_os"
;;
esac
for arg in $@ ; do
if test "x$get_output_file" = "xyes"; then
link_args="$link_args $arg"
output_file="$arg"
get_output_file="no"
continue
fi
case "$arg" in
-c)
allargs="$allargs $arg"
compile_args="$compile_args $arg"
if test "x$do_link" = "xyes" -a -n "$output_file"; then
compile_args="$compile_args -o $output_file"
fi
do_link="no"
dash_c="yes"
;;
-o)
allargs="$allargs $arg"
dash_o="yes"
if test "x$dash_c" = "xyes"; then
compile_args="$compile_args $arg"
else
link_args="$link_args $arg"
do_link="yes"
get_output_file="yes"
fi
;;
-E|-M|-MT)
allargs="$allargs $arg"
compile_args="$compile_args $arg"
dash_c="yes"
do_link="no"
;;
-l*)
libraries=" $libraries $arg "
allargs="$allargs $arg"
;;
-prefix=*)
prefix="`expr "$arg" : '-prefix=\(.*\)'`"
;;
-echo)
set -x
;;
-show)
SHOW="echo"
;;
-showconfig)
showconfigure
exit $status
;;
-shlib)
USE_SHARED_LIB="yes"
;;
-noshlib)
USE_SHARED_LIB="no"
;;
-help)
usage
;;
*\"*)
qarg="'"$arg"'"
allargs="$allargs $qarg"
;;
*\'*)
qarg='\"'"$arg"'\"'
allargs="$allargs $qarg"
;;
*)
allargs="$allargs $qarg"
if test -s "$arg"; then
ext=`expr "$arg" : '.*\(\..*\)'`
if test "x$ext" = "x.c"; then
do_compile="yes"
compile_args="$compile_args $arg"
fname=`basename $arg .c`
link_objs="$link_objs $fname.o"
elif test "x$ext" = "x.o"; then
if test "x$dash_c" = "xyes"; then
compile_args="$compile_args $arg"
else
do_link="yes"
link_objs="$link_objs $arg"
fi
elif test "x$ext" = "x.a"; then
# This is an archive that we're linking in
libraries=" $libraries $arg "
else
compile_args="$compile_args $arg"
link_args="$link_args $arg"
fi
else
compile_args="$compile_args $arg"
link_args="$link_args $arg"
fi
;;
esac
done
if test "$dash_c" = "yes" -a "$do_compile" = no -a "$do_link" = no ; then
# -c was specified. Force do_compile on.
do_compile=yes
fi
if test "x$do_compile" = "xyes"; then
if test "x$dash_c" != "xyes"; then
compile_args="-c $compile_args"
fi
$SHOW $CC -I$includedir $H5BLD_CPPFLAGS $CPPFLAGS $H5BLD_CFLAGS $CFLAGS $compile_args
status=$?
if test "$status" != "0"; then
exit $status
fi
fi
if test "x$do_link" = "xyes"; then
shared_link=""
# conditionally link with the hl library
if test "X$HL" = "Xhl"; then
libraries=" $libraries -lhdf5_hl -lhdf5 "
else
libraries=" $libraries -lhdf5 "
fi
link_args="$link_args -L${libdir}"
case "$kind" in
gcc|linux*)
# MacOS X doesn't support the "-Wl,-rpath -Wl," style of linker flags.
# It appears to want none of them specified.
case "$host_os" in
darwin*) flag="" ;;
*) flag="-Wl,-rpath -Wl," ;;
esac
;;
hpux*) flag="-Wl,+b -Wl," ;;
freebsd*|solaris*) flag="-R" ;;
rs6000*|aix*) flag="-L" ;;
sgi) flag="-rpath " ;;
*) flag="" ;;
esac
if test -n "$flag"; then
shared_link="${flag}${libdir}"
fi
if test "x$USE_SHARED_LIB" != "xyes"; then
# The "-lhdf5" & "-lhdf5_hl" flags are in here already...This is a static
# compile though, so change it to the static version (.a) of the library.
new_libraries=""
for lib in $libraries; do
case "$lib" in
-lhdf5)
new_libraries="$new_libraries ${libdir}/libhdf5.a"
;;
-lhdf5_hl)
new_libraries="$new_libraries ${libdir}/libhdf5_hl.a"
;;
*)
new_libraries="$new_libraries $lib"
;;
esac
done
libraries="$new_libraries"
fi
for lib in $libraries; do
if echo $link_args | grep " $lib " > /dev/null ||
echo $link_args | grep " $lib$" > /dev/null; then
:
else
link_args="$link_args $lib "
fi
done
# The LIBS are just a bunch of -l* libraries necessary for the HDF5
# module. It's okay if they're included twice in the compile line.
link_args="$link_args $H5BLD_LDFLAGS $H5BLD_LIBS"
# User's CPPFLAGS and CFLAGS come after their H5BLD counterparts. User's
# LDFLAGS come just before clibpath, user's LIBS come after $link_objs and
# before the hdf5 libraries in $link_args, followed by any external library
# paths and libraries from AM_LDFLAGS, LDFLAGS, AM_LIBS or LIBS carried in
# from the hdf5 build. The order of the flags is intended to give precedence
# to the user's flags.
$SHOW $CLINKER $H5BLD_CPPFLAGS $CPPFLAGS $H5BLD_CFLAGS $CFLAGS $LDFLAGS $clibpath $link_objs $LIBS $link_args $shared_link
status=$?
fi
exit $status

@ -0,0 +1,215 @@
#! /bin/sh
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
## Update HDF5 compiler tools after the HDF5 software has been installed ##
## in a new location. ##
## For help page, use "h5redeploy -help" ##
## ##
# Constants definitions
EXIT_SUCCESS=0
EXIT_FAILURE=1
# Function definitions
# show help page
usage() {
# A wonderfully informative "usage" message.
echo "usage: $prog_name [OPTIONS]"
echo " OPTIONS:"
echo " -help|help This help message"
echo " -echo Show all the shell commands executed"
echo " -force No prompt, just do it"
echo " -prefix=DIR New directory to find HDF5 lib/ and include/"
echo " subdirectories [default: current directory]"
echo " -exec-prefix=DIR New directory to find HDF5 lib/"
echo " subdirectory [default: <prefix>]"
echo " -libdir=DIR New directory for the HDF5 lib directory"
echo " [default: <exec-prefix>/lib]"
echo " -includedir=DIR New directory for the HDF5 header files"
echo " [default: <prefix>/include]"
echo " -tool=TOOL Tool to update. TOOL must be in the current"
echo " directory and writable. [default: $h5tools]"
echo " -show Show the commands without executing them"
echo " "
exit $EXIT_FAILURE
}
# display variable values
dump_vars(){
echo "====Showing all variable values====="
echo prefix=$prefix
echo h5tools=$h5tools
echo "====End Showing====="
}
# show actions to be taken
show_action()
{
echo "Update the following tools because they are now installed at a new directory"
for t in $foundtools; do
echo "${t}:"
echo " current setting=`sed -e '/^prefix=/s/prefix=//p' -e d $t`"
echo " new setting="\""$prefix"\"
done
}
# Report Error message
ERROR()
{
echo "***ERROR***"
echo "$1"
}
# Main
#
############################################################################
## Installation directories: ##
## prefix architecture-independent files. ##
## exec_prefix architecture-dependent files, default is <prefix>. ##
## libdir libraries, default is <exec_prefix>/lib. ##
## includedir header files, default is <prefix/include>. ##
## Not used here: ##
## bindir executables, <exec_prefix/bin>. ##
############################################################################
# Initialization
h5tools="h5cc h5pcc h5fc h5pfc h5c++" # possible hdf5 tools
foundtools= # tools found and will be modified
fmode= # force mode, default is off
prefix=
exec_prefix=
libdir=
includedir=
# Parse options
for arg in $@ ; do
case "$arg" in
-prefix=*)
prefix="`echo $arg | cut -f2 -d=`"
;;
-exec-prefix=*)
exec_prefix="`echo $arg | cut -f2 -d=`"
;;
-libdir=*)
libdir="`echo $arg | cut -f2 -d=`"
;;
-includedir=*)
includedir="`echo $arg | cut -f2 -d=`"
;;
-echo)
set -x
;;
-show)
SHOW="echo"
;;
-tool=*)
h5tools="`echo $arg | cut -f2 -d=`"
;;
-help|help)
usage
;;
-force)
fmode=yes
;;
*)
ERROR "Unknown Option($arg)"
usage
exit $EXIT_FAILURE
;;
esac
done
# Set to default value, one above where i am, if not given by user
if [ -z "$prefix" ]; then
prefix=`(cd ..;pwd)`
fi
if [ -z "$exec_prefix" ]; then
exec_prefix='${prefix}' # use single quotes to prevent expansion of $
fi
if [ -z "$libdir" ]; then
libdir='${exec_prefix}'/lib # use single quotes to prevent expansion of $
fi
if [ -z "$includedir" ]; then
includedir='${prefix}'/include # use single quotes to prevent expansion of $
fi
for x in $h5tools; do
if [ -f $x ]; then
foundtools="$foundtools $x"
if [ ! -w $x ]; then
ERROR "h5tool($x) is not writable"
exit $EXIT_FAILURE
fi
fi
done
if [ -z "$foundtools" ]; then
ERROR "found no tools to modify"
exit $EXIT_FAILURE
fi
# Show actions to be taken and get consent
show_action
# Ask confirmation unless fmode is on
if [ x-$fmode = x- ]; then
echo "Continue? (yes/no)"
read ansx
ans=`echo $ansx | tr "[A-Z]" "[a-z]"`
if [ x-$ans != x-yes ]; then
echo ABORT. No tools changed.
exit $EXIT_FAILURE
fi
fi
# Create the update commands
CMDFILE=/tmp/h5redeploy.$$
touch $CMDFILE
chmod 0600 $CMDFILE
echo "/^prefix=/c" >> $CMDFILE
echo prefix=\""$prefix"\" >> $CMDFILE
echo . >> $CMDFILE
echo "/^exec_prefix=/c" >> $CMDFILE
echo exec_prefix=\""$exec_prefix"\" >> $CMDFILE
echo . >> $CMDFILE
echo "/^libdir=/c" >> $CMDFILE
echo libdir=\""$libdir"\" >> $CMDFILE
echo . >> $CMDFILE
echo "/^includedir=/c" >> $CMDFILE
echo includedir=\""$includedir"\" >> $CMDFILE
echo . >> $CMDFILE
(echo w; echo q) >> $CMDFILE
# Update them
if [ "$SHOW" = "echo" ]; then
echo "===Update commands are:===="
cat $CMDFILE
echo "===End Update commands====="
fi
for t in $foundtools; do
echo Update $t ...
COMMAND="ed - $t"
if [ "$SHOW" = "echo" ]; then
echo $COMMAND
else
$COMMAND < $CMDFILE
fi
done
# Cleanup
rm -f $CMDFILE
exit $EXIT_SUCCESS

@ -0,0 +1,531 @@
#! /bin/sh
perl -x -S $0 "$@"
exit
#! perl
require 5.003;
use strict;
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Robb Matzke
# 17 July 1998
### Purpose
# Increments the hdf5 version number by changing the value of
# constants in the src/H5public.h file. The new version number is
# printed on the standard output. An alternate source file name can be
# specified as an argument. In any case, the original file is saved
# by appending a tilde `~' to the name.
### Usage:
# h5vers [OPTIONS] [FILE]
# Without options this program only displays the current version and
# doesn't modify any files or create backups. The default is to print
# the version number like X.Y.Z-A where X is the major version number,
# Y is the minor version number, Z is the release number, and A is
# a short annotation string (the `-' is printed only if A is not empty).
# If the `-v' switch is given the version will be printed like:
#
# version X.Y release Z (A)
#
# The space and parentheses around A are only printed if A is not empty.
#
# The `-s VERSION' switch will set the version as specified. If the
# string contains a dotted triple then it will be used as the version
# number, otherwise up to three numbers will be read from the end of
# the string and used as the major version, minor version, and release
# number. If any numbers are missing then zero is assumed. This
# allows versions to be specified like `-s "version 2.1 release 8"' or
# `-s hdf5-2.1.8.tar.bz2'. If the new version is less than the old
# version then a warning message is generated on standard error. The
# annotation string, A, is set only if it appears immediately after the
# third number, separated by a dash (e.g., `1.2.3-pre1') or in parentheses
# (e.g., `version 1.2 release 3 (pre1)').
#
# The `-i [major|minor|release|annot|last]' option increments the major
# number, minor number, release number, or annotation string. The `last'
# switch increments the annotation string if present, otherwise the
# release number. If the release number is incremented then the annotation
# string is cleared. If the minor number is incremented then the release
# number is set to zero and the annotation string is cleared; if the major
# number is incremented then the minor and release numbers are set to zero
# and the annotation string is cleared.
#
# If a file is specified then that file is used instead of
# ./H5public.h or ./src/H5public.h.
#
# If the version number is changed (either `-s' or `-i' was used on
# the command line) then the version line of the README.md and RELEASE.txt files
# one directory above the H5public.h file is also modified so it looks
# something like: This is hdf5-1.2.3-pre1 currently under development.
# The AC_INIT macro in configure.ac will also change in this case to be
# something like: AC_INIT([HDF5], [hdf5-1.2.3-pre1], [help@hdfgroup.org])
# Version changes are also reflected in the Windows-maintained H5pubconf.h
# file.
#
# Whenever the version changes, this script will increment the revision
# field in HDF5's libtool shared library version in config/lt_vers.am,
# which is included in src/Makefile.am. Incrementing the revision field
# indicates that the source code has changed since the last version
# (which it probably has).
##############################################################################
sub getvers {
local ($_) = @_;
my (@vers);
($vers[0]) = /^\#\s*define\s+H5_VERS_MAJOR\s+(\d+)/m;
($vers[1]) = /^\#\s*define\s+H5_VERS_MINOR\s+(\d+)/m;
($vers[2]) = /^\#\s*define\s+H5_VERS_RELEASE\s+(\d+)/m;
($vers[3]) = /^\#\s*define\s+H5_VERS_SUBRELEASE\s+\"([^\"]*)\"/m;
return @vers;
}
sub setvers {
my ($contents, @vers) = @_;
$_[0] =~ s/^(\#\s*define\s+H5_VERS_MAJOR\s+)\d+/$1$vers[0]/m;
$_[0] =~ s/^(\#\s*define\s+H5_VERS_MINOR\s+)\d+/$1$vers[1]/m;
$_[0] =~ s/^(\#\s*define\s+H5_VERS_RELEASE\s+)\d+/$1$vers[2]/m;
$_[0] =~ s/^(\#\s*define\s+H5_VERS_SUBRELEASE\s+\")[^\"]*/$1$vers[3]/m;
$_[0] =~ s/^(\#\s*define\s+H5_VERS_INFO\s+\")[^\"]*/
sprintf("%sHDF5 library version: %d.%d.%d%s%s", $1, @vers[0,1,2],
$vers[3]?"-":"", $vers[3])/me;
}
sub usage {
my ($prog) = $0 =~ /([^\/]+)$/;
print STDERR <<EOF;
Usage: $prog [OPTS] [FILE]
-i major|minor|release|annot
Increment specified version component and set following components
to zero.
-s VERSION
Set the version as specified. The version number can be embedded in
some other string such as \"hdf5-1.1.0-pre1.tar.bz2\" or even
\"this is HDF5 library version 1.1 release 0 (pre1)\" for convenience.
-v
Instead of displaying only a dotted triple version number a line such
as \"version 1.1 release 0 (pre1)\" will be printed.
FILE
The name of the file that contains version information. This is
seldom necessary since files H5public.h, src/H5public.h and
../src/H5public.h are automatically checked.
EOF
exit 1;
}
my ($verbose, $set, $inc, $file, $rc);
my (@files) = ("H5public.h", "src/H5public.h", "../src/H5public.h");
while ($_ = shift) {
$_ eq "-s" && do {
die "-s switch needs a version number\n" unless @ARGV;
$set = shift;
next;
};
$_ eq "-i" && do {
if (@ARGV && $ARGV[0]=~/^(major|minor|release|annot)$/) {
$inc = shift;
} else {
$inc = "last";
}
next;
};
$_ eq "-v" && do {
$verbose = 1;
next;
};
/^-(h|\?|-?help)$/ && usage;
/^-/ && die "unrecognized option: $_\n";
die "only one file name can be specified\n" if $file;
$file = $_;
}
die "mutually exclusive options given\n" if $set && $inc;
# Determine file to use as H5public.h, README.md,
# release_docs/RELEASE.txt, configure.ac, windows/src/H5pubconf.h
# config/lt_vers.am and config/cmake/scripts/HDF5config.cmake.
# The README.md, release_docs/RELEASE.txt, configure.ac,
# windows/src/H5pubconf.h, config/lt_vers.am and
# config/cmake/scripts/HDF5config.cmake
# files are always in the directory above H5public.h
unless ($file) {
for (@files) {
($file=$_,last) if -f $_;
}
}
die "unable to find source files\n" unless defined $file;
die "unable to read file: $file\n" unless -r $file;
# config/lt_vers.am
my $LT_VERS = $file;
$LT_VERS =~ s/[^\/]*$/..\/config\/lt_vers.am/;
die "unable to read file: $LT_VERS\n" unless -r $file;
# config/cmake/scripts/HDF5config.cmake
my $HDF5CONFIGCMAKE = $file;
$HDF5CONFIGCMAKE =~ s/[^\/]*$/..\/config\/cmake\/scripts\/HDF5config.cmake/;
die "unable to read file: $HDF5CONFIGCMAKE\n" unless -r $file;
# README.md
my $README = $file;
$README =~ s/[^\/]*$/..\/README.md/;
die "unable to read file: $README\n" unless -r $file;
# release_docs/RELEASE.txt
my $RELEASE = $file;
$RELEASE =~ s/[^\/]*$/..\/release_docs\/RELEASE.txt/;
die "unable to read file: $RELEASE\n" unless -r $file;
# configure.ac
my $CONFIGURE = $file;
$CONFIGURE =~ s/[^\/]*$/..\/configure.ac/;
die "unable to read file: $CONFIGURE\n" unless -r $file;
# cpp_doc_config
my $CPP_DOC_CONFIG = $file;
$CPP_DOC_CONFIG =~ s/[^\/]*$/..\/c++\/src\/cpp_doc_config/;
die "unable to read file: $CPP_DOC_CONFIG\n" unless -r $file;
my $H5_JAVA = $file;
$H5_JAVA =~ s/[^\/]*$/..\/java\/src\/hdf\/hdf5lib\/H5.java/;
die "unable to read file: $H5_JAVA\n" unless -r $file;
my $TESTH5_JAVA = $file;
$TESTH5_JAVA =~ s/[^\/]*$/..\/java\/test\/TestH5.java/;
die "unable to read file: $TESTH5_JAVA\n" unless -r $file;
my $REPACK_LAYOUT_PLUGIN_VERSION = $file;
$REPACK_LAYOUT_PLUGIN_VERSION =~ s/[^\/]*$/..\/tools\/test\/h5repack\/testfiles\/h5repack_layout.h5-plugin_version_test.ddl/;
die "unable to read file: $REPACK_LAYOUT_PLUGIN_VERSION\n" unless -r $file;
# Get the current version number.
open FILE, $file or die "unable to open $file: $!\n";
my ($contents) = join "", <FILE>;
close FILE;
my (@curver) = getvers $contents;
# Determine the new version number.
my @newver; #new version
if ($set) {
if ($set =~ /(\d+)\.(\d+)\.(\d+)(-([\da-zA-Z]\w*))?/) {
@newver = ($1, $2, $3, $5);
} elsif ($set =~ /(\d+)\D+(\d+)\D+(\d+)(\s*\(([a-zA-Z]\w*)\))?\D*$/) {
@newver = ($1, $2, $3, $5);
} elsif ($set =~ /(\d+)\D+(\d+)\D*$/) {
@newver = ($1, $2, 0, "");
} elsif ($set =~ /(\d+)\D*$/) {
@newver = ($1, 0, 0, "");
} else {
die "illegal version number specified: $set\n";
}
} elsif ($inc) {
$inc = $curver[3] eq "" ? 'release' : 'annot' if $inc eq 'last';
if ($inc eq "major") {
$newver[0] = $curver[0]+1;
@newver[1,2,3] = (0,0,"");
} elsif ($inc eq "minor") {
$newver[0] = $curver[0];
$newver[1] = $curver[1]+1;
@newver[2,3] = (0,"");
} elsif ($inc eq "release") {
@newver[0,1] = @curver[0,1];
$newver[2] = $curver[2]+1;
$newver[3] = "";
} elsif ($inc eq "annot") {
@newver[0,1,2] = @curver[0,1,2];
$newver[3] = $curver[3];
$newver[3] =~ s/(\d+)\D*$/$1+1/e or
die "Annotation \"".$newver[3]."\" cannot be incremented.\n";
} else {
die "unknown increment field: $inc\n";
}
} else {
# Nothing to do but print result
$README = "";
$RELEASE = "";
$CONFIGURE = "";
$CPP_DOC_CONFIG = "";
$LT_VERS = "";
$HDF5CONFIGCMAKE = "";
@newver = @curver;
}
# Note if the version increased or decreased
my $version_increased="";
# Print a warning if the version got smaller (don't check annot field)
if ($newver[0]*1000000 + $newver[1]*1000 + $newver[2] <
$curver[0]*1000000 + $curver[1]*1000 + $curver[2]) {
printf STDERR "Warning: version decreased from %d.%d.%d to %d.%d.%d\n",
@curver[0,1,2], @newver[0,1,2];
}
if ($newver[0]*1000000 + $newver[1]*1000 + $newver[2] >
$curver[0]*1000000 + $curver[1]*1000 + $curver[2]) {
$version_increased="true";
}
# Update the version number if it changed.
if ($newver[0]!=$curver[0] ||
$newver[1]!=$curver[1] ||
$newver[2]!=$curver[2] ||
$newver[3]ne$curver[3]) {
setvers $contents, @newver or die "unable to set version\n";
rename $file, "$file~" or die "unable to save backup file\n";
open FILE, ">$file" or die "unable to open $file but backup saved!\n";
print FILE $contents;
close FILE;
}
# Update the libtool shared library version in src/Makefile.am if
# the version number has increased.
if ($LT_VERS && $version_increased) {
open FILE, $LT_VERS or die "$LT_VERS: $!\n";
my ($contentsy) = join "", <FILE>;
close FILE;
local($_) = $contentsy;
# As of the HDF5 v1.8.16 release, h5vers should not increment
# the LT_VERS numbers, so the next 6 lines are commented out.
# A future version may copy the numbers to H5public.h, so this
# section is retained for future reference.
# my ($lt_revision) = /^LT_VERS_REVISION\s*=\s*(\d+)/m;
# my $new_lt_revision = $lt_revision+1;
# ($contentsy) =~ s/^(LT_VERS_REVISION\s*=\s*)\d+/$1$new_lt_revision/m;
# open FILE, ">$LT_VERS" or die "$LT_VERS: $!\n";
# print FILE $contentsy;
# close FILE;
}
# Update the README.md file
if ($README) {
open FILE, $README or die "$README: $!\n";
my @contents = <FILE>;
close FILE;
$contents[0] = sprintf("HDF5 version %d.%d.%d%s %s",
@newver[0,1,2],
$newver[3] eq "" ? "" : "-".$newver[3],
"currently under development\n");
open FILE, ">$README" or die "$README: $!\n";
print FILE @contents;
close FILE;
}
# Update the release_docs/RELEASE.txt file
if ($RELEASE) {
open FILE, $RELEASE or die "$RELEASE: $!\n";
my @contents = <FILE>;
close FILE;
$contents[0] = sprintf("HDF5 version %d.%d.%d%s %s",
@newver[0,1,2],
$newver[3] eq "" ? "" : "-".$newver[3],
"currently under development\n");
open FILE, ">$RELEASE" or die "$RELEASE: $!\n";
print FILE @contents;
close FILE;
}
# Update the c++/src/cpp_doc_config file
if ($CPP_DOC_CONFIG) {
my $data = read_file($CPP_DOC_CONFIG);
my $sub_rel_ver_str = (
$newver[3] eq ""
? sprintf("%s", "")
: sprintf("%s", "-".$newver[3].", currently under development")
);
my $version_string = sprintf("\"%d.%d.%d%s%s\"",
@newver[0,1,2],
$sub_rel_ver_str);
$data =~ s/PROJECT_NUMBER\s*=.*/PROJECT_NUMBER = $version_string/;
write_file($CPP_DOC_CONFIG, $data);
}
# Update the config/cmake/scripts/HDF5config.cmake file
if ($HDF5CONFIGCMAKE) {
my $data = read_file($HDF5CONFIGCMAKE);
# my $sub_rel_ver_str = "";
my $sub_rel_ver_str = (
$newver[3] eq ""
? sprintf("\"%s\"", "")
: sprintf("\"%s\"", "-".$newver[3])
);
my $version_string = sprintf("\"%d.%d.%d\"", @newver[0,1,2]);
$data =~ s/set \(CTEST_SOURCE_VERSION .*\)/set \(CTEST_SOURCE_VERSION $version_string\)/;
$data =~ s/set \(CTEST_SOURCE_VERSEXT .*\)/set \(CTEST_SOURCE_VERSEXT $sub_rel_ver_str\)/;
write_file($HDF5CONFIGCMAKE, $data);
}
# Update the java/src/hdf/hdf5lib/H5.java file
if ($H5_JAVA) {
my $data = read_file($H5_JAVA);
# my $sub_rel_ver_str = "";
my $sub_rel_ver_str = (
$newver[3] eq ""
? sprintf("\"%s\"", "")
: sprintf("\"%s\"", "-".$newver[3].", currently under development")
);
my $version_string1 = sprintf("%d.%d.%d", @newver[0,1,2]);
my $version_string2 = sprintf("%d, %d, %d", @newver[0,1,2]);
$data =~ s/\@version HDF5 .* <BR>/\@version HDF5 $version_string1 <BR>/;
$data =~ s/ public final static int LIB_VERSION\[\] = \{\d*,.\d*,.\d*\};/ public final static int LIB_VERSION[] = \{$version_string2\};/;
write_file($H5_JAVA, $data);
}
# Update the java/test/TestH5.java file
if ($TESTH5_JAVA) {
my $data = read_file($TESTH5_JAVA);
# my $sub_rel_ver_str = "";
my $sub_rel_ver_str = (
$newver[3] eq ""
? sprintf("\"%s\"", "")
: sprintf("\"%s\"", "-".$newver[3].", currently under development")
);
my $version_string1 = sprintf("%d, %d, %d", @newver[0,1,2]);
my $version_string2 = sprintf("int majnum = %d, minnum = %d, relnum = %d", @newver[0,1,2]);
$data =~ s/ int libversion\[\] = \{.*\};/ int libversion\[\] = \{$version_string1\};/;
$data =~ s/ int majnum = \d*, minnum = \d*, relnum = \d*;/ $version_string2;/;
write_file($TESTH5_JAVA, $data);
}
# Update the tools/test/h5repack/testfiles/h5repack_layout.h5-plugin_version_test.ddl file
if ($REPACK_LAYOUT_PLUGIN_VERSION) {
my $data = read_file($REPACK_LAYOUT_PLUGIN_VERSION);
my $version_string = sprintf("%d %d %d", @newver[0,1,2]);
$data =~ s/ PARAMS \{ 9 \d* \d* \d* \}/ PARAMS \{ 9 $version_string \}/g;
write_file($REPACK_LAYOUT_PLUGIN_VERSION, $data);
}
# helper function to read the file for updating c++/src/cpp_doc_config,
# config/cmake/scripts/HDF5Config.cmake, and java files.
# The version string in that file is not at the top, so the string replacement
# is not for the first line, and reading/writing the entire file as one string
# facilitates the substring replacement.
#Presumably these will also work for resetting the version in HDF5config.cmake.
sub read_file {
my ($filename) = @_;
open my $in, $filename or die "Could not open '$filename' for reading $!";
local $/ = undef;
my $all = <$in>;
close $in;
return $all;
}
# helper function to write the file for updating c++/src/cpp_doc_config,
# config/cmake/scripts/HDF5config.cmake and java files.
sub write_file {
my ($filename, $content) = @_;
open my $out, ">$filename" or die "Could not open '$filename' for writing $!";;
print $out $content;
close $out;
return;
}
sub gen_configure {
my ($name, $conf) = @_;
open FILE, $conf or die "$conf: $!\n";
my @contents = <FILE>;
close FILE;
for (my $i = 0; $i < $#contents; ++$i) {
if ($contents[$i] =~ /^AC_INIT/) {
$contents[$i] = sprintf("AC_INIT([$name], [%d.%d.%d%s], [help\@hdfgroup.org])\n",
@newver[0,1,2],
$newver[3] eq "" ? "" : "-".$newver[3]);
last;
}
}
open FILE, ">$conf" or die "$conf: $!\n";
print FILE @contents;
close FILE;
$conf =~ /^(.*?)\/?configure.ac$/;
if ($1) {
$rc = system("cd $1 && ./autogen.sh >/dev/null 2>/dev/null && rm -rf autom4te.cache");
} else {
$rc = system("./autogen.sh >/dev/null 2>/dev/null && rm -rf autom4te.cache");
}
if ($rc) {
printf("./autogen.sh failed with exit code %d. Aborted.\n", $rc);
exit 1;
}
}
# Update the configure.ac files and regenerate them
gen_configure("HDF5", $CONFIGURE) if $CONFIGURE;
sub gen_h5pubconf {
my ($name, $pubconf, @vers) = @_;
my $namelc = lc($name);
my $nameuc = uc($name);
open FILE, $pubconf or die "$pubconf: $!\n";
my @contents = <FILE>;
close FILE;
for (my $i = 0; $i < $#contents; ++$i) {
if ($contents[$i] =~ /\#\s*define\s+H5_PACKAGE\s+/) {
$contents[$i] = "\#define H5_PACKAGE \"$namelc\"\n";
} elsif ($contents[$i] =~ /\#\s*define\s+H5_PACKAGE_NAME\s+/) {
$contents[$i] = "\#define H5_PACKAGE_NAME \"$nameuc\"\n";
} elsif ($contents[$i] =~ /\#\s*define\s+H5_PACKAGE_STRING\s+/) {
$contents[$i] = sprintf("\#define H5_PACKAGE_STRING \"$nameuc %d.%d.%d%s\"\n",
@vers[0,1,2],
$newver[3] eq "" ? "" : "-".$newver[3]);
} elsif ($contents[$i] =~ /\#\s*define\s+H5_PACKAGE_TARNAME\s+/) {
$contents[$i] = "\#define H5_PACKAGE_TARNAME \"$namelc\"\n";
} elsif ($contents[$i] =~ /\#\s*define\s+H5_PACKAGE_VERSION\s+/) {
$contents[$i] = sprintf("\#define H5_PACKAGE_VERSION \"%d.%d.%d%s\"\n",
@vers[0,1,2],
$newver[3] eq "" ? "" : "-".$newver[3]);
} elsif ($contents[$i] =~ /\#\s*define\s+H5_VERSION\s+/) {
$contents[$i] = sprintf("\#define H5_VERSION \"%d.%d.%d%s\"\n",
@vers[0,1,2],
$newver[3] eq "" ? "" : "-".$newver[3]);
}
}
open FILE, ">$pubconf" or die "$pubconf: $!\n";
print FILE @contents;
close FILE;
}
# Print the new version number
if ($verbose) {
printf("version %d.%d release %d%s\n", @newver[0,1,2],
$newver[3] eq "" ? "" : " (".$newver[3].")");
} else {
printf("%d.%d.%d%s\n", @newver[0,1,2],
$newver[3] eq "" ? "" : "-".$newver[3]);
}
exit 0;
# Because the first line of this file looks like a Bourne shell script, we
# must tell XEmacs explicitly that this is really a perl script.
#
# Local Variables:
# mode:perl
# End:

@ -0,0 +1,529 @@
#!/bin/sh
# install - install a program, script, or datafile
scriptversion=2018-03-11.20; # UTC
# This originates from X11R5 (mit/util/scripts/install.sh), which was
# later released in X11R6 (xc/config/util/install.sh) with the
# following copyright and license.
#
# Copyright (C) 1994 X Consortium
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC-
# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# Except as contained in this notice, the name of the X Consortium shall not
# be used in advertising or otherwise to promote the sale, use or other deal-
# ings in this Software without prior written authorization from the X Consor-
# tium.
#
#
# FSF changes to this file are in the public domain.
#
# Calling this script install-sh is preferred over install.sh, to prevent
# 'make' implicit rules from creating a file called install from it
# when there is no Makefile.
#
# This script is compatible with the BSD install script, but was written
# from scratch.
tab=' '
nl='
'
IFS=" $tab$nl"
# Set DOITPROG to "echo" to test this script.
doit=${DOITPROG-}
doit_exec=${doit:-exec}
# Put in absolute file names if you don't have them in your path;
# or use environment vars.
chgrpprog=${CHGRPPROG-chgrp}
chmodprog=${CHMODPROG-chmod}
chownprog=${CHOWNPROG-chown}
cmpprog=${CMPPROG-cmp}
cpprog=${CPPROG-cp}
mkdirprog=${MKDIRPROG-mkdir}
mvprog=${MVPROG-mv}
rmprog=${RMPROG-rm}
stripprog=${STRIPPROG-strip}
posix_mkdir=
# Desired mode of installed file.
mode=0755
chgrpcmd=
chmodcmd=$chmodprog
chowncmd=
mvcmd=$mvprog
rmcmd="$rmprog -f"
stripcmd=
src=
dst=
dir_arg=
dst_arg=
copy_on_change=false
is_target_a_directory=possibly
usage="\
Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE
or: $0 [OPTION]... SRCFILES... DIRECTORY
or: $0 [OPTION]... -t DIRECTORY SRCFILES...
or: $0 [OPTION]... -d DIRECTORIES...
In the 1st form, copy SRCFILE to DSTFILE.
In the 2nd and 3rd, copy all SRCFILES to DIRECTORY.
In the 4th, create DIRECTORIES.
Options:
--help display this help and exit.
--version display version info and exit.
-c (ignored)
-C install only if different (preserve the last data modification time)
-d create directories instead of installing files.
-g GROUP $chgrpprog installed files to GROUP.
-m MODE $chmodprog installed files to MODE.
-o USER $chownprog installed files to USER.
-s $stripprog installed files.
-t DIRECTORY install into DIRECTORY.
-T report an error if DSTFILE is a directory.
Environment variables override the default commands:
CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG
RMPROG STRIPPROG
"
while test $# -ne 0; do
case $1 in
-c) ;;
-C) copy_on_change=true;;
-d) dir_arg=true;;
-g) chgrpcmd="$chgrpprog $2"
shift;;
--help) echo "$usage"; exit $?;;
-m) mode=$2
case $mode in
*' '* | *"$tab"* | *"$nl"* | *'*'* | *'?'* | *'['*)
echo "$0: invalid mode: $mode" >&2
exit 1;;
esac
shift;;
-o) chowncmd="$chownprog $2"
shift;;
-s) stripcmd=$stripprog;;
-t)
is_target_a_directory=always
dst_arg=$2
# Protect names problematic for 'test' and other utilities.
case $dst_arg in
-* | [=\(\)!]) dst_arg=./$dst_arg;;
esac
shift;;
-T) is_target_a_directory=never;;
--version) echo "$0 $scriptversion"; exit $?;;
--) shift
break;;
-*) echo "$0: invalid option: $1" >&2
exit 1;;
*) break;;
esac
shift
done
# We allow the use of options -d and -T together, by making -d
# take the precedence; this is for compatibility with GNU install.
if test -n "$dir_arg"; then
if test -n "$dst_arg"; then
echo "$0: target directory not allowed when installing a directory." >&2
exit 1
fi
fi
if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then
# When -d is used, all remaining arguments are directories to create.
# When -t is used, the destination is already specified.
# Otherwise, the last argument is the destination. Remove it from $@.
for arg
do
if test -n "$dst_arg"; then
# $@ is not empty: it contains at least $arg.
set fnord "$@" "$dst_arg"
shift # fnord
fi
shift # arg
dst_arg=$arg
# Protect names problematic for 'test' and other utilities.
case $dst_arg in
-* | [=\(\)!]) dst_arg=./$dst_arg;;
esac
done
fi
if test $# -eq 0; then
if test -z "$dir_arg"; then
echo "$0: no input file specified." >&2
exit 1
fi
# It's OK to call 'install-sh -d' without argument.
# This can happen when creating conditional directories.
exit 0
fi
if test -z "$dir_arg"; then
if test $# -gt 1 || test "$is_target_a_directory" = always; then
if test ! -d "$dst_arg"; then
echo "$0: $dst_arg: Is not a directory." >&2
exit 1
fi
fi
fi
if test -z "$dir_arg"; then
do_exit='(exit $ret); exit $ret'
trap "ret=129; $do_exit" 1
trap "ret=130; $do_exit" 2
trap "ret=141; $do_exit" 13
trap "ret=143; $do_exit" 15
# Set umask so as not to create temps with too-generous modes.
# However, 'strip' requires both read and write access to temps.
case $mode in
# Optimize common cases.
*644) cp_umask=133;;
*755) cp_umask=22;;
*[0-7])
if test -z "$stripcmd"; then
u_plus_rw=
else
u_plus_rw='% 200'
fi
cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;;
*)
if test -z "$stripcmd"; then
u_plus_rw=
else
u_plus_rw=,u+rw
fi
cp_umask=$mode$u_plus_rw;;
esac
fi
for src
do
# Protect names problematic for 'test' and other utilities.
case $src in
-* | [=\(\)!]) src=./$src;;
esac
if test -n "$dir_arg"; then
dst=$src
dstdir=$dst
test -d "$dstdir"
dstdir_status=$?
else
# Waiting for this to be detected by the "$cpprog $src $dsttmp" command
# might cause directories to be created, which would be especially bad
# if $src (and thus $dsttmp) contains '*'.
if test ! -f "$src" && test ! -d "$src"; then
echo "$0: $src does not exist." >&2
exit 1
fi
if test -z "$dst_arg"; then
echo "$0: no destination specified." >&2
exit 1
fi
dst=$dst_arg
# If destination is a directory, append the input filename.
if test -d "$dst"; then
if test "$is_target_a_directory" = never; then
echo "$0: $dst_arg: Is a directory" >&2
exit 1
fi
dstdir=$dst
dstbase=`basename "$src"`
case $dst in
*/) dst=$dst$dstbase;;
*) dst=$dst/$dstbase;;
esac
dstdir_status=0
else
dstdir=`dirname "$dst"`
test -d "$dstdir"
dstdir_status=$?
fi
fi
case $dstdir in
*/) dstdirslash=$dstdir;;
*) dstdirslash=$dstdir/;;
esac
obsolete_mkdir_used=false
if test $dstdir_status != 0; then
case $posix_mkdir in
'')
# Create intermediate dirs using mode 755 as modified by the umask.
# This is like FreeBSD 'install' as of 1997-10-28.
umask=`umask`
case $stripcmd.$umask in
# Optimize common cases.
*[2367][2367]) mkdir_umask=$umask;;
.*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;;
*[0-7])
mkdir_umask=`expr $umask + 22 \
- $umask % 100 % 40 + $umask % 20 \
- $umask % 10 % 4 + $umask % 2
`;;
*) mkdir_umask=$umask,go-w;;
esac
# With -d, create the new directory with the user-specified mode.
# Otherwise, rely on $mkdir_umask.
if test -n "$dir_arg"; then
mkdir_mode=-m$mode
else
mkdir_mode=
fi
posix_mkdir=false
case $umask in
*[123567][0-7][0-7])
# POSIX mkdir -p sets u+wx bits regardless of umask, which
# is incompatible with FreeBSD 'install' when (umask & 300) != 0.
;;
*)
# Note that $RANDOM variable is not portable (e.g. dash); Use it
# here however when possible just to lower collision chance.
tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$
trap 'ret=$?; rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir" 2>/dev/null; exit $ret' 0
# Because "mkdir -p" follows existing symlinks and we likely work
# directly in world-writeable /tmp, make sure that the '$tmpdir'
# directory is successfully created first before we actually test
# 'mkdir -p' feature.
if (umask $mkdir_umask &&
$mkdirprog $mkdir_mode "$tmpdir" &&
exec $mkdirprog $mkdir_mode -p -- "$tmpdir/a/b") >/dev/null 2>&1
then
if test -z "$dir_arg" || {
# Check for POSIX incompatibilities with -m.
# HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or
# other-writable bit of parent directory when it shouldn't.
# FreeBSD 6.1 mkdir -m -p sets mode of existing directory.
test_tmpdir="$tmpdir/a"
ls_ld_tmpdir=`ls -ld "$test_tmpdir"`
case $ls_ld_tmpdir in
d????-?r-*) different_mode=700;;
d????-?--*) different_mode=755;;
*) false;;
esac &&
$mkdirprog -m$different_mode -p -- "$test_tmpdir" && {
ls_ld_tmpdir_1=`ls -ld "$test_tmpdir"`
test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1"
}
}
then posix_mkdir=:
fi
rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir"
else
# Remove any dirs left behind by ancient mkdir implementations.
rmdir ./$mkdir_mode ./-p ./-- "$tmpdir" 2>/dev/null
fi
trap '' 0;;
esac;;
esac
if
$posix_mkdir && (
umask $mkdir_umask &&
$doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir"
)
then :
else
# The umask is ridiculous, or mkdir does not conform to POSIX,
# or it failed possibly due to a race condition. Create the
# directory the slow way, step by step, checking for races as we go.
case $dstdir in
/*) prefix='/';;
[-=\(\)!]*) prefix='./';;
*) prefix='';;
esac
oIFS=$IFS
IFS=/
set -f
set fnord $dstdir
shift
set +f
IFS=$oIFS
prefixes=
for d
do
test X"$d" = X && continue
prefix=$prefix$d
if test -d "$prefix"; then
prefixes=
else
if $posix_mkdir; then
(umask=$mkdir_umask &&
$doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break
# Don't fail if two instances are running concurrently.
test -d "$prefix" || exit 1
else
case $prefix in
*\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;;
*) qprefix=$prefix;;
esac
prefixes="$prefixes '$qprefix'"
fi
fi
prefix=$prefix/
done
if test -n "$prefixes"; then
# Don't fail if two instances are running concurrently.
(umask $mkdir_umask &&
eval "\$doit_exec \$mkdirprog $prefixes") ||
test -d "$dstdir" || exit 1
obsolete_mkdir_used=true
fi
fi
fi
if test -n "$dir_arg"; then
{ test -z "$chowncmd" || $doit $chowncmd "$dst"; } &&
{ test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } &&
{ test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false ||
test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1
else
# Make a couple of temp file names in the proper directory.
dsttmp=${dstdirslash}_inst.$$_
rmtmp=${dstdirslash}_rm.$$_
# Trap to clean up those temp files at exit.
trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0
# Copy the file name to the temp name.
(umask $cp_umask &&
{ test -z "$stripcmd" || {
# Create $dsttmp read-write so that cp doesn't create it read-only,
# which would cause strip to fail.
if test -z "$doit"; then
: >"$dsttmp" # No need to fork-exec 'touch'.
else
$doit touch "$dsttmp"
fi
}
} &&
$doit_exec $cpprog "$src" "$dsttmp") &&
# and set any options; do chmod last to preserve setuid bits.
#
# If any of these fail, we abort the whole thing. If we want to
# ignore errors from any of these, just make sure not to ignore
# errors from the above "$doit $cpprog $src $dsttmp" command.
#
{ test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } &&
{ test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } &&
{ test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } &&
{ test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } &&
# If -C, don't bother to copy if it wouldn't change the file.
if $copy_on_change &&
old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` &&
new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` &&
set -f &&
set X $old && old=:$2:$4:$5:$6 &&
set X $new && new=:$2:$4:$5:$6 &&
set +f &&
test "$old" = "$new" &&
$cmpprog "$dst" "$dsttmp" >/dev/null 2>&1
then
rm -f "$dsttmp"
else
# Rename the file to the real destination.
$doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null ||
# The rename failed, perhaps because mv can't rename something else
# to itself, or perhaps because mv is so ancient that it does not
# support -f.
{
# Now remove or move aside any old file at destination location.
# We try this two ways since rm can't unlink itself on some
# systems and the destination file might be busy for other
# reasons. In this case, the final cleanup might fail but the new
# file should still install successfully.
{
test ! -f "$dst" ||
$doit $rmcmd -f "$dst" 2>/dev/null ||
{ $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null &&
{ $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; }
} ||
{ echo "$0: cannot unlink or rename $dst" >&2
(exit 1); exit 1
}
} &&
# Now rename the file to the real destination.
$doit $mvcmd "$dsttmp" "$dst"
}
fi || exit 1
trap '' 0
fi
done
# Local variables:
# eval: (add-hook 'before-save-hook 'time-stamp)
# time-stamp-start: "scriptversion="
# time-stamp-format: "%:y-%02m-%02d.%02H"
# time-stamp-time-zone: "UTC0"
# time-stamp-end: "; # UTC"
# End:

@ -0,0 +1,96 @@
#!/usr/bin/env perl
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Usage: pipe the output of Linux's `strace' program into the stdin of
# this command, and the output of this command into gnuplot.
my ($fast,$npasses);
if ($ARGV[0] =~ /^--?fast$/) {
$fast = 1;
shift;
}
my $filename = shift || "tstab2.h5";
my $total = 0;
my %What; # What{pos}{nbytes}{r|w} = naccesses
my($total_writes, $total_bytes_out, $total_reads, $total_bytes_in);
while (<>) {
if (!defined $fd) {
if (/^open\("(.*?)".*=\s+(\d+)/ && $1 eq $filename) {
$fd = $2;
$pos = 0;
}
} elsif (/^close\((\d+)/ && $1==$fd) {
$fd = undef;
} elsif (!$fast &&
/^lseek\((\d+), -?\d+,.*= (\d+)/ &&
$1==$fd && $2>=0) {
$pos = $2;
} elsif (!$fast && /^lseek\((\d+),/ && $1==$fd) {
die $_;
} elsif (/^write\((\d+), ".*?"(\.\.\.)?, \d+\)\s*= (\d+)/ &&
$1==$fd && $3>=0) {
my $nbytes = $3;
if ($fast) {
$total_writes++;
$total_bytes_out += $nbytes;
} else {
$What{$pos}{$nbytes}{w}++;
printf "%d %d\n", $total, $pos;
$pos += $nbytes;
$total += $nbytes;
}
} elsif (/^write\((\d+),/ && $1==$fd) {
die $_;
} elsif (/^read\((\d+), ".*?"(\.\.\.)?, \d+\)\s*= (\d+)/ &&
$1==$fd && $3>=0) {
my $nbytes = $3;
if ($fast) {
$total_reads++;
$total_bytes_in += $nbytes;
} else {
$What{$pos}{$nbytes}{r}++;
printf "%d %d\n", $total, $pos;
$pos += $nbytes;
$total += $nbytes;
}
} elsif (/^read\((\d+),/ && $1==$fd) {
die $_;
}
}
if (!$fast) {
print "="x36, "\n";
printf "%8s %8s %8s %8s\n","Position","NBytes","NReads","NWrites";
for $pos (sort {$a<=>$b} keys %What) {
for $nbytes (sort {$a<=>$b} keys %{$What{$pos}}) {
printf("%8d %8d %8d %8d\n", $pos, $nbytes,
$What{$pos}{$nbytes}{r},
$What{$pos}{$nbytes}{w});
$total_writes += $What{$pos}{$nbytes}{w};
$total_reads += $What{$pos}{$nbytes}{r};
$total_bytes_out += $What{$pos}{$nbytes}{w} * $nbytes;
$total_bytes_in += $What{$pos}{$nbytes}{r} * $nbytes;
}
}
}
print "="x36, "\n";
printf("Write: %8d calls, %10d total bytes, %10g average bytes\n",
$total_writes, $total_bytes_out, $total_bytes_out/$total_writes)
if $total_writes;
printf("Read: %8d calls, %10d total bytes, %10g average bytes\n",
$total_reads, $total_bytes_in, $total_bytes_in/$total_reads)
if $total_reads;

File diff suppressed because it is too large Load Diff

@ -0,0 +1,397 @@
#!/usr/bin/env perl
require 5.003;
$indent=4;
use warnings;
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Create error headers
#
# Read in the error description text file and create the appropriate headers
# needed by the library.
#
# Programmer: Quincey Koziol
# Creation Date: 2003/08/12
##############################################################################
# Print the copyright into an open file
#
sub print_copyright ($) {
my $fh = shift;
print $fh "/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n";
print $fh " * Copyright by The HDF Group. *\n";
print $fh " * All rights reserved. *\n";
print $fh " * *\n";
print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n";
print $fh " * terms governing use, modification, and redistribution, is contained in *\n";
print $fh " * the COPYING file, which can be found at the root of the source code *\n";
print $fh " * distribution tree, or in https://www.hdfgroup.org/licenses. *\n";
print $fh " * If you do not have access to either file, you may request a copy from *\n";
print $fh " * help\@hdfgroup.org. *\n";
print $fh " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */\n";
}
##############################################################################
# Print the "do not change this file" warning
#
sub print_warning ($) {
my $fh = shift;
print $fh "\n/* Generated automatically by bin/make_err -- do not edit */\n";
print $fh "/* Add new errors to H5err.txt file */\n\n";
}
##############################################################################
# Print start of ifdef's to prevent a file from being re-included
#
sub print_startprotect ($$) {
my ($fh, $file) = @_;
# Clip off the ".h" part of the name
$file =~ s/(\w*)\.h/$1/;
# Print the ifdef info
print $fh "\n#ifndef ${file}_H\n";
print $fh "#define ${file}_H\n";
}
##############################################################################
# Print end of ifdef's to prevent a file from being re-included
#
sub print_endprotect ($$) {
my ($fh, $file) = @_;
# Clip off the ".h" part of the name
$file =~ s/(\w*)\.h/$1/;
# Print the endif info
print $fh "\n#endif /* ${file}_H */\n";
}
##############################################################################
# Parse a meaningful line (not a comment or blank line) into the appropriate
# data structure
#
sub parse_line ($) {
my $line = shift; # Get the line to parse
my $name; # The name of the error message
my $desc; # The description of the error message
# Parse major error lines
#print "line=$line\n";
if($line =~ /^\s*MAJOR,/) {
# Get the major error's name & description
($name, $desc) = ($line =~ /^\s*MAJOR,\s*(\w*),\s*(.*)\n/);
#print "MAJOR: name=$name, desc=$desc\n";
# Check if the name already exists as a major or minor error message
if(exists($major{$name}) || exists($minor{$name})) {
die "duplicated name: $name";
}
# Store the major errors in a hash table, indexed by the name
$major{$name}=$desc;
}
# Parse minor error lines
elsif($line =~ /^\s*MINOR,/) {
my $min_section; # Minor errors have a section they below to also
# Get the minor error's section, name & description
($min_section, $name, $desc) = ($line =~ /^\s*MINOR,\s*(\w*),\s*(\w*),\s*(.*)\n/);
#print "MINOR: min_section=$min_section, name=$name, desc=$desc\n";
# Check for valid section
if(!exists($section{$min_section})) {
die "unknown section: $min_section";
}
# Check if the name already exists as a major or minor error message
if(exists($major{$name}) || exists($minor{$name})) {
die "duplicated name: $name";
}
# Store the minor errors in a hash table, indexed by the name
$minor{$name}=$desc;
# Add the minor error to the list for the section
push @{$section_list{$min_section}}, $name;
}
# Parse section lines
elsif($line =~ /^\s*SECTION,/) {
# Get the section's name & description
($name, $desc) = ($line =~ /^\s*SECTION,\s*(\w*),\s*(.*)\n/);
#print "SECTION: name=$name, desc=$desc\n";
# Check if the section has already been defined
if(exists($section{$name})) {
die "duplicated name: $name";
}
# Store the section in a hash table, indexed by the name
$section{$name}=$desc;
}
# Unknown keyword
else {
die "unknown keyword: $line";
}
}
##############################################################################
# Create the generated portion of the public header file
#
sub create_public ($) {
my $prefix = shift; # Get the prefix for the generated file
my $file = "H5Epubgen.h"; # Name of file to generate
my $name; # Name of error message
my $desc; # Description of error message
my $sect_name; # Section of minor error messages
my $sect_desc; # Description of section
# Rename previous file
# rename "${prefix}${file}", "${prefix}${file}~" or die "unable to make backup";
# Open new header file
open HEADER, ">${prefix}${file}" or die "unable to modify source";
# Create file contents
print_copyright(*HEADER);
print_warning(*HEADER);
print_startprotect(*HEADER, $file);
# Begin extern C block
print HEADER "\n";
print HEADER "#ifdef __cplusplus\n";
print HEADER "extern \"C\" {\n";
print HEADER "#endif\n";
# Iterate over all the major errors
print HEADER "\n/*********************/\n";
print HEADER "/* Major error codes */\n";
print HEADER "/*********************/\n\n";
foreach $name (keys %major) {
printf HEADER "#define %-20s (H5OPEN %s_g)\n",$name,$name;
}
foreach $name (keys %major) {
printf HEADER "H5_DLLVAR hid_t %-20s /* %s */\n","${name}_g;",$major{$name};
}
# Iterate over all the minor error sections
print HEADER "\n/*********************/\n";
print HEADER "/* Minor error codes */\n";
print HEADER "/*********************/\n";
while ( ($sect_name, $sect_desc) = each (%section)) {
print HEADER "\n/* $sect_desc */\n";
# Iterate over all the minor errors in each section
for $name ( @{$section_list{$sect_name}}) {
printf HEADER "#define %-20s (H5OPEN %s_g)\n",$name,$name;
}
for $name ( @{$section_list{$sect_name}}) {
printf HEADER "H5_DLLVAR hid_t %-20s /* %s */\n","${name}_g;",$minor{$name};
}
}
# End extern C block
print HEADER "\n";
print HEADER "#ifdef __cplusplus\n";
print HEADER "}\n";
print HEADER "#endif\n";
print_endprotect(*HEADER, $file);
# Close header file
close HEADER;
}
##############################################################################
# Create the generated portion of the H5E initialization code
#
sub create_init ($) {
my $prefix = shift; # Get the prefix for the generated file
my $file = "H5Einit.h"; # Name of file to generate
my $name; # Name of error message
my $desc; # Description of error message
my $sect_name; # Section of minor error messages
my $sect_desc; # Description of section
# Rename previous file
# rename "${prefix}${file}", "${prefix}${file}~" or die "unable to make backup";
# Open new header file
open HEADER, ">${prefix}${file}" or die "unable to modify source";
# Create file contents
print_copyright(*HEADER);
print_warning(*HEADER);
print_startprotect(*HEADER, $file);
# Iterate over all the major errors
print HEADER "\n/*********************/\n";
print HEADER "/* Major error codes */\n";
print HEADER "/*********************/\n\n";
foreach $name (keys %major) {
print HEADER " "x(0*$indent),"HDassert(${name}_g==(-1));\n";
print HEADER " "x(0*$indent),"if((msg = H5E__create_msg(cls, H5E_MAJOR, \"${major{$name}}\"))==NULL)\n";
print HEADER " "x(1*$indent),"HGOTO_ERROR(H5E_ERROR, H5E_CANTINIT, FAIL, \"error message initialization failed\")\n";
print HEADER " "x(0*$indent),"if((${name}_g = H5I_register(H5I_ERROR_MSG, msg, FALSE))<0)\n";
print HEADER " "x(1*$indent),"HGOTO_ERROR(H5E_ERROR, H5E_CANTREGISTER, FAIL, \"can't register error message\")\n";
}
# Iterate over all the minor error sections
print HEADER "\n/*********************/\n";
print HEADER "/* Minor error codes */\n";
print HEADER "/*********************/\n\n";
while ( ($sect_name, $sect_desc) = each (%section)) {
print HEADER "\n"," "x(0*$indent),"/* $sect_desc */\n";
# Iterate over all the minor errors in each section
for $name ( @{$section_list{$sect_name}}) {
print HEADER " "x(0*$indent),"HDassert(${name}_g==(-1));\n";
print HEADER " "x(0*$indent),"if((msg = H5E__create_msg(cls, H5E_MINOR, \"${minor{$name}}\"))==NULL)\n";
print HEADER " "x(1*$indent),"HGOTO_ERROR(H5E_ERROR, H5E_CANTINIT, FAIL, \"error message initialization failed\")\n";
print HEADER " "x(0*$indent),"if((${name}_g = H5I_register(H5I_ERROR_MSG, msg, FALSE))<0)\n";
print HEADER " "x(1*$indent),"HGOTO_ERROR(H5E_ERROR, H5E_CANTREGISTER, FAIL, \"can't register error message\")\n";
}
}
print_endprotect(*HEADER, $file);
# Close header file
close HEADER;
}
##############################################################################
# Create the generated portion of the H5E termination code
#
sub create_term ($) {
my $prefix = shift; # Get the prefix for the generated file
my $file = "H5Eterm.h"; # Name of file to generate
my $name; # Name of error message
my $desc; # Description of error message
my $sect_name; # Section of minor error messages
my $sect_desc; # Description of section
# Rename previous file
# rename "${prefix}${file}", "${prefix}${file}~" or die "unable to make backup";
# Open new header file
open HEADER, ">${prefix}${file}" or die "unable to modify source";
# Create file contents
print_copyright(*HEADER);
print_warning(*HEADER);
print_startprotect(*HEADER, $file);
# Iterate over all the major errors
print HEADER "\n/* Reset major error IDs */\n";
foreach $name (keys %major) {
print HEADER " "x($indent),"\n${name}_g=";
}
print HEADER " (-1);\n";
# Iterate over all the minor error sections
print HEADER "\n/* Reset minor error IDs */\n";
while ( ($sect_name, $sect_desc) = each (%section)) {
print HEADER "\n"," "x(0*$indent),"\n/* $sect_desc */";
# Iterate over all the minor errors in each section
for $name ( @{$section_list{$sect_name}}) {
print HEADER " "x($indent),"\n${name}_g=";
}
}
print HEADER " (-1);\n";
print_endprotect(*HEADER, $file);
# Close header file
close HEADER;
}
##############################################################################
# Create the generated portion of the error code definitions
#
sub create_define ($) {
my $prefix = shift; # Get the prefix for the generated file
my $file = "H5Edefin.h"; # Name of file to generate
my $name; # Name of error message
my $desc; # Description of error message
my $sect_name; # Section of minor error messages
my $sect_desc; # Description of section
# Rename previous file
# rename "${prefix}${file}", "${prefix}${file}~" or die "unable to make backup";
# Open new header file
open HEADER, ">${prefix}${file}" or die "unable to modify source";
# Create file contents
print_copyright(*HEADER);
print_warning(*HEADER);
print_startprotect(*HEADER, $file);
# Iterate over all the major errors
print HEADER "\n/* Major error IDs */\n";
foreach $name (keys %major) {
printf HEADER "hid_t %-20s = FAIL; /* %s */\n","${name}_g",$major{$name};
}
# Iterate over all the minor error sections
print HEADER "\n/* Minor error IDs */\n";
while ( ($sect_name, $sect_desc) = each (%section)) {
print HEADER "\n/* $sect_desc */\n";
# Iterate over all the minor errors in each section
for $name ( @{$section_list{$sect_name}}) {
printf HEADER "hid_t %-20s = FAIL; /* %s */\n","${name}_g",$minor{$name};
}
}
print_endprotect(*HEADER, $file);
# Close header file
close HEADER;
}
##############################################################################
# Read error file (given as command-line argument) in and process it into
# internal data structures, then create error header files.
#
for $file (@ARGV) {
my $prefix; # Local prefix for generated files
($prefix) = ($file =~ /(^.*\/)/);
# Read in the entire file
open SOURCE, $file or die "$file: $!\n";
while ( defined ($line=<SOURCE>) ) {
if(!($line =~ /(^\s*#.*$)|(^\s*$)/)) {
# Construct data structures for later printing
parse_line($line);
}
}
close SOURCE;
# Create header files
print "Generating 'H5Epubgen.h'\n";
create_public($prefix);
print "Generating 'H5Einit.h'\n";
create_init($prefix);
print "Generating 'H5Eterm.h'\n";
create_term($prefix);
print "Generating 'H5Edefin.h'\n";
create_define($prefix);
}

@ -0,0 +1,214 @@
#!/usr/bin/env perl
require 5.003;
use strict;
use warnings;
# Global settings
# List of supported C types to generate overflow assignment code for
my @ctypes = ( () );
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Create assignment overflow #ifdefs
#
# Programmer: Quincey Koziol
# Creation Date: 2009/04/09
##############################################################################
# Parse a meaningful line (not a comment or blank line) into the appropriate
# data structure
#
sub parse_line ($) {
my $line = shift; # Get the line to parse
# Parse get the type name and whether it's signed or unsigned
#print "line=$line\n";
if($line =~ /.*SIGNED\s*;\s*$/ || $line =~ /.*UNSIGNED\s*;\s*$/) {
my $name; # The name of the type
my $signed; # Whether the type is signed or not
# Get the type's name & signed status
($name, $signed) = ($line =~ /^\s*(\w*)\s*,\s*(\w*)\s*;\s*$/);
#print "name = '$name', signed = '$signed'\n";
# Append the type to the list of C types already parsed
push @ctypes, [$name, $signed];
}
# Unknown keyword
else {
die "unknown keyword: $line";
}
}
##############################################################################
# Print the copyright into an open file
#
sub print_copyright ($) {
my $fh = shift;
print $fh "/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n";
print $fh " * Copyright by The HDF Group. *\n";
print $fh " * All rights reserved. *\n";
print $fh " * *\n";
print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n";
print $fh " * terms governing use, modification, and redistribution, is contained in *\n";
print $fh " * the COPYING file, which can be found at the root of the source code *\n";
print $fh " * distribution tree, or in https://www.hdfgroup.org/licenses. *\n";
print $fh " * If you do not have access to either file, you may request a copy from *\n";
print $fh " * help\@hdfgroup.org. *\n";
print $fh " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */\n";
}
##############################################################################
# Print the "do not change this file" warning
#
sub print_warning ($) {
my $fh = shift;
print $fh "\n/* Generated automatically by bin/make_overflow -- do not edit */\n";
print $fh "/* Add new types to H5overflow.txt file */\n\n";
}
##############################################################################
# Print start of ifdef's to prevent a file from being re-included
#
sub print_startprotect ($$) {
my ($fh, $file) = @_;
# Clip off the ".h" part of the name
$file =~ s/(\w*)\.h/$1/;
# Print the ifdef info
print $fh "\n#ifndef ${file}_H\n";
print $fh "#define ${file}_H\n";
}
##############################################################################
# Print assignment overflow macros for each type
#
sub print_typemacros ($) {
my $fh = shift; # File handle for output file
my ($src_aref, $dst_aref); # References for each type's information
# Print the descriptive comment
print $fh "\n\n/* Each type in this file is tested for assignment to the other types,\n";
print $fh " * and range checks are defined for bad assignments at run-time.\n";
print $fh " */\n";
for $src_aref (@ctypes) {
# Print a descriptive comment
print $fh "\n/* Assignment checks for @$src_aref[0] */\n\n";
for $dst_aref (@ctypes) {
if (@$src_aref[0] ne @$dst_aref[0]) {
# Print a descriptive comment
print $fh "/* src: @$src_aref[0], dst: @$dst_aref[0] */\n";
# Print actual type size checks & macro definitions
print $fh "#if H5_SIZEOF_", uc @$src_aref[0], " < H5_SIZEOF_", uc @$dst_aref[0], "\n";
print $fh " #define ASSIGN_", @$src_aref[0], "_TO_", @$dst_aref[0], "(dst, dsttype, src, srctype) \\\n";
if ( @$src_aref[1] eq @$dst_aref[1]) {
print $fh " ASSIGN_TO_LARGER_SIZE_SAME_SIGNED(dst, dsttype, src, srctype)\n";
} elsif ( @$src_aref[1] eq "SIGNED") {
print $fh " ASSIGN_TO_LARGER_SIZE_SIGNED_TO_UNSIGNED(dst, dsttype, src, srctype)\n";
} else {
print $fh " ASSIGN_TO_LARGER_SIZE_UNSIGNED_TO_SIGNED(dst, dsttype, src, srctype)\n";
}
print $fh "#elif H5_SIZEOF_", uc @$src_aref[0], " > H5_SIZEOF_", uc @$dst_aref[0], "\n";
print $fh " #define ASSIGN_", @$src_aref[0], "_TO_", @$dst_aref[0], "(dst, dsttype, src, srctype) \\\n";
print $fh " ASSIGN_TO_SMALLER_SIZE(dst, dsttype, src, srctype)\n";
print $fh "#else /* H5_SIZEOF_", uc @$src_aref[0], " == H5_SIZEOF_", uc @$dst_aref[0], " */\n";
print $fh " #define ASSIGN_", @$src_aref[0], "_TO_", @$dst_aref[0], "(dst, dsttype, src, srctype) \\\n";
if ( @$src_aref[1] eq @$dst_aref[1]) {
print $fh " ASSIGN_TO_SAME_SIZE_SAME_SIGNED(dst, dsttype, src, srctype)\n";
} elsif ( @$src_aref[1] eq "SIGNED") {
print $fh " ASSIGN_TO_SAME_SIZE_SIGNED_TO_UNSIGNED(dst, dsttype, src, srctype)\n";
} else {
print $fh " ASSIGN_TO_SAME_SIZE_UNSIGNED_TO_SIGNED(dst, dsttype, src, srctype)\n";
}
print $fh "#endif /* src: @$src_aref[0] dst: @$dst_aref[0] */\n\n";
}
}
}
}
##############################################################################
# Print end of ifdef's to prevent a file from being re-included
#
sub print_endprotect ($$) {
my ($fh, $file) = @_;
# Clip off the ".h" part of the name
$file =~ s/(\w*)\.h/$1/;
# Print the endif info
print $fh "#endif /* ${file}_H */\n\n";
}
##############################################################################
# Create the generated portion of the public header file
#
sub create_public ($) {
my $prefix = shift; # Get the prefix for the generated file
my $file = "H5overflow.h"; # Name of file to generate
my $name; # Name of function
# Rename previous file
# rename "${prefix}${file}", "${prefix}${file}~" or die "unable to make backup";
# Open new header file
open HEADER, ">${prefix}${file}" or die "unable to modify source";
# Create file contents
print_copyright(*HEADER);
print_warning(*HEADER);
print_startprotect(*HEADER, $file);
print_typemacros(*HEADER);
print_endprotect(*HEADER, $file);
# Close header file
close HEADER;
}
##############################################################################
# Read symbol version file (given as command-line argument) in and process it
# into internal data structures, then create header files.
#
my $file; # Filename of input file
for $file (@ARGV) {
my $prefix; # Local prefix for generated files
my $line; # Line from input file
#print "file = '$file'\n";
($prefix) = ($file =~ /(^.*\/)/);
#print "prefix = '$prefix'\n";
# Read in the entire file
open SOURCE, $file or die "$file: $!\n";
while ( defined ($line=<SOURCE>) ) {
# Skip blank lines and those lines whose first character is a '#'
if(!($line =~ /(^\s*#.*$)|(^\s*$)/)) {
# Construct data structures for later printing
parse_line($line);
}
}
close SOURCE;
# Create header files
print "Generating 'H5overflow.h'\n";
create_public($prefix);
}

@ -0,0 +1,530 @@
#!/usr/bin/env perl
require 5.003;
use warnings;
# Global settings
# (The max_idx parameter is the only thing that needs to be changed when adding
# support for a new major release. If support for a prior major release
# is added (like support for 1.4, etc), the min_sup_idx parameter will
# need to be decremented.)
# Max. library "index" (0 = v1.0, 1 = 1.2, 2 = 1.4, 3 = 1.6, 4 = 1.8, 5 = 1.10, 6 = 1.12, 7 = 1.14, etc)
$max_idx = 7;
# Min. supported previous library version "index" (0 = v1.0, 1 = 1.2, etc)
$min_sup_idx = 3;
# Number of spaces to indent preprocessor commands inside ifdefs
$indent = 2;
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Create public symbol version headers
#
# Read in the public symbol version description text file and create the
# appropriate headers needed by the library.
#
# Programmer: Quincey Koziol
# Creation Date: 2007/07/10
##############################################################################
# Print the copyright into an open file
#
sub print_copyright ($) {
my $fh = shift;
print $fh "/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n";
print $fh " * Copyright by The HDF Group. *\n";
print $fh " * All rights reserved. *\n";
print $fh " * *\n";
print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n";
print $fh " * terms governing use, modification, and redistribution, is contained in *\n";
print $fh " * the COPYING file, which can be found at the root of the source code *\n";
print $fh " * distribution tree, or in https://www.hdfgroup.org/licenses. *\n";
print $fh " * If you do not have access to either file, you may request a copy from *\n";
print $fh " * help\@hdfgroup.org. *\n";
print $fh " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */\n";
}
##############################################################################
# Print the "do not change this file" warning
#
sub print_warning ($) {
my $fh = shift;
print $fh "\n/* Generated automatically by bin/make_vers -- do not edit */\n";
print $fh "/* Add new versioned symbols to H5vers.txt file */\n\n";
}
##############################################################################
# Print start of ifdef's to prevent a file from being re-included
#
sub print_startprotect ($$) {
my ($fh, $file) = @_;
# Clip off the ".h" part of the name
$file =~ s/(\w*)\.h/$1/;
# Print the ifdef info
print $fh "\n#ifndef ${file}_H\n";
print $fh "#define ${file}_H\n";
}
##############################################################################
# Print check for conflicting version macro settings
#
sub print_checkoptions ($) {
my $fh = shift; # File handle for output file
my $curr_idx; # Current API version index
# Print the option checking
print $fh "\n\n/* Issue error if contradicting macros have been defined. */\n";
print $fh "/* (Can't use an older (deprecated) API version if deprecated symbols have been disabled) */\n";
# Print the #ifdef
print $fh "#if (";
for $curr_idx ($min_sup_idx .. ($max_idx - 1)) {
print $fh "defined(H5_USE_1", ($curr_idx * 2), "_API)";
if($curr_idx < ($max_idx - 1)) {
print $fh " || ";
}
}
print $fh ") && defined(H5_NO_DEPRECATED_SYMBOLS)\n";
# Print the error for bad API version chosen
print $fh ' ' x $indent, "#error \"Can't choose old API versions when deprecated APIs are disabled\"\n";
# Print the #endif
print $fh "#endif /* (";
for $curr_idx ($min_sup_idx .. ($max_idx - 1)) {
print $fh "defined(H5_USE_1", ($curr_idx * 2), "_API)";
if($curr_idx < ($max_idx - 1)) {
print $fh " || ";
}
}
print $fh ") && defined(H5_NO_DEPRECATED_SYMBOLS) */\n";
}
##############################################################################
# Print "global" API version macro settings
#
sub print_globalapidefvers ($) {
my $fh = shift; # File handle for output file
my $curr_idx; # Current API version index
# Print the descriptive comment
print $fh "\n\n/* If a particular default \"global\" version of the library's interfaces is\n";
print $fh " * chosen, set the corresponding version macro for API symbols.\n";
print $fh " *\n";
print $fh " */\n";
for $curr_idx ($min_sup_idx .. ($max_idx - 1)) {
# Print API version ifdef
print $fh "\n#if defined(H5_USE_1", ($curr_idx * 2), "_API_DEFAULT) && !defined(H5_USE_1", ($curr_idx * 2), "_API)\n";
# Print API version definition
print $fh " " x $indent, "#define H5_USE_1", ($curr_idx * 2), "_API 1\n";
# Print API version endif
print $fh "#endif /* H5_USE_1", ($curr_idx * 2), "_API_DEFAULT && !H5_USE_1", ($curr_idx * 2), "_API */\n";
}
}
##############################################################################
# Print "global" API symbol version macro settings
#
sub print_globalapisymbolvers ($) {
my $fh = shift; # File handle for output file
my $curr_idx; # Current API version index
# Print the descriptive comment
print $fh "\n\n/* If a particular \"global\" version of the library's interfaces is chosen,\n";
print $fh " * set the versions for the API symbols affected.\n";
print $fh " *\n";
print $fh " * Note: If an application has already chosen a particular version for an\n";
print $fh " * API symbol, the individual API version macro takes priority.\n";
print $fh " */\n";
# Loop over supported older library APIs and define the appropriate macros
for $curr_idx ($min_sup_idx .. ($max_idx - 1)) {
# Print API version ifdef
print $fh "\n#ifdef H5_USE_1", ($curr_idx * 2), "_API\n";
# Print the version macro info for each function that is defined for
# this API version
print $fh "\n/*************/\n";
print $fh "/* Functions */\n";
print $fh "/*************/\n";
for $name (sort keys %{$func_vers[$curr_idx]}) {
print $fh "\n#if !defined(", $name, "_vers)\n";
print $fh " " x $indent, "#define ", $name, "_vers $func_vers[$curr_idx]{$name}\n";
print $fh "#endif /* !defined(", $name, "_vers) */\n";
}
# Print the version macro info for each typedef that is defined for
# this API version
print $fh "\n/************/\n";
print $fh "/* Typedefs */\n";
print $fh "/************/\n";
for $name (sort keys %{$type_vers[$curr_idx]}) {
print $fh "\n#if !defined(", $name, "_t_vers)\n";
print $fh " " x $indent, "#define ", $name, "_t_vers $type_vers[$curr_idx]{$name}\n";
print $fh "#endif /* !defined(", $name, "_t_vers) */\n";
}
# Print API version endif
print $fh "\n#endif /* H5_USE_1", ($curr_idx * 2), "_API */\n";
}
}
##############################################################################
# Print "default" API version macro settings
#
sub print_defaultapivers ($) {
my $fh = shift; # File handle for output file
my $curr_name; # Current API function
# Print the descriptive comment
print $fh "\n\n/* Choose the correct version of each API symbol, defaulting to the latest\n";
print $fh " * version of each. The \"best\" name for API parameters/data structures\n";
print $fh " * that have changed definitions is also set. An error is issued for\n";
print $fh " * specifying an invalid API version.\n";
print $fh " */\n";
# Loop over function names that are versioned and set up the version macros
print $fh "\n/*************/\n";
print $fh "/* Functions */\n";
print $fh "/*************/\n";
for $curr_name (sort keys %functions) {
my $curr_vers_name; # Name of version macro for current function
my $curr_vers; # Version of function
my @param_list; # Typedefs for the function parameters
# Set up variables for later use
$curr_vers_name = $curr_name . "_vers";
$curr_vers = $functions{$curr_name};
# Split up parameter info
@param_list = split(/\s*,\s*/, $func_params{$curr_name});
#print "print_defaultapivers: param_list=(@param_list)\n";
# Set up default/latest version name mapping
print $fh "\n#if !defined($curr_vers_name) || $curr_vers_name == $curr_vers\n";
print $fh " " x $indent, "#ifndef $curr_vers_name\n";
print $fh " " x ($indent * 2), "#define $curr_vers_name $curr_vers\n";
print $fh " " x $indent, "#endif /* $curr_vers_name */\n";
print $fh " " x $indent, "#define $curr_name $curr_name$curr_vers\n";
# Print function's dependent parameter types
foreach(sort(@param_list)) {
print $fh " " x $indent, "#define ${_}_t $_${curr_vers}_t\n";
}
# Loop to print earlier version name mappings
$curr_vers--;
while($curr_vers > 0) {
print $fh "#elif $curr_vers_name == $curr_vers\n";
print $fh " " x $indent, "#define $curr_name $curr_name$curr_vers\n";
# Print function's dependent parameter types
foreach(sort(@param_list)) {
print $fh " " x $indent, "#define ${_}_t $_${curr_vers}_t\n";
}
$curr_vers--;
}
# Finish up with error for unknown version and endif
print $fh "#else /* $curr_vers_name */\n";
print $fh " " x $indent, "#error \"$curr_vers_name set to invalid value\"\n";
print $fh "#endif /* $curr_vers_name */\n";
}
# Loop over typedefs that are versioned and set up the version macros
print $fh "\n/************/\n";
print $fh "/* Typedefs */\n";
print $fh "/************/\n";
for $curr_name (sort keys %typedefs) {
my $curr_vers_name; # Name of version macro for current function
my $curr_vers; # Version of function
# Set up variables for later use
$curr_vers_name = $curr_name . "_t_vers";
$curr_vers = $typedefs{$curr_name};
# Set up default/latest version name mapping
print $fh "\n#if !defined($curr_vers_name) || $curr_vers_name == $curr_vers\n";
print $fh " " x $indent, "#ifndef $curr_vers_name\n";
print $fh " " x ($indent * 2), "#define $curr_vers_name $curr_vers\n";
print $fh " " x $indent, "#endif /* $curr_vers_name */\n";
print $fh " " x $indent, "#define ${curr_name}_t $curr_name${curr_vers}_t\n";
# Loop to print earlier version name mappings
$curr_vers--;
while($curr_vers > 0) {
print $fh "#elif $curr_vers_name == $curr_vers\n";
print $fh " " x $indent, "#define ${curr_name}_t $curr_name${curr_vers}_t\n";
$curr_vers--;
}
# Finish up with error for unknown version and endif
print $fh "#else /* $curr_vers_name */\n";
print $fh " " x $indent, "#error \"$curr_vers_name set to invalid value\"\n";
print $fh "#endif /* $curr_vers_name */\n\n";
}
}
##############################################################################
# Print end of ifdef's to prevent a file from being re-included
#
sub print_endprotect ($$) {
my ($fh, $file) = @_;
# Clip off the ".h" part of the name
$file =~ s/(\w*)\.h/$1/;
# Print the endif info
print $fh "#endif /* ${file}_H */\n\n";
}
##############################################################################
# Parse a meaningful line (not a comment or blank line) into the appropriate
# data structure
#
sub parse_line ($) {
my $line = shift; # Get the line to parse
# Parse API function lines
#print "line=$line\n";
if($line =~ /^\s*FUNCTION:/ || $line =~ /^\s*TYPEDEF:/) {
my $name; # The name of the function
my $params; # Typedefs for function parameters
my $vers; # The version info for the function
my @vers_list; # Version info, as a list
my @vers_nums; # Version info, as a numeric list
my $num_versions; # Number of versions for function
my %sym_versions; # Versions for a symbol
my $last_idx; # The previous version index seen for a function
my $last_vers; # The previous version # seen for a function
my $line_type; # Type of line we are parsing
# Determine the type of the line to parse
if($line =~ /^\s*FUNCTION:/) {
$line_type = 1;
# Get the function's name & version info
($name, $params, $vers) = ($line =~ /^\s*FUNCTION:\s*(\w*);\s*(.*?)\s*;\s*(.*?)\s*$/);
#print "parse_line: name='$name', params='$params', vers='$vers'\n";
}
elsif($line =~ /^\s*TYPEDEF:/) {
$line_type = 2;
# Get the typedefs's name & version info
($name, $vers) = ($line =~ /^\s*TYPEDEF:\s*(\w*);\s*(.*?)\s*$/);
#print "parse_line: name='$name', vers='$vers'\n";
}
#print "parse_line: line_type='$line_type'\n";
# Check if the name already exists in the list of symbols
if(exists($functions{$name}) || exists($typedefs{$name})) {
die "duplicated symbol: $name";
}
# Check for no version info given
if($vers eq "") {
die "no version information: $name";
}
# Split up version info
@vers_list = split(/\s*,\s*/, $vers);
#print "parse_line: vers_list=(@vers_list)\n";
# Parse the version list into numbers, checking for invalid input
foreach(@vers_list) {
my $vers_idx; # Index of version in array
# Do some validation on the input
# Note: v111 is allowed because H5O functions were prematurely versioned
# in HDF5 1.10. Because users were affected by this, the versioning
# was rescinded but the H5O version 2 functions were kept to be
# called directly. Now that the version macros are added in 1.12,
# along with a 3rd version of the H5O functions, the H5O function
# version for default api=v110 should be version 1 to work correctly
# with 1.10 applications that were using unversioned H5O functions,
# and the H5O function version should be version 3 for default api=v112
# (the default api version for 1.12). Allowing a v111 entry and
# incrementing its index 13 lines below allows a version 2 that is
# never accessed via the H5O function macros.
if(!( $_ =~ /v1[02468]/ || $_ =~ /v11[02468]/ || $_ =~ /v111/ )) {
die "bad version information: $name";
}
if(exists($sym_versions{$_})) {
die "duplicate version information: $name";
}
# Store the versions for the function in a local hash table, indexed by the version
$sym_versions{$_}=$_;
#print "parse_line: _=$_\n";
# Get the index of the version
($vers_idx) = ($_ =~ /v1(\d+)/);
if($vers_idx == 11) {
$vers_idx++;
}
$vers_idx /= 2;
#print "parse_line: vers_idx='$vers_idx'\n";
push(@vers_nums, $vers_idx);
}
#print "parse_line: vers_nums=(@vers_nums)\n";
# Check for invalid version info given
$last_idx = -1;
$last_vers = 1;
foreach(sort(@vers_nums)) {
#print "parse_line: _=$_ last_idx='$last_idx'\n";
# Update intermediate versions of the library that included the API routine
if($last_idx >= 0) {
#print "parse_line: name='$name'\n";
#print "parse_line: last_vers='$last_vers'\n";
#print "parse_line: last_idx='$last_idx'\n";
# Add the function to the list of API routines available in
# different versions of the library
while($last_idx <= $_) {
if($line_type == 1) {
$func_vers[$last_idx]{$name} = $last_vers;
} elsif($line_type == 2) {
$type_vers[$last_idx]{$name} = $last_vers;
} else {
die "unknown line type: $line";
}
$last_idx++;
}
# Increment the version # of the function
$last_vers++;
}
# Keep track of last version index seen
$last_idx = $_;
}
# Finish updating versions of the library that included the API routine
if($last_idx >= 0) {
#print "parse_line: max_idx='$max_idx'\n";
# Add the function to the list of API routines available in
# different versions of the library
while($last_idx <= $max_idx) {
if($line_type == 1) {
$func_vers[$last_idx]{$name} = $last_vers;
} elsif($line_type == 2) {
$type_vers[$last_idx]{$name} = $last_vers;
} else {
die "unknown line type: $line";
}
$last_idx++;
}
}
# Store the number of symbol versions in a hash table, indexed by the name
if($line_type == 1) {
$functions{$name} = $#vers_list + 1;
# Store the function's parameter types for later
$func_params{$name} = $params;
} elsif($line_type == 2) {
$typedefs{$name} = $#vers_list + 1;
} else {
die "unknown line type: $line";
}
}
# Unknown keyword
else {
die "unknown keyword: $line";
}
}
##############################################################################
# Create the generated portion of the public header file
#
sub create_public ($) {
my $prefix = shift; # Get the prefix for the generated file
my $file = "H5version.h"; # Name of file to generate
my $name; # Name of function
# Rename previous file
# rename "${prefix}${file}", "${prefix}${file}~" or die "unable to make backup";
# Open new header file
open HEADER, ">${prefix}${file}" or die "unable to modify source";
# Create file contents
print_copyright(*HEADER);
print_warning(*HEADER);
print_startprotect(*HEADER, $file);
print_globalapidefvers(*HEADER);
print_checkoptions(*HEADER);
print_globalapisymbolvers(*HEADER);
print_defaultapivers(*HEADER);
print_endprotect(*HEADER, $file);
# Close header file
close HEADER;
}
##############################################################################
# Read symbol version file (given as command-line argument) in and process it
# into internal data structures, then create header files.
#
for $file (@ARGV) {
my $prefix; # Local prefix for generated files
#print "file = '$file'\n";
# Check for directory prefix on input file
if($file =~ /\//) {
($prefix) = ($file =~ /(^.*\/)/);
}
else {
$prefix = "";
}
#print "prefix = '$prefix'\n";
# Read in the entire file
open SOURCE, $file or die "$file: $!\n";
while ( defined ($line=<SOURCE>) ) {
# Skip blank lines and those lines whose first character is a '#'
if(!($line =~ /(^\s*#.*$)|(^\s*$)/)) {
# Construct data structures for later printing
parse_line($line);
}
}
close SOURCE;
# Create header files
print "Generating '", $prefix, "H5version.h'\n";
create_public($prefix);
#for $name (sort keys %functions) {
# print "functions{$name} = $functions{$name}\n";
#}
#for $i (0 .. $#func_vers) {
# my $vers_name; # Name of indexed version
# $vers_name = "v1." . ($i * 2);
# print "$vers_name functions: ";
# for $name (sort keys %{$func_vers[$i]}) {
# print "$name$func_vers[$i]{$name} ";
# }
# print "\n";
#}
}

@ -0,0 +1,66 @@
cat << EOF
Help for HDF5 Makefiles
For help with the make utility itself, try 'man make'.
HDF5 makefiles are used to build, test, and install HDF5. The exact
behavior and output will depend on your system's version of make, compiler,
etc.
Usage: make [make options] [TARGET]
Targets:
make help: print this help message
make, make all: (default if no target is specified)
builds all libraries, tools, and tests
make lib: builds libraries only
make progs: builds libraries and programs
make tests: builds libraries, programs, and tests.
Essentially the same as 'make all'.
make check, make tests,
make _test, make test: Run HDF5's test suite.
Make will exit with a failure if any tests fail.
make install: install HDF5 libraries, include files, and tools
make install-examples: installs example source files
make install-all: runs both make-install and make install-examples
make check-install: test building examples with installed HDF5 library
make uninstall: remove installed files
make install-examples: removes example source files
make uninstall-all: removes both installed libraries and examples
make check-clean: remove files generated by running tests
(allows tests to be re-run)
make mostlyclean: remove intermediate files (*.o files).
Doesn't clean libraries or executables.
make clean: remove all files generated by make or make check
make distclean: remove all files generated by make, make check, or
configure
make check-p: Only run parallel tests
make check-s: Only run serial tests
make check-vfd: Run tests with each virtual file driver
make check-vol: Run tests with each virtual object layer connector
HDF5 uses Automake, so any standard Automake targets not listed here
should also work.
Configure options that affect Makefile behavior:
--enable-fortran, --enable-cxx, --enable-parallel, and --disable-hl
enable or disable various interfaces. Make will only recurse into these
directories if they are specified at configure time.
--prefix=[path], --libdir=[path], --includedir=[path], etc. can be used
to change the directory into which make install puts files.
--enable-build-all causes make to build some files that are only
needed by developers (test file generation programs).
Environment variables that affect Makefile behavior:
Make will honor environment variables like CFLAGS that are used when building and linking.
The variable HDF5TestExpress can be used to control the running time
of the tests. HDF5TestExpress = 0 is a full run of the tests while
to run make check. 1 and 2 are intermediate values. The default value is 1.
Available command-line options for make depend on the version of make installed
on your system. Try 'man make'.
EOF

@ -0,0 +1,215 @@
#! /bin/sh
# Common wrapper for a few potentially missing GNU programs.
scriptversion=2018-03-07.03; # UTC
# Copyright (C) 1996-2020 Free Software Foundation, Inc.
# Originally written by Fran,cois Pinard <pinard@iro.umontreal.ca>, 1996.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
if test $# -eq 0; then
echo 1>&2 "Try '$0 --help' for more information"
exit 1
fi
case $1 in
--is-lightweight)
# Used by our autoconf macros to check whether the available missing
# script is modern enough.
exit 0
;;
--run)
# Back-compat with the calling convention used by older automake.
shift
;;
-h|--h|--he|--hel|--help)
echo "\
$0 [OPTION]... PROGRAM [ARGUMENT]...
Run 'PROGRAM [ARGUMENT]...', returning a proper advice when this fails due
to PROGRAM being missing or too old.
Options:
-h, --help display this help and exit
-v, --version output version information and exit
Supported PROGRAM values:
aclocal autoconf autoheader autom4te automake makeinfo
bison yacc flex lex help2man
Version suffixes to PROGRAM as well as the prefixes 'gnu-', 'gnu', and
'g' are ignored when checking the name.
Send bug reports to <bug-automake@gnu.org>."
exit $?
;;
-v|--v|--ve|--ver|--vers|--versi|--versio|--version)
echo "missing $scriptversion (GNU Automake)"
exit $?
;;
-*)
echo 1>&2 "$0: unknown '$1' option"
echo 1>&2 "Try '$0 --help' for more information"
exit 1
;;
esac
# Run the given program, remember its exit status.
"$@"; st=$?
# If it succeeded, we are done.
test $st -eq 0 && exit 0
# Also exit now if we it failed (or wasn't found), and '--version' was
# passed; such an option is passed most likely to detect whether the
# program is present and works.
case $2 in --version|--help) exit $st;; esac
# Exit code 63 means version mismatch. This often happens when the user
# tries to use an ancient version of a tool on a file that requires a
# minimum version.
if test $st -eq 63; then
msg="probably too old"
elif test $st -eq 127; then
# Program was missing.
msg="missing on your system"
else
# Program was found and executed, but failed. Give up.
exit $st
fi
perl_URL=https://www.perl.org/
flex_URL=https://github.com/westes/flex
gnu_software_URL=https://www.gnu.org/software
program_details ()
{
case $1 in
aclocal|automake)
echo "The '$1' program is part of the GNU Automake package:"
echo "<$gnu_software_URL/automake>"
echo "It also requires GNU Autoconf, GNU m4 and Perl in order to run:"
echo "<$gnu_software_URL/autoconf>"
echo "<$gnu_software_URL/m4/>"
echo "<$perl_URL>"
;;
autoconf|autom4te|autoheader)
echo "The '$1' program is part of the GNU Autoconf package:"
echo "<$gnu_software_URL/autoconf/>"
echo "It also requires GNU m4 and Perl in order to run:"
echo "<$gnu_software_URL/m4/>"
echo "<$perl_URL>"
;;
esac
}
give_advice ()
{
# Normalize program name to check for.
normalized_program=`echo "$1" | sed '
s/^gnu-//; t
s/^gnu//; t
s/^g//; t'`
printf '%s\n' "'$1' is $msg."
configure_deps="'configure.ac' or m4 files included by 'configure.ac'"
case $normalized_program in
autoconf*)
echo "You should only need it if you modified 'configure.ac',"
echo "or m4 files included by it."
program_details 'autoconf'
;;
autoheader*)
echo "You should only need it if you modified 'acconfig.h' or"
echo "$configure_deps."
program_details 'autoheader'
;;
automake*)
echo "You should only need it if you modified 'Makefile.am' or"
echo "$configure_deps."
program_details 'automake'
;;
aclocal*)
echo "You should only need it if you modified 'acinclude.m4' or"
echo "$configure_deps."
program_details 'aclocal'
;;
autom4te*)
echo "You might have modified some maintainer files that require"
echo "the 'autom4te' program to be rebuilt."
program_details 'autom4te'
;;
bison*|yacc*)
echo "You should only need it if you modified a '.y' file."
echo "You may want to install the GNU Bison package:"
echo "<$gnu_software_URL/bison/>"
;;
lex*|flex*)
echo "You should only need it if you modified a '.l' file."
echo "You may want to install the Fast Lexical Analyzer package:"
echo "<$flex_URL>"
;;
help2man*)
echo "You should only need it if you modified a dependency" \
"of a man page."
echo "You may want to install the GNU Help2man package:"
echo "<$gnu_software_URL/help2man/>"
;;
makeinfo*)
echo "You should only need it if you modified a '.texi' file, or"
echo "any other file indirectly affecting the aspect of the manual."
echo "You might want to install the Texinfo package:"
echo "<$gnu_software_URL/texinfo/>"
echo "The spurious makeinfo call might also be the consequence of"
echo "using a buggy 'make' (AIX, DU, IRIX), in which case you might"
echo "want to install GNU make:"
echo "<$gnu_software_URL/make/>"
;;
*)
echo "You might have modified some files without having the proper"
echo "tools for further handling them. Check the 'README' file, it"
echo "often tells you about the needed prerequisites for installing"
echo "this package. You may also peek at any GNU archive site, in"
echo "case some other package contains this missing '$1' program."
;;
esac
}
give_advice "$1" | sed -e '1s/^/WARNING: /' \
-e '2,$s/^/ /' >&2
# Propagate the correct exit status (expected to be 127 for a program
# not found, 63 for a program that failed due to version mismatch).
exit $st
# Local variables:
# eval: (add-hook 'before-save-hook 'time-stamp)
# time-stamp-start: "scriptversion="
# time-stamp-format: "%:y-%02m-%02d.%02H"
# time-stamp-time-zone: "UTC0"
# time-stamp-end: "; # UTC"
# End:

@ -0,0 +1,103 @@
## Copyright by The HDF Group.
## All rights reserved.
##
## This file is part of HDF5. The full HDF5 copyright notice, including
## terms governing use, modification, and redistribution, is contained in
## the COPYING file, which can be found at the root of the source code
## distribution tree, or in https://www.hdfgroup.org/licenses.
## If you do not have access to either file, you may request a copy from
## help@hdfgroup.org.
# This contains function definitions of output filtering.
# This file should only be sourced in by another shell script.
#
# Programmer: Albert Cheng
# Created Date: 2011/5/3
# Comment added to address HDFFV-8270:
# As I understand it, the purpose of this file is to remove extraneous messages
# that appear in stdout and stderr on some machines that have been tested outside
# of the HDF Group realm. The purpose of this script is to filter those
# extraneous messages from stdout and stderr so that when the output files are
# compared to the expected output, the extra messages will not cause failures in
# the tests. The system messages in the comments below are out of date, meaning
# I suppose that while the script code to filter messages on the system was
# correct correct when last used, the output in the comments doesn't match the
# script code that follows. I don't currently have access to any of these
# systems to see the current output and the effect of the script code. If using
# this file in the future, please update the comments to match the scripts in use.
# Larry Knox 2017/3/15
# Some systems will dump some messages to stdout for various reasons.
# Remove them from the stdout result file.
# $1 is the file name of the file to be filtered.
# Cases of filter needed:
# 1. Sandia Red-Storm
# yod always prints these two lines at the beginning.
# LibLustre: NAL NID: 0004a605 (5)
# Lustre: OBD class driver Build Version: 1, info@clusterfs.com
# 2. LANL Lambda
# mpijob mirun -np always add an extra line at the end like:
# P4 procgroup file is /users/acheng/.lsbatch/host10524.l82
STDOUT_FILTER() {
result_file=$1
tmp_file=/tmp/h5test_tmp_$$
# Filter Sandia Red-Storm yod messages.
cp $result_file $tmp_file
sed -e '/^LibLustre:/d' -e '/^Lustre:/d' \
< $tmp_file > $result_file
# Filter LANL Lambda mpirun message.
cp $result_file $tmp_file
sed -e '/^P4 procgroup file is/d' \
< $tmp_file > $result_file
# cleanup
rm -f $tmp_file
}
# Some systems will dump some messages to stderr for various reasons.
# Remove them from the stderr result file.
# $1 is the file name of the file to be filtered.
# Cases of filter needed:
# * LANL MPI:
# The LANL MPI will print some messages like the following,
# LA-MPI: *** mpirun (1.5.10)
# LA-MPI: *** 3 process(es) on 2 host(s): 2*fln21 1*fln22
# LA-MPI: *** libmpi (1.5.10)
# LA-MPI: *** Copyright 2001-2004, ACL, Los Alamos National Laboratory
# * h5diff debug output:
# Debug output all have prefix "h5diff debug: ".
# * AIX system prints messages like these when it is aborting:
# ERROR: 0031-300 Forcing all remote tasks to exit due to exit code 1 in task 0
# ERROR: 0031-250 task 4: Terminated
# ERROR: 0031-250 task 3: Terminated
# ERROR: 0031-250 task 2: Terminated
# ERROR: 0031-250 task 1: Terminated
# * LLNL Blue-Gene mpirun prints messages like there when it exit non-zero:
# <Apr 12 15:01:49.075658> BE_MPI (ERROR): The error message in the job record is as follows:
# <Apr 12 15:01:49.075736> BE_MPI (ERROR): "killed by exit(1) on node 0"
STDERR_FILTER() {
result_file=$1
tmp_file=/tmp/h5test_tmp_$$
# Filter LLNL Blue-Gene error messages in both serial and parallel modes
# since mpirun is used in both modes.
cp $result_file $tmp_file
sed -e '/ BE_MPI (ERROR): /d' \
< $tmp_file > $result_file
# Filter LANL MPI messages
# and LLNL srun messages
# and AIX error messages
if test -n "$pmode"; then
cp $result_file $tmp_file
sed -e '/^LA-MPI:/d' -e '/^srun:/d' -e '/^ERROR:/d' \
< $tmp_file > $result_file
fi
# Filter h5diff debug output
cp $result_file $tmp_file
sed -e '/^h5diff debug: /d' \
< $tmp_file > $result_file
# clean up temporary files.
rm -f $tmp_file
}

@ -0,0 +1,164 @@
#! /bin/sh
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
## Remove paths to libraries used to build HDF5 when packaging HDF5
## binaries.
## For help page, use "h5rmflags -help"
# Constants definitions
EXIT_SUCCESS=0
EXIT_FAILURE=1
# Function definitions
# show help page
usage() {
# A wonderfully informative "usage" message.
echo "usage: $prog_name [OPTIONS]"
echo " OPTIONS:"
echo " -help|help This help message"
echo " -echo Show all the shell commands executed"
echo " -force No prompt, just do it"
echo " -prefix=DIR New directory to find HDF5 lib/ and include/"
echo " subdirectories [default: current directory]"
echo " -tool=TOOL Tool to update. TOOL must be in the current"
echo " directory and writable. [default: $h5tools]"
echo " -show Show the commands without executing them"
echo " "
exit $EXIT_FAILURE
}
# display variable values
dump_vars(){
echo "====Showing all variable values====="
echo prefix=$prefix
echo h5tools=$h5tools
echo "====End Showing====="
}
# show actions to be taken
show_action()
{
echo "Update the following tools because they are now installed at a new directory"
for t in $foundtools; do
echo "${t}:"
echo " current setting=`sed -e '/^prefix=/s/prefix=//p' -e d $t`"
echo " new setting="\""$prefix"\"
done
}
# Report Error message
ERROR()
{
echo "***ERROR***"
echo "$1"
}
# Main
#
# Initialization
h5tools="h5cc h5pcc h5fc h5pfc h5c++" # possible hdf5 tools
foundtools= # tools found and will be modified
fmode= # force mode, default is off
prefix=`(cd ..;pwd)`
# Parse options
for arg in $@ ; do
case "$arg" in
-prefix=*)
prefix="`expr "$arg" : '-prefix=\(.*\)'`"
;;
-echo)
set -x
;;
-show)
SHOW="echo"
;;
-tool=*)
h5tools="`expr "$arg" : '-tool=\(.*\)'`"
;;
-help|help)
usage
;;
-force)
fmode=yes
;;
*)
ERROR "Unknown Option($arg)"
usage
exit $EXIT_FAILURE
;;
esac
done
# Sanity checks
#if [ ! -d $prefix ]; then
# ERROR "prefix($prefix) is not an existing directory"
# exit $EXIT_FAILURE
#fi
for x in $h5tools; do
if [ -f $x ]; then
foundtools="$foundtools $x"
if [ ! -w $x ]; then
ERROR "h5tool($x) is not writable"
exit $EXIT_FAILURE
fi
fi
done
if [ -z "$foundtools" ]; then
ERROR "found no tools to modify"
exit $EXIT_FAILURE
fi
# Show actions to be taken and get consent
show_action
# Update them
if [ "$SHOW" = "echo" ]; then
echo "===Update commands are:===="
# cat $CMDFILE
echo "===End Update commands====="
fi
for t in $foundtools; do
echo Update $t ...
# COMMAND="ed - $t"
# if [ "$SHOW" = "echo" ]; then
# echo $COMMAND
# else
# $COMMAND < $CMDFILE
ed - $t << end
g/^H5BLD_CPPFLAGS/s/-I\/.*include //g
g/^H5BLD_LDFLAGS/s/-L\/.*lib //g
g/^H5BLD_LDFLAGS/s/-L\/.*lib64 //g
.
w
end
# fi
done
# Cleanup
#rm -f $CMDFILE
exit $EXIT_SUCCESS
# Some possible future features to add
# CCBASE - Name of the alternative C compiler
# CLINKERBASE - Name of the alternative linker
# LDFLAGS - Path to different libraries your application will link with
# (this path should include the path to the zlib library)
# LIBS - Libraries your application will link with

@ -0,0 +1,365 @@
#!/usr/bin/perl
# makeTarFiles.pl
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
#
#
use warnings;
use strict;
use Cwd;
use File::Basename;
my %destsubdir = ('emu' => 'sunos-5.11-sparc-32-sunc512',
'emu64' => 'sunos-5.11-sparc-64-sunc512',
'ostrich' => 'linux-el6-ppc64-gcc447',
'ostrichxl' => 'linux-el6-ppc64-xl13.1',
'platypus' => 'linux-centos6-x86_64-gcc447',
'platypus32' => 'linux-centos6-x86_64-32-gcc447',
'moohan' => 'linux-centos7-x86_64-gcc485',
'moohan32' => 'linux-centos7-x86_64-32-gcc485',
'kite' => 'osx-10.8-x86_64-clang5.1',
'quail' => 'osx-10.9-x86_64-clang6.0',
'osx1010test' => 'osx-10.10-x86_64-clang6.0');
my %szipdir = ('emu' => '/mnt/hdf/packages/szip/shared/encoder/SunOS-5.10',
'emu-static' => '/mnt/hdf/packages/szip/static/encoder/SunOS-5.10',
'emu64' => '/mnt/hdf/packages/szip-PIC/shared/encoder/SunOS-5.11-64',
'emu64-static' => '/mnt/hdf/packages/szip-PIC/shared/encoder/SunOS-5.11-64',
'kite' => '/mnt/hdf/packages/szip/shared/encoder/MacOS-10.8',
'kite-static' => '/mnt/hdf/packages/szip-PIC/static/encoder/MacOS-10.8',
'ostrich32' => '/mnt/hdf/packages/szip/shared/encoder/Linux2.6-ppc64-gcc',
'ostrich32-static' => '/mnt/hdf/packages/szip/static/encoder/Linux2.6-ibmppc64-gcc-32',
'ostrich' => '/mnt/hdf/packages/szip/shared/encoder/Linux2.6-ppc64-gcc-64',
'ostrich-static' => '/mnt/hdf/packages/szip/static/encoder/Linux2.6-ibmppc64-gcc',
'ostrichxl' => '/mnt/hdf/packages/szip/shared/encoder/Linux2.6-ppc64-gcc-64',
'ostrichxl-static' => '/mnt/hdf/packages/szip/static/encoder/Linux2.6-ibmppc64-gcc',
'osx1010test' => '/mnt/hdf/packages/szip/shared/encoder/MacOS-10.8',
'osx1010test-static' => '/mnt/hdf/packages/szip-PIC/static/encoder/MacOS-10.8',
'moohan' => '/mnt/hdf/packages/szip/shared/encoder/Linux2.6-x86_64-gcc',
'moohan-static' => '/mnt/hdf/packages/szip/static/encoder/Linux2.6-x86_64-gcc',
'moohan32' => '/mnt/hdf/packages/szip/shared/encoder/Linux2.6-x86_64-gcc-m32',
'moohan32-static' => '/mnt/hdf/packages/szip-PIC/static/encoder/Linux2.6-x86_64-gcc-m32',
'platypus32' => '/mnt/hdf/packages/szip/shared/encoder/Linux2.6-x86_64-gcc-m32',
'platypus32-static' => '/mnt/hdf/packages/szip-PIC/static/encoder/Linux2.6-x86_64-gcc-m32',
'platypus' => '/mnt/hdf/packages/szip/shared/encoder/Linux2.6-x86_64-gcc',
'platypus-static' => '/mnt/hdf/packages/szip/static/encoder/Linux2.6-x86_64-gcc',
'quail' => '/mnt/hdf/packages/szip/shared/encoder/MacOS-10.8',
'quail-static' => '/mnt/hdf/packages/szip-PIC/static/encoder/MacOS-10.8');
my %zlibdir = ('emu' => '/mnt/hdf/packages/zlib-125/shared/SunOS-5.10',
'emu-static' => '/mnt/hdf/packages/zlib-125/static/SunOS-5.10',
'emu64' => '/mnt/hdf/packages/zlib-123-PIC/SunOS-5.11-64',
'emu64-static' => '/mnt/hdf/packages/zlib-123-PIC/SunOS-5.11-64',
'kite' => ' /mnt/hdf/packages/zlib-125/shared/mac-intel-x86_64',
'kite-static' => ' /mnt/hdf/packages/zlib-125/static/mac-intel-x86_64',
'ostrich32' => '/mnt/hdf/packages/zlib-125/PIC/Linux2.6-ppc64-gcc',
'ostrich32-static' => '/mnt/hdf/packages/zlib-125/PIC/Linux2.6-ppc64-gcc',
'ostrich' => '/mnt/hdf/packages/zlib-125/PIC/Linux2.6-ppc64-gcc-64',
'ostrich-static' => '/mnt/hdf/packages/zlib-125/PIC/Linux2.6-ppc64-gcc-64',
'ostrichxl' => '/mnt/hdf/packages/zlib-125/PIC/Linux2.6-ppc64-gcc-64',
'ostrichxl-static' => '/mnt/hdf/packages/zlib-125/PIC/Linux2.6-ppc64-gcc-64',
'osx1010test' => ' /mnt/hdf/packages/zlib-125/shared/mac-intel-x86_64',
'osx1010test-static' => ' /mnt/hdf/packages/zlib-125/static/mac-intel-x86_64',
'moohan' => '/mnt/hdf/packages/zlib-125/shared/Linux2.6-x86_64-gcc',
'moohan-static' => '/mnt/hdf/packages/zlib-125/static/Linux2.6-x86_64-gcc',
'moohan32' => '/mnt/hdf/packages/zlib-128/Linux2.6-x86_64-gcc-m32',
'moohan32-static' => '/mnt/hdf/packages/zlib-128/Linux2.6-x86_64-gcc-m32',
'platypus32' => '/mnt/hdf/packages/zlib-128/Linux2.6-x86_64-gcc-m32',
'platypus32-static' => '/mnt/hdf/packages/zlib-128/Linux2.6-x86_64-gcc-m32',
'platypus' => '/mnt/hdf/packages/zlib-125/shared/Linux2.6-x86_64-gcc',
'platypus-static' => '/mnt/hdf/packages/zlib-125/static/Linux2.6-x86_64-gcc',
'quail' => ' /mnt/hdf/packages/zlib-125/shared/mac-intel-x86_64',
'quail-static' => ' /mnt/hdf/packages/zlib-125/static/mac-intel-x86_64');
my $indirectory = ".";
$indirectory = shift;
my $outdirectory = ".";
$outdirectory = shift;
my $key = ".";
#$key = shift;
my $scriptdirname = dirname(__FILE__);
unless (-d $outdirectory) {
print "$outdirectory not found. Create it or choose another one and try again.\n";
exit 1;
}
print "Subdirectories of $indirectory will be tarred and stored in $outdirectory.\n";
my $tarfilename;
my $tardirname;
my $output;
my $cmd;
# I'm currently copying system zlibs for at least solaris and FreeBSD machines. Since this script runs on jam it may require scp to get the libs.
#For future reference
# command for getting szlib files and links for shared binaries:
# tar cvf - -C <szipDir>/lib . | tar xvf - -C <libdir> .
# libz.so.1.2.3 and the static files should just be copied because they're in directories
# with other files. Then create the libz.so.1 and libz.so symbolic links.
sub addzandszlibs {
my $dir = shift;
my $indirectory = shift;
my $currentdir = getcwd();
if (-d "$indirectory/$dir" ) {
my $szdir = $szipdir{$dir};
my $zldir = $zlibdir{$dir};
if ($dir =~ /static/ || $dir =~ /ostrich/) {
$cmd = "cp $szdir/lib/libsz.a $indirectory/$dir/lib";
$output = `$cmd`;
print $output;
$cmd = "cp $zldir/lib/libz.a $indirectory/$dir/lib";
$output = `$cmd`;
print $output;
} elsif ($dir eq "freedom" || $dir eq "loyalty") {
$cmd = "cp $szdir/lib/libsz.so.2 $indirectory/$dir/lib";
$output = `$cmd`;
print $output;
$cmd = "cp $zldir/lib/libz.so.5 $indirectory/$dir/lib";
$output = `$cmd`;
print $output;
chdir "$indirectory/$dir/lib" or die "Couldn't change directory to $indirectory/$dir/lib, $!";
$cmd = "ln -s libsz.so.2 libsz.so";
$output = `$cmd`;
print $output;
$cmd = "ln -s libz.so.5 libz.so";
$output = `$cmd`;
print $output;
chdir $currentdir or die "Couldn't change directory back to $currentdir, $!";
} elsif ($dir eq "quail" || $dir eq "kite" || $dir eq "osx1010test") {
#we've been using the static libraries for the macs - built with -fPIC
# $cmd = "cp $szdir/lib/libsz.a $indirectory/$dir/lib";
# $output = `$cmd`;
# print $output;
# $cmd = "cp $zldir/lib/libz.a $indirectory/$dir/lib";
# $output = `$cmd`;
# print $output;
$cmd = "cp $szdir/lib/libsz.2.0.0.dylib $indirectory/$dir/lib";
$output = `$cmd`;
print $output;
$cmd = "cp $zldir/lib/libz.1.2.5.dylib $indirectory/$dir/lib";
$output = `$cmd`;
print $output;
chdir "$indirectory/$dir/lib" or die "Couldn't change directory to $indirectory/$dir/lib, $!";
$cmd = "ln -s libsz.2.0.0.dylib libsz.2.dylib";
$output = `$cmd`;
print $output;
$cmd = "ln -s libsz.2.0.0.dylib libsz.dylib";
$output = `$cmd`;
print $output;
$cmd = "ln -s libz.1.2.5.dylib libz.1.dylib";
$output = `$cmd`;
print $output;
$cmd = "ln -s libz.1.2.5.dylib libz.dylib";
$output = `$cmd`;
print $output;
chdir $currentdir or die "Couldn't change directory back to $currentdir, $!";
} elsif ($dir eq "emu64") {
$cmd = "cp $szdir/lib/libsz.so.2.0.0 $indirectory/$dir/lib";
$output = `$cmd`;
print $output;
$cmd = "cp $zldir/lib/libz.a $indirectory/$dir/lib";
$output = `$cmd`;
print $output;
chdir "$indirectory/$dir/lib" or die "Couldn't change directory to $indirectory/$dir/lib, $!";
$cmd = "ln -s libsz.so.2.0.0 libsz.so.2";
$output = `$cmd`;
print $output;
$cmd = "ln -s libsz.so.2.0.0 libsz.so";
$output = `$cmd`;
print $output;
chdir $currentdir or die "Couldn't change directory back to $currentdir, $!";
} elsif ($dir eq "platypus32" || $dir eq "moohan32") {
$cmd = "cp $szdir/lib/libsz.so.2.0.0 $indirectory/$dir/lib";
$output = `$cmd`;
print $output;
# $cmd = "cp $zldir/lib/libz.a $indirectory/$dir/lib";
# $output = `$cmd`;
# print $output;
chdir "$indirectory/$dir/lib" or die "Couldn't change directory to $indirectory/$dir/lib, $!";
$cmd = "ln -s libsz.so.2.0.0 libsz.so.2";
$output = `$cmd`;
print $output;
$cmd = "ln -s libsz.so.2.0.0 libsz.so";
$output = `$cmd`;
print $output;
# $cmd = "ln -s libz.so.1.2.8 libz.so.1";
# $output = `$cmd`;
# print $output;
# $cmd = "ln -s libz.so.1.2.8 libz.so";
# $output = `$cmd`;
# print $output;
chdir $currentdir or die "Couldn't change directory back to $currentdir, $!";
} else {
$cmd = "cp $szdir/lib/libsz.so.2.0.0 $indirectory/$dir/lib";
$output = `$cmd`;
print $output;
$cmd = "cp $zldir/lib/libz.so.1.2.5 $indirectory/$dir/lib";
$output = `$cmd`;
print $output;
chdir "$indirectory/$dir/lib" or die "Couldn't change directory to $indirectory/$dir/lib, $!";
$cmd = "ln -s libsz.so.2.0.0 libsz.so.2";
$output = `$cmd`;
print $output;
$cmd = "ln -s libsz.so.2.0.0 libsz.so";
$output = `$cmd`;
print $output;
$cmd = "ln -s libz.so.1.2.5 libz.so.1";
$output = `$cmd`;
print $output;
$cmd = "ln -s libz.so.1.2.5 libz.so";
$output = `$cmd`;
print $output;
chdir $currentdir or die "Couldn't change directory back to $currentdir, $!";
}
}
}
sub makeTarFile {
my $directoryname = shift;
my $origdirname = shift;
my $tarfilename = shift;
my $tardirname = shift;
$cmd = "mv $indirectory/$origdirname $indirectory/$tardirname";
$output = `$cmd`;
print $output;
if (-d $indirectory."/".$tardirname."/doc/hdf5/examples") {
$cmd = "mv $indirectory/$tardirname/doc/hdf5/examples $indirectory/$tardirname/examples";
$output = `$cmd`;
print $output;
$cmd = "rm -rf $indirectory/$tardirname/doc";
$output = `$cmd`;
print $output;
}
$cmd = "tar zcvf $outdirectory/$directoryname/$tarfilename -C $indirectory $tardirname";
print "Need to run $cmd.\n";
$output = `$cmd`;
sleep 10;
print "Create $tarfilename: $output\n";
$cmd = "mv $indirectory/$tardirname $indirectory/$origdirname";
$output = `$cmd`;
print $output;
}
foreach $key (keys %destsubdir) {
print "Process ".$key."\n\n";
#skip unless there's a directory by the name of $key or $key-static
next unless -d $indirectory.'/'.$key || -d $indirectory.'/'.$key."-static";
my $version;
# This assumes a static directory. Probably the others should be checked if this
# doesn't exist.
$cmd = "grep \"HDF5 Version\" $indirectory/$key/lib/libhdf5.settings";
$_ = `$cmd`;
print $_, "\n";
s/HDF5 Version://;
s/^\s+//;
chomp;
$version = $_;
#my $directoryname = substr $destsubdir{$key}, 0, rindex($destsubdir{$key}, '-');
my $directoryname = $destsubdir{$key};
mkdir $outdirectory."/".$directoryname, 0755 unless -d $outdirectory."/".$directoryname;
my $staticdir = $key."-static";
print $indirectory."/$key tarfile will be put in " . $outdirectory."/".$directoryname."\n";
if (-e $outdirectory."/".$destsubdir{$key}."/README") {
print $outdirectory."/".$destsubdir{$key}."/README" . " has already been created.\n";
}
else {
print "Make the Outer README file: ";
$cmd = "perl ./makeOuterREADME.pl $indirectory/$key $outdirectory $directoryname";
print $cmd, "\n";
my $output = `$cmd`;
print $output;
}
my $file = "";
my @dirnames = "";
opendir(DIR, $indirectory) or die "can't open .: $!";
while (defined($file = readdir(DIR))) {
next unless ($file eq $key || $file eq "$key-static") && -d $indirectory."/".$file;
push @dirnames, $file;
}
foreach my $dir (@dirnames) {
next if $dir eq "";
print "Make the Inner README files.\n";
$cmd = "perl ./makeInternalREADME.pl $indirectory/$dir";
print $cmd, "\n";
$output = `$cmd`;
print $output;
print "Add the zlib and szip files for $dir.\n";
&addzandszlibs($dir, $indirectory);
my $currentdir = getcwd();
print "Remove all lib*.la files from $dir/lib*.\n";
$cmd = "rm $indirectory/$dir/lib*/lib*.la";
print $cmd, "\n";
$output = `$cmd`;
print $output;
chdir "$indirectory/$dir/bin" or die "Couldn't change directory to $indirectory/$dir/bin, $!";
$cmd = "$scriptdirname/h5rmflags -force";
$output = `$cmd`;
print $output;
chdir $currentdir or die "Couldn't change directory back to $currentdir, $!";
print "Tar up the files into the output directory.\n";
if (-d $indirectory."/".$dir) {
if ($dir =~ /static/) {
$tarfilename = "hdf5-$version-$destsubdir{$key}-static.tar.gz";
$tardirname = "hdf5-$version-$destsubdir{$key}-static";
} else {
$tarfilename = "hdf5-$version-$destsubdir{$key}-shared.tar.gz";
$tardirname = "hdf5-$version-$destsubdir{$key}-shared";
}
&makeTarFile($directoryname, $dir, $tarfilename, $tardirname);
}
}
# If this copy is done after h5rmflags is run on all the directories the compile scripts
# in the utilities directory will already have the paths removed.
if (-d $indirectory."/".$staticdir) {
$cmd = "cp -prv $indirectory/$staticdir/bin $outdirectory/$directoryname/utilities";
$output = `$cmd`;
print $output;
}
}

@ -0,0 +1,620 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Make a release of hdf5.
# Function definitions
#
# Print Usage page
USAGE()
{
cat << EOF
Usage: $0 -d <dir> [--docver BRANCHNAME] [-h] [--private] <methods> ...
-d DIR The name of the directory where the release(s) should be
placed.
--docver BRANCHNAME This is added for 1.8 and beyond to get the correct
version of documentation files from the hdf5docs
repository. BRANCHNAME for v1.8 should be hdf5_1_8.
-h print the help page.
--private Make a private release with today's date in version information.
This must be run at the top level of the source directory.
The other command-line options are the names of the programs to use
for compressing the resulting tar archive (if none are given then
"tar" is assumed):
tar -- use tar and don't do any compressing.
gzip -- use gzip with "-9" and append ".gz" to the output name.
bzip2 -- use bzip2 with "-9" and append ".bz2" to the output name.
zip -- convert all text files to DOS style and form a zip file for Windows use.
cmake-tgz -- create a tar file using the gzip default level with a build-unix.sh
command file and all other CMake files needed to build HDF5 source
using CMake on unix machines.
cmake-zip -- convert all text files to DOS style and create a zip file including cmake
scripts and .bat files to build HDF5 source using CMake on Windows.
hpc-cmake-tgz
-- create a tar file using the gzip default level with a build-unix.sh
command file and all other CMake files needed to build HDF5 source
using CMake on unix machines, with HDF5options.cmake files for serial
and parallel builds on machines requiring batch jobs to run tests.
The default is for parallel build, with serial only build by changing
the HDF5options.cmake symlink to ser-HDF5options.cmake. More
information is available in the README_HPC file.
doc -- produce the latest doc tree in addition to the archive.
A sha256 checksum is produced for each archive created and stored in the sha256 file.
Examples:
$ bin/release -d /tmp
/tmp/hdf5-1.8.13-RELEASE.txt
/tmp/hdf5-1.8.13.sha256
/tmp/hdf5-1.8.13.tar
$ bin/release -d /tmp gzip
/tmp/hdf5-1.8.13-RELEASE.txt
/tmp/hdf5-1.8.13.sha256
/tmp/hdf5-1.8.13.tar.gz
$ bin/release -d /tmp tar gzip zip
/tmp/hdf5-1.8.13-RELEASE.txt
/tmp/hdf5-1.8.13.sha256
/tmp/hdf5-1.8.13.tar
/tmp/hdf5-1.8.13.tar.gz
/tmp/hdf5-1.8.13.tar.zip
EOF
}
# Function name: tar2zip
# Convert the release tarball to a Windows zipball.
#
# Steps:
# 1. untar the tarball in a temporary directory;
# Note: do this in a temporary directory to avoid changing
# the original source directory which may be around.
# 2. convert all its text files to DOS (LF-CR) style;
# 3. form a zip file which is usable by Windows users.
#
# Parameters:
# $1 version
# $2 release tarball
# $3 output zipball file name
#
# Returns 0 if successful; 1 otherwise
#
tar2zip()
{
if [ $# -ne 3 ]; then
echo "usage: tar2zip <tarfilename> <zipfilename>"
return 1
fi
ztmpdir=/tmp/ztmpdir$$
mkdir -p $ztmpdir
version=$1
tarfile=$2
zipfile=$3
# step 1: untar tarball in ztmpdir
(cd $ztmpdir; tar xf -) < $tarfile
# sanity check
if [ ! -d $ztmpdir/$version ]; then
echo "untar did not create $ztmpdir/$version source dir"
# cleanup
rm -rf $ztmpdir
return 1
fi
# step 2: convert text files
# There maybe a simpler way to do this.
# options used in unix2dos:
# -k Keep the date stamp
# -q quiet mode
# grep redirect output to /dev/null because -q or -s are not portable.
find $ztmpdir/$version | \
while read inf; do \
if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \
unix2dos -q -k $inf; \
fi\
done
# step 3: make zipball
# -9 maximum compression
# -y Store symbolic links as such in the zip archive
# -r recursive
# -q quiet
(cd $ztmpdir; zip -9 -y -r -q $version.zip $version)
mv $ztmpdir/$version.zip $zipfile
# cleanup
rm -rf $ztmpdir
}
# Function name: tar2cmakezip
# Convert the release tarball to a Windows zipball with files to run CMake build.
#
# Steps:
# 1. untar the tarball in a temporary directory;
# Note: do this in a temporary directory to avoid changing
# the original source directory which may be around.
# 2. add build-unix.sh script.
# 3. add LIBAEC.tar.gz, ZLib.tar.gz and cmake files to top level directory.
# 4. create gzipped tar file with these contents:
# build-unix.sh script
# hdf5-<version> source code directory extracted from tar file
# CTestScript.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# HDF5config.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# HDF5options.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# LIBAEC.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
# ZLib.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
# Parameters:
# $1 version
# $2 release tarball
# $3 output zipball file name
#
# Returns 0 if successful; 1 otherwise
#
# need function to create another temporary directory, extract the
# $tmpdir/$HDF5_VERS.tar into it, create build-VS*.bat files,
# add CTestScript.cmake, HDF5config.cmake, LIBAEC.tar.gz
# ZLib.tar.gz, HDF5 examples, and then zip it.
tar2cmakezip()
{
if [ $# -ne 3 ]; then
echo "usage: tar2cmakezip <tarfilename> <zipfilename>"
return 1
fi
cmziptmpdir=/tmp/cmziptmpdir$$
cmziptmpsubdir=$cmziptmpdir/CMake-$HDF5_VERS
mkdir -p $cmziptmpsubdir
version=$1
tarfile=$2
zipfile=$3
# step 1: untar tarball in cmgztmpdir
(cd $cmziptmpsubdir; tar xf -) < $tarfile
# sanity check
if [ ! -d $cmziptmpsubdir/$version ]; then
echo "untar did not create $cmziptmpsubdir/$version source dir"
# cleanup
rm -rf $cmziptmpdir
return 1
fi
# step 2: add batch file for building CMake on window
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2015 -C Release -V -O hdf5.log" > build-VS2015-32.bat; chmod 755 build-VS2015-32.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201564 -C Release -V -O hdf5.log" > build-VS2015-64.bat; chmod 755 build-VS2015-64.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2017 -C Release -V -O hdf5.log" > build-VS2017-32.bat; chmod 755 build-VS2017-32.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201764 -C Release -V -O hdf5.log" > build-VS2017-64.bat; chmod 755 build-VS2017-64.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2019 -C Release -V -O hdf5.log" > build-VS2019-32.bat; chmod 755 build-VS2019-32.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201964 -C Release -V -O hdf5.log" > build-VS2019-64.bat; chmod 755 build-VS2019-64.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2022 -C Release -V -O hdf5.log" > build-VS2022-32.bat; chmod 755 build-VS2022-32.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS202264 -C Release -V -O hdf5.log" > build-VS2022-64.bat; chmod 755 build-VS2022-64.bat)
# step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmziptmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmziptmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.3-Source.zip $cmziptmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-1_14_1.zip $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmziptmpsubdir
# step 4: convert text files
# There maybe a simpler way to do this.
# options used in unix2dos:
# -k Keep the date stamp
# -q quiet mode
# grep redirect output to /dev/null because -q or -s are not portable.
find $cmziptmpsubdir/$version | \
while read inf; do \
if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \
unix2dos -q -k $inf; \
fi\
done
# step 3: make zipball
# -9 maximum compression
# -y Store symbolic links as such in the zip archive
# -r recursive
# -q quiet
(cd $cmziptmpdir; zip -9 -y -r -q CMake-$version.zip *)
mv $cmziptmpdir/CMake-$version.zip $zipfile
# cleanup
rm -rf $cmziptmpdir
}
# Function name: tar2cmaketgz
# Convert the release tarball to a gzipped tar file with files to run CMake build.
#
#
# Steps:
# 1. untar the tarball in a temporary directory;
# Note: do this in a temporary directory to avoid changing
# the original source directory which may be around.
# 2. add build-unix.sh script.
# 3. add LIBAEC.tar.gz, ZLib.tar.gz and cmake files to top level directory.
# 4. create gzipped tar file with these contents:
# build-unix.sh script
# hdf5-<version> source code directory extracted from tar file
# CTestScript.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# HDF5config.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# HDF5options.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# LIBAEC.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
# ZLib.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
# Parameters:
# $1 version
# $2 release tarball
# $3 output zipball file name
#
# Returns 0 if successful; 1 otherwise
#
# need function to create another temporary directory, extract the
# $tmpdir/$HDF5_VERS.tar into it, create build-unix.sh,
# add CTestScript.cmake, HDF5config.cmake, LIBAEC.tar.gz
# ZLib.tar.gz, HDF5 examples, and then tar.gz it.
tar2cmaketgz()
{
if [ $# -ne 3 ]; then
echo "usage: tar2cmaketgz <tarfilename> <tgzfilename>"
return 1
fi
cmgztmpdir=/tmp/cmgztmpdir$$
cmgztmpsubdir=$cmgztmpdir/CMake-$HDF5_VERS
mkdir -p $cmgztmpsubdir
version=$1
tarfile=$2
tgzfile=$3
# step 1: untar tarball in cmgztmpdir
(cd $cmgztmpsubdir; tar xf -) < $tarfile
# sanity check
if [ ! -d $cmgztmpsubdir/$version ]; then
echo "untar did not create $cmgztmpsubdir/$version source dir"
# cleanup
rm -rf $cmgztmpdir
return 1
fi
# step 2: add build-unix.sh script
(cd $cmgztmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=Unix -C Release -V -O hdf5.log" > build-unix.sh; chmod 755 build-unix.sh)
# step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.3-Source.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-1_14_1.tar.gz $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmgztmpsubdir
tar czf $DEST/CMake-$HDF5_VERS.tar.gz -C $cmgztmpdir . || exit 1
# cleanup
rm -rf $cmgztmpdir
}
# Function name: tar2hpccmaketgz
# Convert the release tarball to a gzipped tarfile with files to run CMake build
# and HDF5options.cmake files for parallel or serial only builds where build
# tests are run on compute nodes using batch scripts.
#
# Steps:
# 1. untar the tarball in a temporary directory;
# Note: do this in a temporary directory to avoid changing
# the original source directory which may be around.
# 2. add build-unix.sh script.
# 3. add LIBAEC.tar.gz, ZLib.tar.gz and cmake files to top level directory.
# 4. create gzipped tar file with these contents:
# build-unix.sh script
# hdf5-<version> source code directory extracted from tar file
# CTestScript.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# HDF5config.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# HDF5options.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts
# LIBAEC.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
# ZLib.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake
#
# 5. For HPC-CMake tgz file the following are also needed in the top-level directory:
# README_HPC copied from release_docs
# ser-HDF5options.cmake copied from <hdf5 source code>/config/cmake/scripts/HPC
# par-HDF5options.cmake copied from <hdf5 source code>/config/cmake/scripts/HPC
# HDF5options.cmake symlink to par-HDF5options.cmake
#
# Parameters:
# $1 version
# $2 release tarball
# $3 output zipball file name
#
# Returns 0 if successful; 1 otherwise
#
# need function to create another temporary directory, extract the
# $tmpdir/$HDF5_VERS.tar into it, create build-unix.sh,
# add CTestScript.cmake, HDF5config.cmake, LIBAEC.tar.gz
# ZLib.tar.gz, HDF5 examples, and then tar.gz it.
tar2hpccmaketgz()
{
if [ $# -ne 3 ]; then
echo "usage: tar2hpccmaketgz <tarfilename> <tgzfilename>"
return 1
fi
cmgztmpdir=/tmp/cmgztmpdir$$
cmgztmpsubdir=$cmgztmpdir/HPC-CMake-$HDF5_VERS
mkdir -p $cmgztmpsubdir
version=$1
tarfile=$2
tgzfile=$3
# step 1: untar tarball in cmgztmpdir
(cd $cmgztmpsubdir; tar xf -) < $tarfile
# sanity check
if [ ! -d $cmgztmpsubdir/$version ]; then
echo "untar did not create $cmgztmpsubdir/$version source dir"
# cleanup
rm -rf $cmgztmpdir
return 1
fi
# step 2: add build-unix.sh script
(cd $cmgztmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=Unix -C Release -V -O hdf5.log" > build-unix.sh; chmod 755 build-unix.sh)
# step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-2.0.3-Source.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-1_14_1.tar.gz $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/release_docs/README_HPC $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HPC/ser-HDF5options.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HPC/par-HDF5options.cmake $cmgztmpsubdir
(cd $cmgztmpsubdir; ln -s par-HDF5options.cmake HDF5options.cmake)
tar czf $DEST/HPC-CMake-$HDF5_VERS.tar.gz -C $cmgztmpdir . || exit 1
# cleanup
rm -rf $cmgztmpdir
}
# This command must be run at the top level of the hdf5 source directory.
# Verify this requirement.
if [ ! \( -f configure.ac -a -f bin/release \) ]; then
echo "$0 must be run at the top level of the hdf5 source directory"
exit 1
fi
# Defaults
DEST=releases
VERS=`perl bin/h5vers`
VERS_OLD=
test "$VERS" || exit 1
verbose=yes
release_date=`date +%F`
today=`date +%Y%m%d`
pmode='no'
tmpdir="../#release_tmp.$$" # tmp work directory
DOC_URL=https://git@bitbucket.hdfgroup.org/scm/hdffv/hdf5doc.git
CPPLUS_RM_NAME=cpplus_RM
MAINT_MODE_ENABLED=""
# If maintainer mode is enabled in configure, it should be disabled for release,
# and enabled again after release files have been created. If already disabled
# there's no need to do either.
MAINT_MODE_ENABLED=`grep ^AM_MAINTAINER_MODE ./configure.ac | grep enable`
if [ "${MAINT_MODE_ENABLED}" != "" ]; then
bin/switch_maint_mode -disable ./configure.ac
fi
# Restore previous Version information
RESTORE_VERSION()
{
if [ X-${VERS_OLD} != X- ]; then
echo restoring version information back to $VERS_OLD
rm -f config/lt_vers.am
cp $tmpdir/lt_vers.am config/lt_vers.am
bin/h5vers -s $VERS_OLD
VERS_OLD=
fi
}
# Command-line arguments
while [ -n "$1" ]; do
arg=$1
shift
case "$arg" in
-d)
DEST=$1
shift
;;
-h)
USAGE
exit 0
;;
--private)
pmode=yes
;;
--docver)
DOCVERSION=$1
shift
;;
-*)
echo "Unknown switch: $arg" 1>&2
USAGE
exit 1
;;
*)
methods="$methods $arg"
;;
esac
done
# Default method is tar
if [ "X$methods" = "X" ]; then
methods="tar"
fi
# Create the temporary work directory.
if mkdir $tmpdir; then
echo "temporary work directory for release. "\
"Can be deleted after release completes." > $tmpdir/README
else
echo "Failed to mkdir tmpdir($tmpdir)"
exit 1
fi
# setup restoration in case of abort.
trap RESTORE_VERSION 0
if [ X$pmode = Xyes ]; then
VERS_OLD=$VERS
# Copy old version of config/lt_vers.am, since it's hard to
# "undo" changes to it.
cp config/lt_vers.am $tmpdir
# Set version information to m.n.r-of$today.
# (h5vers does not correctly handle just m.n.r-$today.)
VERS=`echo $VERS | sed -e s/-.*//`-of$today
echo Private release of $VERS
else
bin/h5vers -s $VERS
fi
# Store hdf5-$VERS ("hdf5-1.7.51", e.g.) to a variable to avoid typos
HDF5_VERS=hdf5-$VERS
test "$verbose" && echo "Releasing $HDF5_VERS to $DEST" 1>&2
if [ ! -d $DEST ]; then
echo " Destination directory $DEST does not exist" 1>&2
exit 1
fi
# Create a symlink to the source so files in the tarball have the prefix
# we want (gnu's --transform isn't portable)
ln -s `pwd` $tmpdir/$HDF5_VERS || exit 1
# Save a backup copy of Makefile if exists.
test -f Makefile && mv Makefile $tmpdir/Makefile.x
cp -p Makefile.dist Makefile
# Update README.md and release_docs/RELEASE.txt with release information in
# line 1.
for f in README.md release_docs/RELEASE.txt; do
echo "HDF5 version $VERS released on $release_date" >$f.x
sed -e 1d $f >>$f.x
mv $f.x $f
# Make sure new files are of the right access mode
chmod 644 $f
done
# develop is different than branches.
if [ "${DOCVERSION}" ]; then
DOC_URL="$DOC_URL -b ${DOCVERSION}"
fi
# Create the tar file
test "$verbose" && echo " Running tar..." 1>&2
(cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_VERS" || exit 1 )
# Compress
SHA256=$HDF5_VERS.sha256
cp /dev/null $DEST/$SHA256
for comp in $methods; do
case $comp in
tar)
cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar
(cd $DEST; sha256sum $HDF5_VERS.tar >> $SHA256)
;;
gzip)
test "$verbose" && echo " Running gzip..." 1>&2
gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz
(cd $DEST; sha256sum $HDF5_VERS.tar.gz >> $SHA256)
;;
cmake-tgz)
test "$verbose" && echo " Creating CMake tar.gz file..." 1>&2
tar2cmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.tar.gz 1>&2
(cd $DEST; sha256sum CMake-$HDF5_VERS.tar.gz >> $SHA256)
;;
hpc-cmake-tgz)
test "$verbose" && echo " Creating HPC-CMake tar.gz file..." 1>&2
tar2hpccmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/HPC-CMake-$HDF5_VERS.tar.gz 1>&2
(cd $DEST; sha256sum HPC-CMake-$HDF5_VERS.tar.gz >> $SHA256)
;;
bzip2)
test "$verbose" && echo " Running bzip2..." 1>&2
bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2
(cd $DEST; sha256sum $HDF5_VERS.tar.bz2 >> $SHA256)
;;
zip)
test "$verbose" && echo " Creating zip ball..." 1>&2
tar2zip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2
(cd $DEST; sha256sum $HDF5_VERS.zip >> $SHA256)
;;
cmake-zip)
test "$verbose" && echo " Creating CMake-zip ball..." 1>&2
tar2cmakezip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.zip 1>&2
(cd $DEST; sha256sum CMake-$HDF5_VERS.zip >> $SHA256)
;;
doc)
if [ "${DOCVERSION}" = "" ]; then
DOCVERSION=master
fi
test "$verbose" && echo " Creating docs..." 1>&2
# Check out docs from git repo
(cd $tmpdir; git clone -q $DOC_URL ${DOCVERSION} > /dev/null) || exit 1
# Create doxygen C++ RM
(cd c++/src && doxygen cpp_doc_config > /dev/null ) || exit 1
# Replace version of C++ RM with just-created version
rm -rf $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME || exit 1
mv c++/src/$CPPLUS_RM_NAME $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME || exit 1
# Compress the docs and move them to the release area
mv $tmpdir/${DOCVERSION} $tmpdir/${HDF5_VERS}_docs || exit 1
(cd $tmpdir && tar cf ${HDF5_VERS}_docs.tar ${HDF5_VERS}_docs) || exit 1
mv $tmpdir/${HDF5_VERS}_docs.tar $DEST || exit 1
;;
*)
echo "***Error*** Unknown method $comp"
exit 1
;;
esac
done
# If AM_MAINTAINER_MODE was enabled before running this script
# restore it to "enabled".
if [ "${MAINT_MODE_ENABLED}" != "" ]; then
bin/switch_maint_mode -enable ./configure.ac
fi
# Copy the RELEASE.txt to the release area.
cp release_docs/RELEASE.txt $DEST/$HDF5_VERS-RELEASE.txt
# Remove distributed Makefile and restore previous Makefile if existed.
rm -f Makefile
test -f $tmpdir/Makefile.x && mv $tmpdir/Makefile.x Makefile
# Restore OLD version information, then no need for trap.
if [ X$pmode = Xyes ]; then
RESTORE_VERSION
trap 0
fi
# Remove temporary things
rm -rf $tmpdir
echo "DONE"
exit 0

@ -0,0 +1,68 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# A script to clean up the action of autogen.sh
#
# If this script fails to clean up generated files on a particular
# platform, please contact help@hdfgroup.org or comment on the forum.
echo
echo "*******************************"
echo "* HDF5 autogen.sh undo script *"
echo "*******************************"
echo
echo "Remove autom4te.cache directory"
rm -rf autom4te.cache
echo "Remove configure script"
rm -f configure
echo "Remove Makefile.in files"
find . -type f -name 'Makefile.in' -exec rm {} \;
echo "Remove files generated by libtoolize"
rm -f bin/ltmain.sh
rm -f m4/libtool.m4
rm -f m4/ltoptions.m4
rm -f m4/ltsugar.m4
rm -f m4/ltversion.m4
rm -f m4/lt~obsolete.m4
echo "Remove files generated by automake"
rm -f bin/compile
rm -f bin/config.guess
rm -f bin/config.sub
rm -f bin/install-sh
rm -f bin/missing
rm -f bin/test-driver
rm -f bin/depcomp
echo "Remove files generated by autoheader"
rm -f src/H5config.h.in
echo "Remove files generated by bin/make_err"
rm -f src/H5Epubgen.h
rm -f src/H5Einit.h
rm -f src/H5Eterm.h
rm -f src/H5Edefin.h
echo "Remove files generated by bin/make_vers"
rm -f src/H5version.h
echo "Remove files generated by bin/make_overflow"
rm -f src/H5overflow.h
echo "Remove remaining generated files"
rm -f aclocal.m4

@ -0,0 +1,87 @@
#!/usr/bin/env perl
require 5.003;
use warnings;
$indent=4;
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Run program in background
#
use warnings;
use strict;
use Carp;
use Time::HiRes;
use POSIX 'setsid';
my $child_pid;
my $child_proc;
my $cmd = $ARGV[0];
my $debug = 1;
print "\nStart child process\n";
start_child();
print "\nStarted child process\n";
sub start_child {
die "cannot execute cmd: $cmd" unless -x $cmd;
if ($^O eq 'MSWin32') { # Windows
require Win32::Process;
Win32::Process::Create($child_proc, $cmd, $cmd, 0, 0, ".") || confess "Could not spawn child: $!";
$child_pid = $child_proc->GetProcessID();
}
else { # Unix
$SIG{CHLD} = 'IGNORE';
$child_pid = fork();
unless (defined $child_pid) {
confess "Could not spawn child (Unix): $!";
}
if ($child_pid == 0) { # child
unless ($debug) {
open STDIN, "<", "/dev/null" or die "Can't read /dev/null: $!";
open STDOUT, ">", "/dev/null" or die "Can't write /dev/null: $!";
}
setsid or warn "setsid cannot start a new session: $!";
unless ($debug) {
open STDERR, '>&STDOUT' or die "Can't dup stdout: $!";
}
local $| = 1;
unless (exec($cmd)) {
confess "Could not start child: $cmd: $!";
CORE::exit(0);
}
}
# parent
$SIG{CHLD} = 'DEFAULT';
}
# catch early child exit, e.g. if program path is incorrect
sleep(1.0);
POSIX::waitpid(-1, POSIX::WNOHANG()); # clean up any defunct child process
if (kill(0,$child_pid)) {
print "\nStarted child process id $child_pid\n";
}
else {
warn "Child process exited quickly: $cmd: process $child_pid";
}
}
sub stop_child
{
if ($^O eq 'MSWin32') { # Windows
Win32::Process::KillProcess($child_pid,0);
}
else { # Unix
kill 9, $child_pid || warn "could not kill process $child_pid: $!";
}
print "Stopped child process id $child_pid\n";
}

@ -0,0 +1,80 @@
#!/bin/sh
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Switch AM_MAINTAINER_MODE value in configure.ac
# Usage: See USAGE()
# Programmer: Dana Robinson
# Creation date: January 2016
USAGE()
{
cat <<EOF
switch_maint_mode reverses the status of AM_MAINTAINER_MODE in
configure.ac from enable to disable or vice-versa. When enabled,
this feature forces the autotools to run when the input files are
older than the output files. This is the default for development
branches. When disabled, the autotools will NOT be re-run regardless
of their timestamps or any modifications. This is the default for
tarballs and release branches since it avoids having end-users
requiring the autotools.
Command Syntax
==============
switch_maint_mode [-help] [-enable|disable] <path-to-configure.ac>
EOF
}
MODE="notset"
CONFIG_AC_PATH=
# Display help/usage if any options were passed in
while [ $# -gt 0 ]; do
case "$1" in
-enable)
MODE="enable"
;;
-disable)
MODE="disable"
;;
-help)
USAGE
exit 0
;;
*)
CONFIG_AC_PATH="$1"
;;
esac
shift
done
# Did we get a file path?
if test -z $CONFIG_AC_PATH ; then
USAGE
exit 1
fi
# Did we get a mode?
if test -z $MODE ; then
USAGE
exit 1
fi
# Run perl over configure.ac
if test "X-$MODE" = "X-enable" ; then
perl -pi -e 's/^(AM_MAINTAINER_MODE\(\[)([a-z]+)(\]\))/$1enable$3/g' $CONFIG_AC_PATH
fi
if test "X-$MODE" = "X-disable" ; then
perl -pi -e 's/^(AM_MAINTAINER_MODE\(\[)([a-z]+)(\]\))/$1disable$3/g' $CONFIG_AC_PATH
fi

@ -0,0 +1,148 @@
#! /bin/sh
# test-driver - basic testsuite driver script.
scriptversion=2018-03-07.03; # UTC
# Copyright (C) 2011-2020 Free Software Foundation, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
# This file is maintained in Automake, please report
# bugs to <bug-automake@gnu.org> or send patches to
# <automake-patches@gnu.org>.
# Make unconditional expansion of undefined variables an error. This
# helps a lot in preventing typo-related bugs.
set -u
usage_error ()
{
echo "$0: $*" >&2
print_usage >&2
exit 2
}
print_usage ()
{
cat <<END
Usage:
test-driver --test-name=NAME --log-file=PATH --trs-file=PATH
[--expect-failure={yes|no}] [--color-tests={yes|no}]
[--enable-hard-errors={yes|no}] [--]
TEST-SCRIPT [TEST-SCRIPT-ARGUMENTS]
The '--test-name', '--log-file' and '--trs-file' options are mandatory.
END
}
test_name= # Used for reporting.
log_file= # Where to save the output of the test script.
trs_file= # Where to save the metadata of the test run.
expect_failure=no
color_tests=no
enable_hard_errors=yes
while test $# -gt 0; do
case $1 in
--help) print_usage; exit $?;;
--version) echo "test-driver $scriptversion"; exit $?;;
--test-name) test_name=$2; shift;;
--log-file) log_file=$2; shift;;
--trs-file) trs_file=$2; shift;;
--color-tests) color_tests=$2; shift;;
--expect-failure) expect_failure=$2; shift;;
--enable-hard-errors) enable_hard_errors=$2; shift;;
--) shift; break;;
-*) usage_error "invalid option: '$1'";;
*) break;;
esac
shift
done
missing_opts=
test x"$test_name" = x && missing_opts="$missing_opts --test-name"
test x"$log_file" = x && missing_opts="$missing_opts --log-file"
test x"$trs_file" = x && missing_opts="$missing_opts --trs-file"
if test x"$missing_opts" != x; then
usage_error "the following mandatory options are missing:$missing_opts"
fi
if test $# -eq 0; then
usage_error "missing argument"
fi
if test $color_tests = yes; then
# Keep this in sync with 'lib/am/check.am:$(am__tty_colors)'.
red='' # Red.
grn='' # Green.
lgn='' # Light green.
blu='' # Blue.
mgn='' # Magenta.
std='' # No color.
else
red= grn= lgn= blu= mgn= std=
fi
do_exit='rm -f $log_file $trs_file; (exit $st); exit $st'
trap "st=129; $do_exit" 1
trap "st=130; $do_exit" 2
trap "st=141; $do_exit" 13
trap "st=143; $do_exit" 15
# Test script is run here.
"$@" >$log_file 2>&1
estatus=$?
if test $enable_hard_errors = no && test $estatus -eq 99; then
tweaked_estatus=1
else
tweaked_estatus=$estatus
fi
case $tweaked_estatus:$expect_failure in
0:yes) col=$red res=XPASS recheck=yes gcopy=yes;;
0:*) col=$grn res=PASS recheck=no gcopy=no;;
77:*) col=$blu res=SKIP recheck=no gcopy=yes;;
99:*) col=$mgn res=ERROR recheck=yes gcopy=yes;;
*:yes) col=$lgn res=XFAIL recheck=no gcopy=yes;;
*:*) col=$red res=FAIL recheck=yes gcopy=yes;;
esac
# Report the test outcome and exit status in the logs, so that one can
# know whether the test passed or failed simply by looking at the '.log'
# file, without the need of also peaking into the corresponding '.trs'
# file (automake bug#11814).
echo "$res $test_name (exit status: $estatus)" >>$log_file
# Report outcome to console.
echo "${col}${res}${std}: $test_name"
# Register the test result, and other relevant metadata.
echo ":test-result: $res" > $trs_file
echo ":global-test-result: $res" >> $trs_file
echo ":recheck: $recheck" >> $trs_file
echo ":copy-in-global-log: $gcopy" >> $trs_file
# Local Variables:
# mode: shell-script
# sh-indentation: 2
# eval: (add-hook 'before-save-hook 'time-stamp)
# time-stamp-start: "scriptversion="
# time-stamp-format: "%:y-%02m-%02d.%02H"
# time-stamp-time-zone: "UTC0"
# time-stamp-end: "; # UTC"
# End:

@ -0,0 +1,548 @@
#!/usr/bin/env perl
##
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
require 5.003;
use warnings;
$Source = "";
##############################################################################
# A map from type name to type letter. We use this map for two reasons:
# 1. We want the debugging stuff in the source code to be as unobtrusive as
# possible, which means as compact as possible.
# 2. It's easier (faster) to parse these one and two-letter types in the C
# functions that display debugging results.
#
# All type strings are one or two characters. One-character strings
# are always lower case and should be used for common types.
# Two-character strings begin with an upper-case letter which is
# usually the same as the package name.
#
%TypeString = ("haddr_t" => "a",
"H5A_info_t" => "Ai",
"H5A_operator1_t" => "Ao",
"H5A_operator2_t" => "AO",
"hbool_t" => "b",
"H5AC_cache_config_t" => "Cc",
"H5AC_cache_image_config_t" => "CC",
"double" => "d",
"H5D_alloc_time_t" => "Da",
"H5D_append_cb_t" => "DA",
"H5FD_mpio_collective_opt_t" => "Dc",
"H5D_selection_io_mode_t" => "DC",
"H5D_fill_time_t" => "Df",
"H5D_fill_value_t" => "DF",
"H5D_gather_func_t" => "Dg",
"H5FD_mpio_chunk_opt_t" => "Dh",
"H5D_mpio_actual_io_mode_t" => "Di",
"H5FD_file_image_callbacks_t" => "DI",
"H5D_chunk_index_t" => "Dk",
"H5D_layout_t" => "Dl",
"H5D_mpio_no_collective_cause_t" => "Dn",
"H5D_mpio_actual_chunk_opt_mode_t" => "Do",
"H5D_operator_t" => "DO",
"H5D_space_status_t" => "Ds",
"H5D_scatter_func_t" => "DS",
"H5FD_mpio_xfer_t" => "Dt",
"H5D_vds_view_t" => "Dv",
"H5FD_class_value_t" => "DV",
"H5D_chunk_iter_op_t" => "x",
"herr_t" => "e",
"H5E_auto1_t" => "Ea",
"H5E_auto2_t" => "EA",
"H5ES_event_complete_func_t" => "EC",
"H5E_direction_t" => "Ed",
"H5E_error_t" => "Ee",
"H5ES_event_insert_func_t" => "EI",
"H5ES_status_t" => "Es",
"H5E_type_t" => "Et",
"H5FD_class_t" => "FC",
"H5F_close_degree_t" => "Fd",
"H5F_fspace_strategy_t" => "Ff",
"H5F_flush_cb_t" => "FF",
"H5F_info2_t" => "FI",
"H5F_mem_t" => "Fm",
"H5F_scope_t" => "Fs",
"H5F_file_space_type_t" => "Ft",
"H5F_libver_t" => "Fv",
"H5G_iterate_t" => "Gi",
"H5G_obj_t" => "Go",
"H5G_stat_t" => "Gs",
"hsize_t" => "h",
"H5_atclose_func_t" => "Hc",
"hssize_t" => "Hs",
"H5E_major_t" => "i", # H5E_major_t is typedef'd to hid_t
"H5E_minor_t" => "i", # H5E_minor_t is typedef'd to hid_t
"hid_t" => "i",
"H5I_future_discard_func_t" => "ID",
"H5I_free_t" => "If",
"H5_index_t" => "Ii",
"H5I_iterate_func_t" => "II",
"H5_iter_order_t" => "Io",
"H5FD_subfiling_ioc_select_t" => "IO",
"H5I_future_realize_func_t" => "IR",
"int" => "Is",
"int32_t" => "Is",
"H5I_search_func_t" => "IS",
"H5I_type_t" => "It",
"unsigned" => "Iu",
"unsigned int" => "Iu",
"uint32_t" => "Iu",
"H5O_token_t" => "k",
"H5L_iterate1_t" => "Li",
"H5L_iterate2_t" => "LI",
"H5G_link_t" => "Ll", #Same as H5L_type_t now
"H5L_type_t" => "Ll",
"H5L_elink_traverse_t" => "Lt",
"H5MM_allocate_t" => "Ma",
"MPI_Comm" => "Mc",
"H5MM_free_t" => "Mf",
"MPI_Info" => "Mi",
"H5M_iterate_t" => 'MI',
"H5FD_mem_t" => "Mt",
"off_t" => "o",
"H5O_iterate1_t" => "Oi",
"H5O_iterate2_t" => "OI",
"H5O_mcdt_search_cb_t" => "Os",
"H5O_type_t" => "Ot",
"H5P_class_t" => "p",
"H5P_cls_create_func_t" => "Pc",
"H5P_prp_create_func_t" => "PC",
"H5P_prp_delete_func_t" => "PD",
"H5P_prp_get_func_t" => "PG",
"H5P_iterate_t" => "Pi",
"H5P_cls_close_func_t" => "Pl",
"H5P_prp_close_func_t" => "PL",
"H5P_prp_compare_func_t" => "PM",
"H5P_cls_copy_func_t" => "Po",
"H5P_prp_copy_func_t" => "PO",
"H5P_prp_set_func_t" => "PS",
"hdset_reg_ref_t" => "Rd",
"hobj_ref_t" => "Ro",
"H5R_ref_t" => "Rr",
"H5R_type_t" => "Rt",
"char" => "s",
"unsigned char" => "s",
"H5S_class_t" => "Sc",
"H5S_seloper_t" => "Ss",
"H5S_sel_type" => "St",
"htri_t" => "t",
"H5T_cset_t", => "Tc",
"H5T_conv_t" => "TC",
"H5T_direction_t", => "Td",
"H5T_pers_t" => "Te",
"H5T_conv_except_func_t" => "TE",
"H5T_norm_t" => "Tn",
"H5T_order_t" => "To",
"H5T_pad_t" => "Tp",
"H5T_sign_t" => "Ts",
"H5T_class_t" => "Tt",
"H5T_str_t" => "Tz",
"unsigned long" => "Ul",
"unsigned long long" => "UL",
"uint64_t" => "UL",
"H5VL_attr_get_t" => "Va",
"H5VL_blob_optional_t" => "VA",
"H5VL_attr_specific_t" => "Vb",
"H5VL_blob_specific_t" => "VB",
"H5VL_dataset_get_t" => "Vc",
"H5VL_class_value_t" => "VC",
"H5VL_dataset_specific_t" => "Vd",
"H5VL_datatype_get_t" => "Ve",
"H5VL_datatype_specific_t" => "Vf",
"H5VL_file_get_t" => "Vg",
"H5VL_file_specific_t" => "Vh",
"H5VL_group_get_t" => "Vi",
"H5VL_group_specific_t" => "Vj",
"H5VL_link_create_t" => "Vk",
"H5VL_link_get_t" => "Vl",
"H5VL_get_conn_lvl_t" => "VL",
"H5VL_link_specific_t" => "Vm",
"H5VL_object_get_t" => "Vn",
"H5VL_request_notify_t" => "VN",
"H5VL_object_specific_t" => "Vo",
"H5VL_request_specific_t" => "Vr",
"H5VL_attr_optional_t" => "Vs",
"H5VL_subclass_t" => "VS",
"H5VL_dataset_optional_t" => "Vt",
"H5VL_datatype_optional_t" => "Vu",
"H5VL_file_optional_t" => "Vv",
"H5VL_group_optional_t" => "Vw",
"H5VL_link_optional_t" => "Vx",
"H5VL_object_optional_t" => "Vy",
"H5VL_request_optional_t" => "Vz",
"va_list" => "x",
"void" => "x",
"size_t" => "z",
"H5Z_SO_scale_type_t" => "Za",
"H5Z_class_t" => "Zc",
"H5Z_EDC_t" => "Ze",
"H5Z_filter_t" => "Zf",
"H5Z_filter_func_t" => "ZF",
"ssize_t" => "Zs",
# Types below must be defined here, as they appear in function arguments,
# but they are not yet supported in the H5_trace_args() routine yet. If
# they are used as an actual parameter type (and not just as a pointer to
# to the type), they must have a "real" abbreviation added (like the ones
# above), moved to the section of entries above, and support for displaying
# the type must be added to H5_trace_args().
"H5ES_err_info_t" => "#",
"H5FD_t" => "#",
"H5FD_hdfs_fapl_t" => "#",
"H5FD_mirror_fapl_t" => "#",
"H5FD_onion_fapl_t" => "#",
"H5FD_ros3_fapl_t" => "#",
"H5FD_splitter_vfd_config_t" => "#",
"H5L_class_t" => "#",
"H5VL_class_t" => "#",
"H5VL_loc_params_t" => "#",
"H5VL_request_status_t" => "#",
);
##############################################################################
# Maximum length of H5TRACE macro line
# If the ColumnLimit in .clang-format is changed, this value will need to be updated
#
my $max_trace_macro_line_len = 110;
##############################################################################
# Print an error message.
#
my $found_errors = 0;
sub errmesg ($$@) {
my ($file, $func, @mesg) = @_;
my ($mesg) = join "", @mesg;
my ($lineno) = 1;
if ($Source =~ /(.*?\n)($func)/s) {
local $_ = $1;
$lineno = tr/\n/\n/;
}
$found_errors = 1;
print "$file: in function \`$func\':\n";
print "$file:$lineno: $mesg\n";
}
##############################################################################
# Given a C data type return the type string that goes with it.
#
sub argstring ($$$) {
my ($file, $func, $atype) = @_;
my ($ptr, $tstr, $array) = (0, "!", "");
my ($fq_atype);
# Normalize the data type by removing redundant white space,
# certain type qualifiers, and indirection.
$atype =~ s/^\bconst\b//; # Leading const
$atype =~ s/\s*const\s*//; # const after type, possibly in the middle of '*'s
$atype =~ s/^\bstatic\b//;
$atype =~ s/\bH5_ATTR_UNUSED\b//g;
$atype =~ s/\bH5_ATTR_DEPRECATED_USED\b//g;
$atype =~ s/\bH5_ATTR_NDEBUG_UNUSED\b//g;
$atype =~ s/\bH5_ATTR_DEBUG_API_USED\b//g;
$atype =~ s/\bH5_ATTR_PARALLEL_UNUSED\b//g;
$atype =~ s/\bH5_ATTR_PARALLEL_USED\b//g;
$atype =~ s/\s+/ /g;
$ptr = length $1 if $atype =~ s/(\*+)//;
$atype =~ s/^\s+//;
$atype =~ s/\s+$//;
if ($atype =~ /(.*)\[(.*)\]$/) {
($array, $atype) = ($2, $1);
$atype =~ s/\s+$//;
}
$fq_atype = $atype . ('*' x $ptr);
if ($ptr>0 && exists $TypeString{$fq_atype}) {
$ptr = 0;
$tstr = $TypeString{$fq_atype};
} elsif ($ptr>0 && exists $TypeString{"$atype*"}) {
--$ptr;
$tstr = $TypeString{"$atype*"};
} elsif (!exists $TypeString{$atype}) {
# Defer throwing error until type is actually used
# errmesg $file, $func, "untraceable type \`$atype", '*'x$ptr, "\'";
} else {
$tstr = $TypeString{$atype};
}
return ("*" x $ptr) . ($array ? "[$array]" : "") . $tstr;
}
##############################################################################
# Given information about an API function, rewrite that function with
# updated tracing information.
#
my $file_api = 0;
my $file_args = 0;
my $total_api = 0;
my $total_args = 0;
sub rewrite_func ($$$$$) {
my ($file, $type, $name, $args, $body) = @_;
my ($arg, $trace, $argtrace);
my (@arg_name, @arg_str, @arg_type);
local $_;
# Keep copy of original arguments
my $orig_args = $args;
# Parse return value
my $rettype = argstring $file, $name, $type;
# Parse arguments
if ($args eq "void") {
$trace = "H5TRACE0(\"$rettype\", \"\");\n";
$argtrace = "H5ARG_TRACE0(\"\")";
} else {
# Split arguments. First convert `/*in,out*/' to get rid of the
# comma and remove lines beginning with a '#', then split the arguments
# on commas.
$args =~ s/(\/\*\s*in),\s*(out\s*\*\/)/$1_$2/g; # Get rid of comma in 'in,out'
$args =~ s/H5FL_TRACK_PARAMS//g; # Remove free list macro
$args =~ s/\n#.*?\n/\n/g; # Remove lines beginning with '#'
my @args = split /,[\s\n]*/, $args;
my $argno = 0;
my %names;
for $arg (@args) {
if($arg=~/\w*\.{3}\w*/){ # Skip "..." for varargs parameter
next;
}
unless ($arg=~/^((\s*[a-z_A-Z](\w|\*)*\s+)+(\s*\*\s*|\s*const\s*|\s*volatile\s*)*)
([a-z_A-Z]\w*)(\[.*?\])?
(\s*\/\*\s*(in|out|in_out)\s*\*\/)?\s*$/x) {
errmesg $file, $name, "unable to parse \`$arg\'";
goto error;
} else {
my ($atype, $aname, $array, $adir) = ($1, $5, $6, $8);
$names{$aname} = $argno++;
$adir ||= "in";
$atype =~ s/\s+$//;
push @arg_name, $aname;
push @arg_type, $atype;
if ($adir eq "out") {
push @arg_str, "x";
} else {
if (defined $array) {
$atype .= "*";
if ($array =~ /^\[\/\*([a-z_A-Z]\w*)\*\/\]$/) {
my $asize = $1;
if (exists $names{$asize}) {
$atype .= '[a' . $names{$asize} . ']';
} else {
warn "bad array size: $asize";
$atype .= "*";
}
}
}
push @arg_str, argstring $file, $name, $atype;
}
}
}
# Compose the trace macro
$trace = "H5TRACE" . scalar(@arg_str) . "(\"$rettype\", \"";
$argtrace = "H5ARG_TRACE" . scalar(@arg_str) . "(__func__, \"";
$trace .= join("", @arg_str) . "\"";
$argtrace .= join("", @arg_str) . "\"";
# Add 4 for indenting the line
my $len = 4 + length($trace);
for my $i (0 .. $#arg_name) {
# Handle wrapping
# Be VERY careful here! clang-format and this script MUST agree
# on which lines get wrapped or there will be churn as each tries
# to undo the other's output.
#
# TWO cases must be handled:
# 1) The argument is that last one and ');' will be appended
# 2) The argument is NOT the last one and ',' will be appended
#
# NB: clang-format does NOT consider terminal newlines when
# counting columns for the ColumnLimit
#
# The extra '2' added after $len includes the ', ' that would be
# added BEFORE the argument.
#
my $adjust = ($i + 1 == scalar(@arg_str)) ? 2 : 1;
my $len_if_added = $len + 2 + length($arg_name[$i]) + $adjust;
# Wrap lines that will be longer than the limit
if ($len_if_added > $max_trace_macro_line_len) {
# Wrap line, with indentation
$trace .= ",\n ";
$len = 13; # Set to 13, for indentation
# Indent an extra space to account for extra digit in 'H5TRACE' macro
if (scalar(@arg_str) >= 10) {
$trace .= " ";
$len++;
}
} else {
$trace .= ", ";
$len += 2; # Add 2, for ', '
}
# Append argument
$trace .= "$arg_name[$i]";
$argtrace .= ", $arg_name[$i]";
# Add length of appended argument name
$len += length($arg_name[$i]);
}
# Append final ');' for macro
$trace .= ");\n";
$argtrace .= ")";
}
# Check for API / non-API routine name
if( $name =~ /H5[A-Z]{0,2}[a-z].*/) {
# The H5TRACE() statement, for API routines
if ($body =~ /\/\*[ \t]*NO[ \t]*TRACE[ \t]*\*\//) {
# Ignored due to NO TRACE comment.
} else {
# Check for known, but unsupported type
if ( $trace =~ /(^#)|([^*]#)/ ) {
# Check for unsupported return type
if ( $type =~ /(^#)|([^*]#)/ ) {
errmesg $file, $name, "unsupported type in return type\nAdd to TypeString hash in trace script and update H5_trace_args()";
print "type = '$type'\n";
}
# Check for unsupported argument type
$index = 0;
for (@arg_str) {
if ( $_ =~ /(^#)|([^*]#)/ ) {
errmesg $file, $name, "unsupported type in args\nAdd to TypeString hash in trace script and update H5_trace_args()";
print "type = $arg_type[$index]\n";
}
$index++;
}
goto error;
}
# Check for unknown (and therefore unsupported) type
if ( $trace =~ /(^!)|([^*]!)/ ) {
# Check for unsupported return type
if ( $type =~ /(^!)|([^*]!)/ ) {
errmesg $file, $name, "unknown type in return type\nAdd to TypeString hash in trace script and also update H5_trace_args() if used by value";
print "type = '$type'\n";
}
# Check for unsupported argument type
$index = 0;
for (@arg_str) {
if ( $_ =~ /(^!)|([^*]!)/ ) {
errmesg $file, $name, "unknown type in args\nAdd to TypeString hash in trace script and also update H5_trace_args() if used by value";
print "type = $arg_type[$index]\n";
}
$index++;
}
goto error;
}
if ($body =~ s/((\n[ \t]*)H5TRACE\d+\s*\(.*?\);)\n/"$2$trace"/es) {
# Replaced an H5TRACE macro.
} elsif ($body=~s/((\n[ \t]*)FUNC_ENTER\w*[ \t]*(\(.*?\))?;??)\n/"$1$2$trace"/es) {
# Added an H5TRACE macro after a FUNC_ENTER macro.
} else {
errmesg $file, $name, "unable to insert tracing information";
print "body = ", $body, "\n";
goto error;
}
}
#Increment # of API routines modified
$file_api++;
}
# Check for H5ARG_TRACE macros in non-API routines
if ( $body =~ /H5ARG_TRACE/ ) {
# Check for untraceable type (deferred until $argtrace used)
if ( $argtrace =~ /(^!)|([^*]!)/ ) {
errmesg $file, $name, "untraceable type in args";
print "args = '$orig_args'\n";
goto error;
}
# Replace / update H5ARG_TRACE macro.
$body =~ s/(H5ARG_TRACE(\d+\s*\(.*?\))?)/"$argtrace"/esg;
#Increment # of non-API routines modified
$file_args++;
}
error:
return "\n$type\n$name($orig_args)\n$body";
}
##############################################################################
# Process each source file, rewriting API functions with updated
# tracing information.
#
for $file (@ARGV) {
$file_api = 0;
$file_args = 0;
# Ignore some files that do not need tracing macros
unless ($file eq "H5FDmulti.c" or $file eq "src/H5FDmulti.c" or $file eq "H5FDstdio.c" or $file eq "src/H5FDstdio.c" or $file eq "src/H5TS.c" or $file eq "src/H5FDperform.c") {
# Snarf up the entire file
open SOURCE, $file or die "$file: $!\n";
$Source = join "", <SOURCE>;
close SOURCE;
# Make a copy of the original data
my $original = $Source;
# Make modifications
$Source =~ s/\n([A-Za-z]\w*(\s+[A-Za-z]\w*)*\s*\**)\n #type
(H5[A-Z]{0,2}_?[a-zA-Z0-9_]\w*) #name
\s*\((.*?)\)\s* #args
(\{.*?\n\}[^\n]*) #body
/rewrite_func($file,$1,$3,$4,$5)/segx;
# If the source changed then print out the new version
if ($original ne $Source) {
printf "%s: instrumented %d API function%s and %d argument list%s\n",
$file, $file_api, (1 == $file_api ? "" : "s"),
$file_args, (1 == $file_args ? "" : "s");
rename $file, "$file~" or die "unable to make backup";
open SOURCE, ">$file" or die "unable to modify source";
print SOURCE $Source;
close SOURCE;
$total_api += $file_api;
$total_args += $file_args;
}
}
}
if ($found_errors eq 1) {
printf "\n";
printf "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n";
printf "*** ERRORS FOUND *** ERRORS FOUND *** ERRORS FOUND ****\n";
printf "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n";
exit 1;
} else {
printf "Finished processing HDF5 API calls:\n";
printf "\tinstrumented %d API function%s and %d argument list%s\n",
$total_api, (1 == $total_api ? "" : "s"),
$total_args, (1 == $total_args ? "" : "s");
}

@ -0,0 +1,549 @@
#!/usr/bin/env perl
require 5.003;
use warnings;
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# Quincey Koziol
# 9 Aug 2013
#
# Purpose: Given an input file containing the output from a build of the
# library, gather the file names and line numbers, alias
# identical types of warnings together into a single bin and
# report the number of warnings for each type of warning, each file
# and the total count of warnings
# Perl modules/settings
use strict;
use Getopt::Std;
# Global variables, for accumulating information
my $totalcount = 0;
my $notecount = 0;
my $dupcount = 0;
my %warn_count = ();
my $warncount;
my %warn_file = ();
my %warn_file_line = ();
my %warn_file_line_offset = ();
my %file_count = ();
my $filecount;
my $ignorecount = 0;
my @ignorenames;
my %ignored_files = ();
my %warn_file_indices = ();
my %file_warn_indices = ();
my @warn_match_strings;
my @file_match_strings;
my %file_warn = ();
my %file_warn_line = ();
my $current_warning = 0;
my $current_file = 0;
my $warn_index;
my $genericize = 1;
# Info about last name / line / offset for file
my $last_c_name;
my $last_fort_name;
my $last_fort_line;
my $last_fort_offset;
# Display usage
sub do_help {
print "Usage: 'warnhist [-h, --help] [-t <prefix>] [-w <n>] [-W] [-f <n>] [-F] [-s <warning string list>] [-S <file string list] [-l] [-u] [-i <name list>] [file]'\n";
print "\t-h, --help\tDisplay this usage\n";
print "\t-t <prefix>\tTrim pathname prefix from filenames, <prefix>\n";
print "\t-w <n>\tDisplay files for a given warning index list, <n>\n";
print "\t\t<n> can be a single value, a range, or a comma separated list\n";
print "\t\tFor example: '0' or '0,4' or '8-10' or '0,2-4,8-10,13'\n";
print "\t-W\tDisplay files for all warnings\n";
print "\t-f <n>\tDisplay warnings for a given file index list, <n>\n";
print "\t\t<n> can be a single value, a range, or a comma separated list\n";
print "\t\tFor example: '0' or '0,4' or '8-10' or '0,2-4,8-10,13'\n";
print "\t-F\tDisplay warnings for all files\n";
print "\t-s <warning string list>\tDisplay files for warnings which contain a string, <warning string list>\n";
print "\t\t<warning string list> is a comma separated list, with no spaces\n";
print "\t\tFor example: 'Wunused-dummy-argument' or 'Wunused-dummy-argument,Wunused-variable'\n";
print "\t-S <file string list>\tDisplay warnings for files which contain a string, <file string list>\n";
print "\t\t<file string list> is a comma separated list, with no spaces\n";
print "\t\tFor example: 'H5Fint' or 'H5Fint,H5Gnode'\n";
print "\t-l\tDisplay line numbers for file/warning\n";
print "\t-u\tLeave 'unique' types in warnings, instead of genericizing them\n";
print "\t-i <name list>\tIgnore named files, <name list>\n";
print "\t\t<name list> is a comma separated list, with no spaces\n";
print "\t\tFor example: 'H5LTparse' or 'H5LTparse,H5LTanalyze'\n";
print "\tfile\tFilename containing build output\n";
print "\t\tIf no file is given, standard input is used.\n";
exit;
}
sub main::HELP_MESSAGE {
do_help();
}
# declare the Perl command line flags/options we want to allow
my %options=();
getopts("FWhut:w:f:s:S:i:l", \%options);
# Display usage, if requested
if($options{h}) {
do_help();
}
# Parse list of file names to ignore
if(exists $options{i}) {
@ignorenames = split /,/, $options{i};
#print STDERR @ignorenames;
}
# Parse list of warning indices to expand file names
if(exists $options{w}) {
my @tmp_indices;
@tmp_indices = split /,/, $options{w};
#print STDERR @tmp_indices;
for my $x (@tmp_indices) {
#print STDERR "x = '$x'\n";
if($x =~ /\-/) {
my $start_index;
my $end_index;
#print STDERR "matched = '$x'\n";
($start_index, $end_index) = split /\-/, $x;
#print STDERR "start_index = '$start_index', end_index = '$end_index'\n";
for my $y ($start_index..$end_index) {
#print STDERR "y = '$y'\n";
if(!exists $warn_file_indices{$y}) {
$warn_file_indices{$y} = $y;
}
}
}
else {
if(!exists $warn_file_indices{$x}) {
$warn_file_indices{$x} = $x;
}
}
}
#foreach (sort keys %warn_file_indices) {
# print STDERR "$_ : $warn_file_indices{$_}\n";
#}
}
# Parse list of warning strings to expand file names
if(exists $options{s}) {
@warn_match_strings = split /,/, $options{s};
# print STDERR @warn_match_strings;
}
# Parse list of file indices to expand warnings
if(exists $options{f}) {
my @tmp_indices;
@tmp_indices = split /,/, $options{f};
#print STDERR @tmp_indices;
for my $x (@tmp_indices) {
#print STDERR "x = '$x'\n";
if($x =~ /\-/) {
my $start_index;
my $end_index;
#print STDERR "matched = '$x'\n";
($start_index, $end_index) = split /\-/, $x;
#print STDERR "start_index = '$start_index', end_index = '$end_index'\n";
for my $y ($start_index..$end_index) {
#print STDERR "y = '$y'\n";
if(!exists $file_warn_indices{$y}) {
$file_warn_indices{$y} = $y;
}
}
}
else {
if(!exists $file_warn_indices{$x}) {
$file_warn_indices{$x} = $x;
}
}
}
#foreach (sort keys %warn_file_indices) {
# print STDERR "$_ : $warn_file_indices{$_}\n";
#}
}
# Parse list of warning strings for files to expand warnings
if(exists $options{S}) {
@file_match_strings = split /,/, $options{S};
# print STDERR @file_match_strings;
}
# Check if warnings should stay unique and not be "genericized"
if($options{u}) {
$genericize = 0;
}
PARSE_LINES:
while (<>) {
my $name;
my $line;
my $prev_line;
my $toss;
my $offset;
my $warning;
my $extra;
my $extra2;
# Retain last FORTRAN compile line, which comes a few lines before warning
if($_ =~ /.*\.[fF]90:.*/) {
($last_fort_name, $last_fort_line, $last_fort_offset) = split /\:/, $_;
($last_fort_line, $toss) = split /\./, $last_fort_line;
}
# Retain last C/C++ compile line, which possibly comes a few lines before warning
if($_ =~ /.*[A-Za-z0-9_]\.[cC]:.*/) {
($last_c_name, $toss) = split /\:/, $_;
}
# Retain C/C++ compile line, which comes with the line of warning
if($_ =~ /.*[A-Za-z0-9_]\.[chC]\(.*[0-9]\):.*#.*/) {
$last_c_name = $_;
}
# Skip lines that don't have the word "warning"
next if $_ !~ /[Ww]arning/;
# Skip warnings from linker
next if $_ =~ /ld: warning:/;
# Skip warnings from build_py and install_lib
next if $_ =~ /warning: (build_py|install_lib)/;
# Skip variables with the word 'warning' in them
next if $_ =~ /_warning_/;
# Skip AMD Optimizing Compiler (aocc) lines "<#> warning(s) generated."
next if $_ =~ / warnings? generated\./;
# "Hide" the C++ '::' symbol until we've parsed out the parts of the line
while($_ =~ /\:\:/) {
$_ =~ s/\:\:/@@@@/g;
}
# Check for weird formatting of warning message
$line = "??";
$offset = "??";
if($_ =~ /^cc1: warning:.*/) {
$name = $last_c_name;
($toss, $toss, $warning, $extra, $extra2) = split /\:/, $_;
# Check for CMAKE build with warning on first line and no filename
} elsif($_ =~ /^\s*[Ww]arning:.*/) {
$name = $last_c_name;
($toss, $warning, $extra, $extra2) = split /\:/, $_;
# Check for file-scope gcc Fortran warning output
} elsif($_ =~ /f\d\d\d: Warning:/) {
# These are interspersed with the "compiling a file" output
# when compiling with `make -j` and thus difficult to tie to
# any particular file. They are due to things like inappropriate
# build options and don't have a line number.
#
# They start with f, as in f951
$name = "(generic)";
$line = int(rand(1000000)); # Hack to avoid counting as duplictates
($warning) = $_ =~ /\[(.*)\]/x;
# Check for FORTRAN warning output
} elsif($_ =~ /^Warning:.*/) {
$name = $last_fort_name;
$line = $last_fort_line;
$offset = $last_fort_offset;
($toss, $warning, $extra, $extra2) = split /\:/, $_;
# Check for improperly parsed filename or line
if($name =~ /^$/) {
print "Filename is a null string! Input line #$. is: '$_'";
next
}
if($line =~ /^$/) {
print "Line is a null string! Input line #$. is: '$_'";
next
}
# Check for non-GCC warning (Solaris/Oracle?)
} elsif($_ =~ /^\".*, line [0-9]+: *[Ww]arning:.*/) {
($name, $toss, $warning, $extra, $extra2) = split /\:/, $_;
($name, $line) = split /\,/, $name;
$name =~ s/^\"//g;
$name =~ s/\"$//g;
$line =~ s/^\s*line\s*//g;
# Check for Intel icc warning
} elsif($_ =~ /.*[A-Za-z0-9_]\.[chC]\(.*[0-9]\):.*#.*/) {
($last_c_name, $toss, $warning) = split /\:/, $last_c_name;
($name, $line) = split /\(/, $last_c_name;
$line =~ s/\)//g;
} else {
# Check for 'character offset' field appended to file & line #
# (This is probably specific to GCC)
if($_ =~ /^.*[0-9]+\:[0-9]+\:/) {
($name, $line, $offset, $toss, $warning, $extra, $extra2) = split /\:/, $_;
} else {
($name, $line, $toss, $warning, $extra, $extra2) = split /\:/, $_;
}
}
# Check for extra ':' followed by more text in original warning string,
# and append the ':' and text back onto the parsed warning
# (Use 'length $extra' idiom to avoid warning when $extra is undefined)
if(length $extra ) {
$warning = join ':', $warning, $extra;
}
if(length $extra2 ) {
$warning = join ':', $warning, $extra2;
}
# Restore the C++ '::' symbol now that we've parsed out the parts of the line
while($warning =~ /@@@@/) {
$warning =~ s/@@@@/\:\:/g;
}
# Trim leading '..' paths from filename
while($name =~ /^\.\.\//) {
$name =~ s/^\.\.\///g;
}
# Check for trimming prefix
if((exists $options{t}) && ($name =~ /$options{t}/)) {
$name =~ s/^$options{t}\///g;
}
# Check for ignored file
if(exists $options{i}) {
for my $x (@ignorenames) {
if($name =~ /$x/) {
$ignorecount++;
if(!(exists $ignored_files{$name})) {
$ignored_files{$name} = $name;
}
next PARSE_LINES;
}
}
}
# Check for improperly parsed warning (usually an undefined warning string)
if(!defined $warning) {
print "Warning Undefined! Input line is: '$_'";
next
}
# Get rid of leading & trailing whitespace
$warning =~ s/^\s//g;
$warning =~ s/\s$//g;
# Check for improperly parsed warning
if($warning =~ /^$/) {
print "Warning is a null string! Input line is: '$_'";
next
}
# Convert all quotes to '
$warning =~ s/‘/'/g;
$warning =~ s/’/'/g;
$warning =~ s/"/'/g;
#
# These skipped messages & "genericizations" may be specific to GCC
# Skip supplemental warning message
if($warning =~ /near initialization for/) {
$notecount++;
next
}
# Skip C++ supplemental warning message
if($warning =~ /in call to/) {
$notecount++;
next
}
# Skip GCC warning that should be a note
if($_ =~ /\(this will be reported only once per input file\)/) {
$notecount++;
next
}
if($genericize) {
# Eliminate C/C++ "{aka <some type>}" and "{aka '<some type>'}" info
if($warning =~ /\s(\{|\()aka '?[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#]+[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#\ ]*'?(\}|\))/) {
$warning =~ s/\s(\{|\()aka '?[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#]+[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#\ ]*'?(\}|\))//g;
}
# Genericize C/C++ '<some type>', printf format '%<some format>', and
# "unknown warning group" into '-'
if($warning =~ /'[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#\-\=]+[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#\-\=\ ]*'/) {
$warning =~ s/'[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#\-\=]+[A-Za-z_0-9\(\)\*\,\[\]\.\<\>\&\:\+\#\-\=\ ]*'/'-'/g;
}
if($warning =~ /'%[\#0\-\ \+]*[,;\:_]?[0-9\*]*\.?[0-9\*]*[hjltzL]*[aAcdeEfFgGinosuxX]'/) {
$warning =~ s/'%[\#0\-\ \+]*[,;\:_]?[0-9\*]*\.?[0-9\*]*[hjltzL]*[aAcdeEfFgGinosuxX]'/'-'/g;
}
# Genericize C/C++ "<macro>" warnings into "-"
if($warning =~ /"[A-Za-z_0-9]*"/) {
$warning =~ s/"[A-Za-z_0-9]*"/"-"/g;
}
# Genericize [GCC?] C/C++ warning text about suggested attribute
if($warning =~ /attribute=[A-Za-z_0-9]*\]/) {
$warning =~ s/=[A-Za-z_0-9]*\]/=-\]/g;
}
# Genericize FORTRAN "at (<n>)" into "at (-)", "REAL(<n>)" into "REAL(-)",
# and "INTEGER(<n>)" into "INTEGER(-)"
if($warning =~ /.*at\s\([0-9]+\).*/) {
$warning =~ s/at\s\([0-9]+\)/at \(-\)/g;
}
if($warning =~ /.*REAL\([0-9]+\).*/) {
$warning =~ s/REAL\([0-9]+\)/REAL\(-\)/g;
}
if($warning =~ /.*INTEGER\([0-9]+\).*/) {
$warning =~ s/INTEGER\([0-9]+\)/INTEGER\(-\)/g;
}
# Genericize standalone numbers in warnings
if($warning =~ /(\s|')-?[0-9]+(\s|')/) {
$warning =~ s/-?[0-9]+/-/g;
}
# Genericize unusual GCC/G++/GFORTRAN warnings that aren't handled above
if($warning =~ /\[deprecation\] [A-Za-z_0-9]*\([A-Za-z_,0-9]*\) in [A-Za-z_0-9]* has been deprecated.*/) {
$warning =~ s/[A-Za-z_0-9]*\([A-Za-z_,0-9]*\) in [A-Za-z_0-9]*/-\(-\) in -/g;
}
}
# <end possible GCC-specific code>
# Check if we've already seen this warning on this line in this file
# (Can happen for warnings from inside header files)
if( !exists $warn_file_line_offset{$warning}{$name}{$line}{$offset} ) {
# Increment count for [generic] warning
$warn_count{$warning}++;
$warn_file{$warning}{$name}++;
$warn_file_line{$warning}{$name}{$line}++;
$warn_file_line_offset{$warning}{$name}{$line}{$offset}++;
# Increment count for filename
$file_count{$name}++;
$file_warn{$name}{$warning}++;
$file_warn_line{$name}{$warning}{$line}++;
# Increment total count of warnings
$totalcount++;
}
else {
# Increment count of duplicate warnings
$dupcount++;
}
# print STDERR "name = $name\n";
# print STDERR "line = $line\n";
# print STDERR "offset = $offset\n";
# print STDERR "warning = \"$warning\"\n";
}
print "Total unique [non-ignored] warnings: $totalcount\n";
print "Ignored notes / supplemental warning lines [not counted in unique warnings]: $notecount\n";
print "Duplicated warning lines [not counted in unique warnings]: $dupcount\n";
print "Total ignored warnings: $ignorecount\n";
$warncount = keys %warn_count;
print "Total unique kinds of warnings: $warncount\n";
$filecount = keys %file_count;
print "Total files with warnings: $filecount\n\n";
# Print warnings in decreasing frequency
print "# of Warnings by frequency (file count)\n";
print "=======================================\n";
for my $x (sort {$warn_count{$b} <=> $warn_count{$a}} keys(%warn_count)) {
printf ("[%2d] %4d (%2d) - %s\n", $current_warning++, $warn_count{$x}, scalar(keys %{$warn_file{$x}}), $x);
if((exists $options{W}) || (exists $options{w}) || (exists $options{s})) {
my $curr_index = $current_warning - 1;
my $match = 0;
# Check for string from list in current warning
if(exists $options{s}) {
for my $y (@warn_match_strings) {
# print STDERR "y = '$y'\n";
if($x =~ /$y/) {
# print STDERR "matched warning = '$x'\n";
$match = 1;
last;
}
}
}
# Check if current warning index matches
if((exists $warn_file_indices{$curr_index}) && $curr_index == $warn_file_indices{$curr_index}) {
$match = 1;
}
if($match) {
for my $y (sort {$warn_file{$x}{$b} <=> $warn_file{$x}{$a}} keys(%{$warn_file{$x}})) {
printf ("\t%4d - %s\n", $warn_file{$x}{$y}, $y);
if(exists $options{l}) {
my $lines = join ", ", sort {$a <=> $b} keys %{$warn_file_line{$x}{$y}};
printf("\t\tLines: $lines \n");
}
}
}
}
}
# Print warnings in decreasing frequency, by filename
print "\n# of Warnings by filename (warning type)\n";
print "========================================\n";
for my $x (sort {$file_count{$b} <=> $file_count{$a}} keys(%file_count)) {
printf ("[%3d] %4d (%2d) - %s\n", $current_file++, $file_count{$x}, scalar(keys %{$file_warn{$x}}), $x);
if((exists $options{F}) || (exists $options{f}) || (exists $options{S})) {
my $curr_index = $current_file - 1;
my $match = 0;
# Check for string from list in current file
if(exists $options{S}) {
for my $y (@file_match_strings) {
# print STDERR "y = '$y'\n";
if($x =~ /$y/) {
# print STDERR "matched warning = '$x'\n";
$match = 1;
last;
}
}
}
# Check if current file index matches
if((exists $file_warn_indices{$curr_index}) && $curr_index == $file_warn_indices{$curr_index}) {
$match = 1;
}
if($match) {
for my $y (sort {$file_warn{$x}{$b} <=> $file_warn{$x}{$a}} keys(%{$file_warn{$x}})) {
printf ("\t%4d - %s\n", $file_warn{$x}{$y}, $y);
if(exists $options{l}) {
my $lines = join ", ", sort {$a <=> $b} keys %{$file_warn_line{$x}{$y}};
printf("\t\tLines: $lines \n");
}
}
}
}
}
# Print names of files that were ignored
# Check for ignored file
if(exists $options{i}) {
print "\nIgnored filenames\n";
print "=================\n";
for my $x (sort keys(%ignored_files)) {
print "$x\n";
}
}

@ -0,0 +1,18 @@
cmake_minimum_required (VERSION 3.18)
project (HDF5_CPP CXX)
add_subdirectory (src)
#-----------------------------------------------------------------------------
# Build the CPP Examples
#-----------------------------------------------------------------------------
if (HDF5_BUILD_EXAMPLES)
add_subdirectory (examples)
endif ()
#-----------------------------------------------------------------------------
# Build the CPP unit tests
#-----------------------------------------------------------------------------
if (BUILD_TESTING)
add_subdirectory (test)
endif ()

File diff suppressed because it is too large Load Diff

@ -0,0 +1,49 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
## Makefile.am
## Run automake to generate a Makefile.in from this file.
#
# Top-level HDF5-C++ Makefile(.in)
#
include $(top_srcdir)/config/commence.am
if BUILD_TESTS_CONDITIONAL
TEST_DIR = test
else
TEST_DIR=
endif
## Only recurse into subdirectories if C++ interface is enabled.
if BUILD_CXX_CONDITIONAL
SUBDIRS=src $(TEST_DIR)
endif
DIST_SUBDIRS = src test examples
# Install examples
install-examples uninstall-examples:
@(cd examples && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1;
installcheck-local:
@(cd examples && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1;
# Clean examples when check-clean is invoked
check-clean ::
@$(MAKE) $(AM_MAKEFLAGS) build-$@ || exit 1;
@for d in X $(SUBDIRS) examples; do \
if test $$d != X && test $$d != .; then \
(set -x; cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
fi; \
done
include $(top_srcdir)/config/conclude.am

File diff suppressed because it is too large Load Diff

@ -0,0 +1,87 @@
cmake_minimum_required (VERSION 3.18)
project (HDF5_CPP_EXAMPLES CXX)
# --------------------------------------------------------------------
# Notes: When creating examples they should be prefixed
# with "cpp_ex_". This allows for easier filtering of the examples.
# --------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Define examples
#-----------------------------------------------------------------------------
set (examples
create
readdata
writedata
compound
extend_ds
chunks
h5group
)
set (tutr_examples
h5tutr_cmprss
h5tutr_crtdat
h5tutr_crtatt
h5tutr_crtgrpar
h5tutr_crtgrp
h5tutr_crtgrpd
h5tutr_extend
h5tutr_rdwt
h5tutr_subset
)
foreach (example ${examples})
add_executable (cpp_ex_${example} ${HDF5_CPP_EXAMPLES_SOURCE_DIR}/${example}.cpp)
target_include_directories (cpp_ex_${example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (cpp_ex_${example} STATIC)
target_link_libraries (cpp_ex_${example} PRIVATE ${HDF5_CPP_LIB_TARGET} ${HDF5_LIB_TARGET})
else ()
TARGET_C_PROPERTIES (cpp_ex_${example} SHARED)
target_link_libraries (cpp_ex_${example} PRIVATE ${HDF5_CPP_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
if (MINGW AND HDF5_MINGW_STATIC_GCC_LIBS)
target_link_options (${HDF5_CPP_LIBSH_TARGET}
PRIVATE -static-libgcc -static-libstdc++
)
endif ()
endif ()
set_target_properties (cpp_ex_${example} PROPERTIES FOLDER examples/cpp)
#-----------------------------------------------------------------------------
# Add Target to clang-format
#-----------------------------------------------------------------------------
if (HDF5_ENABLE_FORMATTERS)
clang_format (HDF5_CPP_EXAMPLES_${example}_FORMAT cpp_ex_${example})
endif ()
endforeach ()
foreach (example ${tutr_examples})
add_executable (cpp_ex_${example} ${HDF5_CPP_EXAMPLES_SOURCE_DIR}/${example}.cpp)
target_include_directories (cpp_ex_${example} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (cpp_ex_${example} STATIC)
target_link_libraries (cpp_ex_${example} PRIVATE ${HDF5_CPP_LIB_TARGET} ${HDF5_LIB_TARGET})
else ()
TARGET_C_PROPERTIES (cpp_ex_${example} SHARED)
target_link_libraries (cpp_ex_${example} PRIVATE ${HDF5_CPP_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
if (MINGW AND HDF5_MINGW_STATIC_GCC_LIBS)
target_link_options (${HDF5_CPP_LIBSH_TARGET}
PRIVATE -static-libgcc -static-libstdc++
)
endif ()
endif ()
set_target_properties (cpp_ex_${example} PROPERTIES FOLDER examples/cpp)
#-----------------------------------------------------------------------------
# Add Target to clang-format
#-----------------------------------------------------------------------------
if (HDF5_ENABLE_FORMATTERS)
clang_format (HDF5_CPP_EXAMPLES_${example}_FORMAT cpp_ex_${example})
endif ()
endforeach ()
if (BUILD_TESTING AND HDF5_TEST_CPP AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL)
include (CMakeTests.cmake)
endif ()

@ -0,0 +1,122 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
##############################################################################
##############################################################################
### T E S T I N G ###
##############################################################################
##############################################################################
# Remove any output file left over from previous test run
set (CPP_EX_CLEANFILES
Group.h5
SDS.h5
SDScompound.h5
SDSextendible.h5
Select.h5
)
add_test (
NAME CPP_ex-clear-objects
COMMAND ${CMAKE_COMMAND}
-E remove ${CPP_EX_CLEANFILES}
)
set_tests_properties (CPP_ex-clear-objects PROPERTIES
FIXTURES_SETUP clear_cppex
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
)
add_test (
NAME CPP_ex-clean-objects
COMMAND ${CMAKE_COMMAND}
-E remove ${CPP_EX_CLEANFILES}
)
set_tests_properties (CPP_ex-clean-objects PROPERTIES
FIXTURES_CLEANUP clear_cppex
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
)
foreach (example ${examples})
if (HDF5_ENABLE_USING_MEMCHECKER)
add_test (NAME CPP_ex_${example} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:cpp_ex_${example}>)
else ()
add_test (NAME CPP_ex_${example} COMMAND "${CMAKE_COMMAND}"
-D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}"
-D "TEST_PROGRAM=$<TARGET_FILE:cpp_ex_${example}>"
-D "TEST_ARGS:STRING="
-D "TEST_EXPECT=0"
-D "TEST_SKIP_COMPARE=TRUE"
-D "TEST_OUTPUT=cpp_ex_${example}.txt"
#-D "TEST_REFERENCE=cpp_ex_${example}.out"
-D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
-P "${HDF_RESOURCES_DIR}/runTest.cmake"
)
endif ()
set_tests_properties (CPP_ex_${example} PROPERTIES FIXTURES_REQUIRED clear_cppex)
if (last_test)
set_tests_properties (CPP_ex_${example} PROPERTIES DEPENDS ${last_test})
endif ()
set (last_test "CPP_ex_${example}")
endforeach ()
#the following dependencies are handled by the order of the files
# SET_TESTS_PROPERTIES(CPP_ex_readdata PROPERTIES DEPENDS CPP_ex_create)
# SET_TESTS_PROPERTIES(CPP_ex_chunks PROPERTIES DEPENDS CPP_ex_extend_ds)
set (CPP_EX_TUTR_CLEANFILES
h5tutr_cmprss.h5
h5tutr_dset.h5
h5tutr_extend.h5
h5tutr_group.h5
h5tutr_groups.h5
h5tutr_subset.h5
)
add_test (
NAME CPP_ex_tutr-clear-objects
COMMAND ${CMAKE_COMMAND}
-E remove ${CPP_EX_TUTR_CLEANFILES}
)
set_tests_properties (CPP_ex_tutr-clear-objects PROPERTIES
FIXTURES_SETUP clear_cppex_tutr
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
)
add_test (
NAME CPP_ex_tutr-clean-objects
COMMAND ${CMAKE_COMMAND}
-E remove ${CPP_EX_TUTR_CLEANFILES}
)
set_tests_properties (CPP_ex_tutr-clean-objects PROPERTIES
FIXTURES_CLEANUP clear_cppex_tutr
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
)
foreach (example ${tutr_examples})
if (HDF5_ENABLE_USING_MEMCHECKER)
add_test (NAME CPP_ex_${example} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:cpp_ex_${example}>)
else ()
add_test (NAME CPP_ex_${example} COMMAND "${CMAKE_COMMAND}"
-D "TEST_EMULATOR=${CMAKE_CROSSCOMPILING_EMULATOR}"
-D "TEST_PROGRAM=$<TARGET_FILE:cpp_ex_${example}>"
-D "TEST_ARGS:STRING="
-D "TEST_EXPECT=0"
-D "TEST_SKIP_COMPARE=TRUE"
-D "TEST_OUTPUT=cpp_ex_${example}.txt"
#-D "TEST_REFERENCE=cpp_ex_${example}.out"
-D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
-P "${HDF_RESOURCES_DIR}/runTest.cmake"
)
endif ()
set_tests_properties (CPP_ex_${example} PROPERTIES FIXTURES_REQUIRED clear_cppex_tutr)
if (last_test)
set_tests_properties (CPP_ex_${example} PROPERTIES DEPENDS ${last_test})
endif ()
set (last_test "CPP_ex_${example}")
endforeach ()
#the following dependencies are handled by the order of the files
# SET_TESTS_PROPERTIES(CPP_ex_h5tutr_crtatt PROPERTIES DEPENDS CPP_ex_h5tutr_crtdat)
# SET_TESTS_PROPERTIES(CPP_ex_h5tutr_rdwt PROPERTIES DEPENDS CPP_ex_h5tutr_crtdat)
# SET_TESTS_PROPERTIES(CPP_ex_h5tutr_crtgrpd PROPERTIES DEPENDS CPP_ex_h5tutr_crtgrpar)

File diff suppressed because it is too large Load Diff

@ -0,0 +1,81 @@
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://www.hdfgroup.org/licenses.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
##
## Makefile.am
## Run automake to generate a Makefile.in from this file.
##
#
# HDF5 Library Examples Makefile(.in)
#
include $(top_srcdir)/config/commence.am
INSTALL_SCRIPT_FILES = run-c++-ex.sh
# These are the programs that 'make all' or 'make prog' will build and
# which 'make check' will run. List them in the order they should be run.
EXAMPLE_PROG=create readdata writedata compound extend_ds chunks h5group \
h5tutr_cmprss h5tutr_crtatt h5tutr_crtdat h5tutr_crtgrpar \
h5tutr_crtgrp h5tutr_crtgrpd h5tutr_extend h5tutr_rdwt \
h5tutr_subset
TEST_SCRIPT=testh5c++.sh
TEST_EXAMPLES_SCRIPT=$(INSTALL_SCRIPT_FILES)
# These are the example files to be installed
INSTALL_FILES=create.cpp readdata.cpp writedata.cpp compound.cpp \
extend_ds.cpp chunks.cpp h5group.cpp \
h5tutr_cmprss.cpp h5tutr_crtatt.cpp h5tutr_crtdat.cpp \
h5tutr_crtgrpar.cpp h5tutr_crtgrp.cpp h5tutr_crtgrpd.cpp \
h5tutr_extend.cpp h5tutr_rdwt.cpp h5tutr_subset.cpp
# Some of the examples depend on files created by running other examples
readdata.chkexe_: create.chkexe_
chunks.chkexe_: extend_ds.chkexe_
h5tutr_rdwt.chkexe_: h5tutr_crtdat.chkexe
h5tutrcrtatt.chkexe_: h5tutr_crtdat.chkexe
h5tutr_crtgrpd.chkexe_: h5tutr_crtgrpar.chkexe
# Tell conclude.am that these are C++ tests.
CXX_API=yes
# Where to install examples
# Note: no '/' after DESTDIR. Explanation in commence.am
EXAMPLEDIR=${DESTDIR}$(examplesdir)/c++
EXAMPLETOPDIR=${DESTDIR}$(examplesdir)
# How to build programs using h5c++
$(EXTRA_PROG): $(H5CPP)
$(H5CPP) $(H5CCFLAGS) $(CPPFLAGS) -o $@ $(srcdir)/$@.cpp
# List dependencies for each program. Normally, automake would take
# care of this for us, but if we tell automake about the programs it
# will try to build them with the normal C++ compiler, not h5c++. This is
# an inelegant way of solving the problem, unfortunately.
create: $(srcdir)/create.cpp
readdata: $(srcdir)/readdata.cpp
writedata: $(srcdir)/writedata.cpp
compound: $(srcdir)/compound.cpp
extend_ds: $(srcdir)/extend_ds.cpp
chunks: $(srcdir)/chunks.cpp
h5group: $(srcdir)/h5group.cpp
h5tutr_cmprss: $(srcdir)/h5tutr_cmprss.cpp
h5tutr_crtatt: $(srcdir)/h5tutr_crtatt.cpp
h5tutr_crtdat: $(srcdir)/h5tutr_crtdat.cpp
h5tutr_crtgrpar: $(srcdir)/h5tutr_crtgrpar.cpp
h5tutr_crtgrp: $(srcdir)/h5tutr_crtgrp.cpp
h5tutr_crtgrpd: $(srcdir)/h5tutr_crtgrpd.cpp
h5tutr_extend: $(srcdir)/h5tutr_extend.cpp
h5tutr_rdwt: $(srcdir)/h5tutr_rdwt.cpp
h5tutr_subset: $(srcdir)/h5tutr_subset.cpp
include $(top_srcdir)/config/examples.am
include $(top_srcdir)/config/conclude.am

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save