Initial commit

main
lib 2 years ago
commit 96dbec27e0
  1. 86
      .appveyor.yml
  2. 57
      .github/workflows/build_external_lib.yml
  3. 130
      .github/workflows/build_netcdf_no_hdf5.yml
  4. 129
      .github/workflows/build_test.yml
  5. 309
      .github/workflows/build_variant.yml
  6. 10
      .github/workflows/ci-build.sh
  7. 35
      .github/workflows/cla.yml
  8. 119
      .github/workflows/coverity-scan.yml
  9. 30
      .github/workflows/docker-exodus.yml
  10. 32
      .github/workflows/docker-seacas.yml
  11. 169
      .github/workflows/intel-build.yml
  12. 26
      .github/workflows/msys2.yml
  13. 34
      .github/workflows/python-linting.yml
  14. 19
      .github/workflows/stale.yml
  15. 11
      .github/workflows/trailing.yml
  16. 139
      .gitignore
  17. 19
      .lgtm.yml
  18. 47
      CMakeLists.txt
  19. 46
      CODE-OF-CONDUCT.md
  20. 85
      CONTRIBUTING.md
  21. 2
      DEPENDENCIES
  22. 26
      LICENSE
  23. 73
      NetCDF-Mapping.md
  24. 10
      PackagesList.cmake
  25. 11
      ProjectName.cmake
  26. 57
      README-KOKKOS.md
  27. 17
      README.Coverity
  28. 330
      README.md
  29. 109
      SEACAS-CLA.md
  30. BIN
      SEACAS-Test/1-block.es
  31. BIN
      SEACAS-Test/4-block.es
  32. BIN
      SEACAS-Test/8-block.es
  33. 20
      SEACAS-Test/base.fsq
  34. 34
      SEACAS-Test/base.g3i
  35. 160
      SEACAS-Test/base_o.fsq
  36. 5
      SEACAS-Test/base_o.g3i
  37. 22
      SEACAS-Test/base_sh.fsq
  38. 20
      SEACAS-Test/base_sh.g3i
  39. 24
      SEACAS-Test/baseline.gj
  40. 2
      SEACAS-Test/baseline.grp
  41. 166
      SEACAS-Test/baseline_o.fsq
  42. 17
      SEACAS-Test/cap.fsq
  43. 28
      SEACAS-Test/cap.g3i
  44. 34
      SEACAS-Test/common.h
  45. 33
      SEACAS-Test/common_2.h
  46. 21
      SEACAS-Test/names.grp
  47. 22
      SEACAS-Test/new_shell.gj
  48. 22
      SEACAS-Test/new_tape.gj
  49. 18
      SEACAS-Test/rigid.fsq
  50. 2
      SEACAS-Test/rigid.g3i
  51. 4
      SEACAS-Test/rigid.grp
  52. 29
      SEACAS-Test/scale_time.py
  53. 119
      SEACAS-Test/test_exo.py
  54. 16
      SETUP
  55. 156
      TPL-Manual-Install.md
  56. 45
      TPL/adios2/runcmake.sh
  57. 72
      TPL/catalyst2/runcmake.sh
  58. 65
      TPL/cgns/runcmake.sh
  59. 68
      TPL/compiler.sh
  60. 45
      TPL/faodel/runcmake.sh
  61. 0
      TPL/fmt/dummy
  62. 34
      TPL/gtest/runcmake.sh
  63. 80
      TPL/hdf5/runcmake.sh
  64. 66
      TPL/hdf5/runconfigure.sh
  65. 65
      TPL/kokkos/runcmake.sh
  66. 23
      TPL/matio/mpi.patch
  67. 78
      TPL/matio/runcmake.sh
  68. 41
      TPL/matio/runconfigure.sh
  69. 28
      TPL/metis/runconfigure.sh
  70. 81
      TPL/netcdf/runcmake.sh
  71. 37
      TPL/netcdf/runconfigure.sh
  72. 23
      TPL/parallel/runconfigure.sh
  73. 33
      TPL/parmetis/runconfigure.sh
  74. 42
      TPL/pnetcdf/runconfigure.sh
  75. 53
      TPL/szip/runcmake.sh
  76. 83
      TPLsList.cmake
  77. 5
      Version.cmake
  78. 12
      ci-msys2-build.sh
  79. 577
      cmake-config
  80. 90
      cmake-config-kokkos
  81. 401
      cmake-exodus
  82. 428
      cmake-faodel
  83. 221
      cmake-sems
  84. 164
      cmake-travis.sh
  85. 32
      cmake-use-example/CMakeLists.txt
  86. 719
      cmake-use-example/ExodusRead.f
  87. 587
      cmake-use-example/ExodusWrite.c
  88. 16
      cmake/FortranSettings.cmake
  89. 115
      cmake/ProjectCompilerPostConfig.cmake
  90. 6
      cmake/RepositoryDependenciesSetup.cmake
  91. 2
      cmake/TPLs/FindTPLCCOLAMD.cmake
  92. 59
      cmake/TPLs/FindTPLCGNS.cmake
  93. 129
      cmake/TPLs/FindTPLCUDA.cmake
  94. 59
      cmake/TPLs/FindTPLDLlib.cmake
  95. 39
      cmake/TPLs/FindTPLDataWarp.cmake
  96. 59
      cmake/TPLs/FindTPLGTest.cmake
  97. 61
      cmake/TPLs/FindTPLHDF5.cmake
  98. 60
      cmake/TPLs/FindTPLMETIS.cmake
  99. 62
      cmake/TPLs/FindTPLMatio.cmake
  100. 61
      cmake/TPLs/FindTPLNetcdf.cmake
  101. Some files were not shown because too many files have changed in this diff Show More

@ -0,0 +1,86 @@
image: Visual Studio 2019
environment:
matrix:
- TARGET_ARCH: x64
CONDA_INSTALL_LOCN: C:\\Miniconda-x64
MSYS2_INSTALL_LOCN: C:\msys64
MSYS2_BIN_LOCN: C:\msys64\usr\bin
CMAKE_GENERATOR: "Visual Studio 16 2019"
SUPPRESSIONS: "/wd4478"
VCPKG_DEFAULT_TRIPLET: "x64-windows"
VCPKG_BUILD_TYPE: "release"
LIB_ROOT: C:\Tools\vcpkg\installed\%VCPKG_DEFAULT_TRIPLET%
MINGW_DIR: C:\mingw-w64\x86_64-7.2.0-posix-seh-rt_v5-rev1\mingw64\bin
platform:
- x64
branches:
except:
- /.*[.]dmh/
- /.*[.]wif/
# Do not build feature branch with open Pull Requests
skip_branch_with_pr: true
init:
- cmd: set PATH=%MINGW_DIR%;%PATH%
install:
- cmd: set SRC_DIR=%cd%
- cmd: set INSTALL_LOC=%SRC_DIR%\install
- cmd: set PATH=%PATH%;%MSYS2_BIN_LOCN%;%INSTALL_LOC%\bin;%INSTALL_LOC%\lib
- cmd: vcpkg install netcdf-c
- cmd: vcpkg install matio
- cmd: vcpkg install fmt
- cmd: vcpkg integrate install
configuration: Release
build: off
# Run a custom script.
build_script:
- cmd: mkdir build
- cmd: cd build
- cmd: cmake .. -G "%CMAKE_GENERATOR%" ^
-DCMAKE_TOOLCHAIN_FILE=C:/Tools/vcpkg/scripts/buildsystems/vcpkg.cmake ^
-DBUILD_SHARED_LIBS:BOOL=OFF ^
-DCMAKE_BUILD_TYPE=Release ^
-DCMAKE_CXX_FLAGS="%SUPPRESSIONS% /EHsc" ^
-DCMAKE_C_FLAGS="%SUPPRESSIONS%" ^
-DCMAKE_INSTALL_PREFIX=%INSTALL_LOC% ^
-DHDF5_ROOT:PATH=%LIB_ROOT% ^
-DMatio_INCLUDE_DIRS:PATH=%LIB_ROOT%\include ^
-DMatio_LIBRARIES:PATH=%LIB_ROOT%\lib\libmatio.lib ^
-DNetCDF_ROOT:PATH=%LIB_ROOT% ^
-DSEACASExodus_ENABLE_THREADSAFE:BOOL=OFF ^
-DSEACASIoss_ENABLE_THREADSAFE:BOOL=OFF ^
-DSeacas_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON ^
-DSeacas_ENABLE_ALL_PACKAGES:BOOL=ON ^
-DSeacas_ENABLE_DOXYGEN:BOOL=OFF ^
-DSeacas_ENABLE_Fortran=OFF ^
-DSeacas_ENABLE_SEACAS:BOOL=ON ^
-DSeacas_ENABLE_SECONDARY_TESTED_CODE:BOOL=ON ^
-DSeacas_ENABLE_TESTS=ON ^
-DSeacas_ENABLE_Zoltan:BOOL=OFF ^
-DSeacas_HIDE_DEPRECATED_CODE:BOOL=ON ^
-DSeacas_SKIP_FORTRANCINTERFACE_VERIFY_TEST:BOOL=ON ^
-DTPL_ENABLE_ADIOS2:BOOL=OFF ^
-DTPL_ENABLE_CGNS:BOOL=OFF ^
-DTPL_ENABLE_Kokkos:BOOL=OFF ^
-DTPL_ENABLE_MPI:BOOL=OFF ^
-DTPL_ENABLE_Matio:BOOL=ON ^
-DTPL_ENABLE_Netcdf:BOOL=ON ^
-DTPL_ENABLE_Pamgen:BOOL=OFF ^
-DTPL_ENABLE_Pthread:BOOL=OFF ^
-DTPL_ENABLE_X11:BOOL=OFF ^
-DCMAKE_SH="CMAKE_SH-NOTFOUND" ^
-DTPL_ENABLE_gtest:BOOL=OFF
- cmd: if errorlevel 1 exit 1
- cmd: cmake --build . --config %configuration% -- /maxcpucount:4
test_script:
- cmd: ctest --output-on-failure --build-config Release

@ -0,0 +1,57 @@
name: Build Using Ubuntu-installed libraries
on:
push:
branches:
- master
pull_request:
branches:
- master
workflow_dispatch:
jobs:
build-deps:
runs-on: ubuntu-22.04
strategy:
matrix:
compiler: [ gnu, clang ]
steps:
- uses: actions/checkout@v3
- name: Install System dependencies
shell: bash -l {0}
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev openmpi-bin libopenmpi-dev libnetcdf-dev libhdf5-dev libcgns-dev libmatio-dev libfmt-dev
- name: Run cmake
shell: bash -l {0}
run: |
echo $HOME
find /usr/include -name cgnslib.h
mkdir build
cd build
NETCDF_PATH=/usr MODERN=YES NUMPROCS=2 COMPILER=${{ matrix.compiler }} INSTALL_PATH=${HOME} bash ../cmake-config
- name: Build
shell: bash -l {0}
run: |
cd build
make -j 4
if: ${{ success() }}
- name: Install
shell: bash -l {0}
run: |
cd build
make -j 4 install
if: ${{ success() }}
- name: Run Tests
shell: bash -l {0}
run: |
cd build
ctest -j 4 --output-on-failure
if: ${{ success() }}

@ -0,0 +1,130 @@
# This is a basic workflow to help you get started with Actions
name: SEACAS no-hdf5-netcdf builds
# Controls when the action will run. Triggers the workflow on push
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build-deps:
runs-on: ubuntu-latest
strategy:
matrix:
compiler: [ gnu, clang, mpi ]
netcdf: [ 4.9.2 ]
steps:
- uses: actions/checkout@v3
- name: Install System dependencies
shell: bash -l {0}
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev openmpi-bin libopenmpi-dev
###
# Installing TPL
###
- name: Cache TPL-${{ matrix.compiler }}-${{ matrix.netcdf }}
id: cache-TPL-mpi
uses: actions/cache@v3
with:
path: ~/environments/${{ matrix.compiler }}-${{ matrix.netcdf }}
key: TPL-v4-${{ runner.os }}-${{ matrix.compiler }}-${{ matrix.netcdf }}
- name: Build TPL-${{ matrix.compiler }}-${{ matrix.netcdf }}
if: steps.cache-TPL.outputs.cache-hit != 'true'
run: |
set -x
COMPILER=${{ matrix.compiler }} HDF5=NO GNU_PARALLEL=OFF INSTALL_PATH=${HOME}/environments/${{ matrix.compiler }}-${{ matrix.netcdf }} ./install-tpl.sh
ls ${HOME} && ls ${HOME}/environments && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.netcdf }} && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.netcdf }}/lib
build:
needs: build-deps
# The type of runner that the job will run on
name: ${{ matrix.config.name }}
runs-on: ${{ matrix.config.os }}
strategy:
fail-fast: false
matrix:
netcdf: [ 4.9.2 ]
config:
- {
name: "Debug serial build",
os: ubuntu-latest,
compiler: "gnu",
debug: "YES",
extra: "",
}
- {
name: "Debug parallel build",
os: ubuntu-latest,
compiler: "mpi",
debug: "YES",
extra: "",
}
- {
name: "Use modern CMake configure of netCDF package",
os: ubuntu-latest,
compiler: "mpi",
debug: "NO",
extra: "MODERN=YES",
}
steps:
- uses: actions/checkout@v3
- name: Install System dependencies
shell: bash -l {0}
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev openmpi-bin libopenmpi-dev
- name: Fetch TPL Cache
id: cache-TPL
uses: actions/cache@v3
with:
path: ~/environments/${{ matrix.config.compiler }}-${{ matrix.netcdf }}
key: TPL-v4-${{ runner.os }}-${{ matrix.config.compiler }}-${{ matrix.netcdf }}
- name: Check Cache
shell: bash -l {0}
run: ls ${HOME} && ls ${HOME}/environments && ls ${HOME}/environments/${{ matrix.config.compiler }}-${{ matrix.netcdf }} && ls ${HOME}/environments/${{ matrix.config.compiler }}-${{ matrix.netcdf }}/lib
###
# Configure and build
###
- name: Run cmake
shell: bash -l {0}
run: |
echo $HOME
mkdir build
cd build
NUMPROCS=2 ${{ matrix.config.extra }} DEBUG=${{ matrix.config.debug }} COMPILER=${{ matrix.config.compiler }} INSTALL_PATH=${HOME}/environments/${{ matrix.config.compiler }}-${{ matrix.netcdf }} bash ../cmake-config
- name: Build
shell: bash -l {0}
run: |
cd build
make -j 4
if: ${{ success() }}
- name: Install
shell: bash -l {0}
run: |
cd build
make -j 4 install
if: ${{ success() }}
- name: Run Tests
shell: bash -l {0}
run: |
cd build
ctest -j 4 --output-on-failure
if: ${{ success() }}

@ -0,0 +1,129 @@
name: Build and run SEACAS tests
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build-deps:
runs-on: ubuntu-latest
strategy:
matrix:
compiler: [ gnu, clang, mpi ]
hdf5: [ V18, V110, V114 ]
netcdf: [ 4.9.2 ]
cgns: [ 4.3.0 ]
steps:
- uses: actions/checkout@v3
- name: Install System dependencies
shell: bash -l {0}
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev openmpi-bin libopenmpi-dev
###
# Installing TPL
###
- name: Cache TPL-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
id: cache-TPL
uses: actions/cache@v3
with:
path: ~/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
key: TPL-v4-${{ runner.os }}-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
- name: Build TPL-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
if: steps.cache-TPL.outputs.cache-hit != 'true'
run: |
echo $HOME
set -x
COMPILER=${{ matrix.compiler }} H5VERSION=${{ matrix.hdf5}} GNU_PARALLEL=OFF INSTALL_PATH=${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} ./install-tpl.sh
ls ${HOME} && ls ${HOME}/environments && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}/lib
###
# Fetch Cache
###
seacas-build:
needs: build-deps
runs-on: ubuntu-latest
strategy:
matrix:
compiler: [ gnu, clang, mpi ]
hdf5: [ V18, V110, V114 ]
netcdf: [ 4.9.2 ]
cgns: [ 4.3.0 ]
steps:
- uses: actions/checkout@v3
- name: Install System dependencies
shell: bash -l {0}
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev openmpi-bin libopenmpi-dev
- name: Fetch TPL Cache
id: cache-TPL
uses: actions/cache@v3
with:
path: ~/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
key: TPL-v4-${{ runner.os }}-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
- name: Check Cache
shell: bash -l {0}
run: ls ${HOME} && ls ${HOME}/environments && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}/lib
###
# Configure and build
###
- name: Setup Python
uses: actions/setup-python@v3
with:
python-version: '3.11'
- name: Install NumPy
shell: bash -l {0}
run: pip install numpy
- name: Run cmake
shell: bash -l {0}
run: |
echo $HOME
mkdir build
cd build
NUMPROCS=2 COMPILER=${{ matrix.compiler }} INSTALL_PATH=${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} bash ../cmake-config
- name: Build
shell: bash -l {0}
run: |
cd build
make -j 4
if: ${{ success() }}
- name: Install
shell: bash -l {0}
run: |
cd build
make -j 4 install
if: ${{ success() }}
- name: Run Tests
shell: bash -l {0}
run: |
cd build
ctest -j 4 --output-on-failure
if: ${{ success() }}
- name: Run SEACAS-Test
shell: bash -l {0}
run: |
cd SEACAS-Test
make BASE=${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} test
if: ${{ success() }}

@ -0,0 +1,309 @@
# This is a basic workflow to help you get started with Actions
name: SEACAS Variant builds
# Controls when the action will run. Triggers the workflow on push
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build-deps:
runs-on: ubuntu-latest
strategy:
matrix:
compiler: [ gnu, clang, mpi ]
hdf5: [ V110 ]
netcdf: [ 4.9.2 ]
cgns: [ 4.3.0 ]
steps:
- uses: actions/checkout@v3
- name: Install System dependencies
shell: bash -l {0}
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev openmpi-bin libopenmpi-dev
###
# Installing TPL
###
- name: Cache TPL-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
id: cache-TPL-mpi
uses: actions/cache@v3
with:
path: ~/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
key: TPL-v4-${{ runner.os }}-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
- name: Build TPL-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
if: steps.cache-TPL.outputs.cache-hit != 'true'
run: |
set -x
COMPILER=${{ matrix.compiler }} H5VERSION=${{ matrix.hdf5}} GNU_PARALLEL=OFF INSTALL_PATH=${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} ./install-tpl.sh
ls ${HOME} && ls ${HOME}/environments && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}/lib
build:
needs: build-deps
# The type of runner that the job will run on
name: ${{ matrix.config.name }}
runs-on: ${{ matrix.config.os }}
strategy:
fail-fast: false
matrix:
hdf5: [ V110 ]
netcdf: [ 4.9.2 ]
cgns: [ 4.3.0 ]
config:
- {
name: "Debug serial build",
os: ubuntu-latest,
compiler: "gnu",
debug: "YES",
threadsafe: "NO",
fortran: "YES",
use_kokkos: "NO",
use_adios2: "NO",
use_catalyst2: "NO",
gtest: "NO",
extra: "",
sanitizer: "NO" # address, integer, thread, memory, undefined
}
- {
name: "Debug parallel build",
os: ubuntu-latest,
compiler: "mpi",
debug: "YES",
threadsafe: "NO",
fortran: "YES",
use_kokkos: "NO",
use_adios2: "NO",
use_catalyst2: "NO",
gtest: "NO",
extra: "",
sanitizer: "NO" # address, integer, thread, memory, undefined
}
- {
name: "Threadsafe ON, No Fortran",
os: ubuntu-latest,
compiler: "gnu",
debug: "YES",
threadsafe: "YES",
fortran: "NO",
use_kokkos: "NO",
use_adios2: "NO",
use_catalyst2: "NO",
gtest: "NO",
extra: "",
sanitizer: "thread" # address, integer, thread, memory, undefined
}
- {
name: "Threadsafe ON, Parallel, No Fortran",
os: ubuntu-latest,
compiler: "mpi",
debug: "YES",
threadsafe: "YES",
fortran: "NO",
use_kokkos: "NO",
use_adios2: "NO",
use_catalyst2: "NO",
gtest: "NO",
extra: "",
sanitizer: "thread" # address, integer, thread, memory, undefined
}
- {
name: "Sanitize address, no fortran",
os: ubuntu-latest,
compiler: "clang",
debug: "YES",
threadsafe: "NO",
fortran: "NO",
use_kokkos: "NO",
use_adios2: "NO",
use_catalyst2: "NO",
gtest: "NO",
extra: "",
sanitizer: "address" # address, integer, thread, memory, undefined
}
- {
name: "Sanitize undefined",
os: ubuntu-latest,
compiler: "clang",
debug: "YES",
threadsafe: "NO",
fortran: "YES",
use_kokkos: "NO",
use_adios2: "NO",
use_catalyst2: "NO",
gtest: "NO",
extra: "",
sanitizer: "undefined" # address, integer, thread, memory, undefined
}
- {
name: "Sanitize integer",
os: ubuntu-latest,
compiler: "clang",
debug: "YES",
threadsafe: "NO",
fortran: "NO",
use_kokkos: "NO",
use_adios2: "NO",
use_catalyst2: "NO",
gtest: "NO",
extra: "",
sanitizer: "integer" # address, integer, thread, memory, undefined
}
- {
name: "GTest Enabled",
os: ubuntu-latest,
compiler: "gnu",
debug: "YES",
threadsafe: "NO",
fortran: "YES",
use_kokkos: "NO",
use_adios2: "NO",
use_catalyst2: "NO",
gtest: "YES",
extra: "",
sanitizer: "NO" # address, integer, thread, memory, undefined
}
- {
name: "No applications, legacy, or fortran",
os: ubuntu-latest,
compiler: "clang",
debug: "NO",
threadsafe: "NO",
fortran: "NO",
use_kokkos: "NO",
use_adios2: "NO",
use_catalyst2: "NO",
gtest: "NO",
extra: "APPLICATIONS=NO LEGACY=NO",
sanitizer: "NO" # address, integer, thread, memory, undefined
}
- {
name: "CATALYST2, serial",
os: ubuntu-latest,
compiler: "clang",
debug: "NO",
threadsafe: "NO",
fortran: "YES",
use_kokkos: "NO",
use_adios2: "NO",
use_catalyst2: "YES",
gtest: "NO",
extra: "",
sanitizer: "NO" # address, integer, thread, memory, undefined
}
- {
name: "KOKKOS, parallel",
os: ubuntu-latest,
compiler: "mpi",
debug: "NO",
threadsafe: "NO",
fortran: "YES",
use_kokkos: "YES",
use_adios2: "NO",
use_catalyst2: "NO",
gtest: "NO",
extra: "",
sanitizer: "NO" # address, integer, thread, memory, undefined
}
- {
name: "No deprecated exodus functions",
os: ubuntu-latest,
compiler: "mpi",
debug: "NO",
threadsafe: "NO",
fortran: "YES",
use_kokkos: "NO",
use_adios2: "NO",
use_catalyst2: "NO",
gtest: "NO",
extra: "OMIT_DEPRECATED=YES",
sanitizer: "NO" # address, integer, thread, memory, undefined
}
- {
name: "Use modern CMake configure of netCDF and HDF5 packages",
os: ubuntu-latest,
compiler: "mpi",
debug: "NO",
threadsafe: "NO",
fortran: "YES",
use_kokkos: "NO",
use_adios2: "NO",
use_catalyst2: "NO",
gtest: "NO",
extra: "MODERN=YES",
sanitizer: "NO" # address, integer, thread, memory, undefined
}
steps:
- uses: actions/checkout@v3
- name: Install System dependencies
shell: bash -l {0}
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev openmpi-bin libopenmpi-dev
- name: Fetch TPL Cache
id: cache-TPL
uses: actions/cache@v3
with:
path: ~/environments/${{ matrix.config.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
key: TPL-v4-${{ runner.os }}-${{ matrix.config.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
- name: Check Cache
shell: bash -l {0}
run: ls ${HOME} && ls ${HOME}/environments && ls ${HOME}/environments/${{ matrix.config.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} && ls ${HOME}/environments/${{ matrix.config.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}/lib
- name: install additional TPL
if: ${{ matrix.config.use_kokkos == 'YES' || matrix.config.use_adios2 == 'YES' || matrix.config.gtest == 'YES' || matrix.config.use_catalyst == 'YES' }}
run: |
set -x
COMPILER=${{ matrix.config.compiler }} KOKKOS=${{ matrix.config.use_kokkos }} ADIOS2=${{ matrix.config.use_adios2 }} CATALYST2=${{ matrix.config.use_catalyst2 }} GNU_PARALLEL=OFF GTEST=${{ matrix.config.gtest }} INSTALL_PATH=${HOME}/environments/${{ matrix.config.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} ./install-tpl.sh
ls ${HOME} && ls ${HOME}/environments && ls ${HOME}/environments/${{ matrix.config.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} && ls ${HOME}/environments/${{ matrix.config.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}/lib
###
# Configure and build
###
- name: Run cmake
shell: bash -l {0}
run: |
echo $HOME
mkdir build
cd build
NUMPROCS=2 ${{ matrix.config.extra }} THREADSAFE=${{ matrix.config.threadsafe }} DEBUG=${{ matrix.config.debug }} FORTRAN=${{ matrix.config.fortran }} SANITIZER=${{ matrix.config.sanitizer }} COMPILER=${{ matrix.config.compiler }} INSTALL_PATH=${HOME}/environments/${{ matrix.config.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} bash ../cmake-config
- name: Build
shell: bash -l {0}
run: |
cd build
make -j 4
if: ${{ success() }}
- name: Install
shell: bash -l {0}
run: |
cd build
make -j 4 install
if: ${{ success() }}
- name: Run Non-sanitized Tests
shell: bash -l {0}
run: |
cd build
ctest -j 4 --output-on-failure
if: ${{ success() && matrix.config.sanitizer == 'NO' && matrix.config.threadsafe == 'NO' }}
- name: Run sanitized/threadsafe Tests
shell: bash -l {0}
run: |
cd build
ctest -j 4 --output-on-failure --exclude-regex test_exo
if: ${{ success() && (matrix.config.sanitizer != 'NO' || matrix.config.threadsafe != 'NO') }}

@ -0,0 +1,10 @@
#!/bin/bash
set -e
pwd
echo "$HOME"
mkdir build
cd build
bash ../cmake-config
make -j4
ctest -j 4 --output-on-failure

@ -0,0 +1,35 @@
name: "CLA Assistant"
on:
pull_request_target:
types: [opened,closed,synchronize]
jobs:
CLAssistant:
runs-on: ubuntu-latest
steps:
- name: "CLA Assistant"
if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target'
# Beta Release
uses: cla-assistant/github-action@v2.1.3-beta
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# the below token should have repo scope and must be manually added by you in the repository's secret
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
with:
path-to-signatures: 'signatures/version1/cla.json'
path-to-document: 'https://github.com/sandialabs/seacas/blob/master/SEACAS-CLA.md'
# branch should not be protected
branch: 'master'
allowlist: user1,bot*,*@sandia.gov
#below are the optional inputs - If the optional inputs are not given, then default values will be taken
#remote-organization-name: enter the remote organization name where the signatures should be stored (Default is storing the signatures in the same repository)
#remote-repository-name: enter the remote repository name where the signatures should be stored (Default is storing the signatures in the same repository)
#create-file-commit-message: 'For example: Creating file for storing CLA Signatures'
#signed-commit-message: 'For example: $contributorName has signed the CLA in #$pullRequestNo'
#custom-notsigned-prcomment: 'pull request comment with Introductory message to ask new contributors to sign'
#custom-pr-sign-comment: 'The signature to be committed in order to sign the CLA'
#custom-allsigned-prcomment: 'pull request comment when all contributors has signed, defaults to **CLA Assistant Lite bot** All Contributors have signed the CLA.'
#lock-pullrequest-aftermerge: false - if you don't want this bot to automatically lock the pull request after merging (default - true)
#use-dco-flag: true - If you are using DCO instead of CLA

@ -0,0 +1,119 @@
name: coverity-scan
on:
push:
branches:
- coverity
jobs:
build-deps:
runs-on: ubuntu-latest
strategy:
matrix:
compiler: [ gnu ]
hdf5: [ V110 ]
netcdf: [ 4.9.2 ]
cgns: [ 4.3.0 ]
steps:
- uses: actions/checkout@v3
- name: Install System dependencies
shell: bash -l {0}
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev openmpi-bin libopenmpi-dev
###
# Installing TPL
###
- name: Cache TPL-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
id: cache-TPL
uses: actions/cache@v3
with:
path: ~/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
key: TPL-v2-${{ runner.os }}-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
- name: Build TPL-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
if: steps.cache-TPL.outputs.cache-hit != 'true'
run: |
echo $HOME
set -x
COMPILER=${{ matrix.compiler }} H5VERSION=${{ matrix.hdf5}} GNU_PARALLEL=OFF INSTALL_PATH=${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} ./install-tpl.sh
ls ${HOME} && ls ${HOME}/environments && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}/lib
###
# Fetch Cache
###
seacas-build:
needs: build-deps
runs-on: ubuntu-latest
strategy:
matrix:
compiler: [ gnu ]
hdf5: [ V110 ]
netcdf: [ 4.9.2 ]
cgns: [ 4.3.0 ]
steps:
- uses: actions/checkout@v3
- name: Install System dependencies
shell: bash -l {0}
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev openmpi-bin libopenmpi-dev
- name: Fetch TPL Cache
id: cache-TPL
uses: actions/cache@v3
with:
path: ~/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
key: TPL-v2-${{ runner.os }}-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
- name: Check Cache
shell: bash -l {0}
run: ls ${HOME} && ls ${HOME}/environments && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}/lib
###
# Configure and build
###
- name: Download Coverity Build Tool
run: |
wget -q https://scan.coverity.com/download/linux64 --post-data "token=$TOKEN&project=gsjaardema%2Fseacas" -O coverity_tool.tgz
mkdir cov-analysis-linux64
tar xzf coverity_tool.tgz --strip 1 -C cov-analysis-linux64
env:
TOKEN: ${{ secrets.COVERITY_SCAN_TOKEN }}
- name: Run cmake
shell: bash -l {0}
run: |
echo $HOME
mkdir build
cd build
ZOLTAN=NO INSTALL_PATH=${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} bash ../cmake-config coverity
- name: Build
shell: bash -l {0}
run: |
cd build
mkdir cov-int
../cov-analysis-linux64/bin/cov-build --dir cov-int make -j 4
if: ${{ success() }}
- name: Submit the result to Coverity Scan
run: |
cd build
tail cov-int/build-log.txt
tar czf seacas.tgz cov-int
curl \
--form token="$TOKEN" \
--form email=gsjaardema@gmail.com \
--form file=@seacas.tgz \
--form version="master" \
--form description="SEACAS Github Actions Coverity Scan" \
https://scan.coverity.com/builds?project=gsjaardema%2Fseacas
env:
TOKEN: ${{ secrets.COVERITY_SCAN_TOKEN }}
if: ${{ success() }}

@ -0,0 +1,30 @@
name: docker-exodus
on:
push:
branches: [ "master" ]
jobs:
build-latest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Docker login
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Docker build and push
uses: docker/build-push-action@v3
with:
context: docker/exodus
push: true
tags: mrbuche/exodus:latest
- name: Delay before pull
run: sleep 234s
test-latest:
needs: build-latest
runs-on: ubuntu-latest
container: mrbuche/exodus
steps:
- name: Docker pull and test
run: |
python -c 'import exodus3 as exodus'

@ -0,0 +1,32 @@
name: docker-seacas
on:
push:
branches: [ "master" ]
jobs:
build-latest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Docker login
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Docker build and push
uses: docker/build-push-action@v3
with:
context: docker/seacas
push: true
tags: mrbuche/seacas:latest
- name: Delay before pull
run: sleep 234s
test-latest:
needs: build-latest
runs-on: ubuntu-latest
container: mrbuche/seacas
steps:
- name: Docker pull and test
run: |
cd /seacas/build/ && ctest --output-on-failure && cd
cd /seacas/SEACAS-Test/ && make test && cd
python -c 'import exodus3 as exodus'

@ -0,0 +1,169 @@
name: Intel OneAPI build
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build-deps:
runs-on: ubuntu-latest
strategy:
matrix:
compiler: [ intel, mpi ]
hdf5: [ V114 ]
netcdf: [ 4.9.2 ]
cgns: [ 4.3.0 ]
defaults:
run:
shell: bash --noprofile --norc {0}
steps:
- uses: actions/checkout@v3
- name: setup repo
run: |
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
sudo echo "deb https://apt.repos.intel.com/oneapi all main" | sudo tee /etc/apt/sources.list.d/oneAPI.list
sudo apt-get update
- name: install
run: |
sudo apt-get install -y intel-oneapi-common-vars
sudo apt-get install -y intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic
sudo apt-get install -y intel-oneapi-compiler-fortran
sudo apt-get install -y intel-oneapi-mpi
sudo apt-get install -y intel-oneapi-mpi-devel
sudo apt-get install -y intel-oneapi-mkl
sudo apt-get install -y ninja-build
- name: Install System dependencies
shell: bash -l {0}
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev
###
# Installing TPL
###
- name: Cache TPL-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
id: cache-TPL
uses: actions/cache@v3
with:
path: ~/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
key: TPL-v5intel-${{ runner.os }}-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
- name: Build TPL-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
if: steps.cache-TPL.outputs.cache-hit != 'true'
run: |
echo $HOME
set -x
source /opt/intel/oneapi/setvars.sh
printenv >> $GITHUB_ENV
COMPILER=${{ matrix.compiler }} H5VERSION=${{ matrix.hdf5}} GNU_PARALLEL=OFF INSTALL_PATH=${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} ./install-tpl.sh
ls ${HOME} && ls ${HOME}/environments && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}/lib
seacas-build:
needs: build-deps
runs-on: ubuntu-latest
strategy:
matrix:
compiler: [ intel, mpi ]
hdf5: [ V114 ]
netcdf: [ 4.9.2 ]
cgns: [ 4.3.0 ]
steps:
- uses: actions/checkout@v3
- name: setup repo
run: |
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
sudo echo "deb https://apt.repos.intel.com/oneapi all main" | sudo tee /etc/apt/sources.list.d/oneAPI.list
sudo apt-get update
- name: install
run: |
sudo apt-get install -y intel-oneapi-common-vars
sudo apt-get install -y intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic
sudo apt-get install -y intel-oneapi-compiler-fortran
sudo apt-get install -y intel-oneapi-mpi
sudo apt-get install -y intel-oneapi-mpi-devel
sudo apt-get install -y intel-oneapi-mkl
sudo apt-get install -y ninja-build
- name: Install System dependencies
shell: bash -l {0}
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev
- name: Fetch TPL Cache
id: cache-TPL
uses: actions/cache@v3
with:
path: ~/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
key: TPL-v5intel-${{ runner.os }}-${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}
- name: Check Cache
shell: bash -l {0}
run: ls ${HOME} && ls ${HOME}/environments && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} && ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}/lib
###
# Configure and build
###
- name: List directory
shell: bash -l {0}
run: |
echo $HOME
ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}/lib
ls ${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }}/include
- name: Run cmake
shell: bash -l {0}
run: |
echo $HOME
mkdir build
cd build
source /opt/intel/oneapi/setvars.sh
printenv >> $GITHUB_ENV
NUMPROCS=2 COMPILER=${{ matrix.compiler }} INSTALL_PATH=${HOME}/environments/${{ matrix.compiler }}-${{ matrix.hdf5 }}-${{ matrix.netcdf }}-${{ matrix.cgns }} bash ../cmake-config
- name: Build
shell: bash -l {0}
run: |
cd build
make -j 4
if: ${{ success() }}
- name: Install
shell: bash -l {0}
run: |
cd build
make -j 4 install
if: ${{ success() }}
- name: Run Tests
shell: bash -l {0}
run: |
cd build
ctest -j 4 --output-on-failure
if: ${{ success() }}
# - name: Compile
# run: |
# source /opt/intel/oneapi/setvars.sh
# printenv >> $GITHUB_ENV
# ifort test.f90 -o test_program
# mpiifort test2.f90 -o mpitest
# - name: "Test the code"
# run: |
# echo "Running the tests using the Intel oneapi fortran compiler"
# cd tests/test01
# mpirun -np 1 $HOME/work/reponame/reponame/mpitest
# ./$HOME/work/reponame/reponame/test_program

@ -0,0 +1,26 @@
name: SEACAS MSYS2 Build
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build:
runs-on: windows-latest
defaults:
run:
shell: msys2 {0}
steps:
- uses: actions/checkout@v3
- uses: msys2/setup-msys2@v2
with:
msystem: MINGW64
update: true
install: git mingw-w64-x86_64-toolchain make mingw-w64-x86_64-hdf5 mingw-w64-x86_64-cgns mingw-w64-x86_64-netcdf mingw-w64-x86_64-zlib mingw-w64-x86_64-gcc-fortran mingw-w64-x86_64-gcc-libgfortran mingw-w64-x86_64-cmake mingw-w64-x86_64-fmt
- name: CI-Build
run: |
echo 'Running in MSYS2!'
./ci-msys2-build.sh

@ -0,0 +1,34 @@
name: Python Linting
on:
push:
branches:
- master
pull_request:
branches:
- master
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8
pip install pylint
- name: Lint exodus.py
run: |
flake8 --ignore E501,W503 packages/seacas/scripts/exodus3.in.py
- name: Lint exomerge.py
if: success() || failure()
run: |
flake8 --ignore E501,W503,W504 packages/seacas/scripts/exomerge3.py
- name: Lint exodus tests
if: success() || failure()
run: |
flake8 --ignore E501,E402,W503,W605 packages/seacas/scripts/tests

@ -0,0 +1,19 @@
name: Mark stale issues and pull requests
on:
schedule:
- cron: "30 1 * * *"
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: 'Stale issue message'
stale-pr-message: 'Stale pull request message'
stale-issue-label: 'no-issue-activity'
stale-pr-label: 'no-pr-activity'

@ -0,0 +1,11 @@
name: CI Formatting
on:
workflow_dispatch:
jobs:
trailing:
name: Find Trailing Whitespace
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: harupy/find-trailing-whitespace@master

139
.gitignore vendored

@ -0,0 +1,139 @@
*.aux
*.exo
*.o
*.g
*.e
*.g.*
*.e.*
*.pyc
.libs
Makefile
Makefile.bak
bin
etc
inc
share
/lib
/include
SEACAS-Test/*.g3
SEACAS-Test/baseline.gt
SEACAS-Test/baseline.mat
SEACAS-Test/baseline.t
src/libraries/exodus/cbind/test/*.res
src/libraries/exodus/forbind/test/*.res
src/libraries/exodus/CMakeCache.txt
src/libraries/exodus/CMakeFiles/
src/libraries/exodus/DartConfiguration.tcl
src/libraries/exodus/cbind/CMakeFiles/
src/libraries/exodus/cbind/CTestTestfile.cmake
src/libraries/exodus/cbind/cmake_install.cmake
src/libraries/exodus/cbind/include/exodus_cfg.h
src/libraries/exodus/cbind/test/CreateEdgeFace
src/libraries/exodus/cbind/test/testrd-long-name
src/libraries/exodus/cbind/test/testrd-nfaced
src/libraries/exodus/cbind/test/testwt-long-name
src/libraries/exodus/cbind/test/testwt-nfaced
src/libraries/exodus/cbind/test/CMakeFiles/
src/libraries/exodus/cbind/test/CTestTestfile.cmake
src/libraries/exodus/cbind/test/ExoIICTests
src/libraries/exodus/cbind/test/ExoIICTests.cxx
src/libraries/exodus/cbind/test/cmake_install.cmake
src/libraries/exodus/cbind/test/create_mesh
src/libraries/exodus/cbind/test/rd_wt_mesh
src/libraries/exodus/cbind/test/testall
src/libraries/exodus/cbind/test/testcp
src/libraries/exodus/cbind/test/testcp_ln
src/libraries/exodus/cbind/test/testcp_nl
src/libraries/exodus/cbind/test/testcpd
src/libraries/exodus/cbind/test/testrd
src/libraries/exodus/cbind/test/testrd-partial
src/libraries/exodus/cbind/test/testrd1
src/libraries/exodus/cbind/test/testrd_nc
src/libraries/exodus/cbind/test/testrd_ss
src/libraries/exodus/cbind/test/testrdd
src/libraries/exodus/cbind/test/testrdv
src/libraries/exodus/cbind/test/testrdwt
src/libraries/exodus/cbind/test/testwt
src/libraries/exodus/cbind/test/testwt-one-attrib
src/libraries/exodus/cbind/test/testwt-partial
src/libraries/exodus/cbind/test/testwt-zeroe
src/libraries/exodus/cbind/test/testwt-zeron
src/libraries/exodus/cbind/test/testwt1
src/libraries/exodus/cbind/test/testwt2
src/libraries/exodus/cbind/test/testwt_clb
src/libraries/exodus/cbind/test/testwt_nc
src/libraries/exodus/cbind/test/testwt_nossnsdf
src/libraries/exodus/cbind/test/testwt_ss
src/libraries/exodus/cbind/test/testwtd
src/libraries/exodus/cbind/test/testwtm
src/libraries/exodus/cbind/test/testrd-nsided
src/libraries/exodus/cbind/test/testwt-nsided
src/libraries/exodus/cbind/test/testwt-groups
src/libraries/exodus/cbind/test/testrd-groups
src/libraries/exodus/forbind/test/testall
src/libraries/exodus/forbind/test/testcp
src/libraries/exodus/forbind/test/testcpln
src/libraries/exodus/forbind/test/testcpnl
src/libraries/exodus/forbind/test/testrd
src/libraries/exodus/forbind/test/testrd1
src/libraries/exodus/forbind/test/testrd_nsid
src/libraries/exodus/forbind/test/testrdd
src/libraries/exodus/forbind/test/testwt
src/libraries/exodus/forbind/test/testwt1
src/libraries/exodus/forbind/test/testwt2
src/libraries/exodus/forbind/test/testwt_nsid
src/libraries/exodus/forbind/test/testwtd
src/libraries/exodus/forbind/test/testwtm
src/libraries/exodus/cmake_install.cmake
src/libraries/svdi/post/post.cps
src/libraries/svdi/post/post.eps
src/libraries/svdi/post/post.pst
src/libraries/svdi/post/post.x11
.deps
*.lo
*.la
*.a
*.so
*.so.*
*.dylib
*.log
*.log.*
*~
src/libraries/zoltan/Utilities/Config/Config.SEACAS
src/libraries/zoltan/Utilities/Obj_SEACAS/
src/libraries/zoltan/Obj_SEACAS/
src/libraries/ioss/src/Trios_config.h
src/libraries/ioss/src/SEACASIoss_config.h
autom4te.cache
SNTools.project
*.cproject
*.project
*.settings
*ioss.doxygen.warn
*ioss.tags
SeacasRepoVersion.txt
TPL/adios2/ADIOS2/
TPL/cgns/CGNS/
TPL/data-warehouse-release
TPL/gtest/googletest/
TPL/hdf5/hdf5-*.tar.bz2/
TPL/hdf5/hdf5-*/
TPL/kokkos/kokkos
TPL/kokkos/kokkos-*
TPL/matio/matio/
TPL/metis/metis-5.1.0.1/
TPL/metis/v5.1.0.1.tar.gz
TPL/netcdf/netcdf-c/
TPL/parallel/parallel-*/
TPL/parallel/parallel-*.tar.bz2
TPL/parmetis/parmetis/
TPL/hdf5/hdf5-*.tar.bz2
TPL/pnetcdf/parallel-netcdf-*.tar.gz
TPL/pnetcdf/parallel-netcdf-*/
TPL/pnetcdf/pnetcdf-*.tar.gz
TPL/pnetcdf/pnetcdf-*/
TPL/faodel/faodel*/
TPL/catalyst2/catalyst/
TPL/fmt/fmt/
build/
.scannerwork

@ -0,0 +1,19 @@
path_classifiers:
external:
- "cmake"
- "packages/zoltan"
- "packages/seacas/libraries/ioss/src/visualization"
test:
- "SEACAS-Test"
docs:
- "docs"
- "packages/seacas/doc-source"
extraction:
cpp:
prepare:
packages:
- hdf5
- netcdf
configure:
command:
- ./cmake-travis.sh

@ -0,0 +1,47 @@
#
# A) Define your project name and set up major project options
#
# Must set the project name as a variable at very beginning before including anything else
# We set the project name in a separate file so CTest scripts can use it.
INCLUDE(${CMAKE_CURRENT_SOURCE_DIR}/ProjectName.cmake)
SET(Seacas_ENABLE_Zoltan_DEFAULT ON)
SET(TRIBITS_HIDE_DEPRECATED_INCLUDE_DIRECTORIES_OVERRIDE TRUE)
# Define the TriBITS minimum required CMake version
SET(TRIBITS_CMAKE_MINIMUM_REQUIRED 3.17.0)
# CMake requires this be in the top file and not in an include file :-(
CMAKE_MINIMUM_REQUIRED(VERSION ${TRIBITS_CMAKE_MINIMUM_REQUIRED} FATAL_ERROR)
# CMake requires that you declare the CMake project in the top-level file and
# not in an include file :-(
PROJECT(${PROJECT_NAME} NONE)
if (${CMAKE_VERSION} GREATER_EQUAL "3.12")
cmake_policy(SET CMP0074 NEW)
endif()
#
# B) Pull in the TriBITS system and execute
#
INCLUDE(${CMAKE_CURRENT_SOURCE_DIR}/cmake/tribits/TriBITS.cmake)
SET(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules/")
SET(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${${PROJECT_NAME}_TRIBITS_DIR}/common_tpls/utils)
# Make Trilinos create <Package>Config.cmake files by default
SET(${PROJECT_NAME}_ENABLE_INSTALL_CMAKE_CONFIG_FILES_DEFAULT ON)
# Make Trilinos set up CPack support by default
SET(${PROJECT_NAME}_ENABLE_CPACK_PACKAGING_DEFAULT ON)
# Don't allow disabled subpackages to be excluded from tarball
SET(${PROJECT_NAME}_EXCLUDE_DISABLED_SUBPACKAGES_FROM_DISTRIBUTION_DEFAULT FALSE)
SET(TPL_ENABLE_Pthread OFF CACHE BOOL "")
OPTION(Seacas_ENABLE_DOXYGEN "Enable Doxygen documentation creation." OFF)
# Do all of the processing for this Tribits project
TRIBITS_PROJECT()

@ -0,0 +1,46 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at gsjaardema@gmail.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

@ -0,0 +1,85 @@
### Fork seacas
* If you have not already done so, create a fork of seacas on GitHub under your username.
* Sign on (via web) to https://github.com/sandialabs/seacas
* Make sure you are signed in to github
* Click on the 'Fork' button near the top right of the page.
* Clone your fork of seacas with
* `git clone git@github.com:<username>/seacas`.
* Or: `git clone https://github.com/<username>/seacas`
* Each time you clone your fork,
* `git remote add upstream git@github.com:sandialabs/seacas` to add the original seacas repository as the `upstream` remote.
* Or: `git remote add upstream https://github.com/sandialabs/seacas'
### Update the Main Development Branch
To keep your `master` branch up-to-date with `upstream`:
* `git fetch --all`
* `git checkout master`
* `git merge upstream/master`
* `git push origin master`
You want to do this before starting work on a new feature branch.
### Create a Feature Branch
Create a local branch off of `master` on which to make your changes:
* `git checkout master`
* `git checkout -b <branchName>`
`<branchName>` can be whatever you like, though we have some recommendations:
* Make the branch name descriptive; that is, avoid `fixSomeStuff`, `performanceTweaks`, and generic names along those lines.
* To indicate your branch is intended solely for your own use, preface the branch name with your username, as in `<username>/<restOfBranchName>`.
### Make Your Changes
Do whatever work is necessary to address the issue you're tackling,
breaking your work into logical, compilable commits. Feel free to
commit small chunks early and often in your local repository and then
use `git rebase -i` to reorganize your commits before sharing. Make
sure the commit messages you will be sharing reference the appropriate
GitHub issue numbers.
### Update Your Branch
While working on your feature in your local `<branchName>` branch,
other commits will likely make it into the real seacas `master`
branch. There are a variety of ways to merge these changes into your
local feature branch. One possibility is
* `git checkout <branchName>`
* `git fetch --all`
* `git merge upstream/master`
though there are others that are equally valid.
### Create a Pull Request
When your changes are ready to be integrated into seacas' `master` branch:
* Push your local feature branch up to your fork with `git push -u origin <branchName>`.
* Navigate to your fork of seacas on GitHub and create a new pull request:
* Be sure you choose:
* base fork: `sandialabs/seacas`
* base: `master`
* head fork: `<username>/seacas`
* compare: `<branchName>`
* On the new pull request creation page, you'll notice the *Description* field will be pre-populated with some text. Follow the instructions in that template to give us as much information as you can such that we can review and approve the issue as soon as is practicable.
### Feedback
At this point you'll enter into a stage where you and various seacas
developers will iterate back and forth until your changes are in an
acceptable state and can be merged in. If you need to make changes to
your pull request, make additional commits on your `<branchName>`
branch and push them up to your fork. Make sure you don't delete your
remote feature branch or your fork of seacas before your pull request
has been merged.
### Acknowledgement
Based on the `CONTRIBUTING.md` document from Trilinos.

@ -0,0 +1,2 @@
netcdf-c >= 4.6.1
hdf5 >= 1.10.3

@ -0,0 +1,26 @@
Copyright (c) 1988-2017, National Technology & Engineering Solutions
of Sandia, LLC (NTESS). Under the terms of Contract DE-NA0003525 with
NTESS, the U.S. Government retains certain rights in this software.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of NTESS nor the names of its contributors may
be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -0,0 +1,73 @@
# Mapping of exodus entities onto NetCDF
***If you are using NetCDF-4.5.1 or later, then you can ignore the information in this file.***
The distributed version of netcdf sets the following limits
on dimensions and variables:
* `#define NC_MAX_DIMS 1024`
* `#define NC_MAX_VARS 8192`
For use with Exodus, it is recommended that these be increased to:
* `#define NC_MAX_DIMS 65536`
* `#define NC_MAX_VARS 524288`
The reason for these increases is due to the mapping of Exodus onto
NetCDF. The sections below show the number of Dimensions (controlled
by NC_MAX_DIMS) and Variables (controlled by NC_MAX_VARS) that are
used in an Exodus file.
## Entities
* A mesh-entity is an individual node, edge, face, or element.
* An entity is a set or block consisting of a single mesh-entity type.
* Each entity can have variables, maps, and attributes which contain an entry per mesh-entity.
* Each entity has an optional name and a required id (32-bit or 64-bit )which is non-negative.
* A mesh-entity can be in one and only one entity block,
* A mesh-entity can be in zero or more entity sets.
* Currently there is only a single implicit node block containing all nodes in the model.
## Dimensions: (NC_MAX_DIMS)
* There are about 10 standard dimensions in every file.
* plus one for each set plus one if any attributes
* plus two for each block plus one if any attributes
* plus one for each transient variable on an entity (node, node set, element block, element set, ...)
## Variables: (NC_MAX_VARS)
* There are a few standard dimensions
* times
* names of each entity type (block set)
* ids of each entity type (block set)
* status of each entity type (block set)
* #ndim coordinates (1,2,3)
* Each block adds 1 + 2*#attr_on_block + #var_on_block
* Each set adds 2 + 2*#attr_on_set + #var_on_set
* Each sideset add 3 + 2*#attr_on_sset + #var_on_sset
* Each map adds 1
## Example
If we have an exodus file with:
* Nodes
* 5 Element blocks
* 4 transient variables per element block
* 2 attributes per element block
* 4 Node Sets
* Distribution Factors defined on each set
* 3 transient variables
* 3 Side Sets
* Distribution Factors defined on each set
* 2 transient variables
Then there would be about:
`10 + 5*(2+1) + 4*(2) + 3*(2) + 1 + 1 + 1 = 42` Dimensions
There would be about:
`5*(1+2*2+4) + 4*(2+3) + 3*(3+2) + 3*(5+4+3) + 3 + 1 = 120` Variables.
From this, you can see that a moderately complicated model would
quickly overflow the standard values for `NC_MAX_DIMS` and `NC_MAX_VARS`.

@ -0,0 +1,10 @@
#
# Define the Seacas packages
#
TRIBITS_REPOSITORY_DEFINE_PACKAGES(
Zoltan packages/zoltan PT
SEACAS packages/seacas PT # Depends on netcdf, optionally hdf5, pamgen
)
# Allow builds even if some packages are missing

@ -0,0 +1,11 @@
# Must set the project name at very beginning before including anything else
set(PROJECT_NAME Seacas)
# Turn on export dependency generation for WrapExteranl package
set(${PROJECT_NAME}_GENERATE_EXPORT_FILE_DEPENDENCIES_DEFAULT ON)
set(${PROJECT_NAME}_GENERATE_REPO_VERSION_FILE_DEFAULT ON)
set(${PROJECT_NAME}_GENERATE_VERSION_DATE_FILES_DEFAULT ON)
set(SEACAS_GTest_TPL_name "GTest")

@ -0,0 +1,57 @@
A capability in testing is to build an "on-node parallel" version of
SEACAS using the Kokkos package. This includes a "Kokkos-aware" version of
the Ioss library. The modifications to the build process described in the
README file are shown below:
## Additional Libraries
You will need the following library if you want to use Kokkos with CUDA as the backend:
* [CUDA](#cuda)
### CUDA
* CUDA is already installed on many platforms. In many cases, typing something like `module load cuda` should be sufficient. Loading the module would typically set environment variables, such as CUDA_ROOT.
* If installing yourself, see www.nvidia.com/object/cuda_home_new.html
## Configure, Build, and Install SEACAS
Build as described in README-PARALLEL.md if using MPI or as described in README.md
otherwise. If you are using the `cmake-config-kokkos` script, change `MPI` to
`ON` or `OFF` as appropriate, change `CUDA` to `ON` or `OFF`
as appropriate, and then source the configure script.
```bash
source cmake-config-kokkos
```
If using your own cmake script or directly calling cmake, be sure to do the following.
* Enable the Kokkos package.
* If using OpenMP as the Kokkos backend, enable OpenMP and disable Pthread.
If using CUDA as the backend, OpenMP can still be enabled.
```bash
-D Seacas_ENABLE_OpenMP:Bool=ON
-D TPL_ENABLE_Pthread:Bool=OFF
```
* If using CUDA as the Kokkos backend, enable CUDA, set the CUDA root directory, and disable Pthread.
```bash
-D TPL_ENABLE_CUDA:Bool=ON
-D CUDA_TOOLKIT_ROOT_DIR:Path=${CUDA_PATH}
-D TPL_ENABLE_Pthread:Bool=OFF
```
* If using CUDA as the Kokkos backend, some environment variables need to be set. These can be set in your configure script, but then be sure to source the script rather than just running it so that your environment is correct when running `make`.
```bash
export OMPI_CXX=<SEACAS-source-directory>/packages/kokkos/config/nvcc_wrapper
export CUDA_MANAGED_FORCE_DEVICE_ALLOC=1
```
Finally, build and install
```bash
make
make install
```

@ -0,0 +1,17 @@
To trigger a scan by coverity, do the following:
* Commit all changes on the master branch
* Change to the "coverity_scan" branch
* git merge master
* make a change such that you can do a commit
* Commit and then push
This should fire off a run of coverity scan.
Do not make source code changes on the coverity_scan branch and don't
merge the branch back into master since the .travis.yml file is
different between the branch and master.
Commits to coverity_scan branch will only trigger the coverity scan
and won't do the normal build and test. Commits to master branch will
do the normal clang and gcc builds and run tests.

@ -0,0 +1,330 @@
# SEACAS [[Documentation](http://sandialabs.github.io/seacas-docs/)] [[Wiki](https://github.com/sandialabs/seacas/wiki)]
[![Codacy Badge](https://app.codacy.com/project/badge/Grade/838c6d845e9e4ce4a7cd02bd06b4d2ad)](https://www.codacy.com/gh/gsjaardema/seacas/dashboard?utm_source=github.com&amp;utm_medium=referral&amp;utm_content=gsjaardema/seacas&amp;utm_campaign=Badge_Grade)
[![Analysis Status](https://scan.coverity.com/projects/2205/badge.svg?flat=1)](https://scan.coverity.com/projects/gsjaardema-seacas)
[![Spack Version](https://img.shields.io/spack/v/adios2.svg)](https://spack.readthedocs.io/en/latest/package_list.html#seacas)
[![Appveyor Build](https://ci.appveyor.com/api/projects/status/pis4gok72yh0wwfs/branch/master?svg=true)](https://ci.appveyor.com/project/gsjaardema/seacas/branch/master)
[![SEACAS Docker](https://img.shields.io/github/actions/workflow/status/sandialabs/seacas/docker-seacas.yml?branch=master&label=SEACAS&logo=docker&logoColor=0db7ed)](https://hub.docker.com/r/mrbuche/seacas)
[![Exodus Docker](https://img.shields.io/github/actions/workflow/status/sandialabs/seacas/docker-exodus.yml?branch=master&label=Exodus&logo=docker&logoColor=0db7ed)](https://hub.docker.com/r/mrbuche/exodus)
[![Github Actions -- CI Serial](https://github.com/sandialabs/seacas/actions/workflows/build_test.yml/badge.svg)](https://github.com/sandialabs/seacas)
[![Github Actions -- CI Variants](https://github.com/sandialabs/seacas/actions/workflows/build_variant.yml/badge.svg)](https://github.com/sandialabs/seacas)
[![Github Actions -- CI Intel](https://github.com/sandialabs/seacas/actions/workflows/intel-build.yml/badge.svg)](https://github.com/sandialabs/seacas)
[![Github Actions -- CI MSYS2](https://github.com/sandialabs/seacas/actions/workflows/msys2.yml/badge.svg)](https://github.com/sandialabs/seacas)
* [Get the sources](#get-the-sources)
* [Build instructions](#build-instructions)
* [Configure, Build, and Install SEACAS](#configure-build-and-install-seacas)
* [Parallel Build](#parallel-build)
* [Testing](#testing)
* [Exodus](#exodus)
* [Trilinos](#trilinos)
* [SPACK](#spack)
* [Docker](#docker)
* [CMake Example Usage](#cmake-example-usage)
* [Requred Software: Mac](#required-software)
* [License](#license)
* [Contact information](#contact-information)
* NOTE: The old imake-based build has been removed.
## Get the sources
```sh
git clone https://github.com/sandialabs/seacas.git
```
This will create a directory that will be referred to as _seacas_ in
the instructions that follow. You can rename this directory to any
other name you desire. Set an environment variable pointing to this
location by doing:
```sh
cd seacas && export ACCESS=`pwd`
```
## Build instructions
### Automatically download and build dependencies (Third-Party Libraries)
There are a few externally developed third-party libraries (TPL) that
are required (or optional) to build SEACAS: HDF5, NetCDF, CGNS, MatIO,
Kokkos, and (if MPI set) PnetCDF libraries. You can build the
libraries using the `install-tpl.sh` script, or you can install them
manually as detailed in
[TPL-Manual-Install.md](TPL-Manual-Install.md).
* To use the script, simply type `./install-tpl.sh`
* The default behavior can be modified via a few environment variables:
| Variable | Values | Default | Description |
|-----------------|:---------------:|:-------:|-------------|
| INSTALL_PATH | path to install | pwd | Root of install path; default is current location |
| COMPILER | clang, gnu, intel, ibm nvidia | gnu | What compiler should be used for non-parallel build. Must have C++-17 capability. |
| MPI | YES, NO | NO | If YES, then build parallel capability |
| FORCE | YES, NO | NO | Force downloading and building even if lib is already installed. |
| BUILD | YES, NO | YES | Should TPLs be built and installed. |
| DOWNLOAD | YES, NO | YES | Should TPLs be downloaded. |
| USE_PROXY | YES, NO | NO | Sandia specific -- use proxy when downloading tar files |
| DEBUG | YES, NO | NO | Build debug executable; default is optimized
| SHARED | YES, NO | YES | Build shared libraries if YES, archive (.a) if NO |
| CRAY | YES, NO | YES | Is this a Cray system (special parallel options) |
| NEEDS_ZLIB | YES, NO | NO | If system does not have zlib installed, download and install it (HDF5 compression). |
| USE\_ZLIB\_NG | YES, NO | NO | Should the improved [zlib-ng](https://github.com/zlib-ng/zlib-ng) library be used to provide ZLIB capability |
| NEEDS_SZIP | YES, NO | NO | If system does not have szip installed, download and install it (HDF5 compression). |
| USE\_64BIT\_INT | YES, NO | NO | In CGNS, enable 64-bit integers |
| CGNS | YES, NO | YES | Should CGNS TPL be built. |
| MATIO | YES, NO | YES | Should matio TPL be built. |
| METIS | YES, NO | NO | Should metis TPL be built (parallel decomposition). |
| PARMETIS | YES, NO | NO | Should parmetis TPL be built (parallel decomposition). |
| ADIOS2 | YES, NO | NO | Should adios2 TPL be built. |
| CATALYST2 | YES, NO | NO | Should catalyst 2 TPL be built. |
| KOKKOS | YES, NO | NO | Should Kokkos TPL be built. |
| GNU_PARALLEL | YES, NO | YES | Should GNU parallel script be built. |
| FMT | YES, NO | YES | Should Lib::FMT TPL be built. |
| H5VERSION | V114, V110, V18 | V110 | Use HDF5-1.14.X, HDF5-1.10.X or HDF5-1.8.X |
| BB | YES, NO | NO | Enable Burst Buffer support in PnetCDF |
| JOBS | {count} | 2 | Number of "jobs" used for simultaneous compiles |
| SUDO | "" or sudo | "" | If need to be superuser to install |
* NOTE: The `DOWNLOAD` and `BUILD` options can be used to download all TPL source; move to a system with no outside internet access and then build/install the TPLs.
* The arguments can either be set in the environment as: `export COMPILER=gnu`, or passed on the script invocation line: `COMPILER=gnu ./install-tpl.sh`
## Configure, Build, and Install SEACAS
At this time, you should have all external TPL libraries built and
installed into `${ACCESS}/lib` and `${ACCESS}/include`. You are now ready
to configure the SEACAS CMake build.
* `cd $ACCESS`
* `mkdir build`
* `cd build`
* edit the `${ACCESS}cmake-config` file and adjust compilers and other settings as needed.
* enter the command `../cmake-config` and cmake should configure everything for the build.
* `make && make install`
* If everything works, your applications should be in `${ACCESS}/bin`
* To install in a different location, do `INSTALL_PATH={path_to_install} ../cmake-config`
* The default behavior can be modified via a few environment variables:
| Variable | Values | Default | Description |
|-----------------|:---------------:|:-------:|-------------|
| INSTALL_PATH | path to install | pwd | Root of install path; default is current location |
| BUILDDIR | {dir} | `pwd`/build | Directory to do config and build |
| COMPILER | clang, gnu, intel, ibm | gnu | What compiler should be used for non-parallel build |
| SHARED | YES, NO | YES | Build and use shared libraries is YES |
| APPLICATIONS | YES, NO | YES | Should all SEACAS applications be built (see `cmake-config`) |
| LEGACY | YES, NO | YES | Should the legacy SEACAS applications be built (see `cmake-config`) |
| FORTRAN | YES, NO | YES | Should fortran libraries and applications be built (see `cmake-config`) |
| ZOLTAN | YES, NO | YES | Should zoltan library and nem_slice be built |
| BUILD_TYPE | debug, release | release | what type of build |
| MODERN | YES, NO | NO | Use "modern" CMake configuration files for netCDF and HDF5 |
| DEBUG | -none- | | If specified, then do a debug build. Can't be used with `BUILD_TYPE` |
| HAVE_X11 | YES, NO | YES | Does the system have X11 libraries and include files; used for blot, fastq |
| THREADSAFE | YES, NO | NO | Compile a thread-safe IOSS and Exodus library |
| USE_SRUN | YES, NO | NO | If MPI enabled, then use srun instead of mpiexec to run parallel tests |
| DOXYGEN | YES, NO | NO | Run doxygen on several packages during build to generate documentation |
| OMIT_DEPRECATED | YES, NO | NO | Should the deprecated code be omitted; NO will enable deprecated code |
| EXTRA_WARNINGS | YES, NO | NO | Build with extra warnings enabled; see list in `cmake-config` |
| SANITIZER | many | NO | If not NO, build using specified sanitizer; see list in `cmake-config` |
| GENERATOR | many | "Unix Makefiles" | what generator should CMake use; see cmake doc |
* The arguments can either be set in the environment as: `export COMPILER=gnu`, or passed on the script invocation line: `COMPILER=gnu ./install-tpl.sh`
## Parallel Build
For some areas of use, a parallel version of SEACAS is required. This
will build a "parallel-aware" version of the exodus library and a
parallel version of the Ioss library.
The only modification to the serial build described above is to make
sure that the mpicc parallel C compiler is in your path and to add the
`MPI=YES` argument to the `install-tpl.sh` script invocation when
building the TPLs. For example:
```sh
MPI=YES ./install-tpl.sh
```
This will download all requested libraries and build them with
parallel capability enabled (if applicable). You can then continue
with the steps outlined in the previous section.
## Testing
There are a few unit tests for zoltan, exodus, ioss, and aprepro that can be run via `make test` or `ctest` if you configured with `-D Seacas_ENABLE_TESTS=YES`.
There is also a system-level test that just verifies that the applications can read and write exodus files correctly. This test runs off of the installed applications. To run do:
* `make install`
* `cd ../SEACAS-Test`
* `make clean; make`
This will run through several of the SEACAS applications creating a mesh (exodus file) and then performing various manipulations on the mesh. If the test runs successfully, there is some hope that everything has built and is running correctly.
## Exodus
If you only want the exodus library, then follow most of the above instructions with the following exceptions:
* Clone entire source tree as above. (There used to be a zip file, but difficult to keep up-to-date)
* You only need the netcdf and optionally hdf5 libraries
* Use the `cmake-exodus` file instead of `cmake-config`.
* This will build, by default, a shared exodus library and also install the exodus.py and exomerge.py Python interfaces.
## Trilinos
Although SEACAS is included in Trilinos
(https://github.com/trilinos/Trilinos), it is also possible to use the
SEACAS code from this repository to override the possibly older SEACAS
code in Trilinos. The steps are to directly pull SEACAS from github
under Trilinos and then build SEACAS under Trilinos with that version
using `SEACAS_SOURCE_DIR_OVERRIDE`. Here is how you do it:
```sh
cd Trilinos/
git clone https://github.com/sandialabs/seacas.git
cd BUILD/
cmake -DSEACAS_SOURCE_DIR_OVERRIDE:STRING=seacas/packages/seacas -DTrilinos_ENABLE_SEACAS [other options] ..
```
## SPACK
The SPACK package manager (https://spack.io/) can be used to install
SEACAS and all dependent third-party libraries. SEACAS is a supported
package in SPACK as of December 2018.
```sh
git clone https://github.com/spack/spack.git
. spack/share/spack/setup-env.sh
spack install seacas~mpi # Serial build (most common)
```
## Docker
An Ubuntu-based Docker image, with SEACAS built and installed, is available on [Docker Hub](https://hub.docker.com/r/mrbuche/seacas).
```sh
docker pull mrbuche/seacas
```
SEACAS is located in `/seacas` when running the container. There is also a similar image available on [Docker Hub](https://hub.docker.com/r/mrbuche/exodus) with only Exodus built and installed.
```sh
docker pull mrbuche/exodus
```
## CMake Example Usage
A simple example of using the SEACAS Exodus library in your external project. Here is the CMakeLists.txt file:
```sh
project(ExodusCMakeExample VERSION 1.0 LANGUAGES C Fortran)
cmake_minimum_required(VERSION 3.1...3.26)
#### C ####
find_package(SEACASExodus CONFIG)
add_executable(ExodusWriteC ExodusWrite.c)
target_link_libraries(ExodusWriteC PRIVATE SEACASExodus::all_libs)
#### FORTRAN #####
IF ("${CMAKE_Fortran_COMPILER_ID}" MATCHES "GNU")
SET(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -fcray-pointer -fdefault-real-8 -fdefault-integer-8 -fno-range-check")
ELSEIF ("${CMAKE_Fortran_COMPILER_ID}" MATCHES "XL")
SET(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -qintsize=8 -qrealsize=8")
ELSEIF ("${CMAKE_Fortran_COMPILER_ID}" MATCHES "Cray")
SET(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -sdefault64")
ELSE()
SET(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -r8 -i8")
ENDIF()
find_package(SEACASExodus_for CONFIG)
add_executable(ExodusReadFor ExodusRead.f)
target_link_libraries(ExodusReadFor PRIVATE SEACASExodus_for::all_libs)
```
The `cmake-use-example` directory contains this sample
`CMakeLists.txt` file and a couple C and Fortran files which provide
an example of how to build and link a C or Fortran program with the
Exodus library installed as part of a build of this package.
To use this, copy the contents of the directory to your own filespace
and modify the contents as needed. The example provides a C
executable and a Fortran Executable which both are linked to the
Exodus library.
To configure and build, you would do something like:
```sh
mkdir build; cd build
CMAKE_PREFIX_PATH={path_to_root_of_seacas_install} cmake ..
make
```
And you would then get `ExodusWriteC` and `ExodusReadFor` compiled and linked against the Exodus library.
## Required Software
The SEACAS system requires that there be some libraries and
applications already existing on the chosen system prior to building
SEACAS and its required Third-Party Liberaries.
These include:
* The `git` application is used to access the SEACAS and TPL git
repositories
* CMake is used to generate the build system.
* C, C++, and (optionally) Fortran compilers. The C compiler must
support the C11 standard and C++ must support C+++17. GNU, Clang,
Intel, Cray, compilers are supported and tested regularly.
* For parallel capability, an MPI library is needed. We have used
openmpi, mpich, mvapich, intel, and cray MPI libraries. It is
recommended to use as current an MPI library as possible.
* Automake is used to configure some of the TPL builds
* wget is needed to download some of the TPL library source code.
* python is required to use the `exodus.py` and `exomerge.py`
Python interfaces to Exodus databases. Python3 is recommended.
* To use the `blot` and `fastq` applications, an X11 development
environment is needed.
* Flex and Bison are optional if you are developing new capabilities
in aprepro.
* M4 is needed to build the netCDF library.
### Mac
On a mac system, I use the `brew` system which provides all of the
applications listed above. The X11 system I use is `XQuartz`. The
Mac also requires `XCode`
### Linux
On an ubuntu system, the following is used to set up the basic
packages needed to compile SEACAS:
```sh
apt install -y libaec-dev zlib1g-dev automake autoconf \
libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake \
libzip-dev openmpi-bin libopenmpi-dev \
```
### Windows
On windows, I have used the following packages for MINGW64:
```sh
git mingw-w64-x86_64-toolchain make mingw-w64-x86_64-hdf5 \
mingw-w64-x86_64-cgns mingw-w64-x86_64-netcdf mingw-w64-x86_64-zlib \
mingw-w64-x86_64-gcc-fortran mingw-w64-x86_64-gcc-libgfortran \
mingw-w64-x86_64-cmake mingw-w64-x86_64-fmt
```
There is also a Visual Studio build performed at each commit to the
SEACAS git repository. See the file `.appveyor.yml` for more details.
## License
SEACAS is licensed under the Modified BSD License. See the [LICENSE](LICENSE) file for details.
The following externally-developed software routines are used in some of the SEACAS applications and are under
a separate license:
| Routine | Where Used | License |
|---------|-------------|:-------:|
| getline | `packages/seacas/libraries/aprepro_lib/apr_getline_int.c` | [MIT](https://opensource.org/licenses/MIT) |
| getline | `packages/seacas/libraries/suplib_c/getline.c` | [BSD](https://opensource.org/licenses/BSD-3-Clause) |
| [GetLongOpt](https://searchcode.com/codesearch/view/64130032/) | `packages/seacas/libraries/suplib_cpp/GetLongOpt.C` | public domain |
| [adler hash](https://en.wikipedia.org/wiki/Adler-32) | `packages/seacas/libraries/suplib_c/adler.c` | [zlib](https://opensource.org/licenses/zlib) |
| [MurmurHash](https://github.com/aappleby/smhasher) | `packages/seacas/libraries/ioss/src/Ioss_FaceGenerator.C` | public domain |
| [json include file](http://jsoncpp.sourceforge.net) | `packages/seacas/libraries/ioss/src/visualization/` | [MIT](https://opensource.org/licenses/MIT) |
| [terminal_color](https://github.com/matovitch/trmclr) | `packages/seacas/libraries/aprepro_lib` | [zlib](https://opensource.org/licenses/zlib) |
| [Tessil Hash](https://github.com/Tessil/) | `packages/seacas/libraries/ioss/src/hash` | [MIT](https://opensource.org/licenses/MIT) |
| [doctest](https://github.com/doctest/doctest) | `packages/seacas/libraries/ioss/src/doctest.h` | [MIT](https://opensource.org/licenses/MIT) |
| [pdqsort](https://github.com/orlp/pdqsort) | `packages/seacas/libraries/ioss/src` | [Zlib License](https://github.com/orlp/pdqsort/blob/master/license.txt) |
## Contact information
Greg Sjaardema (<gsjaardema@gmail.com>, <gdsjaar@sandia.gov>)

@ -0,0 +1,109 @@
###SEACAS Individual Contributor License Agreement (“Agreement”)
Thank you for your interest in SEACAS (the “Project”). In order to
clarify the intellectual property license granted with Contributions
from any person or entity, the SEACAS copyright Holders (the
“Copyright Holders”) must have a Contributor License Agreement ("CLA")
on file that has been signed by each Contributor, indicating agreement
to the license terms below. This license is for your protection as a
Contributor as well as the protection of the Copyright Holders and
their users; it does not change your rights to use your own
Contributions for any other purpose.
You accept and agree to the following terms and conditions for Your
present and future Contributions submitted to the Copyright
Holders. In return, the Copyright Holders shall not use Your
Contributions in a way that is contrary to the public benefit. Except
for the license granted herein to the Copyright Holders and recipients
of software distributed by the Copyright Holders, You reserve all
right, title, and interest in and to Your Contributions.
1. Definitions.
"You" (or "Your") shall mean the copyright owner or legal entity
authorized by the copyright owner that is making this Agreement with
the Copyright Holders. For legal entities, the entity making a
Contribution and all other entities that control, are controlled by,
or are under common control with that entity are considered to be a
single Contributor. For the purposes of this definition, "control"
means (i) the power, direct or indirect, to cause the direction or
management of such entity, whether by contract or otherwise, or (ii)
ownership of fifty percent (50, 21%) or more of the outstanding
shares, or (iii) beneficial ownership of such entity.
"Contribution" shall mean any original work of authorship, including
any modifications or additions to an existing work, that is
intentionally submitted by You to the Copyright Holders for inclusion
in, or documentation of, any of the products owned or managed by the
Copyright Holders (the "Work"). For the purposes of this definition,
"submitted" means any form of electronic, verbal, or written
communication sent to the Copyright Holders or their representatives,
including but not limited to communication on electronic mailing
lists, source code control systems, and issue tracking systems that
are managed by, or on behalf of, the Copyright Holders for the purpose
of discussing and improving the Work, but excluding communication that
is conspicuously marked or otherwise designated in writing by You as
"Not a Contribution."
2. Grant of Copyright License. Subject to the terms and conditions of
this Agreement, You hereby grant to the Copyright Holders and to
recipients of software distributed by the Copyright Holders a
perpetual, worldwide, non-exclusive, no-charge, royalty-free,
irrevocable copyright license to reproduce, prepare derivative works
of, publicly display, publicly perform, sublicense, and distribute
Your Contributions and such derivative works.
3. Grant of Patent License. Subject to the terms and conditions of
this Agreement, You hereby grant to the Copyright Holders and to
recipients of software distributed by the Copyright Holders a
perpetual, worldwide, non-exclusive, no-charge, royalty-free,
irrevocable (except as stated in this section) patent license to make,
have made, use, offer to sell, sell, import, and otherwise transfer
the Work, where such license applies only to those patent claims
licensable by You that are necessarily infringed by Your
Contribution(s) alone or by combination of Your Contribution(s) with
the Work to which such Contribution(s) was submitted. If any entity
institutes patent litigation against You or any other entity
(including a cross-claim or counterclaim in a lawsuit) alleging that
your Contribution, or the Work to which you have contributed,
constitutes direct or contributory patent infringement, then any
patent licenses granted to that entity under this Agreement for that
Contribution or Work shall terminate as of the date such litigation is
filed.
4. You represent that you are legally entitled to grant the above
license. If your employer(s) has rights to intellectual property that
you create that includes your Contributions, you represent that you
have received permission to make Contributions on behalf of that
employer, that your employer has waived such rights for your
Contributions to the Copyright Holders, or that your employer has
executed a separate Corporate CLA with the Copyright Holders.
5. You represent that each of Your Contributions is Your original
creation (see section 7 for submissions on behalf of others). You
represent that Your Contribution submissions include complete details
of any third-party license or other restriction (including, but not
limited to, related patents and trademarks) of which you are
personally aware and which are associated with any part of Your
Contributions.
6. You are not expected to provide support for Your Contributions,
except to the extent You desire to provide support. You may provide
support for free, for a fee, or not at all. Unless required by
applicable law or agreed to in writing, You provide Your Contributions
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
either express or implied, including, without limitation, any
warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY,
or FITNESS FOR A PARTICULAR PURPOSE.
7. Should You wish to submit work that is not Your original creation,
You may submit it to the Copyright Holders separately from any
Contribution, identifying the complete details of its source and of
any license or other restriction (including, but not limited to,
related patents, trademarks, and license agreements) of which you are
personally aware, and conspicuously marking the work as "Submitted on
behalf of a third-party: \[named here\]".
8. You agree to notify the Copyright Holders of any facts or
circumstances of which you become aware that would make these
representations inaccurate in any respect.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -0,0 +1,20 @@
$ {include("common.h")}
$ {I=2}
Title
Base - spline warped to 3d
Point 1 0.0 0.0
Point 2 {FR1 - rad1} 0.0
Point 3 {0.0} {FR1 - rad1}
Line 1 str 1 2 0 {[I*28]} {ratio = 0.95^(1/I)}
Line 2 circ 2 3 1 {[I*90]}
Line 3 str 1 3 0 {[I*28]} {ratio}
Region 1 {FireSet} -1 -2 -3
Scheme 1 x4s
NodeBC 11 2
NodeBC 1 1
NodeBC 100 3
Exit

@ -0,0 +1,34 @@
$ {include("common.h")}
$ {len1 = sqrt(8.5^2 - 7.428^2)}
$ {len2 = FR1 - rad1 - len1}
$ {th = 0.090}
spline 3
front
LEFT 0.0
right 0.0
$ {intr = 73} {_i=0}
{Loop(73)}
{_x = _i/(72) * len1} {sqrt( (8.5+th)^2 - _x^2)} $ {_i++}
{endloop}
$ {_i=1}
{loop(27)}
{_x = len1 + (_i/27) * len2} {7.428+.09} $ {_i++}
{endloop}
Back
LEFT 0.0
right 0.0
$ {intr = 151} {_i=0}
{Loop(151)}
{_x = _i/(150) * len1} {sqrt( (8.501)^2 - _x^2)} $ {_i++}
{endloop}
$ {_i=1}
{loop(100)}
{_x = len1 + (_i/100) * len2} {7.428} $ {_i++}
{endloop}
end
$ sideset back {FireBaseS}
sideset back 10
$ nodeset front 92
revolve x -90
shift y -7.338
EXIT

@ -0,0 +1,160 @@
{include("common.h")}
Title
Outer edge for base
$ {intv = 1}
$ {gap = T8-T4}
$ {ch = .015}
Point 1 {FR1 - rad1} 0.0
Point 10 {FR5} {T8/2+gap/3}
Point 17 {FR4+ch} {T8/2+gap/3}
Point 20 {FR4} {T8/2+gap/3}
Point 23 {FR4} {T8/2+gap/3+ch}
Point 30 {FR4} {T8}
Point 40 {FR3} {T8}
Point 50 {FR3} {-T6+T5}
Point 60 {FR2} {-T6+T5}
Point 70 {FR2} {-T6}
Point 80 {R3} {-T6}
Point 90 {R3} {-T7}
Point 95 {FR1+.05} {-T7}
$Point 100 {FR1} {-T7}
Point 105 {FR1} {-T7+.05}
Point 110 {FR1} {0.0}
Point 120 {FR1-rad1} {T8}
Point 130 {FR1-rad1} {T8+T8/2}
Point 140 {FR5} {T8+T8/2}
Line 10 str 10 17 0 {[intv * 1]}
Line 17 str 17 23 0 {[intv * 0]}
Line 20 str 23 30 0 {[intv * 0]}
Line 30 str 30 40 0 {[intv * 0]}
Line 40 str 40 50 0 {[intv * 0]}
Line 50 str 50 60 0 {[intv * 0]}
Line 60 str 60 70 0 {[intv * 0]}
Line 70 str 70 80 0 {[intv * 0]}
Line 80 str 80 90 0 {[intv * 0]}
Line 90 str 90 95 0 {[intv * 0]}
Line 95 str 95 105 0 {[intv * 0]}
Line 100 str 105 110 0 {[intv * 0]}
Line 110 circ 110 120 1 {[intv * 0]}
Line 120 str 120 130 0 {[intv * 0]}
Line 130 str 130 140 0 {[intv * 0]}
Line 140 str 140 10 0 {[intv * 4]}
Region 10 {FireSet} -10 -17 -20 -30 -40 -50 -60 -70 -80 -90 -95 -100 -110 -120 -130 -140
$ Tape Joint Tapes {TJw = .100} {TJh = .200}
Barset 11 {FireShell} 110 130
Point 200 {FR2 + TJw} {-T6}
Point 203 {FR2 + TJw-ch} {-T6}
Point 207 {FR2+ch} {-T6}
Point 210 {FR2} {-T6}
Point 213 {FR2} {-T6+ch}
Point 217 {FR2} {-T6 + TJh-ch}
Point 220 {FR2} {-T6 + TJh}
Point 223 {FR2+ch} {-T6 + TJh}
Point 227 {FR2 + TJw-ch} {-T6 + TJh}
Point 230 {FR2 + TJw} {-T6 + TJh}
Point 233 {FR2 + TJw} {-T6 + TJh-ch}
Point 237 {FR2 + TJw} {-T6+ch}
Line 203 str 203 207 0 {[intv * 0]}
Line 207 str 207 213 0 {[intv * 0]}
Line 213 str 213 217 0 {[intv * 0]}
Line 217 str 217 223 0 {[intv * 0]}
Line 223 str 223 227 0 {[intv * 0]}
Line 227 str 227 233 0 {[intv * 0]}
Line 233 str 233 237 0 {[intv * 0]}
Line 237 str 237 203 0 {[intv * 0]}
Region 20 {TapeJoint} -203 -207 -213 -217 -223 -227 -233 -237
$ Case - Near Tape Joint
Point 300 {R6} {-T10}
Point 305 {R6} {-T1}
Point 310 {R6} {T8/2}
Point 320 {R5} {T8/2}
Point 327 {R5} {T8-ch}
Point 330 {R5} {T8}
Point 333 {R5-ch} {T8}
Point 337 {FR3+ch} {T8}
Point 340 {FR3} {T8}
Point 343 {FR3} {T8-ch}
Point 347 {FR3} {-T3+ch}
Point 350 {FR3} {-T3}
Point 353 {FR3+ch} {-T3}
Point 360 {R4} {-T3}
Point 370 {R4} {-(T2+T3)}
Point 380 {R3} {-(T2+T3)}
Point 390 {R3} {-T1}
Point 394 {R3} {-T9}
Point 398 {R3} {-T10}
Line 300 str 300 305 0 {[intv * 0]}
Line 305 str 305 310 0 {[intv * 0]}
Line 310 str 310 320 0 {[intv * 0]}
Line 320 str 320 327 0 {[intv * 0]}
Line 327 str 327 333 0 {[intv * 0]}
Line 333 str 333 337 0 {[intv * 0]}
Line 337 str 337 343 0 {[intv * 0]}
Line 340 str 343 347 0 {[intv * 0]}
Line 347 str 347 353 0 {[intv * 0]}
Line 350 str 353 360 0 {[intv * 0]}
Line 360 str 360 370 0 {[intv * 0]}
Line 370 str 370 380 0 {[intv * 0]}
Line 380 str 380 390 0 {[intv * 0]}
Line 390 str 390 400 0 {[intv * 0]}
Line 394 str 450 394 0 {[intv * 0]}
Line 396 str 394 398 0 {[intv * 0]}
Line 398 str 398 300 0 {[intv * 0]}
Line 392 str 390 305 0 {[intv * 0]}
Region 30 {Case_} -305 -310 -320 -327 -333 -337 -340 -347 -350 -360 -370 -380 -392
Region 31 {Case_} -300 -392 -390 -450 -394 -396 -398
$ Locking Ring
Point 400 {(R2+R3)/2} {-.4180}
Point 410 {R2} {-T7}
Point 420 {R2} {-T6+TJh/2}
Point 430 {R1} {-T6+TJh/2}
Point 440 {R1} {-T9}
Point 450 {(R2+R3)/2} {-T9}
Line 400 str 400 410 0 {[intv * 0]}
Line 410 str 410 420 0 {[intv * 0]}
Line 420 str 420 430 0 {[intv * 0]}
Line 430 str 430 440 0 {[intv * 0]}
Line 440 str 440 450 0 {[intv * 0]}
Line 450 str 450 400 0 {[intv * 0]}
Region 40 {Ring} -400 -410 -420 -430 -440 -450
$ {size = .0275}
Size {size} 10
Size {size} 20
Size {size} 30
Size {2*size} 31
Size {2*size} 40
Size {size} 50
Scheme 10 x4s
$ Scheme 20 x4s
Scheme 30 x4s
Scheme 31 x4s
Scheme 40 x4s
Scheme 50 x4s
SideBC 10 10 17 20 30 40 50 60 70 80 90 95 100 110
SideBC 20 203 207 213 217 223 227 233 237
SideBC 30 305 310 320 327 333 337 340 347 350 360 370 380 390 400 410 420
NodeBC 11 120
NodeBC 1 520
NodeBC 10 398
SideBC 100 300 305
SideBC 102 140
SideBC 200 530 540 130

@ -0,0 +1,5 @@
Rotate 180 90 1 0
list block
NodeSet Front 1
NodeSet Back 100
Exit

@ -0,0 +1,22 @@
$ {include("common.h")}
$ {I=2}
Title
Base - spline warped to 3d
Point 1 0.0 0.0
Point 2 {FR1 - rad1} 0.0
Point 3 {0.0} {FR1 - rad1}
Line 1 str 1 2 0 {[I*28]} {ratio = 0.95^(1/I)}
Line 2 circ 2 3 1 {[I*90]}
Line 3 str 1 3 0 {[I*28]} {ratio}
Region 1 {FireShell} -1 -2 -3
Scheme 1 x4s
NodeBC 11 2
NodeBC 1 1
NodeBC 100 3
NodeBC 1001 1
NodeBC 1100 3
Exit

@ -0,0 +1,20 @@
$ {include("common.h")}
$ {len1 = sqrt(8.5^2 - 7.428^2)}
$ {len2 = FR1 - rad1 - len1}
$ {th = 0.090}
spline {th}
LEFT 0.0
right 0.0
$ {intr = 73} {_i=0}
{Loop(73)}
{_x = _i/(72) * len1} {sqrt( (8.5+th/2)^2 - _x^2)} $ {_i++}
{endloop}
$ {_i=1}
{loop(27)}
{_x = len1 + (_i/27) * len2} {7.428+th/2} $ {_i++}
{endloop}
end
sideset back 11
revolve x -90
shift y -7.338
EXIT

@ -0,0 +1,24 @@
baseline_o.g3
base.g3
Shift Y -0.090
EQUIV 11 11 1.0e-4 closest
equiv end
nset
delete 11
up
add
rigid_0.g3
equiv end
add
cap.g3
Shift Y -0.090
equiv end
nset
combine 1111 1112
up
Title
change
SEACAS Test Suite Tape Joint
up
FINISH
baseline.g3

@ -0,0 +1,2 @@
Shift Y -0.090
Exit

@ -0,0 +1,166 @@
Title
Baseline TapeJoint Design
{NOECHO}{Units("in-lbf-s")}
{ECHO}
$ {FireSet = 1}
$ {TapeJoint = 2}
$ {Case_ = 3}
$ {Ring = 4}
$ {Rigid = 5}
$ {intv = 1}
$ {FR1 = 11.765 / 2}
$ {FR3 = 11.518 / 2}
$ {FR2 = FR3 + 0.041}
$ {R1 = 10.640 / 2}
$ {R2 = 138.41~mm}
$ {R3 = 143.00~mm}
$ {R4 = 142.00~mm}
$ {R5 = FR3 - .006}
$ {R6 = R5 - .041}
$ {R7 = 132.5~mm}
$ {tapeH = 0.200}
$ {tapeW = .08}
$ {T1 = 0.178}
$ {T2 = 0.391}
$ {T3 = 0.673}
$ {T4 = T2 - tapeH}
$ {T5 = T4 + .213}
$ {T7 = .03}
$ {T8 = T7 + .3818}
$ {T6 = T8 + .080}
$ {T9 = .090}
$ {T10= 0.800}
$ {rad1 = 0.030}
$ {rad2 = 0.090}
Point 1 {FR3 - rad1} {0.0 - rad1}
Point 2 {R4 + rad2} {-(T3+rad2)}
Point 10 {FR1} {-T3}
Point 20 {FR1} {T9}
Point 30 {5.415} {T9}
Point 40 {5.415} 0.0
Point 50 {FR3 - rad1} {0.0}
Point 60 {FR3} {0.0 - rad1}
Point 70 {FR3} {-T1}
Point 80 {FR2} {-T1}
Point 90 {FR2} {-T2}
Point 100 {FR3} {-T2}
Point 110 {FR3} {-T3}
Line 10 str 10 20 0 {[intv * 0]}
Line 20 str 20 30 0 {[intv * 0]}
Line 30 str 30 40 0 {[intv * 3]}
Line 40 str 40 50 0 {[intv * 0]}
Line 50 circ 50 60 -1 {[intv * 0]}
Line 60 str 60 70 0 {[intv * 0]}
Line 70 str 70 80 0 {[intv * 0]}
Line 80 str 80 90 0 {[intv * 0]}
Line 90 str 90 100 0 {[intv * 0]}
Line 100 str 100 110 0 {[intv * 0]}
Line 110 str 110 10 0 {[intv * 4]}
Region 10 {FireSet} -10 -20 -30 -40 -50 -60 -70 -80 -90 -100 -110
Point 200 {FR2} {-T2}
Point 210 {FR2} {-T4}
Point 215 {FR2 - (1-.1) * tapeW} {-T4}
Point 220 {FR2 - tapeW} {-T4}
Point 225 {FR2 - tapeW} {-T4-.1 * tapeW}
Point 230 {FR2 - tapeW} {-T2}
Line 200 str 200 210 0 {[intv * 6]}
Line 210 str 210 215 0 {[intv * 3]}
Line 215 str 215 225 0 {[intv * 1]}
Line 220 str 225 230 0 {[intv * 6]}
Line 230 str 230 200 0 {[intv * 4]}
Region 20 {TapeJoint} -200 -210 -215 -220 -230
Point 310 {R4} {-T10}
Point 320 {R4} {-(T3 + rad2)}
Point 330 {R4 + rad2} {-T3}
Point 340 {R5} {-T3}
Point 350 {R5} {-T5}
Point 360 {R6} {-T5}
Point 370 {R6} {-T4}
Point 375 {R5-.1*tapeW} {-T4}
Point 380 {R5} {-T4}
Point 385 {R5} {-T4+.1*tapeW}
Point 390 {R5} {0.0 - rad1}
Point 400 {R5 - rad1} {0.0}
Point 410 {R3} {0.0}
Point 420 {R3} {-T7}
Point 430 {R3} {-T8}
Point 440 {R3} {-T6}
Point 450 {R2} {-T6}
Point 460 {R2} {-T10}
Line 310 str 310 320 0 {[intv * 0]}
Line 320 circ 320 330 -2 {[intv * 4]}
Line 330 str 330 340 0 {[intv * 2]}
Line 340 str 340 350 0 {[intv * 0]}
Line 350 str 350 360 0 {[intv * 0]}
Line 360 str 360 370 0 {[intv * 0]}
Line 370 str 370 375 0 {[intv * 0]}
Line 375 str 375 385 0 {[intv * 0]}
Line 380 str 385 390 0 {[intv * 0]}
Line 390 circ 390 400 1 {[intv * 0]}
Line 400 str 400 410 0 {[intv * 0]}
Line 410 str 410 420 0 {[intv * 0]}
Line 420 str 420 430 0 {[intv * 0]}
Line 430 str 430 440 0 {[intv * 4]}
Line 440 str 440 450 0 {[intv * 5]}
Line 450 str 450 460 0 {[intv * 7]}
Line 460 str 460 310 0 {[intv * 4]}
Region 30 {Case_} -310 -320 -330 -340 -350 -360 -370 -375 -380 -390 -400 -410 -420 -430 -440 -450 -460
Point 610 {R7} {-T8}
Point 620 {R7} {-T7}
Point 600 {R1} {-T8}
Point 650 {R1} {-T10}
Line 599 str 430 600 0 {[intv * 0]}
Line 600 str 600 610 0 {[intv * 0]}
Line 610 str 610 620 0 {[intv * 0]}
Line 620 str 620 420 0 {[intv * 0]}
Line 640 str 600 650 0 {[intv * 0]}
Line 650 str 650 460 0 {[intv * 2]}
Region 60 {Ring} -599 -600 -610 -620 -420
Region 61 {Ring} -599 -640 -650 -450 -440 -430
NodeBC 10 460 650
NodeBC 1 520
NodeBC 11 30
SideBC 10 40 50 60 70 80 90 100 110
SideBC 20 200 210 215 220 230
SideBC 30 340 350 360 370 375 380 390 400 410 620 610
$ SideBC 61 620 610
SideBC 100 10
$ {size = .025}
Size {size} 10
Size {size} 20
Size {size} 30
Size {size} 50
Size {2*size} 60
Size {2*size} 61
Scheme 10 x4s
Scheme 20 m
Scheme 30 x4s
Scheme 50 x4s
Scheme 60 x4s
Scheme 61 x4s

@ -0,0 +1,17 @@
$ {include("common.h")}
$ {I=0.5}
Title
Base - spline warped to 3d
Point 1 0.0 0.0
Point 2 {FR1 - rad1} 0.0
Point 3 {0.0} {FR1 - rad1}
Line 1 str 1 2 0 {[I*28]} ${ratio = 0.95^(1/I)}
Line 2 circ 2 3 1 {[I*90]}
Line 3 str 1 3 0 {[I*28]} ${ratio}
Region 1 {Cap_Rigid} -1 -2 -3
Scheme 1 x4s
Exit

@ -0,0 +1,28 @@
$ {include("common.h")}
$ {len1 = sqrt(8.5^2 - 7.428^2)}
$ {len2 = FR1 - rad1 - len1}
$ {th = 0.090}
$ There is a 5 mm (.2 in) gap between the bottom of the fireset
$ and the top of the cap. This gap is filled with silicon rubber.
$ Assume that rubber can compress to 50% of thickness and then lockup.
$ Use .2/2 as gap.
spline 1
Angular
front
LEFT 0.0
right 0.0
0.0 {8.5 - .2/2}
40.0 {8.5 - .2/2}
Back
LEFT 0.0
right 0.0
0.0 {8.5 - .2/2 - th}
40.0 {8.5 - .2/2 - th}
end
sideset front 103
nodeset front 1111
nodeset back 1112
revolve x -90
shift y -7.338
EXIT

@ -0,0 +1,34 @@
${rad1 = 0.090}
${FR1 = 11.010 / 2}
${FR2 = 11.169 / 2}
${FR3 = 11.267 / 2}
${FR4 = (11.500+.004) / 2}
${FR5 = 11.600 / 2}
${FR6 = 11.765 / 2}
${R1 = 10.640 / 2}
${R2 = 10.980 / 2}
${R3 = 11.275 / 2}
${R4 = 11.394 / 2}
${R5 = (11.492-.003) / 2}
${R6 = 11.620 / 2}
${T1 = 0.380}
${T2 = 0.210}
${T3 = 0.040}
${T4 = 0.070}
${T5 = 0.210}
${T6 = 0.240}
${T7 = 0.350}
${T8 = 0.090}
${T9 = 0.932} $ Scaled from drawing
${T10 = 1.010} $ Scaled from drawing
${T11 = T6}
${FireSet = 1}
${FireShell = 11}
${TapeJoint = 2}
${Case_ = 3}
${Ring = 4}
${Rigid = 5}
${Cap_Rigid = 6}

@ -0,0 +1,33 @@
$ Radii for Fireset Base
{ECHO(OFF)}
{rad1 = 0.090}
{FR1 = 11.010 / 2}
{FR2 = 11.169 / 2}
{FR3 = 11.267 / 2}
{FR4 = 11.474 / 2}
{FR5 = 11.600 / 2}
{R1 = 10.640 / 2}
{R2 = 10.980 / 2}
{R3 = 11.275 / 2}
{R4 = 11.394 / 2}
{R5 = 11.460 / 2}
{R6 = 11.620 / 2}
{T1 = 0.530}
{T2 = 0.210}
{T3 = 0.220}
{T4 = 0.000}
{T5 = 0.210}
{T6 = 0.420}
{T7 = 0.505}
{T8 = 0.090}
{T9 = 0.932+.150} $ Scaled from drawing
{T10 = 1.010+.150} $ Scaled from drawing
{T11 = .075 - T8}
{FireSet = 1}
{TapeJoint = 2}
{Case_ = 3}
{Ring = 4}
{Rigid = 5}

@ -0,0 +1,21 @@
name block 1 'Outer_Case'
name block 2 'Tape'
name block 3 'Inner_Case'
name block 4 'Cargo'
name block 5 'Rigid_Wall'
name block 6 'Cap'
name nset 100 'No_X'
name nset 10 'No_Y'
name nset 1 'No_Z'
name nset 111 'Fixed'
name nset 1111 'Bottom_Cap'
name sset 10 'Outer_Case_To_Tape'
name sset 20 'TapeSurface'
name sset 30 'Inner_Case_To_Tape'
name sset 101 'RigidSurface'
name sset 100 'To_Rigid'
name sset 103 'CapSurface'

@ -0,0 +1,22 @@
base_sh.g3
base_o.g3
EQUIV 11 11 1.0e-4 closest
equiv end
nset
delete 11
up
add
rigid.g3
equiv end
add
cap.g3
equiv end
nset
combine 1111 1112
up
Title
change
MAST Fireset Tape Joint - Proposed Design 3 - Shell
up
FINISH
new_shell.g3

@ -0,0 +1,22 @@
base.g3
base_o.g3
EQUIV 11 11 1.0e-4 closest
equiv end
nset
delete 11
up
add
rigid.g3
equiv end
add
cap.g3
equiv end
nset
combine 1111 1112
up
Title
change
MAST Fireset Tape Joint - Proposed Design 3
up
FINISH
new_tape.g3

@ -0,0 +1,18 @@
Title
Rigid Surface ( Moving )
$ {include("common.h")}
Point 1 {R6+.0} {-1.1 * T10}
Point 2 {R6 + 1} {-1.1 * T10}
Point 3 {R6 + 1} {4*T8} $ Make sure its above FS base
Point 4 {R6+.0} {4*T8}
Line 1 str 1 2 0 1
Line 2 str 2 3 0 1
Line 3 str 3 4 0 1
Line 4 str 4 1 0 1
Region {Rigid} {Rigid} -1 -2 -3 -4
SideBC 101 4
NodeBC 111 1 4

@ -0,0 +1,2 @@
Translate 1 6
Exit

@ -0,0 +1,4 @@
${include("common.h")}
Shift X {FR6 - R6}
Exit

@ -0,0 +1,29 @@
#!/usr/bin/env python
import sys
import exodus
DATABASE_PATH = "8-block.e"
# Test outputing c-type arrays and numpy arrays
EXO = exodus.exodus(DATABASE_PATH, mode="a", array_type="ctype")
print ("Exodus file has title:", EXO.title())
print ("Exodus file has", EXO.num_dimensions(), "dimensions")
print ("Exodus file has", EXO.num_nodes(), "nodes")
print ("Exodus file has", EXO.num_elems(), "elements")
print ("Exodus file has", EXO.num_blks(), "blocks")
print ("Exodus file has", EXO.num_node_sets(), "node sets")
print ("Exodus file has", EXO.num_side_sets(), "side sets")
print ("Exodus file has", EXO.num_times(), "time steps")
scale = 2.0
offset = 10.0
if EXO.num_times() > 0:
TIMES = EXO.get_times()
step = 1
for time in TIMES:
new_time = time * scale + offset
print "Time = {}, New_Time = {}, Step = {}".format(time, new_time, step)
EXO.put_time(step, time)
step = step + 1

@ -0,0 +1,119 @@
#!/usr/bin/env python
"""
Test routine for SEACAS exodus.py module
"""
import sys
import exodus
DATABASE_PATH = "baseline.g"
# Test outputing c-type arrays and numpy arrays
ARRAY_TYPES = ['ctype', 'numpy']
for array_type in ARRAY_TYPES:
EXO = exodus.exodus(DATABASE_PATH, array_type=array_type)
print("Exodus file has title:", EXO.title())
print("Exodus file has", EXO.num_dimensions(), "dimensions")
print("Exodus file has", EXO.num_nodes(), "nodes")
print("Exodus file has", EXO.num_elems(), "elements")
print("Exodus file has", EXO.num_blks(), "blocks")
print("Exodus file has", EXO.num_node_sets(), "node sets")
print("Exodus file has", EXO.num_side_sets(), "side sets")
print("Exodus file has", EXO.num_times(), "time steps")
if EXO.num_times() > 0:
TIMES = EXO.get_times()
for time in TIMES:
print("time = ", time)
BLOCKS = EXO.get_elem_blk_ids()
for block in BLOCKS:
name = EXO.get_elem_blk_name(block)
print("block id = {}, name = {}".format(block, name))
SIDESETS = EXO.get_side_set_ids()
for sideset in SIDESETS:
print("side set id = ", sideset)
NODESETS = EXO.get_node_set_ids()
for nodeset in NODESETS:
print("node set id = ", nodeset)
COORDINATES = EXO.get_coords()
print("Local Node Id 1 has COORDINATES: {} {} {}"
.format(COORDINATES[0][0], COORDINATES[1][0], COORDINATES[2][0]))
NN = (EXO.num_nodes() - 1)
print("Local Node Id {} has COORDINATES: {} {} {}"
.format(EXO.num_nodes(), COORDINATES[0][NN], COORDINATES[1][NN], COORDINATES[2][NN]))
print("Side Set Variable Names")
SSVARNAMES = EXO.get_side_set_variable_names()
for name in SSVARNAMES:
print("ssvar = ", name)
print("Side Set Cosa Variable Values")
step = 1
if EXO.num_times() > 0:
for time in TIMES:
print("time = ", time)
ssvals = EXO.get_side_set_variable_values(1, "cosa", step)
for ssval in ssvals:
print("value =", ssval)
step += 1
EXO.close()
# Test reading in data from exodus database, and then copying it into another database
for array_type in ARRAY_TYPES:
new_DATABASE_PATH = DATABASE_PATH[:-2] + '_' + array_type + '_copy.e'
exodus.copyTransfer(DATABASE_PATH, new_DATABASE_PATH, array_type=array_type)
print("Database copied using " + array_type + " arrays.")
# Test the exodus.py `copy` function which calls the C API `ex_copy`
DB_PATH = "base_ioshell.g"
EXO = exodus.exodus(DB_PATH)
NEW_DATABASE_PATH = DB_PATH[:-2] + '_copy.e'
EXO_COPY = EXO.copy(NEW_DATABASE_PATH, True)
if sys.version_info[0] >= 3:
EXO_COPY.summarize()
print("Exodus file has", EXO_COPY.num_blks(), "blocks")
BLOCKS = EXO_COPY.get_elem_blk_ids()
for block in BLOCKS:
name = EXO_COPY.get_elem_blk_name(block)
print("\tblock id = {}, name = {}".format(block, name))
print("Exodus file has", EXO_COPY.num_side_sets(), "side sets")
SIDESETS = EXO_COPY.get_side_set_ids()
for sideset in SIDESETS:
name = EXO_COPY.get_side_set_name(sideset)
print("\tside set id = {}, name = {}".format(sideset, name))
print("Exodus file has", EXO_COPY.num_node_sets(), "node sets")
NODESETS = EXO_COPY.get_node_set_ids()
for nodeset in NODESETS:
name = EXO_COPY.get_node_set_name(nodeset)
print("\tnode set id = {}, name = {}".format(nodeset, name))
COORDINATES = EXO_COPY.get_coords()
print("Local Node Id 1 has COORDINATES: {} {} {}"
.format(COORDINATES[0][0], COORDINATES[1][0], COORDINATES[2][0]))
NN = (EXO_COPY.num_nodes() - 1)
print("Local Node Id {} has COORDINATES: {} {} {}"
.format(EXO_COPY.num_nodes(), COORDINATES[0][NN], COORDINATES[1][NN], COORDINATES[2][NN]))
print("Exodus file has", EXO_COPY.num_times(), "time steps")
if EXO_COPY.num_times() > 0:
TIMES = EXO_COPY.get_times()
for time in TIMES:
print("\ttime = ", time)
SSVARNAMES = EXO_COPY.get_side_set_variable_names()
print("Side Set Variable Names:")
for name in SSVARNAMES:
print("\tSideSet Variable = ", name)
step = 2
ssvals = EXO_COPY.get_side_set_variable_values(2, "SideBlock_2", step)
EXO_COPY.close()

16
SETUP

@ -0,0 +1,16 @@
#!/usr/bin/env bash
if [ -z "$1" ]
then
printf "Usage: setup.sh install_directory [dependencies].\n"
exit
fi
# $2 netcdf-c hdf5
cd $(dirname $0)
libname=$(basename $(pwd))
mkdir build
cd build
STATIC=YES FORTRAN=NO MPI=NO INSTALL_PATH="$1" NETCDF_PATH="$2" HDF5_PATH="$3" ../cmake-exodus
make -j8
make install

@ -0,0 +1,156 @@
There are a few externally developed third-party libraries (TPL) that
are required to build SEACAS. You can build the libraries manually as
detailed below, or you can use the `install-tpl.sh` script as
described in the previous section.
* [Zoltan](#zoltan) -- required, supplied
* [HDF5](#hdf5) -- optional
* [NetCDF](#netcdf) -- required with possible modifications
* [Kokkos](#kokkos) -- optional
* [MatIO](#matio) -- optional, required for exo2mat and mat2exo
* [GNU Parallel](#gnu-parallel) -- optional
* [CGNS](#cgns) -- experimental optional
* [Faodel](#faodel) -- optional
## Zoltan
A snapshot of [zoltan_distrib\_v3.83.tar.gz](http://www.cs.sandia.gov/Zoltan/Zoltan_download.html) is provided in seacas/packages/zoltan. This will be built automatically as part of the SEACAS build process.
## HDF5
If you are using the netcdf-4 capability in the netcdf library or are using the MatIO library for conversion of exodus to/from matlab format, or using CGNS, then you will need the hdf5 library.
### WARNING
There are some issues with using HDF5-1.10.0 through HDF5-1.10.2 since
it will possibly create files which are unreadable by applications
using an earlier version of the library. As of HDF5-1.10.3 and later,
the HDF5 team added an option that makes it possible for the library
to create files readable by those applications. This flag is currently
being used by NetCDF and CGNS.
The hdf5 library is used for the netcdf4 capability in netcdf which in
turn is used by exodus. The netcdf4 capability is typically used for
large models (>150 million elements); if you are not planning to
create or read models of this size, you do not have to build hdf5.
* Download HDF5 from either:
* <https://www.hdfgroup.org/HDF5/release/obtain5.html> for HDF5-1.10.X or
* <https://support.hdfgroup.org/HDF5/release/obtain518.html> for HDF5-1.8.X
* Download to `seacas/TPL/hdf5` and untar it
* `cd` to that directory and enter the command:
```bash
sh ../runconfigure.sh
```
* `make && make install`
## NetCDF
The most recent released version is recommended. For use with Exodus, some local modifications to the netcdf.h include file are required if using versions prior to 4.5.1. See [NetCDF-Mapping.md](NetCDF-Mapping.md) for an explanation of why these modifications are required (or highly recommended)
* Download the latest netcdf-c release from <https://www.unidata.ucar.edu/downloads/netcdf/index.jsp> and put it inside `seacas/TPL/netcdf`
* `cd TPL/netcdf`
* `tar zxvf netcdf-4.6.1.tar.gz`
* If the version is *prior* to 4.5.1, then you need to modify the
following defines in
seacas/TPL/netcdf/netcdf-4.6.1/include/netcdf.h. Versions *4.5.1 or
later* do not check these limits and can be run unmodified.
```c
#define NC_MAX_DIMS 65536 /* max dimensions per file */
#define NC_MAX_VARS 524288 /* max variables per file */
```
* If you did *not* build HDF5, then you will need to edit the runcmake.sh script and remove all lines mentioning HDF5 and also set `ENABLE_NETCDF
_4` to `OFF`
* `cd netcdf-4.6.1` and enter the command:
```bash
mkdir build
cd build
sh ../../runcmake.sh
```
* `make && make install`
## Kokkos
The most recent released version is recommended.
* Download the latest Kokkos release from <https://github.com/kokkos/kokkos/releases> and put it inside `seacas/TPL/kokkos`
* `cd TPL/kokkos`
* `tar zxvf 2.8.0.tar.gz`
* `cd kokkos-2.8.0` and enter the command:
```bash
mkdir build
cd build
sh ../../runcmake.sh
```
* The default `runcmake.sh` will only build the `serial` device. Modify build options to tailor for your system as documented in the Kokkos documentation.
* `make && make install`
## MatIO
The MatIO library is used in the exo2mat and mat2exo programs which convert an exodus file to and from a MATLAB binary file. To use this do:
* Download matio via git:
* `cd TPL/matio`
* `git clone https://github.com/tbeu/matio.git`
* `cd matio` and enter the command:
```bash
./autogen.sh
sh ../runconfigure.sh
```
* `make && make install`
## GNU Parallel
GNU Parallel is a shell tool for executing jobs in parallel using one or more computers. A job is typically a single command or a small script that has to be run for each of the lines in the input. The typical input is a list of files, a list of hosts, a list of users, or a list of tables. In SEACAS, this is only used by epup which runs multiple epu jobs concurrently. To build:
* Download the most recent version of the library from <ftp://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2>.
```bash
cd TPL/parallel
tar jxvf /path/to/parallel-latest.tar.bz2
cd parallel-20150522
sh ../runconfigure.sh
```
* `make && make install`
## CGNS
The IOSS library supports using CGNS for structured and unstructred meshes. To use this capability, you will need to download and install the CGNS library:
* Download CGNS via git:
```bash
cd TPL/cgns
git clone https://github.com/CGNS/CGNS.git
```
* Build using CMake.
* Modify `TPL/cgns/runcmake.sh` to meet your environment
* `cd CGNS`
* `mkdir build`
* `cd build`
* `../../runcmake.sh`
* `make && make install`
## Faodel
Faodel is a collection of data management tools that Sandia is developing to improve how datasets migrate between memory and storage resources in a distributed system. For SEACAS Faodel support means adding a new backend to IOSS. This enables additional data storage capabilities and the chance to communicate data between execution spaces.
Faodel is available at [Faodel](https://github.com/faodel/faodel). And is built here as a SEACAS TPL.

@ -0,0 +1,45 @@
#! /usr/bin/env bash
EXTRA_ARGS=$@
MPI="${MPI:-NO}"
echo "MPI set to ${MPI}"
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
SHARED="${SHARED:-YES}"
. ${ACCESS}/TPL/compiler.sh
CFLAGS="-I${INSTALL_PATH}/include"; export CFLAGS
CPPFLAGS="-DNDEBUG"; export CPPFLAGS
rm -f CMakeCache.txt
cmake \
${RPATH} \
-D BUILD_SHARED_LIBS:BOOL=${SHARED} \
-D CMAKE_PREFIX_PATH:PATH=${INSTALL_PATH}/lib \
-D CMAKE_INSTALL_PREFIX:PATH=${INSTALL_PATH} \
-D CMAKE_INSTALL_LIBDIR:PATH=lib \
-D ADIOS2_USE_MPI:BOOL=${MPI} \
-D ADIOS2_BUILD_EXAMPLES:BOOL=OFF \
-D ADIOS2_BUILD_TESTING:BOOL=OFF \
-D INSTALL_GTEST:BOOL=OFF \
-D ADIOS2_USE_Fortran:BOOL=OFF \
-D ADIOS_USE_Profiling=OFF \
$EXTRA_ARGS \
..
echo ""
echo " MPI: ${MPI}"
echo " COMPILER: ${CC}"
echo "C++ COMPILER: ${CXX}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,72 @@
#! /usr/bin/env bash
EXTRA_ARGS=$@
MPI="${MPI:-NO}"
echo "MPI set to ${MPI}"
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
SHARED="${SHARED:-YES}"
if [ "$MPI" == "YES" ] && [ "$CRAY" = "YES" ]
then
export CC=cc
export CXX=cxx
elif [ "$MPI" == "YES" ]
then
export CC=mpicc
export CXX=mpicxx
else
COMPILER="${COMPILER:-gnu}"
if [ "$COMPILER" == "gnu" ]
then
export CC=gcc
export CXX=g++
fi
if [ "$COMPILER" == "clang" ]
then
export CC=clang
export CXX=clang++
fi
if [ "$COMPILER" == "intel" ]
then
export CC=icc
export CXX=icpc
fi
if [ "$COMPILER" == "ibm" ]
then
export CC=xlc
export CXX=xlC
fi
fi
CFLAGS="-I${INSTALL_PATH}/include"; export CFLAGS
CPPFLAGS="-DNDEBUG"; export CPPFLAGS
rm -f CMakeCache.txt
cmake \
${RPATH} \
-D CATALYST_BUILD_SHARED_LIBS:BOOL=${SHARED} \
-D CMAKE_PREFIX_PATH:PATH=${INSTALL_PATH}/lib \
-D CMAKE_INSTALL_PREFIX:PATH=${INSTALL_PATH} \
-D CMAKE_INSTALL_LIBDIR:PATH=lib \
-D CATALYST_BUILD_STUB_IMPLEMENTATION:BOOL=ON \
-D CATALYST_USE_MPI:BOOL=${MPI} \
-D CATALYST_BUILD_TESTING:BOOL=OFF \
$EXTRA_ARGS \
..
echo ""
echo " MPI: ${MPI}"
echo " COMPILER: ${CC}"
echo "C++ COMPILER: ${CXX}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,65 @@
#! /usr/bin/env bash
EXTRA_ARGS=$@
#MPI="${MPI:-ON}"
MPI="${MPI:-NO}"
USE_64BIT_INT="${USE_64BIT_INT:-YES}"
echo "MPI set to ${MPI}"
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
SHARED="${SHARED:-YES}"
DEBUG="${DEBUG:-NO}"
if [ "$DEBUG" == "YES" ]
then
BUILD_TYPE="DEBUG"
else
BUILD_TYPE="RELEASE"
fi
. ${ACCESS}/TPL/compiler.sh
CFLAGS="-I${INSTALL_PATH}/include"; export CFLAGS
CPPFLAGS="-DNDEBUG"; export CPPFLAGS
OS=$(uname -s)
rm -f CMakeCache.txt
if [ "$OS" == "Darwin" ] ; then
RPATH="-D CMAKE_MACOSX_RPATH:BOOL=ON -D CMAKE_INSTALL_RPATH:PATH=${INSTALL_PATH}/lib"
fi
cmake \
${RPATH} \
-D CGNS_BUILD_SHARED:BOOL=${SHARED} \
-D CGNS_ENABLE_HDF5:BOOL=ON \
-D HDF5_ROOT=${INSTALL_PATH} \
-D HDF5_LIBRARY:PATH=${INSTALL_PATH}/lib \
-D HDF5_NEED_ZLIB:BOOL=ON \
-D CGNS_ENABLE_64BIT:BOOL=${USE_64BIT_INT} \
-D CGNS_ENABLE_SCOPING:BOOL=ON \
-D CGNS_ENABLE_FORTRAN:BOOL=OFF \
-D CGNS_ENABLE_PARALLEL:BOOL=${MPI} \
-D CMAKE_PREFIX_PATH:PATH=${INSTALL_PATH}/lib \
-D CMAKE_INSTALL_PREFIX:PATH=${INSTALL_PATH} \
-D CMAKE_BUILD_TYPE=${BUILD_TYPE} \
-D HDF5_NEED_MPI:BOOL=${MPI} \
-D HDF5_IS_PARALLEL:BOOL=${MPI} \
$EXTRA_ARGS \
..
echo ""
echo " MPI: ${MPI}"
echo " COMPILER: ${CC}"
echo " 64BITINT: ${USE_64BIT_INT}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,68 @@
#! /usr/bin/env bash
COMPILER="${COMPILER:-gnu}"
MPI="${MPI:-NO}"
if [ "$COMPILER" == "mpi" ]
then
MPI="YES"
fi
if [ "$MPI" == "YES" ] && [ "$CRAY" = "YES" ]
then
export CC=cc
export CXX=CC
elif [ "$MPI" == "YES" ]
then
export CC=mpicc
export CXX=mpicxx
else
if [ "$COMPILER" == "gnu" ]
then
export CC=gcc
export CXX=g++
fi
if [ "$COMPILER" == "clang" ]
then
export CC=clang
export CXX=clang++
fi
if [ "$COMPILER" == "gnubrew" ]
then
VER=${VER:-10}
CXX=g++-${VER}
CC=gcc-${VER}
fi
if [ "$COMPILER" == "gnumacport" ]
then
VER=${VER:-5}
export CXX=g++-mp-${VER}
export CC=gcc-mp-${VER}
fi
if [ "$COMPILER" == "clangmacport" ]
then
VER=${VER:-7}
export CXX=clang++-mp-${VER}.0
export CC=clang-mp-${VER}.0
fi
if [ "$COMPILER" == "intel" ]
then
export CC=icc
export CXX=icc
fi
if [ "$COMPILER" == "analyzer" ]
then
export CXX=/opt/local/libexec/llvm-9.0/libexec/c++-analyzer
export CC=/opt/local/libexec/llvm-9.0/libexec/ccc-analyzer
fi
if [ "$COMPILER" == "ibm" ]
then
export CC=xlc
export CXX=xlc
fi
if [ "$COMPILER" == "nvidia" ]
then
export CXX="nvcc -x c++"
expoort CC=nvcc
fi
fi

@ -0,0 +1,45 @@
#! /usr/bin/env bash
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
. ${ACCESS}/TPL/compiler.sh
if [ "X$BOOST_ROOT" == "X" ] ; then
BOOST_ROOT=$(cd ../../../..; pwd)
echo "Faodel requires these Boost packages: atomic log log_setup serialization"
echo "Please set BOOST_ROOT."
exit 0
fi
rm -f CMakeCache.txt
echo "+++++++++++++++++++++ $(pwd)"
# export BOOST_ROOT=/opt/local
# export GTEST_ROOT=/opt/local
cmake .. \
-DCMAKE_CXX_COMPILER=$CXX \
-DCMAKE_C_COMPILER=$CC \
-DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} \
-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=ON \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS:BOOL=OFF \
-DGTEST_ROOT=${GTEST_ROOT} \
-DBOOST_ROOT=${BOOST_ROOT} \
-DFaodel_NETWORK_LIBRARY=nnti \
-DFaodel_ENABLE_MPI_SUPPORT=${MPI} \
-DFaodel_ENABLE_CEREAL:BOOL=ON \
-DBUILD_TESTS=ON \
-DBUILD_DOCS=OFF
echo ""
echo " MPI: ${MPI}"
echo " C COMPILER: ${CC}"
echo "CXX COMPILER: ${CXX}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,34 @@
#! /usr/bin/env bash
EXTRA_ARGS=$@
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
SHARED="${SHARED:-YES}"
. ${ACCESS}/TPL/compiler.sh
CFLAGS="-I${INSTALL_PATH}/include"; export CFLAGS
CPPFLAGS="-DNDEBUG"; export CPPFLAGS
rm -f CMakeCache.txt
cmake \
${RPATH} \
-D BUILD_SHARED_LIBS:BOOL=${SHARED} \
-D CMAKE_PREFIX_PATH:PATH=${INSTALL_PATH}/lib \
-D CMAKE_INSTALL_PREFIX:PATH=${INSTALL_PATH} \
-D CMAKE_INSTALL_LIBDIR:PATH=lib \
-D BUILD_GMOCK:BOOL=OFF \
$EXTRA_ARGS \
..
echo ""
echo " COMPILER: ${CC}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,80 @@
#! /usr/bin/env bash
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
H5VERSION=${H5VERSION:-V110}
DEBUG="${DEBUG:-NO}"
if [ "$DEBUG" == "YES" ]
then
BUILD_TYPE="Debug"
else
BUILD_TYPE="Release"
fi
if [ "$CRAY" == "YES" ]
then
SHARED="${SHARED:-NO}"
else
SHARED="${SHARED:-YES}"
fi
if [[ "$SHARED" == "ON" || "$SHARED" == "YES" ]]
then
OS=$(uname -s)
if [ "$OS" = "Darwin" ] ; then
LD_EXT="dylib"
else
LD_EXT="so"
fi
else
LD_EXT="a"
EXTRA_DEPS="-DNC_EXTRA_DEPS=-ldl\;-lz"
fi
NEEDS_ZLIB="${NEEDS_ZLIB:-NO}"
if [ "$NEEDS_ZLIB" == "YES" ] || [ "$NEEDS_ZLIB" == "ON" ]
then
LOCAL_ZLIB="-DZLIB_INCLUDE_DIR:PATH=${INSTALL_PATH}/include -DZLIB_LIBRARY:FILEPATH=${INSTALL_PATH}/lib/libz.${LD_EXT}"
fi
NEEDS_SZIP="${NEEDS_SZIP:-NO}"
if [ "$NEEDS_SZIP" == "YES" ] || [ "$NEEDS_SZIP" == "ON" ]
then
LOCAL_SZIP="-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=${NEEDS_SZIP} -DSZIP_INCLUDE_DIR:PATH=${INSTALL_PATH}/include -DSZIP_LIBRARY:FILEPATH=${INSTALL_PATH}/lib/libsz.${LD_EXT}"
fi
. ${ACCESS}/TPL/compiler.sh
# If using an XLF compiler on an IBM system, may need to add the following:
# -DCMAKE_Fortran_FLAGS="-qfixed=72" \
# -DCMAKE_EXE_LINKER_FLAGS:STRING="-lxl -lxlopt"
rm -f config.cache
cmake .. -DCMAKE_C_COMPILER:FILEPATH=${CC} \
-DBUILD_SHARED_LIBS:BOOL=${SHARED} \
-DBUILD_TESTING:BOOL=OFF \
-DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} \
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
-DDEFAULT_API_VERSION=V18 \
-DHDF5_ENABLE_PARALLEL:BOOL=${MPI} \
-DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=ON \
-DHDF5_BUILD_CPP_LIB:BOOL=OFF \
-DHDF5_BUILD_FORTRAN:BOOL=OFF \
-DHDF5_BUILD_HL_LIB:BOOL=ON \
-DHDF5_DISABLE_COMPILER_WARNINGS:BOOL=ON \
${LOCAL_ZLIB} ${LOCAL_SZIP} \
${EXTRA_DEPS}
echo ""
echo " MPI: ${MPI}"
echo " COMPILER: ${CC}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,66 @@
#! /usr/bin/env bash
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
H5VERSION=${H5VERSION:-V110}
NEEDS_ZLIB="${NEEDS_ZLIB:-NO}"
if [ "$NEEDS_ZLIB" == "YES" ]
then
ZLIB_YES_NO="--with-zlib=${INSTALL_PATH}"
fi
NEEDS_SZIP="${NEEDS_SZIP:-NO}"
if [ "$NEEDS_SZIP" == "YES" ]
then
SZIP_YES_NO="--with-szlib=${INSTALL_PATH}"
fi
. ${ACCESS}/TPL/compiler.sh
DEBUG="${DEBUG:-NO}"
if [ "$DEBUG" == "YES" ]
then
BUILD_MODE="--enable-build-mode=debug"
CPPFLAGS='-g'; export CPPFLAGS
CFLAGS='-g'; export CFLAGS
else
BUILD_MODE="--enable-build-mode=production"
CPPFLAGS='-DNDEBUG'; export CPPFLAGS
fi
rm -f config.cache
### SEACAS does not use the Fortran, F90, or C++ versions of hdf5 library
FC=''; export FC
F90=''; export F90
SHARED="${SHARED:-YES}"
if [[ "$SHARED" == "ON" || "$SHARED" == "YES" ]]
then
USE_SHARED="--enable-shared"
else
USE_SHARED="--disable-shared"
fi
if [ "${H5VERSION}" == "V18" ]
then
./configure --prefix=${INSTALL_PATH} ${BUILD_MODE} ${USE_SHARED} --enable-static-exec $1
else
./configure --prefix=${INSTALL_PATH} ${ZLIB_YES_NO} ${SZIP_YES_NO} ${BUILD_MODE} ${USE_SHARED} --with-default-api-version=v18 --enable-static-exec $1
fi
echo ""
echo " MPI: ${MPI}"
echo " COMPILER: ${CC}"
echo " DEBUG: ${DEBUG} ${BUILD_MODE}"
echo " ACCESS: ${ACCESS}"
echo " NEEDS_SZIP: ${NEEDS_SZIP}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,65 @@
#! /usr/bin/env bash
EXTRA_ARGS=$@
MPI="${MPI:-NO}"
CUDA="${CUDA:-NO}"
if [ "$CUDA" == "ON" ] || [ "$CUDA" == "YES" ] ; then
if [ "X$CUDA_PATH" == "X" ] ; then
echo "ERROR: CUDA_PATH must be set if CUDA is enabled in kokkos build."
exit 1
else
echo "CUDA_PATH set to ${CUDA_PATH}"
fi
fi
echo "MPI set to ${MPI}"
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
SHARED="${SHARED:-YES}"
. ${ACCESS}/TPL/compiler.sh
CFLAGS="-I${INSTALL_PATH}/include"; export CFLAGS
CPPFLAGS="-DNDEBUG"; export CPPFLAGS
rm -f CMakeCache.txt
if [ "$CUDA" == "YES" ]
then
export "OMPI_CXX=../config/nvcc_wrapper"
export CUDA_MANAGED_FORCE_DEVICE_ALLOC=1
KOKKOS_SYMBOLS="-D CUDA_TOOLKIT_ROOT_DIR:PATH=${CUDA_PATH}"
else
export OMPI_CXX=$(which gcc)
unset CUDA_MANAGED_FORCE_DEVICE_ALLOC
fi
cmake \
${RPATH} \
-D BUILD_SHARED_LIBS:BOOL=${SHARED} \
-D CMAKE_PREFIX_PATH:PATH=${INSTALL_PATH}/lib \
-D CMAKE_INSTALL_PREFIX:PATH=${INSTALL_PATH} \
-D CMAKE_INSTALL_LIBDIR:PATH=lib \
-D Kokkos_ENABLE_CUDA:BOOL=${CUDA} \
-D Kokkos_ENABLE_DEPRECATED_CODE:BOOL=OFF \
-D Kokkos_ENABLE_PTHREAD:BOOL=OFF \
-D Kokkos_ARCH_NONE=ON \
${KOKKOS_SYMBOLS} \
$EXTRA_ARGS \
..
echo ""
echo " CUDA: ${CUDA}"
echo " MPI: ${MPI}"
echo " COMPILER: ${CC}"
echo "C++ COMPILER: ${CXX}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,23 @@
From 126ed8b74e5c3118ca46222503d8cf350d8f1cb8 Mon Sep 17 00:00:00 2001
From: Greg Sjaardema <gsjaardema@gmail.com>
Date: Wed, 11 Jan 2023 13:27:50 -0700
Subject: [PATCH] Fix mpi build
---
cmake/thirdParties.cmake | 3 +++
1 file changed, 3 insertions(+)
diff --git a/cmake/thirdParties.cmake b/cmake/thirdParties.cmake
index ae8b4ce..12246ec 100644
--- a/cmake/thirdParties.cmake
+++ b/cmake/thirdParties.cmake
@@ -1,3 +1,6 @@
+include(CMakeFindDependencyMacro)
+find_dependency(MPI)
+
if(MATIO_USE_CONAN AND (MATIO_WITH_HDF5 OR MATIO_WITH_ZLIB))
conan_add_remote(NAME conan-center URL https://center.conan.io VERIFY_SSL False)
endif()
--
2.38.1

@ -0,0 +1,78 @@
#! /usr/bin/env bash
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
. ${ACCESS}/TPL/compiler.sh
rm -f config.cache
HDF5="${HDF5:-YES}"
if [ "$HDF5" == "YES" ]
then
DEFAULT_VERSION="7.3"
else
DEFAULT_VERSION="5"
fi
DEBUG="${DEBUG:-NO}"
if [ "$DEBUG" == "YES" ]
then
BUILD_TYPE="DEBUG"
else
BUILD_TYPE="RELEASE"
fi
SHARED="${SHARED:-YES}"
if [[ "$SHARED" == "ON" || "$SHARED" == "YES" ]]
then
OS=$(uname -s)
if [ "$OS" = "Darwin" ] ; then
LD_EXT="dylib"
else
LD_EXT="so"
fi
else
LD_EXT="a"
EXTRA_DEPS="-DNC_EXTRA_DEPS=-ldl\;-lz"
fi
NEEDS_ZLIB="${NEEDS_ZLIB:-NO}"
if [ "$NEEDS_ZLIB" == "YES" ]
then
LOCAL_ZLIB="-DZLIB_INCLUDE_DIR:PATH=${INSTALL_PATH}/include -DZLIB_LIBRARY:FILEPATH=${INSTALL_PATH}/lib/libz.${LD_EXT}"
fi
if [ "$MPI" == "YES" ]
then
cd ..
patch -p1 < ../mpi.patch
cd -
fi
rm -f config.cache
cmake .. -DCMAKE_C_COMPILER:FILEPATH=${CC} \
-DBUILD_SHARED_LIBS:BOOL=${SHARED} \
-DMATIO_SHARED:BOOL=${SHARED} \
-DMATIO_DEFAULT_FILE_VERSION=${DEFAULT_VERSION} \
-DMATIO_MAT73:BOOL=${HDF5} \
-DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} \
-DCMAKE_INSTALL_LIBDIR:PATH=lib \
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
-DMATIO_WITH_HDF5:BOOL=${HDF5} \
-DMATIO_WITH_ZLIB:BOOL=ON \
-DHDF5_ROOT:PATH=${INSTALL_PATH} \
-DHDF5_DIR:PATH=${INSTALL_PATH} \
${LOCAL_ZLIB} ${EXTRA_DEPS}
echo ""
echo " COMPILER: ${CC}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,41 @@
#! /usr/bin/env bash
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
. ${ACCESS}/TPL/compiler.sh
rm -f config.cache
CFLAGS="-I${INSTALL_PATH}/include"; export CFLAGS
CPPFLAGS='-DNDEBUG'; export CPPFLAGS
# Find hdf5 library...
LDFLAGS="-L${INSTALL_PATH}/lib"; export LDFLAGS
SHARED="${SHARED:-YES}"
if [[ "$SHARED" == "ON" || "$SHARED" == "YES" ]]
then
USE_SHARED="--enable-shared"
else
USE_SHARED="--disable-shared"
fi
NEEDS_ZLIB="${NEEDS_ZLIB:-NO}"
if [ "$NEEDS_ZLIB" == "YES" ] || [ "$NEEDS_ZLIB" == "ON" ]
then
LOCAL_ZLIB="--with-zlib=${INSTALL_PATH}"
fi
./configure ${LOCAL_ZLIB} --with-hdf5=${INSTALL_PATH} --enable-mat73 ${USE_SHARED} --prefix=${INSTALL_PATH} $1
echo ""
echo " MPI: ${MPI}"
echo " COMPILER: ${CC}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,28 @@
#! /usr/bin/env bash
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
SHARED="${SHARED:-YES}"
. ${ACCESS}/TPL/compiler.sh
mkdir build
cd build
cmake -DCMAKE_C_COMPILER:FILEPATH=${CC} \
-DBUILD_SHARED_LIBS:BOOL=${SHARED} \
-DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} \
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} ..
cd ..
echo ""
echo " MPI: ${MPI}"
echo " COMPILER: ${CC}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,81 @@
#! /usr/bin/env bash
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
HDF5="${HDF5:-YES}"
DEBUG="${DEBUG:-NO}"
if [ "$DEBUG" == "YES" ]
then
BUILD_TYPE="DEBUG"
else
BUILD_TYPE="RELEASE"
fi
SHARED="${SHARED:-YES}"
if [[ "$SHARED" == "ON" || "$SHARED" == "YES" ]]
then
OS=$(uname -s)
if [ "$OS" = "Darwin" ] ; then
LD_EXT="dylib"
else
LD_EXT="so"
fi
else
LD_EXT="a"
EXTRA_DEPS="-DNC_EXTRA_DEPS=-ldl\;-lz"
fi
if [[ "$HDF5" == "ON" || "$HDF5" == "YES" ]]
then
HDF5_INFO="-DHDF5_ROOT:PATH=${INSTALL_PATH} -DHDF5_DIR:PATH=${INSTALL_PATH} -DENABLE_NETCDF4:BOOL=ON"
else
HDF5_INFO="-DENABLE_HDF5=OFF -DENABLE_NETCDF4:BOOL=OFF"
fi
NEEDS_ZLIB="${NEEDS_ZLIB:-NO}"
if [ "$NEEDS_ZLIB" == "YES" ]
then
LOCAL_ZLIB="-DZLIB_INCLUDE_DIR:PATH=${INSTALL_PATH}/include -DZLIB_LIBRARY:FILEPATH=${INSTALL_PATH}/lib/libz.${LD_EXT}"
fi
NEEDS_SZIP="${NEEDS_SZIP:-NO}"
if [ "$NEEDS_SZIP" == "YES" ]
then
LOCAL_SZIP="-DSZIP_INCLUDE_DIR:PATH=${INSTALL_PATH}/include -DSZIP_LIBRARY:FILEPATH=${INSTALL_PATH}/lib/libsz.${LD_EXT}"
fi
. ${ACCESS}/TPL/compiler.sh
# If using an XLF compiler on an IBM system, may need to add the following:
# -DCMAKE_Fortran_FLAGS="-qfixed=72" \
# -DCMAKE_EXE_LINKER_FLAGS:STRING="-lxl -lxlopt"
rm -f config.cache
cmake .. -DCMAKE_C_COMPILER:FILEPATH=${CC} \
-DBUILD_SHARED_LIBS:BOOL=${SHARED} \
-DBUILD_TESTING:BOOL=OFF \
-DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} \
-DCMAKE_INSTALL_LIBDIR:PATH=lib \
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
-DENABLE_PNETCDF:BOOL=${MPI} \
-DENABLE_CDF5=ON \
-DENABLE_MMAP:BOOL=ON \
-DENABLE_DAP:BOOL=OFF \
-DENABLE_V2_API:BOOL=OFF \
${LOCAL_ZLIB} \
${LOCAL_SZIP} \
${EXTRA_DEPS} \
${HDF5_INFO} \
-DENABLE_CONVERSION_WARNINGS:BOOL=OFF
echo ""
echo " MPI: ${MPI}"
echo " COMPILER: ${CC}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,37 @@
#! /bin/sh
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
MPI="${MPI:-NO}"
PARALLEL="${MPI:-NO}"
rm -f config.cache
export CFLAGS="-I${ACCESS}/include"
export CPPFLAGS="-DNDEBUG ${CFLAGS}"
# Find the hdf5 library
export LDFLAGS="-L${ACCESS}/lib"
if [ "$PARALLEL" == "NO" ] ; then
export CC='gcc'
PNETCDF=""
PARALLEL_TESTS=""
else
export CC='mpicc'
PNETCDF="--enable-pnetcdf"
PARALLEL_TESTS="--enable-parallel-tests"
fi
SHARED="--enable-shared"
./configure --enable-netcdf-4 ${PNETCDF} ${SHARED} ${PARALLEL_TESTS} --disable-v2 --disable-fsync --prefix=${ACCESS} --disable-dap $1
echo ""
echo " MPI: ${MPI}"
echo "COMPILER: ${CC}"
echo " ACCESS: ${ACCESS}"
echo ""

@ -0,0 +1,23 @@
#! /usr/bin/env bash
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
. ${ACCESS}/TPL/compiler.sh
CFLAGS="-I${INSTALL_PATH}/include"; export CFLAGS
CPPFLAGS='-DNDEBUG'; export CPPFLAGS
LDFLAGS="-L${INSTALL_PATH}/lib"; export LDFLAGS
rm -f config.cache
./configure --prefix=${INSTALL_PATH} $1
echo ""
echo " COMPILER: ${CC}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,33 @@
#! /usr/bin/env bash
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
SHARED="${SHARED:-YES}"
if [[ "$SHARED" == "ON" || "$SHARED" == "YES" ]]
then
USE_SHARED="1"
else
USE_SHARED="0"
fi
MPI="${MPI:-YES}"
if [ "$MPI" != "YES" ] ; then
echo "MPI Must be YES"
exit 1
fi
. ${ACCESS}/TPL/compiler.sh
METIS_PATH=${ACCESS}/TPL/metis/METIS-5.1.0.1
make config cc=${CC} prefix=${INSTALL_PATH} shared=${USE_SHARED} metis_path=${METIS_PATH}/src gklib_path=${METIS_PATH}/src/GKlib
echo ""
echo " MPI: ${MPI}"
echo " COMPILER: ${CC}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,42 @@
#! /bin/sh
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
if [ "$BB" == "YES" ]
then
USE_BB="--enable-burst-buffering"
fi
SHARED="${SHARED:-YES}"
if [[ "$SHARED" == "ON" || "$SHARED" == "YES" ]]
then
USE_SHARED="--enable-shared"
else
USE_SHARED="--disable-shared"
fi
rm -f config.cache
if [ "$CRAY" == "YES" ]
then
CC=cc
else
CC=mpicc
fi
MPICC=$(CC); export MPICC
CFLAGS="-fPIC -I${INSTALL_PATH}/include"; export CFLAGS
CPPFLAGS="-DNDEBUG"; export CPPFLAGS
AR_FLAGS="cru"; export AR_FLAGS
LDFLAGS="-L${INSTALL_PATH}/lib"; export LDFLAGS
./configure --disable-fortran ${USE_SHARED} ${USE_BB} --disable-cxx --prefix=${INSTALL_PATH}
echo ""
echo " COMPILER: ${CC}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,53 @@
#! /usr/bin/env bash
EXTRA_ARGS=$@
#MPI="${MPI:-YES}"
MPI="${MPI:-NO}"
echo "MPI set to ${MPI}"
### The following assumes you are building in a subdirectory of ACCESS Root
if [ "X$ACCESS" == "X" ] ; then
ACCESS=$(cd ../../../..; pwd)
echo "ACCESS set to ${ACCESS}"
fi
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
SHARED="${SHARED:-YES}"
DEBUG="${DEBUG:-NO}"
if [ "$DEBUG" == "YES" ]
then
BUILD_TYPE="DEBUG"
else
BUILD_TYPE="RELEASE"
fi
. ${ACCESS}/TPL/compiler.sh
CFLAGS="-I${INSTALL_PATH}/include"; export CFLAGS
CPPFLAGS="-DNDEBUG"; export CPPFLAGS
OS=$(uname -s)
rm -f CMakeCache.txt
if [ "$OS" == "Darwin" ] ; then
RPATH="-D CMAKE_MACOSX_RPATH:BOOL=YES"
fi
cmake \
${RPATH} \
-D CMAKE_INSTALL_RPATH:PATH=${INSTALL_PATH}/lib \
-D BUILD_SHARED_LIBS:BOOL=${SHARED} \
-D CMAKE_INSTALL_LIBDIR:PATH=lib \
-D CMAKE_INSTALL_PREFIX:PATH=${INSTALL_PATH} \
-D CMAKE_BUILD_TYPE=${BUILD_TYPE} \
$EXTRA_ARGS \
..
echo ""
echo " MPI: ${MPI}"
echo " COMPILER: ${CC}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo ""

@ -0,0 +1,83 @@
#
# Define the list of TPLs, their find module names, and their classification
#
# TPL_NAME:
#
# The name of the TPL used in the CMake cache variables TPL_ENABLE_${TPL_NAME}
#
# TPL_FINDMOD:
#
# The name of the find module under that is used to get the names of the
# TPLs. If ends in '/' then this gives the directory and the standard module
# name will be used which is FindTPL${TPL_NAME}.cmake.
#
# TPL_CLASSIFICATION:
#
# PS: Primary Stable TPL
#
# Primary Stable TPLs are those TPLs that a Trilinos developer must have
# installed on their machine in order to be able to do Trilinos
# development. For example, we require that you have BLAS, LAPACK, and
# MPI installed in order to do Trilinos development. These are
# fundamental dependencies that are needed in order to do precheckin
# testing.
#
# SS: Secondary Stable TPL
#
# Secondary Stable TPLs are those TPLs that are not required in order to
# be able to develop and test Trilinos before checkins but are none the
# less officially supported. Support for SS TPLs is tested as part of the
# nightly testing process.
#
# TS: Tertiary Stable TPL
#
# Tertiary Stable TPLs are those TPLs that are supported TPLs but can not
# be included in the set of SS TPLs because they may conflicit with other
# SS Code. For example, METIS is listed as a TS TPL because it conflicts
# with ParMETIS which is declared as a SS TPL.
#
# EX: Experimental TPL
#
# Experimental TPLs are not officially supported. They represent
# experimental capabilities of Trilinos packages. Support for EX TPLs is
# never tested as part of the main nightly testing process. However,
# package developers are encouraged to set up their own nightly testing
# for their EX TPLs for their packages.
#
# The default enable for all TPLs is empty "" regardless of the category.
# The idea is that the enabling of the TPL will be done by the package and
# other enables that the user has to set.
#
# NOTE: The TPLs must be listed in the order of increasing dependencies (if
# such dependencies exist).
#
SET( Seacas_TPLS_FINDMODS_CLASSIFICATIONS
GTest "cmake/TPLs/" PT
Zlib "cmake/TPLs/" ST
Pthread "cmake/TPLs/" PT
MPI "${${PROJECT_NAME}_TRIBITS_DIR}/core/std_tpls/" PT
HDF5 "${${PROJECT_NAME}_TRIBITS_DIR}/common_tpls/" ST
Pnetcdf "${${PROJECT_NAME}_TRIBITS_DIR}/common_tpls/" ST
Netcdf "${${PROJECT_NAME}_TRIBITS_DIR}/common_tpls/" ST
CGNS "${${PROJECT_NAME}_TRIBITS_DIR}/common_tpls/" ST
DataWarp "cmake/TPLs/" ST
METIS "cmake/TPLs/" TT
ParMETIS "cmake/TPLs/" ST
Pamgen "cmake/TPLs/" ST
Matio "cmake/TPLs/" ST
X11 "cmake/TPLs/" ST
DLlib "cmake/TPLs/" ST
CUDA "cmake/TPLs/" ST
fmt "cmake/TPLs/" ST
Kokkos "${${PROJECT_NAME}_TRIBITS_DIR}/common_tpls/" ST
Faodel "${PROJECT_SOURCE_DIR}/packages/seacas/cmake/tpls/" ST
Cereal "${PROJECT_SOURCE_DIR}/packages/seacas/cmake/tpls/" ST
ADIOS2 "${PROJECT_SOURCE_DIR}/packages/seacas/cmake/tpls/FindTPLADIOS2.cmake" EX
Catalyst2 "${PROJECT_SOURCE_DIR}/packages/seacas/cmake/tpls/FindTPLCatalyst2.cmake" EX
# TPLs defined in Zoltan/cmake/Dependencies.cmake that are never enabled
PaToH "cmake/TPLs/" EX
Scotch "cmake/TPLs/" EX
CCOLAMD "cmake/TPLs/" EX
OVIS "cmake/TPLs/" EX
)

@ -0,0 +1,5 @@
SET(Seacas_VERSION 2.0)
SET(Seacas_MAJOR_VERSION 2)
SET(Seacas_MAJOR_MINOR_VERSION 020000)
SET(Seacas_VERSION_STRING "2.0 (Dev)")
SET(Seacas_ENABLE_DEVELOPMENT_MODE_DEFAULT ON) # Change to 'OFF' for a release

@ -0,0 +1,12 @@
#!/bin/bash
set -e
pwd
echo $HOME
mkdir build
cd build
HAVE_X11=NO NETCDF_PATH=/mingw64 HDF5_PATH=/mingw64 CGNS_PATH=/mingw64 MPI=NO bash ../cmake-config
make -k -j4
make install
#ctest -j 4 --output-on-failure

@ -0,0 +1,577 @@
#! /usr/bin/env bash
# Text color variables
if [[ $TERM != *"xterm"* ]]; then
export TERM=dumb
fi
txtred=$(tput setaf 1) # Red
txtgrn=$(tput setaf 2) # Green
txtylw=$(tput setaf 3) # Yellow
#txtblu=$(tput setaf 4) # Blue
#txtpur=$(tput setaf 5) # Purple
txtcyn=$(tput setaf 6) # Cyan
#txtwht=$(tput setaf 7) # White
txtrst=$(tput sgr0) # Text reset
if [ "${TRAVIS}" == "true" ]
then
BUILDDIR=${1:-build}
mkdir $BUILDDIR && cd $BUILDDIR
else
EXTRA_ARGS=$@
fi
### The following assumes you are building in a subdirectory of ACCESS Root
### If not, then define "ACCESS" to point to the root of the SEACAS source code.
if [ "$ACCESS" == "" ]
then
ACCESS=$(cd ..; pwd)
fi
### The SEACAS code will install in ${INSTALL_PATH}/bin, ${INSTALL_PATH}/lib, and ${INSTALL_PATH}/include.
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
function check_valid()
{
if [ "${!1}" == "YES" ] || [ "${!1}" == "ON" ]; then
echo "YES"
return 1
fi
if [ "${!1}" == "NO" ] || [ "${!1}" == "OFF" ]; then
echo "NO"
return 1
fi
echo "Invalid value for $1 (${!1}) -- Must be ON, YES, NO, or OFF"
exit 1
}
### Possible subset of what is built ---
APPLICATIONS=${APPLICATIONS:-YES}
APPLICATIONS=$(check_valid APPLICATIONS)
LEGACY=${LEGACY:-YES}
LEGACY=$(check_valid LEGACY)
FORTRAN=${FORTRAN:-YES}
FORTRAN=$(check_valid FORTRAN)
ZOLTAN=${ZOLTAN:-YES}
ZOLTAN=$(check_valid ZOLTAN)
### TPLs --
### Make sure these point to the locations to find the libraries and includes in lib and include
### subdirectories of the specified paths.
### For example, netcdf.h should be in ${NETCDF_PATH}/include
NETCDF_PATH=${NETCDF_PATH:-${INSTALL_PATH}}
PNETCDF_PATH=${PNETCDF_PATH:-${INSTALL_PATH}}
MATIO_PATH=${MATIO_PATH:-${INSTALL_PATH}}
HDF5_PATH=${HDF5_PATH:-${INSTALL_PATH}}
CGNS_PATH=${CGNS_PATH:-${INSTALL_PATH}}
FAODEL_PATH=${FAODEL_PATH:-${INSTALL_PATH}}
ADIOS2_PATH=${ADIOS2_PATH:-${INSTALL_PATH}}
CATALYST2_PATH=${CATALYST2_PATH:-${INSTALL_PATH}}
GTEST_PATH=${GTEST_PATH:-${INSTALL_PATH}}
KOKKOS_PATH=${KOKKOS_PATH:-${INSTALL_PATH}}
METIS_PATH=${METIS_PATH:-${INSTALL_PATH}}
PARMETIS_PATH=${PARMETIS_PATH:-${INSTALL_PATH}}
FMT_PATH=${FMT_PATH:-${INSTALL_PATH}}
### Set to ON for parallel compile; otherwise OFF for serial (default)
if [ "${MPI}" = "" ]
then
if [[ -f "$NETCDF_PATH/bin/nc-config" ]]; then
netcdf_parallel=$($NETCDF_PATH/bin/nc-config --has-parallel)
if [ "${netcdf_parallel}" == "yes" ]
then
MPI=YES
else
MPI=NO
fi
else
echo "Unable to determine whether netCDF is parallel or serial. Assuming serial"
echo "Set either \"NETCDF_PATH\" or \"MPI\" manually if the assumption is incorrect."
MPI=NO
fi
fi
MPI=$(check_valid MPI)
echo "${txtgrn}MPI set to ${MPI}${txtrst}"
if [ "${MPI}" == "NO" ]
then
### Change this to point to the compilers you want to use
## Travis build (and others) set this to EXTERNAL to set
## CXX, CC, and FC externally.
COMPILER=${COMPILER:-gnu}
if [ "$COMPILER" == "gnu" ]
then
CXX=g++
CC=gcc
FC=gfortran
CFLAGS="-Wall -Wunused -pedantic -Wshadow -std=c11"
CXXFLAGS="-Wall -Wextra -Wunused -pedantic -Wshadow"
fi
if [ "$COMPILER" == "gnubrew" ]
then
VER=${VER:-10}
CXX=g++-${VER}
CC=gcc-${VER}
FC=gfortran-${VER}
CFLAGS="-Wall -Wunused -pedantic -Wshadow -std=c11"
CXXFLAGS="-Wall -Wextra -Wunused -pedantic -Wshadow"
fi
if [ "$COMPILER" == "gnumacport" ]
then
VER=${VER:-10}
CXX=g++-mp-${VER}
CC=gcc-mp-${VER}
FC=gfortran-mp-${VER}
CFLAGS="-Wall -Wunused -pedantic -Wshadow -std=c11"
CXXFLAGS="-Wall -Wextra -Wunused -pedantic -Wshadow"
fi
if [ "$COMPILER" == "clangmacport" ]
then
VER=${VER:-9}
CXX=clang++-mp-${VER}.0
CC=clang-mp-${VER}.0
FC=gfortran
CFLAGS="-Wall -Wunused -pedantic -Wshadow -std=c11"
CXXFLAGS="-Wall -Wextra -Wunused -pedantic -Wshadow"
fi
if [ "$COMPILER" == "nvidia" ]
then
CXX="nvcc -x c++"
CC=nvcc
FC=gfortran
fi
if [ "$COMPILER" == "clang" ]
then
CXX=clang++
CC=clang
FC=${FC:-gfortran}
CFLAGS="-Wall -Wunused -pedantic -Wshadow"
CXXFLAGS="-Wall -Wextra -Wunused -pedantic -Wshadow"
fi
if [ "$COMPILER" == "intel" ]
then
CXX=icpx
CC=icx
FC=ifort
CFLAGS="-Wall -Wunused"
CXXFLAGS="-Wall -Wextra -Wunused"
fi
# When building: "scan-build make -j8"
if [ "$COMPILER" == "analyzer" ]
then
CXX=/opt/local/libexec/llvm-9.0/libexec/c++-analyzer
CC=/opt/local/libexec/llvm-9.0/libexec/ccc-analyzer
FC=gfortran
CFLAGS="-Wall -Wunused"
CXXFLAGS="-Wall -Wunused"
FORTRAN="NO"
fi
if [ "$COMPILER" == "ibm" ]
then
CXX=xlC
CC=xlc
FC=xlf
fi
fi
if [ "${APPLICATIONS}" == "YES" ] && [ "${LEGACY}" == "YES" ]
then
SUBSET_OPTIONS="-DSeacas_ENABLE_ALL_PACKAGES:BOOL=ON \
-DSeacas_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON \
-DSeacas_ENABLE_SECONDARY_TESTED_CODE:BOOL=ON"
else
SUBSET_OPTIONS="-DSeacas_ENABLE_ALL_PACKAGES:BOOL=OFF \
-DSeacas_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=OFF \
-DSeacas_ENABLE_SECONDARY_TESTED_CODE:BOOL=OFF \
-DSeacas_ENABLE_SEACASIoss:BOOL=ON \
-DSeacas_ENABLE_SEACASExodus:BOOL=ON \
-DSeacas_ENABLE_SEACASExodus_for:BOOL=ON \
-DSeacas_ENABLE_SEACASExoIIv2for32:BOOL=ON"
if [ "${APPLICATIONS}" == "YES" ]
then
SUBSET_OPTIONS="${SUBSET_OPTIONS} \
-DSeacas_ENABLE_SEACASAprepro:BOOL=ON \
-DSeacas_ENABLE_SEACASAprepro_lib:BOOL=ON \
-DSeacas_ENABLE_SEACASConjoin:BOOL=ON \
-DSeacas_ENABLE_SEACASCpup:BOOL=ON \
-DSeacas_ENABLE_SEACASEjoin:BOOL=ON \
-DSeacas_ENABLE_SEACASEpu:BOOL=ON \
-DSeacas_ENABLE_SEACASExo2mat:BOOL=ON \
-DSeacas_ENABLE_SEACASExo_format:BOOL=ON \
-DSeacas_ENABLE_SEACASExodiff:BOOL=ON \
-DSeacas_ENABLE_SEACASMat2exo:BOOL=ON \
-DSeacas_ENABLE_SEACASNemslice:BOOL=ON \
-DSeacas_ENABLE_SEACASSlice:BOOL=ON \
-DSeacas_ENABLE_SEACASZellij:BOOL=ON \
-DSeacas_ENABLE_SEACASNemspread:BOOL=ON"
if [ "${FORTRAN}" == "YES" ]
then
SUBSET_OPTIONS="${SUBSET_OPTIONS} \
-DSeacas_ENABLE_SEACASExplore:BOOL=ON \
-DSeacas_ENABLE_SEACASGrepos:BOOL=ON"
fi
elif [ "${LEGACY}" == "YES" ] && [ "${FORTRAN}" == "YES" ]
then
SUBSET_OPTIONS="${SUBSET_OPTIONS} \
-DSeacas_ENABLE_SEACASAlgebra:BOOL=ON \
-DSeacas_ENABLE_SEACASBlot:BOOL=ON \
-DSeacas_ENABLE_SEACASEx1ex2v2:BOOL=ON \
-DSeacas_ENABLE_SEACASEx2ex1v2:BOOL=ON \
-DSeacas_ENABLE_SEACASExomatlab:BOOL=ON \
-DSeacas_ENABLE_SEACASExotec2:BOOL=ON \
-DSeacas_ENABLE_SEACASExotxt:BOOL=ON \
-DSeacas_ENABLE_SEACASFastq:BOOL=ON \
-DSeacas_ENABLE_SEACASGen3D:BOOL=ON \
-DSeacas_ENABLE_SEACASGenshell:BOOL=ON \
-DSeacas_ENABLE_SEACASGjoin:BOOL=ON \
-DSeacas_ENABLE_SEACASMapvar:BOOL=ON \
-DSeacas_ENABLE_SEACASMapvar-kd:BOOL=ON \
-DSeacas_ENABLE_SEACASNemesis:BOOL=ON \
-DSeacas_ENABLE_SEACASTxtexo:BOOL=ON"
fi
fi
MODERN=${MODERN:-NO}
if [ "${MODERN}" == "YES" ]
then
# MODERN_ARG="-D TPL_ENABLE_HDF5:BOOL=ON -D Netcdf_ALLOW_MODERN:BOOL=ON"
MODERN_ARG="-DNetcdf_FORCE_MODERN:BOOL=ON -DCGNS_FORCE_MODERN:BOOL=ON"
else
MODERN_ARG="-D HDF5_NO_SYSTEM_PATHS=YES"
fi
GENERATOR=${GENERATOR:-"Unix Makefiles"}
# If using an XLF compiler on an IBM system, may need to add the following:
# -DCMAKE_Fortran_FLAGS="-qfixed=72" \
# -DCMAKE_EXE_LINKER_FLAGS:STRING="-lxl -lxlopt"
CRAY="${CRAY:-NO}"
CRAY=$(check_valid CRAY)
if [ "${CRAY}" == "YES" ]
then
SHARED="${SHARED:-NO}"
else
SHARED="${SHARED:-YES}"
fi
SHARED=$(check_valid SHARED)
if [ "${CRAY}" == "YES" ] && [ "${SHARED}" == "NO" ]
then
# Assumes we build our own static zlib with CRAY
EXTRA_LIB=-DSeacas_EXTRA_LINK_FLAGS=${INSTALL_PATH}/lib/libz.a
fi
### Switch for Debug or Release build:
### Check that both `DEBUG` and `BUILD_TYPE` are not set
if [ ! -z ${DEBUG+x} ] && [ ! -z ${BUILD_TYPE+x} ]
then
echo "ERROR: Both DEBUG and BUILD_TYPE are set. Only one is allowed."
exit
fi
BUILD_TYPE="${BUILD_TYPE:-RELEASE}"
if [ ! -z ${DEBUG+x} ]
then
if [ "${DEBUG}" == "ON" ] || [ "${DEBUG}" == "YES" ]
then
BUILD_TYPE="DEBUG"
elif [ "${DEBUG}" == "OFF" ] || [ "${DEBUG}" == "NO" ]
then
BUILD_TYPE="RELEASE"
else
echo "ERROR: Invalid value for DEBUG ('$DEBUG'). Must be 'ON', 'OFF', 'YES', 'NO'."
exit
fi
fi
### If you do not have the X11 developer package on your system
### which provides X11/Xlib.h and the libX11, then change the "YES"
### below to "NO". It will disable blot and fastq
HAVE_X11=${HAVE_X11:-YES}
HAVE_X11=$(check_valid HAVE_X11)
### Set to ON to enable the building of a thread-safe version of the Exodus and IOSS libraries.
THREADSAFE=${THREADSAFE:-NO}
THREADSAFE=$(check_valid THREADSAFE)
function check_enable()
{
local path=$1
if [ -e "${path}" ]
then
echo "YES"
else
echo "NO"
fi
}
HAVE_NETCDF=$(check_enable "${NETCDF_PATH}/include/netcdf.h")
HAVE_MATIO=$(check_enable "${MATIO_PATH}/include/matio.h")
HAVE_CGNS=$(check_enable "${CGNS_PATH}/include/cgnslib.h")
HAVE_FAODEL=$(check_enable "${FAODEL_PATH}/include/faodel/faodelConfig.h")
HAVE_ADIOS2=$(check_enable "${ADIOS2_PATH}/include/adios2.h")
HAVE_CATALYST2=$(check_enable "${CATALYST2_PATH}/include/catalyst-2.0/catalyst.h")
HAVE_GTEST=$(check_enable "${GTEST_PATH}/include/gtest/gtest.h")
HAVE_KOKKOS=$(check_enable "${KOKKOS_PATH}/include/Kokkos_Core.hpp")
HAVE_METIS=$(check_enable "${METIS_PATH}/include/metis.h")
HAVE_PARMETIS=$(check_enable "${METIS_PATH}/include/parmetis.h")
### DataWarp (Burst Buffer)
### I use the following for mutrino (10/16/2018):
### module load datawarp
### -D TPL_ENABLE_DataWarp:BOOL=ON \
### -D DataWarp_LIBRARY_DIRS:PATH=/opt/cray/datawarp/2.1.16-6.0.5.1_2.61__g238b34d.ari/lib \
### -D DataWarp_INCLUDE_DIRS:PATH=/opt/cray/datawarp/2.1.16-6.0.5.1_2.61__g238b34d.ari/include \
### Define to NO to *enable* exodus deprecated functions
OMIT_DEPRECATED=${OMIT_DEPRECATED:-NO}
NUMPROCS=${NUMPROCS:-4}
# BUG needs to work with cray too.
if [ "${MPI}" == "YES" ] && [ "${CRAY}" == "YES" ]
then
MPI_EXEC=$(which srun)
MPI_SYMBOLS="-D MPI_EXEC=${MPI_EXEC} -D MPI_EXEC_NUMPROCS_FLAG=-n -DMPI_EXEC_DEFAULT_NUMPROCS:STRING=${NUMPROCS} -DMPI_EXEC_MAX_NUMPROCS:STRING=${NUMPROCS}"
CXX=CC
CC=cc
FC=ftn
MPI_BIN=$(dirname $(which ${CC}))
elif [ "${MPI}" == "YES" ]
then
if [ "${USE_SRUN}" == "YES" ]
then
MPI_EXEC=$(which srun)
MPI_SYMBOLS="-D MPI_EXEC=${MPI_EXEC} -D MPI_EXEC_NUMPROCS_FLAG=-N -DMPI_EXEC_DEFAULT_NUMPROCS:STRING=${NUMPROCS} -DMPI_EXEC_MAX_NUMPROCS:STRING=${NUMPROCS}"
MPI_BIN=$(dirname "${MPI_EXEC}")
else
MPI_EXEC=$(which mpiexec)
MPI_SYMBOLS="-D MPI_EXEC=${MPI_EXEC} -DMPI_EXEC_DEFAULT_NUMPROCS:STRING=${NUMPROCS} -DMPI_EXEC_MAX_NUMPROCS:STRING=${NUMPROCS}"
MPI_BIN=$(dirname "${MPI_EXEC}")
fi
CXX=mpicxx
CC=mpicc
FC=mpif77
fi
OS=$(uname -s)
if [ "$SHARED" == "YES" ]
then
if [ "$OS" == "Darwin" ] ; then
LD_EXT="dylib"
else
LD_EXT="so"
fi
else
EXTRA_LIB="-DSeacas_EXTRA_LINK_FLAGS=z;dl -DSEACASExodus_ENABLE_SHARED:BOOL=OFF"
LD_EXT="a"
fi
if [ "${HAVE_KOKKOS}" == "YES" ]
then
KOKKOS_SYMBOLS="-DKOKKOS_SRC_PATH:PATH=${INSTALL_PATH}/TPL/kokkos/kokkos \
-DTPL_Kokkos_LIBRARY_DIRS:PATH=${KOKKOS_PATH}/lib \
-DTPL_Kokkos_INCLUDE_DIRS:PATH=${KOKKOS_PATH}/include \
-DTPL_Kokkos_LIBRARIES=${KOKKOS_PATH}/lib/libkokkoscore.${LD_EXT}"
fi
if [ "$HAVE_FAODEL" == "YES" ]
then
FAODEL_SYMBOLS=" -D TPL_ENABLE_Faodel:BOOL=${HAVE_FAODEL} \
-D Faodel_ROOT:PATH=${FAODEL_PATH} \
-D Faodel_INCLUDE_DIRS:PATH=${FAODEL_PATH}/include/faodel \
"
fi
if [ "$OS" == "Darwin" ] ; then
DARWIN_OPT="-D CMAKE_MACOSX_RPATH:BOOL=ON -D TPL_X11_INCLUDE_DIRS:PATH=/opt/X11/include"
else
DARWIN_OPT=""
fi
# Only run doxygen if me and on master branch...
DOXYGEN=${DOXYGEN:-NO}
DOXYGEN=$(check_valid DOXYGEN)
if [[ "$DOXYGEN" == "NO" && "$OS" == "Darwin" && "$MPI" == "NO" ]] ; then
branch=$(git branch |grep \* |cut -c3-)
USER=$(id -nu)
if [ "$USER" == "gdsjaar" ] && [ "$branch" == "master" ]; then
DOXYGEN=YES
fi
fi
FC=${FC:-gfortran}
EXTRA_WARNINGS=${EXTRA_WARNINGS:-NO}
EXTRA_WARNINGS=$(check_valid EXTRA_WARNINGS)
SANITIZER=${SANITIZER:-NO}
if [ "$SANITIZER" != "NO" ] ; then
### To use the clang sanitizers:
#sanitizer=address #: AddressSanitizer, a memory error detector.
#sanitizer=integer #: Enables checks for undefined or suspicious integer behavior.
#sanitizer=thread #: ThreadSanitizer, a data race detector.
#sanitizer=memory #: MemorySanitizer, experimental detector of uninitialized reads.
#sanitizer=undefined #: Fast and compatible undefined behavior checker.
#sanitizer=dataflow #: DataFlowSanitizer, a general data flow analysis.
#sanitizer=cfi #: control flow integrity checks. Requires -flto.
#sanitizer=safe-stack #: safe stack protection against stack-based memory corruption errors.
SANITIZE="-fsanitize=${SANITIZER} -fno-omit-frame-pointer -fPIC"
if [ "$SANITIZER" == "integer" ] ; then
SANITIZE="$SANITIZE -fno-sanitize=unsigned-integer-overflow"
fi
fi
### You can add these below if you want more verbosity...
#-D CMAKE_VERBOSE_MAKEFILE:BOOL=ON \
#-D Seacas_VERBOSE_CONFIGURE=ON \
### You can add these below to regenerate the flex and bison files for
### aprepro and aprepro_lib May have to touch aprepro.l aprepro.y
### aprepro.ll and aprepro.yy to have them regenerate
#-D GENERATE_FLEX_FILES=ON \
#-D GENERATE_BISON_FILES=ON \
if [ "${EXTRA_WARNINGS}" == "YES" ]; then
### Additional gcc warnings:
if [ "$COMPILER" == "gnu" ]
then
COMMON_WARNING_FLAGS="\
-Wshadow -Wabsolute-value -Waddress -Waliasing -Wpedantic\
"
C_WARNING_FLAGS="${COMMON_WARNING_FLAGS}"
CXX_WARNING_FLAGS="${COMMON_WARNING_FLAGS} -Wnull-dereference -Wzero-as-null-pointer-constant -Wuseless-cast -Weffc++ -Wsuggest-override"
# -Wuseless-cast
# -Wold-style-cast
# -Wdouble-promotion
fi
if [ "$COMPILER" == "clang" ]
then
C_WARNING_FLAGS="-Weverything -Wno-missing-prototypes -Wno-sign-conversion -Wno-reserved-id-macro"
CXX_WARNING_FLAGS="-Weverything -Wno-c++98-compat -Wno-old-style-cast -Wno-sign-conversion -Wno-reserved-id-macro"
fi
fi
rm -f CMakeCache.txt
###------------------------------------------------------------------------
cmake -G "${GENERATOR}" \
-D CMAKE_CXX_COMPILER:FILEPATH=${CXX} \
-D CMAKE_C_COMPILER:FILEPATH=${CC} \
-D CMAKE_Fortran_COMPILER:FILEPATH=${FC} \
-D CMAKE_CXX_FLAGS="${CXXFLAGS} ${CXX_WARNING_FLAGS} ${SANITIZE}" \
-D CMAKE_C_FLAGS="${CFLAGS} ${C_WARNING_FLAGS} ${SANITIZE}" \
-D CMAKE_Fortran_FLAGS="${FFLAGS} ${F77_WARNING_FLAGS} ${SANITIZE}" \
-D Seacas_ENABLE_STRONG_C_COMPILE_WARNINGS=${EXTRA_WARNINGS} \
-D Seacas_ENABLE_STRONG_CXX_COMPILE_WARNINGS=${EXTRA_WARNINGS} \
-D CMAKE_INSTALL_RPATH:PATH=${INSTALL_PATH}/lib \
-D BUILD_SHARED_LIBS:BOOL=${SHARED} \
-D CMAKE_BUILD_TYPE=${BUILD_TYPE} \
${SUBSET_OPTIONS} \
-D Seacas_ENABLE_Zoltan:BOOL=${ZOLTAN} \
-D Seacas_ENABLE_TESTS=ON \
-D CMAKE_INSTALL_PREFIX:PATH=${INSTALL_PATH} \
-D Seacas_SKIP_FORTRANCINTERFACE_VERIFY_TEST:BOOL=ON \
-D Seacas_HIDE_DEPRECATED_CODE:BOOL=${OMIT_DEPRECATED} \
-D Seacas_ENABLE_DOXYGEN:BOOL=${DOXYGEN} \
-D Seacas_ENABLE_Fortran=${FORTRAN} \
${EXTRA_LIB} \
${MODERN_ARG} \
\
-D TPL_ENABLE_Netcdf:BOOL=${HAVE_NETCDF} \
-D TPL_ENABLE_Matio:BOOL=${HAVE_MATIO} \
-D TPL_ENABLE_CGNS:BOOL=${HAVE_CGNS} \
-D TPL_ENABLE_ADIOS2:BOOL=${HAVE_ADIOS2} \
-D TPL_ENABLE_Catalyst2:BOOL=${HAVE_CATALYST2} \
-D TPL_ENABLE_GTest:BOOL=${HAVE_GTEST} \
-D TPL_ENABLE_Kokkos:BOOL=${HAVE_KOKKOS} \
-D TPL_ENABLE_METIS:BOOL=${HAVE_METIS} \
-D TPL_ENABLE_ParMETIS:BOOL=${HAVE_PARMETIS} \
-D TPL_ENABLE_MPI:BOOL=${MPI} \
-D TPL_ENABLE_Pamgen:BOOL=OFF \
-D TPL_ENABLE_fmt:BOOL=ON \
-D TPL_ENABLE_Pthread:BOOL=${THREADSAFE} \
${THREAD_SAFE_OPT} \
-D TPL_ENABLE_X11:BOOL=${HAVE_X11} \
\
-D SEACASExodus_ENABLE_THREADSAFE:BOOL=${THREADSAFE} \
-D SEACASIoss_ENABLE_THREADSAFE:BOOL=${THREADSAFE} \
\
${KOKKOS_SYMBOLS} \
${MPI_SYMBOLS} \
${DARWIN_OPT} \
${FAODEL_SYMBOLS} \
\
-D MPI_BIN_DIR:PATH=${MPI_BIN} \
-D NetCDF_ROOT:PATH=${NETCDF_PATH} \
-D netCDF_ROOT:PATH=${NETCDF_PATH} \
-D HDF5_ROOT:PATH=${HDF5_PATH} \
-D HDF5_DIR:PATH=${HDF5_PATH} \
-D CGNS_ROOT:PATH=${CGNS_PATH} \
-D CGNS_DIR:PATH=${CGNS_PATH} \
-D Matio_ROOT:PATH=${MATIO_PATH} \
-D Metis_ROOT:PATH=${METIS_PATH} \
-D ParMETIS_ROOT:PATH=${PARMETIS_PATH} \
-D PNetCDF_ROOT:PATH=${PNETCDF_PATH} \
-D fmt_LIBRARY_DIRS:PATH=${FMT_PATH}/lib \
-D fmt_INCLUDE_DIRS:PATH=${FMT_PATH}/include \
$EXTRA_ARGS \
${ACCESS}
echo ""
echo " OS: ${OS}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo " "
echo " CC: ${CC}"
echo " CXX: ${CXX}"
echo " FC: ${FC}"
echo " MPI: ${MPI}"
echo " SHARED: ${SHARED}"
echo " BUILD_TYPE: ${BUILD_TYPE}"
echo " THREADSAFE: ${THREADSAFE}"
echo " CRAY: ${CRAY}"
echo " "
echo " NETCDF: ${HAVE_NETCDF}"
echo " MATIO: ${HAVE_MATIO}"
echo " CGNS: ${HAVE_CGNS}"
echo " KOKKOS: ${HAVE_KOKKOS}"
echo " ZOLTAN: ${ZOLTAN}"
echo " ADIOS2: ${HAVE_ADIOS2}"
echo " CATALYST2: ${HAVE_CATALYST2}"
echo " METIS: ${HAVE_METIS}"
echo " PARMETIS: ${HAVE_PARMETIS}"
echo " FAODEL: ${HAVE_FAODEL}"
echo " GTEST: ${HAVE_GTEST}"
echo " DOXYGEN: ${DOXYGEN}"
echo ""
if [ "${TRAVIS}" == "true" ]
then
make -j2
cd ${ACCESS}
fi

@ -0,0 +1,90 @@
#!/bin/sh
EXTRA_ARGS=$@
### The following assumes you are building in a subdirectory of ACCESS Root
ACCESS=$(cd ..; pwd)
### The SEACAS code will install in ${INSTALL_PATH}/bin, ${INSTALL_PATH}/lib, and ${INSTALL_PATH}/include.
INSTALL_PATH=${ACCESS}
### Make sure these point to the locations to find the libraries and includes in lib and include
### subdirectories of the specified paths.
### For example, netcdf.h should be in ${NETCDF_PATH}/include
NETCDF_PATH=${ACCESS}
MATIO_PATH=${ACCESS}
HDF5_PATH=${ACCESS}
CGNS_PATH=${ACCESS}
CUDA_PATH=${CUDA_ROOT} # Set this to the appropriate path.
### Set to ON for parallel compile; otherwise OFF for serial (default)
MPI="ON"
### Set to ON for CUDA compile; otherwise OFF (default)
CUDA="OFF"
### Change this to point to the compilers you want to use
if [ "$MPI" == "ON" ]
then
CXX=mpicxx
CC=mpicc
FC=mpif77
else
CXX=clang++
CC=clang
FC=gfortran
fi
if [ "$CUDA" == "ON" ]
then
export "OMPI_CXX=${SEACAS_SRC_DIR}/packages/kokkos/config/nvcc_wrapper"
export CUDA_MANAGED_FORCE_DEVICE_ALLOC=1
KOKKOS_SYMBOLS="-D TPL_ENABLE_CUDA:Bool=ON -D CUDA_TOOLKIT_ROOT_DIR:PATH=${CUDA_PATH} -D TPL_ENABLE_Pthread:Bool=OFF"
else
export OMPI_CXX=`which gcc`
unset CUDA_MANAGED_FORCE_DEVICE_ALLOC
KOKKOS_SYMBOLS="-D Seacas_ENABLE_OpenMP:Bool=ON -D TPL_ENABLE_Pthread:Bool=OFF"
fi
### Switch for Debug or Release build:
BUILD_TYPE=Release
#BUILD_TYPE=Debug
### If you do not have the X11 developer package on your system
### which provides X11/Xlib.h and the libX11, then you will need
### to add the line below. It will disable blot and fastq
#-D TPL_ENABLE_X11=OFF \
rm -f CMakeCache.txt
###------------------------------------------------------------------------
cmake \
-D CMAKE_MACOSX_RPATH:BOOL=ON \
-D CMAKE_INSTALL_RPATH:PATH=${INSTALL_PATH}/lib \
-D BUILD_SHARED_LIBS:BOOL=ON \
-D CMAKE_BUILD_TYPE=${BUILD_TYPE} \
-D Seacas_ENABLE_ALL_PACKAGES:BOOL=ON \
-D Seacas_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON \
-D Seacas_ENABLE_SECONDARY_TESTED_CODE:BOOL=ON \
-D Seacas_ENABLE_TESTS=ON \
-D CMAKE_INSTALL_PREFIX:PATH=${INSTALL_PATH} \
-D CMAKE_CXX_COMPILER:FILEPATH=${CXX} \
-D CMAKE_C_COMPILER:FILEPATH=${CC} \
-D CMAKE_Fortran_COMPILER:FILEPATH=${FC} \
-D Seacas_SKIP_FORTRANCINTERFACE_VERIFY_TEST:BOOL=ON \
\
-D TPL_ENABLE_Netcdf:BOOL=ON \
-D TPL_ENABLE_Matio:BOOL=ON \
-D TPL_ENABLE_MPI=${MPI} \
-D TPL_ENABLE_Pamgen=OFF \
-D TPL_ENABLE_CGNS:BOOL=OFF \
\
${KOKKOS_SYMBOLS} \
\
-D NetCDF_ROOT:PATH=${NETCDF_PATH} \
-D HDF5_ROOT:PATH=${HDF5_PATH} \
-D HDF5_NO_SYSTEM_PATHS=ON \
-D CGNS_ROOT:PATH=${CGNS_PATH} \
-D Matio_LIBRARY_DIRS:PATH=${MATIO_PATH}/lib \
-D TPL_Matio_INCLUDE_DIRS:PATH=${MATIO_PATH}/include \
\
$EXTRA_ARGS \
..

@ -0,0 +1,401 @@
#! /usr/bin/env bash
# CMake config file to build ONLY the exodus libraries (C, Fortran, Fortran-32, and Python interface)
# By default, Will build both static and shared version of the C API.
# If only want shared, then run with "sh STATIC=NO ../cmake-exodus"
EXTRA_ARGS=$@
### The following assumes you are building in a subdirectory of ACCESS Root
### If not, then define "ACCESS" to point to the root of the SEACAS source code.
if [ "$ACCESS" == "" ]
then
ACCESS=$(cd ..; pwd)
fi
### The SEACAS code will install in ${INSTALL_PATH}/bin, ${INSTALL_PATH}/lib, and ${INSTALL_PATH}/include.
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
function check_valid()
{
if [ "${!1}" == "YES" ] || [ "${!1}" == "ON" ]; then
echo "YES"
return 1
fi
if [ "${!1}" == "NO" ] || [ "${!1}" == "OFF" ]; then
echo "NO"
return 1
fi
echo "Invalid value for $1 (${!1}) -- Must be ON, YES, NO, or OFF"
exit 1
}
FORTRAN=${FORTRAN:-YES}
FORTRAN=$(check_valid FORTRAN)
### TPLs --
### Make sure these point to the locations to find the libraries and includes in lib and include
### subdirectories of the specified paths.
### For example, netcdf.h should be in ${NETCDF_PATH}/include
NETCDF_PATH=${NETCDF_PATH:-${INSTALL_PATH}}
PNETCDF_PATH=${PNETCDF_PATH:-${INSTALL_PATH}}
HDF5_PATH=${HDF5_PATH:-${INSTALL_PATH}}
### Set to ON for parallel compile; otherwise OFF for serial (default)
if [ "${MPI}" = "" ]
then
if [[ -f "$NETCDF_PATH/bin/nc-config" ]]; then
netcdf_parallel=$($NETCDF_PATH/bin/nc-config --has-parallel)
if [ "${netcdf_parallel}" == "yes" ]
then
MPI=YES
else
MPI=NO
fi
else
echo "Unable to determine whether netCDF is parallel or serial. Assuming serial"
echo "Set either `NETCDF_PATH` or `MPI` manually if the assumption is incorrect."
MPI=NO
fi
fi
MPI=$(check_valid MPI)
echo "${txtgrn}MPI set to ${MPI}${txtrst}"
if [ "${MPI}" == "NO" ]
then
### Change this to point to the compilers you want to use
## Some builds set this to EXTERNAL to set CXX, CC, and FC externally.
COMPILER=${COMPILER:-gnu}
if [ "$COMPILER" == "gnu" ]
then
CXX=g++
CC=gcc
FC=gfortran
CFLAGS="-Wall -Wunused -pedantic -std=c11"
CXXFLAGS="-Wall -Wunused -pedantic"
fi
if [ "$COMPILER" == "gnubrew" ]
then
VER=${VER:-10}
CXX=g++-${VER}
CC=gcc-${VER}
FC=gfortran-${VER}
CFLAGS="-Wall -Wunused -pedantic -std=c11"
CXXFLAGS="-Wall -Wunused -pedantic"
fi
if [ "$COMPILER" == "gnumacport" ]
then
VER=${VER:-10}
CXX=g++-mp-${VER}
CC=gcc-mp-${VER}
FC=gfortran-mp-${VER}
CFLAGS="-Wall -Wunused -pedantic -std=c11"
CXXFLAGS="-Wall -Wunused -pedantic"
fi
if [ "$COMPILER" == "clangmacport" ]
then
VER=${VER:-9}
CXX=clang++-mp-${VER}.0
CC=clang-mp-${VER}.0
FC=gfortran
CFLAGS="-Wall -Wunused -pedantic -std=c11"
CXXFLAGS="-Wall -Wunused -pedantic"
fi
if [ "$COMPILER" == "nvidia" ]
then
CXX="nvcc -x c++"
CC=nvcc
FC=gfortran
fi
if [ "$COMPILER" == "clang" ]
then
CXX=clang++
CC=clang
FC=${FC:-gfortran}
CFLAGS="-Wall -Wunused -pedantic"
CXXFLAGS="-Wall -Wunused -pedantic"
fi
if [ "$COMPILER" == "intel" ]
then
CXX=icpc
CC=icc
FC=ifort
CFLAGS="-Wall -Wunused"
CXXFLAGS="-Wall -Wunused"
fi
# When building: "scan-build make -j8"
if [ "$COMPILER" == "analyzer" ]
then
CXX=/opt/local/libexec/llvm-9.0/libexec/c++-analyzer
CC=/opt/local/libexec/llvm-9.0/libexec/ccc-analyzer
FC=gfortran
CFLAGS="-Wall -Wunused"
CXXFLAGS="-Wall -Wunused"
FORTRAN="NO"
fi
if [ "$COMPILER" == "ibm" ]
then
CXX=xlC
CC=xlc
FC=xlf
fi
fi
MODERN=${MODERN:-NO}
if [ "${MODERN}" == "YES" ]
then
MODERN_ARG="-D TPL_ENABLE_HDF5:BOOL=ON -D Netcdf_ALLOW_MODERN:BOOL=ON"
else
MODERN_ARG=""
fi
GENERATOR=${GENERATOR:-"Unix Makefiles"}
CRAY="${CRAY:-NO}"
CRAY=$(check_valid CRAY)
if [ "${CRAY}" == "YES" ]
then
SHARED="${SHARED:-NO}"
else
SHARED="${SHARED:-YES}"
fi
SHARED=$(check_valid SHARED)
if [ "${CRAY}" == "YES" ] && [ "${SHARED}" == "NO" ]
then
# Assumes we build our own static zlib with CRAY
EXTRA_LIB=-DSeacas_EXTRA_LINK_FLAGS=${INSTALL_PATH}/lib/libz.a
fi
### Switch for Debug or Release build:
### Check that both `DEBUG` and `BUILD_TYPE` are not set
if [ ! -z ${DEBUG+x} ] && [ ! -z ${BUILD_TYPE+x} ]
then
echo "ERROR: Both DEBUG and BUILD_TYPE are set. Only one is allowed."
exit
fi
BUILD_TYPE="${BUILD_TYPE:-RELEASE}"
if [ ! -z ${DEBUG+x} ]
then
if [ "${DEBUG}" == "ON" ] || [ "${DEBUG}" == "YES" ]
then
BUILD_TYPE="DEBUG"
elif [ "${DEBUG}" == "OFF" ] || [ "${DEBUG}" == "NO" ]
then
BUILD_TYPE="RELEASE"
else
echo "ERROR: Invalid value for DEBUG ('$DEBUG'). Must be 'ON', 'OFF', 'YES', 'NO'."
exit
fi
fi
### Set to YES to enable the building of a thread-safe version of the Exodus and IOSS libraries.
THREADSAFE=${THREADSAFE:-NO}
THREADSAFE=$(check_valid THREADSAFE)
if [ "$THREADSAFE" == "YES" ] ; then
THREAD_SAFE_OPT="-DTPL_Pthread_LIBRARIES=-lpthread"
fi
function check_enable()
{
local path=$1
if [ -e "${path}" ]
then
echo "YES"
else
echo "NO"
fi
}
HAVE_NETCDF=$(check_enable "${NETCDF_PATH}/include/netcdf.h")
### DataWarp (Burst Buffer)
### I use the following for mutrino (10/16/2018):
### module load datawarp
### -D TPL_ENABLE_DataWarp:BOOL=ON \
### -D DataWarp_LIBRARY_DIRS:PATH=/opt/cray/datawarp/2.1.16-6.0.5.1_2.61__g238b34d.ari/lib \
### -D DataWarp_INCLUDE_DIRS:PATH=/opt/cray/datawarp/2.1.16-6.0.5.1_2.61__g238b34d.ari/include \
### Define to NO to *enable* exodus deprecated functions
OMIT_DEPRECATED=${OMIT_DEPRECATED:-YES}
NUMPROCS=${NUMPROCS:-4}
# BUG needs to work with cray too.
if [ "${MPI}" == "YES" ] && [ "${CRAY}" == "YES" ]
then
MPI_EXEC=$(which srun)
MPI_SYMBOLS="-D MPI_EXEC=${MPI_EXEC} -D MPI_EXEC_NUMPROCS_FLAG=-n -DMPI_EXEC_DEFAULT_NUMPROCS:STRING=${NUMPROCS} -DMPI_EXEC_MAX_NUMPROCS:STRING=${NUMPROCS}"
CXX=CC
CC=cc
FC=ftn
MPI_BIN=$(dirname $(which ${CC}))
elif [ "${MPI}" == "YES" ]
then
if [ "${USE_SRUN}" == "YES" ]
then
MPI_EXEC=$(which srun)
MPI_SYMBOLS="-D MPI_EXEC=${MPI_EXEC} -D MPI_EXEC_NUMPROCS_FLAG=-N -DMPI_EXEC_DEFAULT_NUMPROCS:STRING=${NUMPROCS} -DMPI_EXEC_MAX_NUMPROCS:STRING=${NUMPROCS}"
MPI_BIN=$(dirname "${MPI_EXEC}")
else
MPI_EXEC=$(which mpiexec)
MPI_SYMBOLS="-D MPI_EXEC=${MPI_EXEC} -DMPI_EXEC_DEFAULT_NUMPROCS:STRING=${NUMPROCS} -DMPI_EXEC_MAX_NUMPROCS:STRING=${NUMPROCS}"
MPI_BIN=$(dirname "${MPI_EXEC}")
fi
CXX=mpicxx
CC=mpicc
FC=mpif77
fi
OS=$(uname -s)
if [ "$SHARED" == "YES" ]
then
if [ "$OS" == "Darwin" ] ; then
LD_EXT="dylib"
else
LD_EXT="so"
fi
else
EXTRA_LIB="-DSeacas_EXTRA_LINK_FLAGS=z;dl -DSEACASExodus_ENABLE_SHARED:BOOL=OFF"
LD_EXT="a"
fi
if [ "$OS" == "Darwin" ] ; then
DARWIN_OPT="-D CMAKE_MACOSX_RPATH:BOOL=ON"
else
DARWIN_OPT=""
fi
FC=${FC:-gfortran}
EXTRA_WARNINGS=${EXTRA_WARNINGS:-NO}
EXTRA_WARNINGS=$(check_valid EXTRA_WARNINGS)
SANITIZER=${SANITIZER:-NO}
if [ "$SANITIZER" != "NO" ] ; then
### To use the clang sanitizers:
#sanitizer=address #: AddressSanitizer, a memory error detector.
#sanitizer=integer #: Enables checks for undefined or suspicious integer behavior.
#sanitizer=thread #: ThreadSanitizer, a data race detector.
#sanitizer=memory #: MemorySanitizer, experimental detector of uninitialized reads.
#sanitizer=undefined #: Fast and compatible undefined behavior checker.
#sanitizer=dataflow #: DataFlowSanitizer, a general data flow analysis.
#sanitizer=cfi #: control flow integrity checks. Requires -flto.
#sanitizer=safe-stack #: safe stack protection against stack-based memory corruption errors.
SANITIZE="-fsanitize=${SANITIZER} -fno-omit-frame-pointer -fPIC"
if [ "$SANITIZER" == "integer" ] ; then
SANITIZE="$SANITIZE -fno-sanitize=unsigned-integer-overflow"
fi
fi
### You can add these below if you want more verbosity...
#-D CMAKE_VERBOSE_MAKEFILE:BOOL=ON \
#-D Seacas_VERBOSE_CONFIGURE=ON \
### You can add these below to regenerate the flex and bison files for
### aprepro and aprepro_lib May have to touch aprepro.l aprepro.y
### aprepro.ll and aprepro.yy to have them regenerate
#-D GENERATE_FLEX_FILES=ON \
#-D GENERATE_BISON_FILES=ON \
if [ "${EXTRA_WARNINGS}" == "YES" ]; then
### Additional gcc warnings:
if [ "$COMPILER" == "gnu" ]
then
COMMON_WARNING_FLAGS="\
-Wshadow -Wabsolute-value -Waddress -Waliasing -Wpedantic\
"
C_WARNING_FLAGS="${COMMON_WARNING_FLAGS}"
CXX_WARNING_FLAGS="${COMMON_WARNING_FLAGS} -Wnull-dereference -Wzero-as-null-pointer-constant -Wuseless-cast -Weffc++ -Wsuggest-override"
# -Wuseless-cast
# -Wold-style-cast
# -Wdouble-promotion
fi
if [ "$COMPILER" == "clang" ]
then
C_WARNING_FLAGS="-Weverything -Wno-missing-prototypes -Wno-sign-conversion -Wno-reserved-id-macro"
CXX_WARNING_FLAGS="-Weverything -Wno-c++98-compat -Wno-old-style-cast -Wno-sign-conversion -Wno-reserved-id-macro"
fi
fi
rm -f CMakeCache.txt
###------------------------------------------------------------------------
cmake -G "${GENERATOR}" \
-D CMAKE_CXX_COMPILER:FILEPATH=${CXX} \
-D CMAKE_C_COMPILER:FILEPATH=${CC} \
-D CMAKE_Fortran_COMPILER:FILEPATH=${FC} \
-D CMAKE_CXX_FLAGS="${CXXFLAGS} ${CXX_WARNING_FLAGS} ${SANITIZE}" \
-D CMAKE_C_FLAGS="${CFLAGS} ${C_WARNING_FLAGS} ${SANITIZE}" \
-D CMAKE_Fortran_FLAGS="${FFLAGS} ${F77_WARNING_FLAGS} ${SANITIZE}" \
-D Seacas_ENABLE_STRONG_C_COMPILE_WARNINGS=${EXTRA_WARNINGS} \
-D Seacas_ENABLE_STRONG_CXX_COMPILE_WARNINGS=${EXTRA_WARNINGS} \
-D CMAKE_INSTALL_RPATH:PATH=${INSTALL_PATH}/lib \
-D BUILD_SHARED_LIBS:BOOL=${SHARED} \
-D CMAKE_BUILD_TYPE=${BUILD_TYPE} \
-D Seacas_ENABLE_SEACASExodus=YES \
-D Seacas_ENABLE_SEACASExodus_for=${FORTRAN} \
-D Seacas_ENABLE_SEACASExoIIv2for32=${FORTRAN} \
-D Seacas_ENABLE_TESTS=YES \
-D SEACASExodus_ENABLE_STATIC:BOOL=${STATIC} \
-D CMAKE_INSTALL_PREFIX:PATH=${INSTALL_PATH} \
-D Seacas_SKIP_FORTRANCINTERFACE_VERIFY_TEST:BOOL=YES \
-D Seacas_HIDE_DEPRECATED_CODE:BOOL=${OMIT_DEPRECATED_CODE} \
-D Seacas_ENABLE_Fortran=${FORTRAN} \
${EXTRA_LIB} \
${MODERN_ARG} \
\
-D TPL_ENABLE_Netcdf:BOOL=${HAVE_NETCDF} \
-D TPL_ENABLE_MPI:BOOL=${MPI} \
-D TPL_ENABLE_Pthread:BOOL=${THREADSAFE} \
${THREAD_SAFE_OPT} \
-D SEACASExodus_ENABLE_THREADSAFE:BOOL=${THREADSAFE} \
\
${MPI_SYMBOLS} \
${DARWIN_OPT} \
\
-D MPI_BIN_DIR:PATH=${MPI_BIN} \
-D NetCDF_ROOT:PATH=${NETCDF_PATH} \
-D HDF5_ROOT:PATH=${HDF5_PATH} \
-D HDF5_NO_SYSTEM_PATHS=YES \
-D PNetCDF_ROOT:PATH=${PNETCDF_PATH} \
\
$EXTRA_ARGS \
${ACCESS}
echo ""
echo " OS: ${OS}"
echo " ACCESS: ${ACCESS}"
echo " INSTALL_PATH: ${INSTALL_PATH}"
echo " "
echo " CC: ${CC}"
echo " CXX: ${CXX}"
echo " FC: ${FC}"
echo " MPI: ${MPI}"
echo " SHARED: ${SHARED}"
echo " BUILD_TYPE: ${BUILD_TYPE}"
echo " THREADSAFE: ${THREADSAFE}"
echo "OMIT_DEPRECATED_CODE: ${OMIT_DEPRECATED_CODE}"
echo " CRAY: ${CRAY}"
echo " "
echo " HAVE_NETCDF: ${HAVE_NETCDF}"
echo ""

@ -0,0 +1,428 @@
#! /usr/bin/env bash
if [ "${TRAVIS}" == "true" ]
then
BUILDDIR=${1:-build}
mkdir $BUILDDIR && cd $BUILDDIR
else
EXTRA_ARGS=$@
fi
### The following assumes you are building in a subdirectory of ACCESS Root
### If not, then define "ACCESS" to point to the root of the SEACAS source code.
if [ "$ACCESS" == "" ]
then
ACCESS=$(cd ..; pwd)
fi
### The SEACAS code will install in ${INSTALL_PATH}/bin, ${INSTALL_PATH}/lib, and ${INSTALL_PATH}/include.
INSTALL_PATH=${INSTALL_PATH:-${ACCESS}}
### TPLs could be installed separately from SEACAS. Default to INSTALL_PATH.
TPL_INSTALL=${TPL_INSTALL:-${INSTALL_PATH}}
### Possible subset of what is built ---
APPLICATIONS=${APPLICATIONS:-YES}
LEGACY=${LEGACY:-YES}
FORTRAN=${FORTRAN:-YES}
ZOLTAN=${ZOLTAN:-YES}
### Python Version...
PYTHON_VER=${PYTHON_VER:-"3.0"}
### Set to YES for parallel compile; otherwise NO for serial (default)
netcdf_parallel=$($NETCDF_ROOT/bin/nc-config --has-parallel)
if [ "${netcdf_parallel}" == "yes" ]
then
MPI=YES
else
MPI=NO
fi
echo "MPI set to ${MPI}"
if [ "${MPI}" == "NO" ]
then
### Change this to point to the compilers you want to use
## Travis build (and others) set this to EXTERNAL to set
## CXX, CC, and FC externally.
COMPILER=${COMPILER:-gnu}
if [ "$COMPILER" == "gnu" ]
then
CXX=g++
CC=gcc
FC=gfortran
CFLAGS="-Wall -Wunused -pedantic"
CXXFLAGS="-Wall -Wunused -pedantic"
fi
if [ "$COMPILER" == "clang" ]
then
CXX=clang++
CC=clang
FC=gfortran-mp-7
CFLAGS="-Wall -Wunused -pedantic"
CXXFLAGS="-Wall -Wunused -pedantic"
fi
if [ "$COMPILER" == "intel" ]
then
CXX=icpc
CC=icc
FC=ifort
CFLAGS="-Wall -Wunused"
CXXFLAGS="-Wall -Wunused"
fi
if [ "$COMPILER" == "ibm" ]
then
CXX=xlC
CC=xlc
FC=xlf
fi
fi
if [ "${APPLICATIONS}" == "YES" ] && [ "${LEGACY}" == "YES" ]
then
SUBSET_OPTIONS="-DSeacas_ENABLE_ALL_PACKAGES:BOOL=ON \
-DSeacas_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON \
-DSeacas_ENABLE_SECONDARY_TESTED_CODE:BOOL=ON"
else
SUBSET_OPTIONS="-DSeacas_ENABLE_ALL_PACKAGES:BOOL=OFF \
-DSeacas_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=OFF \
-DSeacas_ENABLE_SECONDARY_TESTED_CODE:BOOL=OFF \
-DSeacas_ENABLE_SEACASIoss:BOOL=ON \
-DSeacas_ENABLE_SEACASExodus:BOOL=ON \
-DSeacas_ENABLE_SEACASExodus_for:BOOL=ON \
-DSeacas_ENABLE_SEACASExoIIv2for32:BOOL=ON"
if [ "${APPLICATIONS}" == "YES" ]
then
SUBSET_OPTIONS="${SUBSET_OPTIONS} \
-DSeacas_ENABLE_SEACASAprepro:BOOL=ON \
-DSeacas_ENABLE_SEACASAprepro_lib:BOOL=ON \
-DSeacas_ENABLE_SEACASConjoin:BOOL=ON \
-DSeacas_ENABLE_SEACASEjoin:BOOL=ON \
-DSeacas_ENABLE_SEACASEpu:BOOL=ON \
-DSeacas_ENABLE_SEACASExo2mat:BOOL=ON \
-DSeacas_ENABLE_SEACASExo_format:BOOL=ON \
-DSeacas_ENABLE_SEACASExodiff:BOOL=ON \
-DSeacas_ENABLE_SEACASMat2exo:BOOL=ON \
-DSeacas_ENABLE_SEACASNemslice:BOOL=ON \
-DSeacas_ENABLE_SEACASNemspread:BOOL=ON"
if [ "${FORTRAN}" == "YES" ]
then
SUBSET_OPTIONS="${SUBSET_OPTIONS} \
-DSeacas_ENABLE_SEACASExplore:BOOL=ON \
-DSeacas_ENABLE_SEACASGrepos:BOOL=ON"
fi
elif [ "${LEGACY}" == "YES" ] && [ "${FORTRAN}" == "YES" ]
then
SUBSET_OPTIONS="${SUBSET_OPTIONS} \
-DSeacas_ENABLE_SEACASAlgebra:BOOL=ON \
-DSeacas_ENABLE_SEACASBlot:BOOL=ON \
-DSeacas_ENABLE_SEACASEx1ex2v2:BOOL=ON \
-DSeacas_ENABLE_SEACASEx2ex1v2:BOOL=ON \
-DSeacas_ENABLE_SEACASExomatlab:BOOL=ON \
-DSeacas_ENABLE_SEACASExotec2:BOOL=ON \
-DSeacas_ENABLE_SEACASExotxt:BOOL=ON \
-DSeacas_ENABLE_SEACASFastq:BOOL=ON \
-DSeacas_ENABLE_SEACASGen3D:BOOL=ON \
-DSeacas_ENABLE_SEACASGenshell:BOOL=ON \
-DSeacas_ENABLE_SEACASGjoin:BOOL=ON \
-DSeacas_ENABLE_SEACASMapvar:BOOL=ON \
-DSeacas_ENABLE_SEACASMapvar-kd:BOOL=ON \
-DSeacas_ENABLE_SEACASNemesis:BOOL=ON \
-DSeacas_ENABLE_SEACASTxtexo:BOOL=ON"
fi
fi
GENERATOR=${GENERATOR:-"Unix Makefiles"}
# If using an XLF compiler on an IBM system, may need to add the following:
# -DCMAKE_Fortran_FLAGS="-qfixed=72" \
# -DCMAKE_EXE_LINKER_FLAGS:STRING="-lxl -lxlopt"
SHARED="${SHARED:-YES}"
### Switch for Debug or Release build:
### Check that both `DEBUG` and `BUILD_TYPE` are not set
if [ ! -z ${DEBUG+x} ] && [ ! -z ${BUILD_TYPE+x} ]
then
echo "ERROR: Both DEBUG and BUILD_TYPE are set. Only one is allowed."
exit
fi
BUILD_TYPE="${BUILD_TYPE:-RELEASE}"
if [ ! -z ${DEBUG+x} ]
then
if [ "${DEBUG}" == "YES" ]
then
BUILD_TYPE="DEBUG"
elif [ "${DEBUG}" == "NO" ]
then
BUILD_TYPE="RELEASE"
else
echo "ERROR: Invalid value for DEBUG ('$DEBUG'). Must be 'YES' or 'NO'."
exit
fi
fi
### If you do not have the X11 developer package on your system
### which provides X11/Xlib.h and the libX11, then change the "YES"
### below to "NO". It will disable blot and fastq
HAVE_X11=${X11:-YES}
### Set to YES to enable the building of a thread-safe version of the Exodus and IOSS libraries.
THREADSAFE=${THREADSAFE:-NO}
if [ "$THREADSAFE" == "YES" ] ; then
THREAD_SAFE_OPT="-DSeacas_EXTRA_LINK_FLAGS=-lpthread"
fi
if [ "${FAODEL_INSTALL}" == "" ]; then
# FAODEL_INSTALL isn't set. Try to derive it from Faodel_DIR.
if [ -e ${Faodel_DIR}/Faodel/FaodelConfig.cmake ]; then
# Faodel_DIR points to the cmake directory, so trim it.
FAODEL_INSTALL=${Faodel_DIR}/../..
elif [ -e ${Faodel_DIR}/lib/cmake/Faodel/FaodelConfig.cmake ]; then
# Faodel_DIR points to the install root, so just use it.
FAODEL_INSTALL=${Faodel_DIR}
fi
fi
### TPLs --
### Make sure these point to the locations to find the libraries and includes in lib and include
### subdirectories of the specified paths.
### For example, netcdf.h should be in ${NETCDF_PATH}/include
NETCDF_PATH=${NETCDF_ROOT:-${TPL_INSTALL}}
PNETCDF_PATH=${PNETCDF_ROOT:-${TPL_INSTALL}}
MATIO_PATH=${MATIO_ROOT:-${TPL_INSTALL}}
HDF5_PATH=${HDF5_ROOT:-${TPL_INSTALL}}
CGNS_PATH=${CGNS_ROOT:-${TPL_INSTALL}}
CEREAL_PATH=${CEREAL_ROOT:-${TPL_INSTALL}}
FAODEL_PATH=${FAODEL_ROOT:-${FAODEL_INSTALL}}
ADIOS2_PATH=${ADIOS2_ROOT:-${TPL_INSTALL}}
GTEST_PATH=${GTEST_ROOT:-${TPL_INSTALL}}
KOKKOS_PATH=${KOKKOS_ROOT:-${TPL_INSTALL}}
function check_enable()
{
local path=$1
if [ -e "${path}" ]
then
echo "YES"
else
echo "NO"
fi
}
HAVE_NETCDF=$(check_enable "${NETCDF_PATH}/include/netcdf.h")
HAVE_MATIO=$(check_enable "${MATIO_PATH}/include/matio.h")
HAVE_CGNS=$(check_enable "${CGNS_PATH}/include/cgnslib.h")
HAVE_CEREAL=$(check_enable "${CEREAL_PATH}/include/cereal/cereal.hpp")
HAVE_FAODEL=$(check_enable "${FAODEL_PATH}/lib/cmake/Faodel/FaodelConfig.cmake")
HAVE_ADIOS2=$(check_enable "${ADIOS2_PATH}/include/adios2.h")
HAVE_GTEST=$(check_enable "${GTEST_PATH}/include/gtest/gtest.h")
HAVE_KOKKOS=$(check_enable "${KOKKOS_PATH}/include/Kokkos_Core.hpp")
### DataWarp (Burst Buffer)
### I use the following for mutrino (10/16/2018):
### module load datawarp
### -D TPL_ENABLE_DataWarp:BOOL=ON \
### -D DataWarp_LIBRARY_DIRS:PATH=/opt/cray/datawarp/2.1.16-6.0.5.1_2.61__g238b34d.ari/lib \
### -D DataWarp_INCLUDE_DIRS:PATH=/opt/cray/datawarp/2.1.16-6.0.5.1_2.61__g238b34d.ari/include \
### Define to NO to *enable* exodus deprecated functions
OMIT_DEPRECATED_CODE="NO"
# BUG needs to work with cray too.
if [ "${MPI}" == "YES" ] && [ "${CRAY}" == "YES" ]
then
MPI_EXEC=$(which srun)
MPI_SYMBOLS="-D MPI_EXEC=${MPI_EXEC} -D MPI_EXEC_NUMPROCS_FLAG=-n"
CXX=CC
CC=cc
FC=ftn
MPI_BIN=$(dirname $(which ${CC}))
SHARED=NO
unset CRAYPE_LINK_TYPE
elif [ "${MPI}" == "YES" ]
then
if [ "${USE_SRUN}" == "YES" ]
then
MPI_EXEC=$(which srun)
MPI_SYMBOLS="-D MPI_EXEC=${MPI_EXEC} -D MPI_EXEC_NUMPROCS_FLAG=-N"
MPI_BIN=$(dirname "${MPI_EXEC}")
else
MPI_EXEC=$(which mpiexec)
MPI_SYMBOLS="-D MPI_EXEC=${MPI_EXEC}"
MPI_BIN=$(dirname "${MPI_EXEC}")
fi
CXX=mpicxx
CC=mpicc
FC=mpif77
fi
OS=$(uname -s)
if [ "$SHARED" == "YES" ]
then
if [ "$OS" == "Darwin" ] ; then
LD_EXT="dylib"
else
LD_EXT="so"
fi
else
LD_EXT="a"
fi
if [ "${HAVE_KOKKOS}" == "YES" ]
then
KOKKOS_SYMBOLS="-DKOKKOS_SRC_PATH:PATH=${INSTALL_PATH}/TPL/kokkos/kokkos \
-DTPL_Kokkos_LIBRARY_DIRS:PATH=${KOKKOS_PATH}/lib \
-DTPL_Kokkos_INCLUDE_DIRS:PATH=${KOKKOS_PATH}/include \
-DTPL_Kokkos_LIBRARIES=${KOKKOS_PATH}/lib/libkokkos.${LD_EXT}"
fi
if [ "$HAVE_CEREAL" == "YES" ]
then
CEREAL_SYMBOLS=" -D TPL_ENABLE_Cereal:BOOL=${HAVE_CEREAL} \
-D Cereal_INCLUDE_DIRS:PATH=${CEREAL_PATH}/include \
"
fi
if [ "$HAVE_FAODEL" == "YES" ]
then
export Faodel_DIR
FAODEL_SYMBOLS=" -D TPL_ENABLE_Faodel:BOOL=${HAVE_FAODEL} \
-D Faodel_ROOT:PATH=${FAODEL_PATH} \
"
fi
#-D TRIBITS_TPL_FIND_INCLUDE_DIRS_AND_LIBRARIES_VERBOSE=TRUE \
#-D Faodel_FORCE_PRE_FIND_PACKAGE:BOOL=TRUE \
# -D Faodel_INCLUDE_DIRS:PATH=${FAODEL_PATH}/include/faodel \
# -D Faodel_LIBRARY_DIRS:PATH=${FAODEL_PATH}/lib
if [ "$OS" == "Darwin" ] ; then
DARWIN_OPT="-D CMAKE_MACOSX_RPATH:BOOL=ON -D TPL_X11_INCLUDE_DIRS:PATH=/opt/X11/include"
else
DARWIN_OPT=""
fi
# Only run doxygen if me and on master branch...
DOXYGEN=NO
if [[ "$OS" == "Darwin" && "$MPI" == "NO" ]] ; then
branch=$(git branch |grep \* |cut -c3-)
USER=$(id -nu)
if [ "$USER" == "gdsjaar" ] && [ "$branch" == "master" ]; then
DOXYGEN=YES
fi
fi
FC=${FC:-gfortran}
rm -f CMakeCache.txt
###------------------------------------------------------------------------
cmake -G "${GENERATOR}" \
-D CMAKE_CXX_COMPILER:FILEPATH=${CXX} \
-D CMAKE_C_COMPILER:FILEPATH=${CC} \
-D CMAKE_Fortran_COMPILER:FILEPATH=${FC} \
-D CMAKE_CXX_FLAGS=${CXXFLAGS} ${SANITIZER} \
-D CMAKE_C_FLAGS=${CFLAGS} ${SANITIZER} \
-D CMAKE_INSTALL_RPATH:PATH=${INSTALL_PATH}/lib \
-D BUILD_SHARED_LIBS:BOOL=${SHARED} \
-D CMAKE_BUILD_TYPE=${BUILD_TYPE} \
${SUBSET_OPTIONS} \
-D Seacas_ENABLE_Zoltan:BOOL=${ZOLTAN} \
-D Seacas_ENABLE_TESTS=ON \
-D CMAKE_INSTALL_PREFIX:PATH=${INSTALL_PATH} \
-D Seacas_SKIP_FORTRANCINTERFACE_VERIFY_TEST:BOOL=ON \
-D Seacas_HIDE_DEPRECATED_CODE:BOOL=${OMIT_DEPRECATED_CODE} \
-D Seacas_ENABLE_DOXYGEN:BOOL=${DOXYGEN} \
-D Seacas_ENABLE_Fortran=${FORTRAN} \
\
-D TPL_ENABLE_Netcdf:BOOL=${HAVE_NETCDF} \
-D TPL_ENABLE_Matio:BOOL=${HAVE_MATIO} \
-D TPL_ENABLE_CGNS:BOOL=${HAVE_CGNS} \
-D TPL_ENABLE_ADIOS2:BOOL=${HAVE_ADIOS2} \
-D TPL_ENABLE_gtest:BOOL=${HAVE_GTEST} \
-D TPL_ENABLE_Kokkos:BOOL=${HAVE_KOKKOS} \
-D TPL_ENABLE_MPI:BOOL=${MPI} \
-D TPL_ENABLE_Pamgen:BOOL=OFF \
-D TPL_ENABLE_Pthread:BOOL=${THREADSAFE} \
${THREAD_SAFE_OPT} \
-D TPL_ENABLE_X11:BOOL=${HAVE_X11} \
\
-D SEACASExodus_ENABLE_THREADSAFE:BOOL=${THREADSAFE} \
-D SEACASIoss_ENABLE_THREADSAFE:BOOL=${THREADSAFE} \
\
${KOKKOS_SYMBOLS} \
${MPI_SYMBOLS} \
${DARWIN_OPT} \
${CEREAL_SYMBOLS} \
${FAODEL_SYMBOLS} \
\
-D MPI_BIN_DIR:PATH=${MPI_BIN} \
-D NetCDF_ROOT:PATH=${NETCDF_PATH} \
-D HDF5_ROOT:PATH=${HDF5_PATH} \
-D HDF5_NO_SYSTEM_PATHS=ON \
-D CGNS_ROOT:PATH=${CGNS_PATH} \
-D Matio_ROOT:PATH=${MATIO_PATH} \
-D PNetCDF_ROOT:PATH=${PNETCDF_PATH} \
-D PythonInterp_FIND_VERSION:STRING=${PYTHON_VER} \
\
$EXTRA_ARGS \
${ACCESS}
if [[ "$HAVE_FAODEL" == "YES" && "$HAVE_CEREAL" == "NO" ]] ; then
echo "========================================================="
echo "WARNING: Faodel is enabled, but Cereal is not."
echo "WARNING: The Faodel components will fail to compile"
echo "WARNING: unless Cereal is found implicitly."
echo "========================================================="
fi
echo ""
echo " OS: ${OS}"
echo " ACCESS: ${ACCESS}"
echo " TPL_INSTALL: ${TPL_INSTALL}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo " "
echo " CC: ${CC}"
echo " CXX: ${CXX}"
echo " FC: ${FC}"
echo " MPI: ${MPI}"
echo " SHARED: ${SHARED}"
echo " BUILD_TYPE: ${BUILD_TYPE}"
echo " THREADSAFE: ${THREADSAFE}"
echo " PYTHON_VER: ${PYTHON_VER}"
echo " "
echo " NETCDF: ${HAVE_NETCDF}"
echo " MATIO: ${HAVE_MATIO}"
echo " CGNS: ${HAVE_CGNS}"
echo " KOKKOS: ${HAVE_KOKKOS}"
echo " ZOLTAN: ${ZOLTAN}"
echo " ADIOS2: ${HAVE_ADIOS2}"
echo " CEREAL: ${HAVE_CEREAL}"
echo "CEREAL_PATH: ${CEREAL_PATH}"
echo " FAODEL: ${HAVE_FAODEL}"
echo "FAODEL_PATH: ${FAODEL_PATH}"
echo " Faodel_DIR: ${Faodel_DIR}"
echo " GTEST: ${HAVE_GTEST}"
echo " DOXYGEN: ${DOXYGEN}"
echo ""
if [ "${TRAVIS}" == "true" ]
then
make -j2
cd ${ACCESS}
fi

@ -0,0 +1,221 @@
#! /usr/bin/env bash
EXTRA_ARGS=$@
# Text color variables
if [[ $TERM != *"xterm"* ]]; then
export TERM=dumb
fi
txtred=$(tput setaf 1) # Red
txtgrn=$(tput setaf 2) # Green
txtylw=$(tput setaf 3) # Yellow
#txtblu=$(tput setaf 4) # Blue
#txtpur=$(tput setaf 5) # Purple
txtcyn=$(tput setaf 6) # Cyan
#txtwht=$(tput setaf 7) # White
txtrst=$(tput sgr0) # Text reset
function check_valid()
{
if [ "${!1}" == "YES" ] || [ "${!1}" == "ON" ]; then
echo "YES"
return 1
fi
if [ "${!1}" == "NO" ] || [ "${!1}" == "OFF" ]; then
echo "NO"
return 1
fi
echo "Invalid value for $1 (${!1}) -- Must be ON, YES, NO, or OFF"
exit 1
}
### Set to YES for parallel compile; otherwise NO for serial (default)
MPI="${MPI:-NO}"
MPI=$(check_valid MPI)
echo "${txtgrn}MPI set to ${MPI}${txtrst}"
### Switch for Debug or Release build:
BUILD_TYPE="${BUILD_TYPE:-Release}"
#BUILD_TYPE=Debug
SHARED="${SHARED:-YES}"
SHARED=$(check_valid SHARED)
### Define to YES to *disable* all exodus deprecated functions
OMIT_DEPRECATED_CODE="NO"
### The following assumes you are building in a subdirectory of ACCESS Root
### If not, then define "ACCESS" to point to the root of the SEACAS source code.
ACCESS=$(cd ..; pwd)
### If you do not have the X11 developer package on your system
### which provides X11/Xlib.h and the libX11, then change the "YES"
### below to "NO". It will disable blot and fastq
HAVE_X11="${HAVE_X11:-YES}"
HAVE_X11=$(check_valid HAVE_X11)
### Python Version...
PYTHON_VER=${PYTHON_VER:-"3.0"}
### Set to YES to enable the building of a thread-safe version of the Exodus and IOSS libraries.
THREADSAFE=${THREADSAFE:-NO}
THREADSAFE=$(check_valid THREADSAFE)
### The SEACAS code will install in ${INSTALL_PATH}/bin, ${INSTALL_PATH}/lib, and ${INSTALL_PATH}/include.
INSTALL_PATH=${ACCESS}
### TPLs --
### Using SEMS (Software Engineering Maintenance & Support) provided libraries...
### https://sems.sandia.gov/
module purge
source /projects/sems/modulefiles/utils/sems-modules-init.sh
module load sems-gcc/10.1.0
module load sems-fmt/9.1.0
module load sems-cmake/3.24.3
module load sems-doxygen
module load sems-zlib
if [ "$MPI" == "YES" ]
then
module load sems-openmpi
module load sems-hdf5/1.12.2_parallel
module load sems-netcdf-c/4.8.1_parallel
module load sems-parallel-netcdf/1.12.1
module load sems-parmetis-int64/4.0.3
module load sems-cgns/4.3.0_parallel
else
module load sems-hdf5/1.12.2_serial
module load sems-netcdf-c/4.8.1_serial
module load sems-cgns/4.3.0_serial
module load sems-metis-int64/5.1.0
fi
HAVE_NETCDF=YES
HAVE_CGNS=YES
# NOTE: CGNS, MATIO, PNETCDF, and are currently not supported by SEMS.
# To use them you will need to install them following the directions in
# README.md, enable them below, and add the appropriate defines below:
#
# -D Matio_LIBRARY_DIRS:PATH=${MATIO_PATH}/lib \
# -D TPL_Matio_INCLUDE_DIRS:PATH=${MATIO_PATH}/include \
HAVE_MATIO=NO
HAVE_CGNS=NO
NETCDF_PATH=${SEMS_NETCDF_ROOT}
HDF5_PATH=${SEMS_HDF5_ROOT}
CGNS_PATH=${SEMS_CGNS_ROOT}
### Define to NO to *enable* exodus deprecated functions
OMIT_DEPRECATED_CODE="NO"
### Set to YES to use Kokkos in the Ioss library; otherwise NO (default)
KOKKOS=NO
### Set to YES for CUDA compile; otherwise NO (default) (only used if KOKKOS=YES
CUDA=NO
### Change this to point to the compilers you want to use
COMPILER="gnu"
#COMPILER="clang"
if [ "$MPI" == "YES" ]
then
MPI_EXEC=`which mpiexec`
MPI_BIN=`dirname $MPI_EXEC`
CXX=mpicxx
CC=mpicc
FC=mpif77
else
CXX=g++
CC=gcc
FC=gfortran
fi
if [ "$KOKKOS" == "YES" ]
then
if [ "$CUDA" == "YES" ]
then
export "OMPI_CXX=${SEACAS_SRC_DIR}/packages/kokkos/config/nvcc_wrapper"
export CUDA_MANAGED_FORCE_DEVICE_ALLOC=1
KOKKOS_SYMBOLS="-D Seacas_ENABLE_Kokkos:BOOL=ON \
-D TPL_ENABLE_CUDA:Bool=ON \
-D CUDA_TOOLKIT_ROOT_DIR:PATH=${CUDA_PATH} \
-D Kokkos_ENABLE_Pthread:BOOL=OFF"
else
export OMPI_CXX=`which gcc`
unset CUDA_MANAGED_FORCE_DEVICE_ALLOC
KOKKOS_SYMBOLS="-D Seacas_ENABLE_Kokkos:BOOL=ON \
-D Seacas_ENABLE_OpenMP:Bool=ON \
-D Kokkos_ENABLE_Pthread:BOOL=OFF"
fi
else
KOKKOS_SYMBOLS="-D Seacas_ENABLE_Kokkos:BOOL=OFF"
fi
rm -f CMakeCache.txt
###------------------------------------------------------------------------
cmake \
-D CMAKE_CXX_FLAGS="-Wall -Wunused -pedantic" \
-D CMAKE_C_FLAGS="-Wall -Wunused -pedantic" \
-D CMAKE_MACOSX_RPATH:BOOL=ON \
-D CMAKE_INSTALL_RPATH:PATH=${INSTALL_PATH}/lib:${LD_LIBRARY_PATH} \
-D BUILD_SHARED_LIBS:BOOL=${SHARED} \
-D CMAKE_BUILD_TYPE=${BUILD_TYPE} \
-D Seacas_ENABLE_ALL_PACKAGES:BOOL=ON \
-D Seacas_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON \
-D Seacas_ENABLE_SECONDARY_TESTED_CODE:BOOL=ON \
-D Seacas_ENABLE_TESTS=ON \
-D CMAKE_INSTALL_PREFIX:PATH=${INSTALL_PATH} \
-D CMAKE_CXX_COMPILER:FILEPATH=${CXX} \
-D CMAKE_C_COMPILER:FILEPATH=${CC} \
-D CMAKE_Fortran_COMPILER:FILEPATH=${FC} \
-D Seacas_SKIP_FORTRANCINTERFACE_VERIFY_TEST:BOOL=ON \
-D Seacas_HIDE_DEPRECATED_CODE:BOOL=${OMIT_DEPRECATED_CODE} \
\
-D TPL_ENABLE_Netcdf:BOOL=${HAVE_NETCDF} \
-D TPL_ENABLE_Matio:BOOL=${HAVE_MATIO} \
-D TPL_ENABLE_CGNS:BOOL=${HAVE_CGNS} \
-D TPL_ENABLE_MPI:BOOL=${MPI} \
-D TPL_ENABLE_Pamgen:BOOL=OFF \
-D TPL_ENABLE_Pthread:BOOL=${THREADSAFE} \
-D SEACASExodus_ENABLE_THREADSAFE:BOOL=${THREADSAFE} \
-D SEACASIoss_ENABLE_THREADSAFE:BOOL=${THREADSAFE} \
-D TPL_ENABLE_fmt:BOOL=ON \
-D TPL_ENABLE_X11:BOOL=${HAVE_X11} \
-D PythonInterp_FIND_VERSION:STRING=${PYTHON_VER} \
\
${KOKKOS_SYMBOLS} \
\
-D MPI_BIN_DIR:PATH=${MPI_BIN} \
-D NetCDF_ROOT:PATH=${NETCDF_PATH} \
-D HDF5_ROOT:PATH=${HDF5_PATH} \
-D HDF5_NO_SYSTEM_PATHS=ON \
-D CGNS_ROOT:PATH=${CGNS_PATH} \
-D PNetCDF_LIBRARIES=${SEMS_NETCDF_ROOT}/lib/libpnetcdf.a \
-D PNetCDF_INCLUDE_DIRS=${SEMS_NETCDF_ROOT}/include/ \
\
$EXTRA_ARGS \
..
OS=$(uname -s)
CC_VER=$(${CC} --version)
echo ""
echo " OS: ${OS}"
echo " ACCESS: ${ACCESS}"
echo "INSTALL_PATH: ${INSTALL_PATH}"
echo " "
echo " CC: ${CC}, ${CC_VER}"
echo " CXX: ${CXX}"
echo " FC: ${FC}"
echo " MPI: ${MPI}"
echo " SHARED: ${SHARED}"
echo " BUILD_TYPE: ${BUILD_TYPE}"
echo " THREADSAFE: ${THREADSAFE}"
echo " PYTHON_VER: ${PYTHON_VER}"
echo " "
echo " NETCDF: ${HAVE_NETCDF}"
echo " MATIO: ${HAVE_MATIO}"
echo " CGNS: ${HAVE_CGNS}"

@ -0,0 +1,164 @@
#! /usr/bin/env bash
BUILDDIR=${1:-build}
mkdir $BUILDDIR && cd $BUILDDIR
### Set to ON for parallel compile; otherwise OFF for serial (default)
MPI="${MPI:-OFF}"
echo "MPI set to ${MPI}"
### Switch for Debug or Release build:
BUILD_TYPE="${BUILD_TYPE:-RELEASE}"
### The following assumes you are building in a subdirectory of ACCESS Root
### If not, then define "ACCESS" to point to the root of the SEACAS source code.
ACCESS=$(cd ..; pwd)
### If you do not have the X11 developer package on your system
### which provides X11/Xlib.h and the libX11, then change the "ON"
### below to "OFF". It will disable blot and fastq
HAVE_X11=ON
### Set to ON to enable the building of a thread-safe version of the Exodus and IOSS libraries.
THREADSAFE=${THREADSAFE:-OFF}
### The SEACAS code will install in ${INSTALL_PATH}/bin, ${INSTALL_PATH}/lib, and ${INSTALL_PATH}/include.
INSTALL_PATH=${ACCESS}
### TPLs --
### Make sure these point to the locations to find the libraries and includes in lib and include
### subdirectories of the specified paths.
### For example, netcdf.h should be in ${NETCDF_PATH}/include
NETCDF_PATH=${ACCESS}
PNETCDF_PATH=${ACCESS}
MATIO_PATH=${ACCESS}
HDF5_PATH=${ACCESS}
CGNS_PATH=${ACCESS}
FAODEL_PATH=${ACCESS}
function check_enable()
{
local path=$1
if [ -e "${path}" ]
then
echo "ON"
else
echo "OFF"
fi
}
HAVE_NETCDF=$(check_enable "${NETCDF_PATH}/include/netcdf.h")
HAVE_MATIO=$(check_enable "${MATIO_PATH}/include/matio.h")
HAVE_CGNS=$(check_enable "${CGNS_PATH}/include/cgnslib.h")
HAVE_FAODEL=$(check_enable "${FAODEL_PATH}/include/faodel/faodelConfig.h")
### Define to NO to *enable* exodus deprecated functions
OMIT_DEPRECATED_CODE="NO"
### Set to ON to use Kokkos in the Ioss library; otherwise OFF (default)
KOKKOS=${KOKKOS:-OFF}
CUDA_PATH=${CUDA_ROOT} #Set this to the appropriate path
### Set to ON for CUDA compile; otherwise OFF (default) (only used if KOKKOS=ON
CUDA=${CUDA:-OFF}
if [ "${MPI}" == "ON" ]
then
MPI_EXEC=$(which mpiexec)
MPI_BIN=$(dirname "${MPI_EXEC}")
CXX=mpicxx
CC=mpicc
FC=mpif77
fi
if [ "$KOKKOS" == "ON" ]
then
if [ "$CUDA" == "ON" ]
then
export "OMPI_CXX=${SEACAS_SRC_DIR}/packages/kokkos/config/nvcc_wrapper"
export CUDA_MANAGED_FORCE_DEVICE_ALLOC=1
KOKKOS_SYMBOLS="-D Seacas_ENABLE_Kokkos:BOOL=ON \
-D TPL_ENABLE_CUDA:Bool=ON \
-D CUDA_TOOLKIT_ROOT_DIR:PATH=${CUDA_PATH} \
-D Kokkos_ENABLE_Pthread:BOOL=OFF"
else
export OMPI_CXX=$(which gcc)
unset CUDA_MANAGED_FORCE_DEVICE_ALLOC
KOKKOS_SYMBOLS="-D Seacas_ENABLE_Kokkos:BOOL=ON \
-D Seacas_ENABLE_OpenMP:Bool=ON \
-D Kokkos_ENABLE_Pthread:BOOL=OFF"
fi
else
KOKKOS_SYMBOLS="-D Seacas_ENABLE_Kokkos:BOOL=OFF"
fi
if [ "HAVE_FAODEL" == "ON" ]
then
DW_SYMBOLS="-DTPL_ENABLE_FAODEL:BOOL=${HAVE_FAODEL} \
-DFAODEL_LIBRARY_DIRS:PATH=${FAODEL_PATH}/lib \
-DFAODEL_INCLUDE_DIRS:PATH=${FAODEL_PATH}/include"
fi
rm -f CMakeCache.txt
###------------------------------------------------------------------------
cmake \
-D CMAKE_CXX_COMPILER:FILEPATH=${CXX} \
-D CMAKE_C_COMPILER:FILEPATH=${CC} \
-D CMAKE_Fortran_COMPILER:FILEPATH=${FC} \
-D CMAKE_CXX_FLAGS="-Wall -Wunused -pedantic" \
-D CMAKE_C_FLAGS="-Wall -Wunused -pedantic" \
-D CMAKE_MACOSX_RPATH:BOOL=ON \
-D CMAKE_INSTALL_RPATH:PATH=${INSTALL_PATH}/lib \
-D BUILD_SHARED_LIBS:BOOL=OFF \
-D CMAKE_BUILD_TYPE=${BUILD_TYPE} \
-D Seacas_ENABLE_ALL_PACKAGES:BOOL=ON \
-D Seacas_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON \
-D Seacas_ENABLE_SECONDARY_TESTED_CODE:BOOL=ON \
-D Seacas_ENABLE_TESTS=ON \
-D CMAKE_INSTALL_PREFIX:PATH=${INSTALL_PATH} \
-D Seacas_SKIP_FORTRANCINTERFACE_VERIFY_TEST:BOOL=ON \
-D Seacas_HIDE_DEPRECATED_CODE:BOOL=${OMIT_DEPRECATED_CODE} \
\
-D TPL_ENABLE_Netcdf:BOOL=${HAVE_NETCDF} \
-D TPL_ENABLE_Matio:BOOL=${HAVE_MATIO} \
-D TPL_ENABLE_CGNS:BOOL=${HAVE_CGNS} \
-D TPL_ENABLE_MPI:BOOL=${MPI} \
-D TPL_ENABLE_Pamgen:BOOL=OFF \
-D TPL_ENABLE_Pthread:BOOL=${THREADSAFE} \
-D SEACASExodus_ENABLE_THREADSAFE:BOOL=${THREADSAFE} \
-D SEACASIoss_ENABLE_THREADSAFE:BOOL=${THREADSAFE} \
-D TPL_X11_INCLUDE_DIRS:PATH=/opt/local/include \
-D TPL_ENABLE_X11:BOOL=${HAVE_X11} \
\
${KOKKOS_SYMBOLS} \
${DW_SYMBOLS} \
\
-D MPI_BIN_DIR:PATH=${MPI_BIN} \
-D NetCDF_ROOT:PATH=${NETCDF_PATH} \
-D HDF5_ROOT:PATH=${HDF5_PATH} \
-D HDF5_NO_SYSTEM_PATHS=ON \
-D CGNS_ROOT:PATH=${CGNS_PATH} \
-D Matio_ROOT:PATH=${MATIO_PATH} \
-D PNetCDF_ROOT:PATH=${PNETCDF_PATH} \
\
$EXTRA_ARGS \
..
echo ""
echo " ACCESS: ${ACCESS}"
echo " CC: ${CC}"
echo " CXX: ${CXX}"
echo " FC: ${FC}"
echo " MPI: ${MPI}"
echo " THREADSAFE: ${THREADSAFE}"
echo "HAVE_NETCDF: ${HAVE_NETCDF}"
echo " HAVE_MATIO: ${HAVE_MATIO}"
echo " HAVE_CGNS: ${HAVE_CGNS}"
echo " KOKKOS: ${KOKKOS}"
echo ""
make -j2
cd ${ACCESS}

@ -0,0 +1,32 @@
# Almost all CMake files should start with this
# You should always specify a range with the newest
# and oldest tested versions of CMake. This will ensure
# you pick up the best policies.
cmake_minimum_required(VERSION 3.1...3.26)
project(ExodusCMakeExample VERSION 1.0 LANGUAGES C Fortran)
###
### Generate Makefile with:
# * mkdir build; cd build
# * CMAKE_PREFIX_PATH={path_to_root_of_seacas_install} ccmake ..
#### C ####
find_package(SEACASExodus CONFIG)
add_executable(ExodusWriteC ExodusWrite.c)
target_link_libraries(ExodusWriteC PRIVATE SEACASExodus::all_libs)
#### FORTRAN #####
IF ("${CMAKE_Fortran_COMPILER_ID}" MATCHES "GNU")
SET(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -fcray-pointer -fdefault-real-8 -fdefault-integer-8 -fno-range-check")
ELSEIF ("${CMAKE_Fortran_COMPILER_ID}" MATCHES "XL")
SET(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -qintsize=8 -qrealsize=8")
ELSEIF ("${CMAKE_Fortran_COMPILER_ID}" MATCHES "Cray")
SET(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -sdefault64")
ELSE()
SET(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -r8 -i8")
ENDIF()
find_package(SEACASExodus_for CONFIG)
add_executable(ExodusReadFor ExodusRead.f)
target_link_libraries(ExodusReadFor PRIVATE SEACASExodus_for::all_libs)

@ -0,0 +1,719 @@
C Copyright(C) 1999-2020, 2023 National Technology & Engineering Solutions
C of Sandia, LLC (NTESS). Under the terms of Contract DE-NA0003525 with
C NTESS, the U.S. Government retains certain rights in this software.
C
C See packages/seacas/LICENSE for details
program testrd
c This is a test program for the Fortran binding of the EXODUS
c database read routines
implicit none
include 'exodusII.inc'
integer iin, iout, ierr
integer exoid, num_dim, num_nodes, num_elem, num_elem_blk
integer num_node_sets
integer num_side_sets
integer i, j, k, elem_map(5), connect(10), node_list(100)
integer elem_list(100), side_list(100), ids(10)
integer num_elem_per_set(10), num_nodes_per_set(10)
integer num_df_per_set(10)
integer num_df_in_set, num_sides_in_set
integer df_ind(10),node_ind(10),elem_ind(10),num_qa_rec,num_info
integer num_glo_vars, num_nod_vars, num_ele_vars
integer truth_tab(3,5)
integer num_time_steps
integer num_elem_in_block(10), num_nodes_per_elem(10)
integer num_attr(10), node_ctr_list(10), node_ctr
integer num_nodes_in_set, num_elem_in_set
integer df_list_len, list_len, elem_list_len, node_list_len
integer node_num, time_step, var_index, beg_time, end_time
integer elem_num
integer cpu_ws,io_ws, mod_sz
integer num_props, prop_value
integer mxalnmlen, mxusnmlen
real time_value, time_values(100), var_values(100)
real x(100), y(100), z(100)
real attrib(100), dist_fact(100)
real vers
character*(MXSTLN) coord_names(3), qa_record(4,2), var_names(3)
character*(MXSTLN) name
character*(MXSTLN) blk_names(5)
character*(MXSTLN) nset_names(2)
character*(MXSTLN) sset_names(5)
character*(MXLNLN) inform(3), titl
character typ*(MXSTLN)
character*(MXSTLN) prop_names(3)
character*(MXSTLN) attrib_names(100)
data iin /5/, iout /6/
c open EXODUS files
cpu_ws = 0
io_ws = 0
exoid = excre("test.exo", EXNOCL, cpu_ws, io_ws, ierr)
exoid = exopen ("test.exo", EXREAD, cpu_ws, io_ws, vers, ierr)
write (iout, '(/"after exopen, error = ",i3)')
1 ierr
write (iout, '("test.exo is an EXODUSII file; version ",
1 f4.2)') vers
write (iout, '(" I/O word size",i2)') io_ws
mod_sz = exlgmd(exoid)
write (iout, '(" Model Size",i2)') mod_sz
num_props = exinqi (exoid, EXNEBP)
mxalnmlen = exinqi (exoid, EXDBMXALNM)
mxusnmlen = exinqi (exoid, EXDBMXUSNM)
write (iout, '(" Maximum Allowed/Used DB Name Size ",i4,i4)')
* mxalnmlen, mxusnmlen
c read database parameters
call exgini (exoid, titl, num_dim, num_nodes, num_elem,
1 num_elem_blk, num_node_sets, num_side_sets, ierr)
write (iout, '(/"after exgini, error = ", i3)' ) ierr
write (iout, '("database parameters:"/
1 "title = ", a81 /
2 "num_dim = ", i3 /
3 "num_nodes = ", i3 /
4 "num_elem = ", i3 /
5 "num_elem_blk = ", i3 /
6 "num_node_sets = ", i3 /
7 "num_side_sets = ", i3)')
8 titl,num_dim, num_nodes, num_elem,
9 num_elem_blk,num_node_sets, num_side_sets
c read nodal coordinates values and names from database
call exgcor (exoid, x, y, z, ierr)
write (iout, '(/"after exgcor, error = ", i3)' ) ierr
write (iout, '("x, y, z coords = ")')
do 10 i = 1, num_nodes
write (iout, '(3(e12.5,3x))') x(i), y(i), z(i)
10 continue
call exgcon (exoid, coord_names, ierr)
write (iout, '(/"after exgcon, error = ", i3)' ) ierr
write (iout, '("x coord name = ", a32)') coord_names(1)
write (iout, '("y coord name = ", a32)') coord_names(2)
c read element order map
call exgmap (exoid, elem_map, ierr)
write (iout, '(/"after exgmap, error = ", i3)' ) ierr
do 30 i = 1, num_elem
write (iout, '("elem_map(",i1,") = ", i1)') i, elem_map(i)
30 continue
c read element block parameters
call exgebi (exoid, ids, ierr)
write (iout, '(/"after exgebi, error = ", i3)' ) ierr
do 40 i = 1, num_elem_blk
call exgelb (exoid, ids(i), typ, num_elem_in_block(i),
1 num_nodes_per_elem(i), num_attr(i), ierr)
write (iout, '(/"after exgelb, error = ", i3)' ) ierr
call exgnam (exoid, EXEBLK, ids(i), name, ierr)
write (iout, '("element block id = ", i2,/
1 "element type = ", a32,/
* "block name = ", a32,/
2 "num_elem_in_block = ", i2,/
3 "num_nodes_per_elem = ", i2,/
4 "num_attr = ", i2)')
5 ids(i), typ, name,
* num_elem_in_block(i),
6 num_nodes_per_elem(i), num_attr(i)
40 continue
c read element block properties */
num_props = exinqi (exoid, EXNEBP)
write (iout,
1 '(/"There are ",i2," properties for each element block")')
2 num_props
call exgpn(exoid, EXEBLK, prop_names, ierr)
write (iout, '("after exgpn, error = ", i3)' ) ierr
do 47 i = 1, num_props
do 45 j = 1, num_elem_blk
call exgp(exoid, EXEBLK,ids(j),prop_names(i),prop_value,ierr)
if (ierr .eq. 0) then
write( iout,
1 '("elem block ",i2," property(",i2,"): ",a," = ",i5)' )
2 j, i, prop_names(i), prop_value
else
write (iout, '(/"after exgp, error = ", i3)' ) ierr
endif
45 continue
47 continue
c read element connectivity
do 60 i = 1, num_elem_blk
call exgelc (exoid, ids(i), connect, ierr)
write (iout, '(/"after exgelc, error = ", i3)' ) ierr
write (iout, '("connect array for elem block ", i2)') ids(i)
do 50 j = 1, num_nodes_per_elem(i)
write (iout, '(i3)') connect(j)
50 continue
60 continue
c read element block names
call exgnams(exoid, EXEBLK, num_elem_blk, blk_names, ierr)
write (iout, '(/"after exgnams, error = ", i3)' ) ierr
do i=1, num_elem_blk
write (iout, '("element block ",i2," name: ",a)' )
2 i, blk_names(i)
end do
c read element block attributes
do 70 i = 1, num_elem_blk
call exgeat (exoid, ids(i), attrib, ierr)
write (iout, '(/"after exgeat, error = ", i3)' ) ierr
call exgean (exoid, ids(i), num_attr(i), attrib_names, ierr)
write (iout, '(/"after exgean, error = ", i3)' ) ierr
write (iout,
* '("element block ", i2, " has ",i2," attribute(s) and ",
* i2, " element(s):")')
* ids(i), num_attr(i), num_elem_in_block(i)
do j=1, num_attr(i)
write (iout, 69) attrib_names(j),
* (attrib(k),k= j, num_attr(i)*num_elem_in_block(i),
* num_attr(i))
end do
69 format(A32," = ", 10(f6.4,2x))
70 continue
c read individual node sets
if (num_node_sets .gt. 0) then
call exgnsi (exoid, ids, ierr)
write (iout, '(/"after exgnsi, error = ", i3)' ) ierr
endif
do 100 i = 1, num_node_sets
call exgnp (exoid, ids(i), num_nodes_in_set,
1 num_df_in_set, ierr)
write (iout, '(/"after exgnp, error = ", i3)' ) ierr
write (iout, '(/"node set ", i2, " parameters: ",/
2 "num_nodes = ", i2)') ids(i), num_nodes_in_set
call exgns (exoid, ids(i), node_list, ierr)
write (iout, '(/"after exgns, error = ", i3)' ) ierr
if (num_df_in_set .gt. 0) then
call exgnsd (exoid, ids(i), dist_fact, ierr)
write (iout, '(/"after exgnsd, error = ", i3)' ) ierr
endif
write (iout, '(/"node list for node set ", i2)') ids(i)
do 80 j = 1, num_nodes_in_set
write (iout, '(i3)') node_list(j)
80 continue
if (num_df_in_set .gt. 0) then
write (iout, '("dist factors for node set ", i2)') ids(i)
do 90 j = 1, num_nodes_in_set
write (iout, '(f5.2)') dist_fact(j)
90 continue
else
write (iout, '("no dist factors for node set ", i2)') ids(i)
endif
100 continue
c read node set names
call exgnams(exoid, EXNSET, num_node_sets, nset_names, ierr)
write (iout, '(/"after exgnams, error = ", i3)' ) ierr
do i=1, num_node_sets
write (iout, '("node set ",i2," name: ",a)' )
2 i, nset_names(i)
end do
c read node set properties
num_props = exinqi (exoid, EXNNSP)
write (iout,
1 '(/"There are ",i2," properties for each node set")')
2 num_props
call exgpn(exoid, EXNSET, prop_names, ierr)
write (iout, '("after exgpn, error = ", i3)' ) ierr
do 107 i = 1, num_props
do 105 j = 1, num_node_sets
call exgp(exoid,EXNSET,ids(j),prop_names(i),prop_value,ierr)
if (ierr .eq. 0) then
write( iout,
1 '("node set ",i2," property(",i2,"): ",a," = ",i5)' )
2 j, i, prop_names(i), prop_value
else
write (iout, '(/"after exgp, error = ", i3)' ) ierr
endif
105 continue
107 continue
c read concatenated node sets; this produces the same information as
c the above code which reads individual node sets
num_node_sets = exinqi (exoid, EXNODS)
if (num_node_sets .gt. 0) then
list_len = exinqi (exoid, EXNSNL)
write(iout,'(/"after EXNSNL =",i3," exinq, error = ",i3)')
1 list_len,ierr
list_len = exinqi (exoid, EXNSDF)
write(iout,'(/"after EXNSDF =",i3," exinq, error = ",i3)')
1 list_len,ierr
call exgcns (exoid, ids, num_nodes_per_set, num_df_per_set,
1 node_ind, df_ind, node_list, dist_fact, ierr)
write (iout, '(/"after exgcns, error = ", i3)' ) ierr
write (iout, '(/"concatenated node set info")')
write (iout, '("ids = ")')
do 110 i = 1, num_node_sets
write (iout, '(i3)') ids(i)
110 continue
write (iout, '("num_nodes_per_set = ")')
do 120 i = 1, num_node_sets
write (iout, '(i3)') num_nodes_per_set(i)
120 continue
write (iout, '("node_ind = ")')
do 130 i = 1, num_node_sets
write (iout, '(i3)') node_ind(i)
130 continue
write (iout, '("node_list = ")')
do 140 i = 1, list_len
write (iout, '(i3)') node_list(i)
140 continue
write (iout, '("dist_fact = ")')
do 150 i = 1, list_len
write (iout, '(e12.5)') dist_fact(i)
150 continue
endif
c read individual side sets
if (num_side_sets .gt. 0) then
call exgssi (exoid, ids, ierr)
write (iout, '(/"after exgssi, error = ", i3)' ) ierr
endif
do 190 i = 1, num_side_sets
call exgsp (exoid, ids(i), num_sides_in_set, num_df_in_set,
1 ierr)
write (iout, '(/"after exgsp, error = ", i3)' ) ierr
write (iout, '("side set ", i2, " parameters:",/
2 "num_sides = ", i3,/
3 "num_dist_factors = ", i3)')
4 ids(i), num_sides_in_set, num_df_in_set
call exgss (exoid, ids(i), elem_list, side_list, ierr)
write (iout, '(/"after exgss, error = ", i3)' ) ierr
call exgssn (exoid, ids(i), node_ctr_list, node_list, ierr)
write (iout, '(/"after exgssn, error = ", i3)' ) ierr
if (num_df_in_set .gt. 0) then
call exgssd (exoid, ids(i), dist_fact, ierr)
write (iout, '(/"after exgssd, error = ", i3)' ) ierr
endif
write (iout, '(/"element list for side set ", i2)') ids(i)
num_elem_in_set = num_sides_in_set
do 160 j = 1, num_elem_in_set
write (iout, '(i3)') elem_list(j)
160 continue
write (iout, '("side list for side set ", i2)') ids(i)
do 170 j = 1, num_sides_in_set
write (iout, '(i3)') side_list(j)
170 continue
node_ctr = 0
write (iout, '("node list for side set ", i2)') ids(i)
do 178 k=1, num_elem_in_set
do 175 j=1, node_ctr_list(k)
write (iout, '(i3)') node_list(j+node_ctr)
175 continue
node_ctr = node_ctr+node_ctr_list(k)
178 continue
if (num_df_in_set .gt. 0) then
write (iout, '("dist factors for side set ", i2)') ids(i)
do 180 j = 1, num_df_in_set
write (iout, '(f6.3)') dist_fact(j)
180 continue
else
write (iout, '("no dist factors for side set ", i2)') ids(i)
endif
190 continue
c read side set names
call exgnams(exoid, EXSSET, num_side_sets, sset_names, ierr)
write (iout, '(/"after exgnams, error = ", i3)' ) ierr
do i=1, num_side_sets
write (iout, '("side set ",i2," name: ",a)' )
2 i, sset_names(i)
end do
c read side set properties
num_props = exinqi (exoid, EXNSSP)
write (iout,
1 '(/"There are ",i2," properties for each side set")')
2 num_props
call exgpn(exoid, EXSSET, prop_names, ierr)
write (iout, '("after exgpn, error = ", i3)' ) ierr
do 197 i = 1, num_props
do 195 j = 1, num_side_sets
call exgp(exoid, EXSSET,ids(j),prop_names(i),prop_value,ierr)
if (ierr .eq. 0) then
write( iout,
1 '("side set ",i2," property(",i2,"): ",a," = ",i5)' )
2 j, i, prop_names(i), prop_value
else
write (iout, '(/"after exgp, error = ", i3)' ) ierr
endif
195 continue
197 continue
num_side_sets = exinqi (exoid, EXSIDS)
write (iout, '(/"after exinq: EXSIDS =",i3,", error = ",i3)')
1 num_side_sets,ierr
if (num_side_sets .gt. 0) then
elem_list_len = exinqi (exoid, EXSSEL)
write (iout, '(/"after exinq: EXSSEL =",i3,", error = ",i3)')
1 elem_list_len,ierr
node_list_len = exinqi (exoid, EXSSNL)
write (iout, '(/"after exinq: EXSSNL =",i3,", error = ",i3)')
1 node_list_len,ierr
df_list_len = exinqi (exoid, EXSSDF)
write (iout, '(/"after exinq: EXSSDF =",i3,", error = ",i3)')
1 df_list_len,ierr
c read concatenated side sets; this produces the same information as
c the above code which reads individual side sets
call exgcss (exoid, ids, num_elem_per_set, num_df_per_set,
1 elem_ind, df_ind, elem_list, side_list, dist_fact,
2 ierr)
write (iout, '(/"after exgcss, error = ", i3)' ) ierr
write (iout, '("concatenated side set info")')
write (iout, '("ids = ")')
do 200 i = 1, num_side_sets
write (iout, '(i3)') ids(i)
200 continue
write (iout, '("num_elem_per_set = ")')
do 210 i = 1, num_side_sets
write (iout, '(i3)') num_elem_per_set(i)
210 continue
write (iout, '("num_df_per_set = ")')
do 220 i = 1, num_side_sets
write (iout, '(i3)') num_df_per_set(i)
220 continue
write (iout, '("elem_ind = ")')
do 230 i = 1, num_side_sets
write (iout, '(i3)') elem_ind(i)
230 continue
write (iout, '("df_ind = ")')
do 240 i = 1, num_side_sets
write (iout, '(i3)') df_ind(i)
240 continue
write (iout, '("elem_list = ")')
do 250 i = 1, elem_list_len
write (iout, '(i3)') elem_list(i)
250 continue
write (iout, '("side_list = ")')
do 260 i = 1, elem_list_len
write (iout, '(i3)') side_list(i)
260 continue
write (iout, '("dist_fact = ")')
do 270 i = 1, df_list_len
write (iout, '(f6.3)') dist_fact(i)
270 continue
endif
c read QA records
num_qa_rec = exinqi (exoid, EXQA)
call exgqa (exoid, qa_record, ierr)
write (iout, '(/"after exgqa, error = ", i3)' ) ierr
write (iout, '("QA records = ")')
do 290 i = 1, num_qa_rec
do 280 j = 1, 4
write (iout, '(a)') qa_record(j,i)
280 continue
290 continue
c read information records
num_info = exinqi (exoid, EXINFO)
call exginf (exoid, inform, ierr)
write (iout, '(/"after exginf, error = ", i3)' ) ierr
write (iout, '("info records = ")')
do 300 i = 1, num_info
write (iout, '(a81)') inform(i)
300 continue
c read global variables parameters and names
call exgvp (exoid, "g", num_glo_vars, ierr)
write (iout, '(/"after exgvp, error = ", i3)' ) ierr
call exgvan (exoid, "g", num_glo_vars, var_names, ierr)
write (iout, '(/"after exgvan, error = ", i3)' ) ierr
write (iout, '("There are ",i2," global variables; their names ",
1 "are :")') num_glo_vars
do 320 i = 1, num_glo_vars
write (iout, '(a32)') var_names(i)
320 continue
c read nodal variables parameters and names
call exgvp (exoid, "n", num_nod_vars, ierr)
write (iout, '(/"after exgvp, error = ", i3)' ) ierr
call exgvan (exoid, "n", num_nod_vars, var_names, ierr)
write (iout, '(/"after exgvan, error = ", i3)' ) ierr
write (iout, '("There are ",i2," nodal variables; their names ",
1 "are :")') num_nod_vars
do 330 i = 1, num_nod_vars
write (iout, '(a32)') var_names(i)
330 continue
c read element variables parameters and names
call exgvp (exoid, "e", num_ele_vars, ierr)
write (iout, '(/"after exgvp, error = ", i3)' ) ierr
call exgvan (exoid, "e", num_ele_vars, var_names, ierr)
write (iout, '(/"after exgvan, error = ", i3)' ) ierr
write (iout, '("There are ",i2," element variables; their names ",
1 "are :")') num_ele_vars
do 340 i = 1, num_ele_vars
write (iout, '(a32)') var_names(i)
340 continue
c read element variable truth table
call exgvtt (exoid, num_elem_blk, num_ele_vars, truth_tab, ierr)
write (iout, '(/"after exgvtt, error = ", i3)' ) ierr
write (iout, '("This is the element variable truth table:")')
do 360 i = 1, num_elem_blk
do 350 j = 1, num_ele_vars
write (iout, '(i2)') truth_tab(j,i)
350 continue
360 continue
c determine how many time steps are stored
num_time_steps = exinqi (exoid, EXTIMS)
write (iout, '("There are ",i2," time steps in the database.")')
1 num_time_steps
c read time value at one time step
time_step = 3
call exgtim (exoid, time_step, time_value, ierr)
write (iout, '(/"after exgtim, error = ", i3)' ) ierr
write (iout, '("time value at time step ",i2," = ", e12.5)')
1 time_step, time_value
c read time values at all time steps
call exgatm (exoid, time_values, ierr)
write (iout, '(/"after exgatm, error = ", i3)' ) ierr
write (iout, '("time values at all time steps are:")')
do 370 i = 1, num_time_steps
write (iout, '(e12.5)') time_values(i)
370 continue
var_index = 1
beg_time = 1
end_time = -1
c read all global variables at one time step
call exggv (exoid, time_step, num_glo_vars, var_values, ierr)
write (iout, '(/"after exggv, error = ", i3)' ) ierr
write (iout, '("global variable values at time step ",i2)')
1 time_step
do 400 i = 1, num_glo_vars
write (iout, '(e12.5)') var_values(i)
400 continue
c read a single global variable through time
call exggvt (exoid, var_index, beg_time, end_time, var_values,
1 ierr)
write (iout, '(/"after exggvt, error = ", i3)' ) ierr
write (iout, '("global variable ",i2," values through time:")')
1 var_index
do 410 i = 1, num_time_steps
write (iout, '(e12.5)') var_values(i)
410 continue
c read a nodal variable at one time step
call exgnv (exoid, time_step, var_index, num_nodes, var_values,
1 ierr)
write (iout, '(/"after exgnv, error = ", i3)' ) ierr
write (iout, '("nodal variable ",i2," values at time step ",i2)')
1 var_index, time_step
do 420 i = 1, num_nodes
write (iout, '(e12.5)') var_values(i)
420 continue
c read a nodal variable through time
node_num = 1
call exgnvt (exoid, var_index, node_num, beg_time, end_time,
1 var_values, ierr)
write (iout, '(/"after exgnvt, error = ", i3)' ) ierr
write (iout, '("nodal variable ",i2," values for node ",i2,
1 " through time:")') var_index, node_num
do 430 i = 1, num_time_steps
write (iout, '(e12.5)') var_values(i)
430 continue
c read an element variable at one time step
call exgebi (exoid, ids, ierr)
write (iout, '(/"after exgebi, error = ", i3)' ) ierr
do 450 i = 1, num_elem_blk
call exgev (exoid, time_step, var_index, ids(i),
1 num_elem_in_block(i), var_values, ierr)
write (iout, '(/"after exgev, error = ", i3)' ) ierr
if (ierr .eq. 0) then
write (iout, '("element variable ",i2," values of element ",
1 "block ",i2," at time step ",i2)')
2 var_index, ids(i), time_step
endif
do 440 j = 1, num_elem_in_block(i)
write (iout, '(e12.5)') var_values(j)
440 continue
450 continue
c read an element variable through time
var_index = 2
elem_num = 2
call exgevt (exoid, var_index, elem_num, beg_time, end_time,
1 var_values, ierr)
write (iout, '(/"after exgevt, error = ", i3)' ) ierr
write (iout, '("element variable ",i2," values for element ",i2,
1 " through time:")') var_index, elem_num
do 460 i = 1, num_time_steps
write (iout, '(e12.5)') var_values(i)
460 continue
call exclos (exoid, ierr)
write (iout, '(/"after exclos, error = ", i3)' ) ierr
stop
end

@ -0,0 +1,587 @@
/*
* Copyright(C) 1999-2022 National Technology & Engineering Solutions
* of Sandia, LLC (NTESS). Under the terms of Contract DE-NA0003525 with
* NTESS, the U.S. Government retains certain rights in this software.
*
* See packages/seacas/LICENSE for details
*/
/*****************************************************************************
*
* testwt - test write an ExodusII database file
*
* This is a test program for the C binding of the EXODUS II
* database write routines.
*
*****************************************************************************/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "exodusII.h"
#include "exodusII_int.h"
#define STRINGIFY(x) #x
#define TOSTRING(x) STRINGIFY(x)
#define EXCHECK(funcall) \
do { \
int f_error = (funcall); \
printf("after %s, error = %d\n", TOSTRING(funcall), f_error); \
if (f_error != EX_NOERR && f_error != EX_WARN) { \
fprintf(stderr, "Error calling %s\n", TOSTRING(funcall)); \
ex_close(exoid); \
exit(-1); \
} \
} while (0)
int main(int argc, char **argv) {
ex_opts(EX_VERBOSE);
/* Specify compute and i/o word size */
int CPU_word_size = 0; /* sizeof(float) */
int IO_word_size = 4; /* (4 bytes) */
/* create EXODUS II file */
int exoid = ex_create("test.exo", /* filename path */
EX_CLOBBER, /* create mode */
&CPU_word_size, /* CPU float word size in bytes */
&IO_word_size); /* I/O float word size in bytes */
printf("after ex_create for test.exo, exoid = %d\n", exoid);
printf(" cpu word size: %d io word size: %d\n", CPU_word_size, IO_word_size);
/* initialize file with parameters */
int num_dim = 3;
int num_nodes = 33;
int num_elem = 7;
int num_elem_blk = 7;
int num_node_sets = 2;
int num_side_sets = 5;
char *title = "This is a test";
EXCHECK(ex_put_init(exoid, title, num_dim, num_nodes, num_elem, num_elem_blk,
num_node_sets, num_side_sets));
/* clang-format off */
/* write nodal coordinates values and names to database */
/* Quad #1 */
float x[100], y[100], z[100];
x[0] = 0.0; y[0] = 0.0; z[0] = 0.0;
x[1] = 1.0; y[1] = 0.0; z[1] = 0.0;
x[2] = 1.0; y[2] = 1.0; z[2] = 0.0;
x[3] = 0.0; y[3] = 1.0; z[3] = 0.0;
/* Quad #2 */
x[4] = 1.0; y[4] = 0.0; z[4] = 0.0;
x[5] = 2.0; y[5] = 0.0; z[5] = 0.0;
x[6] = 2.0; y[6] = 1.0; z[6] = 0.0;
x[7] = 1.0; y[7] = 1.0; z[7] = 0.0;
/* Hex #1 */
x[8] = 0.0; y[8] = 0.0; z[8] = 0.0;
x[9] = 10.0; y[9] = 0.0; z[9] = 0.0;
x[10] = 10.0; y[10] = 0.0; z[10] = -10.0;
x[11] = 1.0; y[11] = 0.0; z[11] = -10.0;
x[12] = 1.0; y[12] = 10.0; z[12] = 0.0;
x[13] = 10.0; y[13] = 10.0; z[13] = 0.0;
x[14] = 10.0; y[14] = 10.0; z[14] = -10.0;
x[15] = 1.0; y[15] = 10.0; z[15] = -10.0;
/* Tetra #1 */
x[16] = 0.0; y[16] = 0.0; z[16] = 0.0;
x[17] = 1.0; y[17] = 0.0; z[17] = 5.0;
x[18] = 10.0; y[18] = 0.0; z[18] = 2.0;
x[19] = 7.0; y[19] = 5.0; z[19] = 3.0;
/* Wedge #1 */
x[20] = 3.0; y[20] = 0.0; z[20] = 6.0;
x[21] = 6.0; y[21] = 0.0; z[21] = 0.0;
x[22] = 0.0; y[22] = 0.0; z[22] = 0.0;
x[23] = 3.0; y[23] = 2.0; z[23] = 6.0;
x[24] = 6.0; y[24] = 2.0; z[24] = 2.0;
x[25] = 0.0; y[25] = 2.0; z[25] = 0.0;
/* Tetra #2 */
x[26] = 2.7; y[26] = 1.7; z[26] = 2.7;
x[27] = 6.0; y[27] = 1.7; z[27] = 3.3;
x[28] = 5.7; y[28] = 1.7; z[28] = 1.7;
x[29] = 3.7; y[29] = 0.0; z[29] = 2.3;
/* 3d Tri */
x[30] = 0.0; y[30] = 0.0; z[30] = 0.0;
x[31] = 10.0; y[31] = 0.0; z[31] = 0.0;
x[32] = 10.0; y[32] = 10.0; z[32] = 10.0;
/* clang-format on */
EXCHECK(ex_put_coord(exoid, x, y, z));
char *coord_names[] = {"xcoor", "ycoor", "zcoor"};
EXCHECK(ex_put_coord_names(exoid, coord_names));
/* Add nodal attributes */
EXCHECK(ex_put_attr_param(exoid, EX_NODAL, 0, 2));
EXCHECK(ex_put_one_attr(exoid, EX_NODAL, 0, 1, x));
EXCHECK(ex_put_one_attr(exoid, EX_NODAL, 0, 2, y));
{
char *attrib_names[] = {"Node_attr_1", "Node_attr_2"};
EXCHECK(ex_put_attr_names(exoid, EX_NODAL, 0, attrib_names));
}
/* write element id map */
int *elem_map = (int *)calloc(num_elem, sizeof(int));
for (int i = 1; i <= num_elem; i++) {
elem_map[i - 1] = i * 10;
}
EXCHECK(ex_put_id_map(exoid, EX_ELEM_MAP, elem_map));
free(elem_map);
/* write element block parameters */
struct ex_block blocks[10];
for (int i = 0; i < 10; i++) {
blocks[i].type = EX_ELEM_BLOCK;
blocks[i].id = 0;
blocks[i].num_entry = 0;
blocks[i].num_nodes_per_entry = 0;
blocks[i].num_edges_per_entry = 0;
blocks[i].num_faces_per_entry = 0;
blocks[i].num_attribute = 0;
}
char *block_names[10];
block_names[0] = "block_1";
block_names[1] = "block_2";
block_names[2] = "block_3";
block_names[3] = "block_4";
block_names[4] = "block_5";
block_names[5] = "block_6";
block_names[6] = "block_7";
ex_copy_string(blocks[0].topology, "quad", MAX_STR_LENGTH + 1);
ex_copy_string(blocks[1].topology, "quad", MAX_STR_LENGTH + 1);
ex_copy_string(blocks[2].topology, "hex", MAX_STR_LENGTH + 1);
ex_copy_string(blocks[3].topology, "tetra", MAX_STR_LENGTH + 1);
ex_copy_string(blocks[4].topology, "wedge", MAX_STR_LENGTH + 1);
ex_copy_string(blocks[5].topology, "tetra", MAX_STR_LENGTH + 1);
ex_copy_string(blocks[6].topology, "tri", MAX_STR_LENGTH + 1);
blocks[0].num_entry = 1;
blocks[1].num_entry = 1;
blocks[2].num_entry = 1;
blocks[3].num_entry = 1;
blocks[4].num_entry = 1;
blocks[5].num_entry = 1;
blocks[6].num_entry = 1;
blocks[0].num_attribute = 1;
blocks[1].num_attribute = 1;
blocks[2].num_attribute = 1;
blocks[3].num_attribute = 1;
blocks[4].num_attribute = 1;
blocks[5].num_attribute = 1;
blocks[6].num_attribute = 1;
blocks[0].num_nodes_per_entry = 4; /* elements in block #1 are 4-node quads */
blocks[1].num_nodes_per_entry = 4; /* elements in block #2 are 4-node quads */
blocks[2].num_nodes_per_entry = 8; /* elements in block #3 are 8-node hexes */
blocks[3].num_nodes_per_entry =
4; /* elements in block #4 are 4-node tetras */
blocks[4].num_nodes_per_entry =
6; /* elements in block #5 are 6-node wedges */
blocks[5].num_nodes_per_entry =
8; /* elements in block #6 are 8-node tetras */
blocks[6].num_nodes_per_entry = 3; /* elements in block #7 are 3-node tris */
blocks[0].id = 10;
blocks[1].id = 11;
blocks[2].id = 12;
blocks[3].id = 13;
blocks[4].id = 14;
blocks[5].id = 15;
blocks[6].id = 16;
/* Generate an error that name is not found since blocks have not
yet been defined
*/
int error = ex_put_name(exoid, EX_ELEM_BLOCK, blocks[0].id, block_names[0]);
printf("after ex_put_name, error = %d\n", error);
EXCHECK(ex_put_block_params(exoid, num_elem_blk, blocks));
/* Write element block names */
for (int i = 0; i < num_elem_blk; i++) {
EXCHECK(ex_put_name(exoid, EX_ELEM_BLOCK, blocks[i].id, block_names[i]));
}
/* write element block properties */
/* 12345678901234567890123456789012 */
char *prop_names[2];
prop_names[0] = "MATERIAL_PROPERTY_LONG_NAME_32CH";
prop_names[1] = "DENSITY";
EXCHECK(ex_put_prop_names(exoid, EX_ELEM_BLOCK, 2, prop_names));
EXCHECK(ex_put_prop(exoid, EX_ELEM_BLOCK, blocks[0].id, prop_names[0], 10));
EXCHECK(ex_put_prop(exoid, EX_ELEM_BLOCK, blocks[1].id, prop_names[0], 20));
EXCHECK(ex_put_prop(exoid, EX_ELEM_BLOCK, blocks[2].id, prop_names[0], 30));
EXCHECK(ex_put_prop(exoid, EX_ELEM_BLOCK, blocks[3].id, prop_names[0], 40));
EXCHECK(ex_put_prop(exoid, EX_ELEM_BLOCK, blocks[4].id, prop_names[0], 50));
EXCHECK(ex_put_prop(exoid, EX_ELEM_BLOCK, blocks[5].id, prop_names[0], 60));
EXCHECK(ex_put_prop(exoid, EX_ELEM_BLOCK, blocks[6].id, prop_names[0], 70));
/* write element connectivity */
{
int connect[] = {1, 2, 3, 4};
EXCHECK(
ex_put_conn(exoid, EX_ELEM_BLOCK, blocks[0].id, connect, NULL, NULL));
}
{
int connect[] = {5, 6, 7, 8};
EXCHECK(
ex_put_conn(exoid, EX_ELEM_BLOCK, blocks[1].id, connect, NULL, NULL));
}
{
int connect[] = {9, 10, 11, 12, 13, 14, 15, 16};
EXCHECK(
ex_put_conn(exoid, EX_ELEM_BLOCK, blocks[2].id, connect, NULL, NULL));
}
{
int connect[] = {17, 18, 19, 20};
EXCHECK(
ex_put_conn(exoid, EX_ELEM_BLOCK, blocks[3].id, connect, NULL, NULL));
}
{
int connect[] = {21, 22, 23, 24, 25, 26};
EXCHECK(
ex_put_conn(exoid, EX_ELEM_BLOCK, blocks[4].id, connect, NULL, NULL));
}
{
int connect[] = {17, 18, 19, 20, 27, 28, 30, 29};
EXCHECK(
ex_put_conn(exoid, EX_ELEM_BLOCK, blocks[5].id, connect, NULL, NULL));
}
{
int connect[] = {31, 32, 33};
EXCHECK(
ex_put_conn(exoid, EX_ELEM_BLOCK, blocks[6].id, connect, NULL, NULL));
}
/* write element block attributes */
float attrib[1];
attrib[0] = 3.14159;
EXCHECK(ex_put_attr(exoid, EX_ELEM_BLOCK, blocks[0].id, attrib));
EXCHECK(ex_put_attr(exoid, EX_ELEM_BLOCK, blocks[0].id, attrib));
attrib[0] = 6.14159;
EXCHECK(ex_put_attr(exoid, EX_ELEM_BLOCK, blocks[1].id, attrib));
EXCHECK(ex_put_attr(exoid, EX_ELEM_BLOCK, blocks[2].id, attrib));
EXCHECK(ex_put_attr(exoid, EX_ELEM_BLOCK, blocks[3].id, attrib));
EXCHECK(ex_put_attr(exoid, EX_ELEM_BLOCK, blocks[4].id, attrib));
EXCHECK(ex_put_attr(exoid, EX_ELEM_BLOCK, blocks[5].id, attrib));
EXCHECK(ex_put_attr(exoid, EX_ELEM_BLOCK, blocks[6].id, attrib));
{
char *attrib_names[] = {"THICKNESS"};
for (int i = 0; i < num_elem_blk; i++) {
EXCHECK(
ex_put_attr_names(exoid, EX_ELEM_BLOCK, blocks[i].id, attrib_names));
}
}
/* write individual node sets */
int num_nodes_in_nset[] = {5, 3};
int nsids[] = {20, 21};
{
EXCHECK(ex_put_set_param(exoid, EX_NODE_SET, nsids[0], 5, 5));
int node_list[] = {10, 11, 12, 13, 14};
float dist_fact[] = {1.0, 2.0, 3.0, 4.0, 5.0};
EXCHECK(ex_put_set(exoid, EX_NODE_SET, nsids[0], node_list, NULL));
EXCHECK(ex_put_set_dist_fact(exoid, EX_NODE_SET, nsids[0], dist_fact));
}
{
EXCHECK(ex_put_set_param(exoid, EX_NODE_SET, nsids[1], 3, 3));
int node_list[] = {20, 21, 22};
float dist_fact[] = {1.1, 2.1, 3.1};
EXCHECK(ex_put_set(exoid, EX_NODE_SET, nsids[1], node_list, NULL));
EXCHECK(ex_put_set_dist_fact(exoid, EX_NODE_SET, nsids[1], dist_fact));
}
/* Write node set names */
char *nset_names[] = {"nset_1", "nset_2"};
EXCHECK(ex_put_names(exoid, EX_NODE_SET, nset_names));
EXCHECK(ex_put_prop(exoid, EX_NODE_SET, nsids[0], "FACE", 4));
EXCHECK(ex_put_prop(exoid, EX_NODE_SET, nsids[1], "FACE", 5));
int prop_array[] = {1000, 2000};
EXCHECK(ex_put_prop_array(exoid, EX_NODE_SET, "VELOCITY", prop_array));
/* Add nodeset attributes */
EXCHECK(ex_put_attr_param(exoid, EX_NODE_SET, nsids[0], 1));
EXCHECK(ex_put_attr(exoid, EX_NODE_SET, nsids[0], x));
{
char *attrib_names[] = {"Nodeset_attribute"};
EXCHECK(ex_put_attr_names(exoid, EX_NODE_SET, nsids[0], attrib_names));
}
/* write individual side sets */
int num_face_in_sset[] = {2, 2, 7, 8, 10};
int ssids[] = {30, 31, 32, 33, 34};
{
/* side set #1 - quad */
EXCHECK(ex_put_set_param(exoid, EX_SIDE_SET, ssids[0], 2, 4));
int elem_list[] = {2, 2};
int side_list[] = {4, 2};
float dist_fact[] = {30.0, 30.1, 30.2, 30.3};
EXCHECK(ex_put_set(exoid, EX_SIDE_SET, 30, elem_list, side_list));
EXCHECK(ex_put_set_dist_fact(exoid, EX_SIDE_SET, 30, dist_fact));
}
{
/* side set #2 - quad, spanning 2 elements */
EXCHECK(ex_put_set_param(exoid, EX_SIDE_SET, 31, 2, 4));
int elem_list[] = {1, 2};
int side_list[] = {2, 3};
float dist_fact[] = {31.0, 31.1, 31.2, 31.3};
EXCHECK(ex_put_set(exoid, EX_SIDE_SET, 31, elem_list, side_list));
EXCHECK(ex_put_set_dist_fact(exoid, EX_SIDE_SET, 31, dist_fact));
}
{
/* side set #3 - hex */
EXCHECK(ex_put_set_param(exoid, EX_SIDE_SET, 32, 7, 0));
int elem_list[] = {3, 3, 3, 3, 3, 3, 3};
int side_list[] = {5, 3, 3, 2, 4, 1, 6};
EXCHECK(ex_put_set(exoid, EX_SIDE_SET, 32, elem_list, side_list));
}
{
/* side set #4 - tetras */
EXCHECK(ex_put_set_param(exoid, EX_SIDE_SET, 33, 8, 0));
int elem_list[] = {4, 4, 4, 4, 6, 6, 6, 6};
int side_list[] = {1, 2, 3, 4, 1, 2, 3, 4};
EXCHECK(ex_put_set(exoid, EX_SIDE_SET, 33, elem_list, side_list));
}
{
/* side set #5 - wedges and tris */
EXCHECK(ex_put_set_param(exoid, EX_SIDE_SET, 34, 10, 0));
int elem_list[] = {5, 5, 5, 5, 5, 7, 7, 7, 7, 7};
int side_list[] = {1, 2, 3, 4, 5, 1, 2, 3, 4, 5};
EXCHECK(ex_put_set(exoid, EX_SIDE_SET, 34, elem_list, side_list));
}
/* Write side set names */
char *sset_names[] = {"sset_1", "sset_2", "sset_3", "sset_4", "sset_5"};
EXCHECK(ex_put_names(exoid, EX_SIDE_SET, sset_names));
EXCHECK(ex_put_prop(exoid, EX_SIDE_SET, 30, "COLOR", 100));
EXCHECK(ex_put_prop(exoid, EX_SIDE_SET, 31, "COLOR", 101));
/* write QA records; test empty and just blank-filled records */
int num_qa_rec = 2;
char *qa_record[2][4];
qa_record[0][0] = "TESTWT";
qa_record[0][1] = "testwt";
qa_record[0][2] = "07/07/93";
qa_record[0][3] = "15:41:33";
qa_record[1][0] = "Thirty-Two character QA Record|";
qa_record[1][1] = " ";
qa_record[1][2] = "";
qa_record[1][3] = " ";
EXCHECK(ex_put_qa(exoid, num_qa_rec, qa_record));
/* write information records; test empty and just blank-filled records */
char *info[3];
info[0] = "This is the first information record.";
info[1] = "";
info[2] = "This info record is exactly 80 characters long. last character "
"should be pipe |";
int num_info = 3;
EXCHECK(ex_put_info(exoid, num_info, info));
/* write results variables parameters and names */
int num_glo_vars = 1;
{
char *var_names[] = {"glo_vars"};
EXCHECK(ex_put_variable_param(exoid, EX_GLOBAL, num_glo_vars));
EXCHECK(ex_put_variable_names(exoid, EX_GLOBAL, num_glo_vars, var_names));
}
int num_nod_vars = 2;
{
/* 12345678901234567890123456789012 */
char *var_names[] = {"node_variable_a_very_long_name_0", "nod_var1"};
EXCHECK(ex_put_variable_param(exoid, EX_NODAL, num_nod_vars));
EXCHECK(ex_put_variable_names(exoid, EX_NODAL, num_nod_vars, var_names));
}
int num_ele_vars = 3;
{
/* 0 1 2 3 */
/* 12345678901234567890123456789012 */
char *var_names[] = {"this_variable_name_is_short",
"this_variable_name_is_just_right",
"this_variable_name_is_tooooo_long"};
EXCHECK(ex_put_variable_param(exoid, EX_ELEM_BLOCK, num_ele_vars));
EXCHECK(
ex_put_variable_names(exoid, EX_ELEM_BLOCK, num_ele_vars, var_names));
}
int num_nset_vars = 3;
{
char *var_names[] = {"ns_var0", "ns_var1", "ns_var2"};
EXCHECK(ex_put_variable_param(exoid, EX_NODE_SET, num_nset_vars));
EXCHECK(
ex_put_variable_names(exoid, EX_NODE_SET, num_nset_vars, var_names));
}
int num_sset_vars = 3;
{
char *var_names[] = {"ss_var0", "ss_var1", "ss_var2"};
EXCHECK(ex_put_variable_param(exoid, EX_SIDE_SET, num_sset_vars));
EXCHECK(
ex_put_variable_names(exoid, EX_SIDE_SET, num_sset_vars, var_names));
}
/* write element variable truth table */
int *truth_tab = (int *)calloc((num_elem_blk * num_ele_vars), sizeof(int));
{
int k = 0;
for (int i = 0; i < num_elem_blk; i++) {
for (int j = 0; j < num_ele_vars; j++) {
truth_tab[k++] = 1;
}
}
}
EXCHECK(ex_put_truth_table(exoid, EX_ELEM_BLOCK, num_elem_blk, num_ele_vars,
truth_tab));
free(truth_tab);
/* for each time step, write the analysis results;
* the code below fills the arrays glob_var_vals,
* nodal_var_vals, and elem_var_vals with values for debugging purposes;
* obviously the analysis code will populate these arrays
*/
float *glob_var_vals = (float *)calloc(num_glo_vars, CPU_word_size);
float *nodal_var_vals = (float *)calloc(num_nodes, CPU_word_size);
float *elem_var_vals = (float *)calloc(num_ele_vars, CPU_word_size);
float *sset_var_vals =
(float *)calloc(10, CPU_word_size); /* max sides_in_sset */
float *nset_var_vals =
(float *)calloc(5, CPU_word_size); /* max nodes_in_nset */
int num_time_steps = 10;
for (int i = 0; i < num_time_steps; i++) {
int whole_time_step = i + 1;
float time_value = (float)(i + 1) / 100.;
/* write time value */
EXCHECK(ex_put_time(exoid, whole_time_step, &time_value));
/* write global variables */
for (int j = 0; j < num_glo_vars; j++) {
glob_var_vals[j] = (float)(j + 2) * time_value;
}
EXCHECK(ex_put_var(exoid, whole_time_step, EX_GLOBAL, 1, 1, num_glo_vars,
glob_var_vals));
/* write nodal variables */
for (int k = 1; k <= num_nod_vars; k++) {
for (int j = 0; j < num_nodes; j++) {
nodal_var_vals[j] = (float)k + ((float)(j + 1) * time_value);
}
EXCHECK(ex_put_var(exoid, whole_time_step, EX_NODAL, k, 1, num_nodes,
nodal_var_vals));
}
/* write element variables */
for (int k = 1; k <= num_ele_vars; k++) {
for (int j = 0; j < num_elem_blk; j++) {
for (int m = 0; m < blocks[j].num_entry; m++) {
elem_var_vals[m] =
(float)(k + 1) + (float)(j + 2) + ((float)(m + 1) * time_value);
/* printf("elem_var_vals[%d]: %f\n",m,elem_var_vals[m]); */
}
EXCHECK(ex_put_var(exoid, whole_time_step, EX_ELEM_BLOCK, k,
blocks[j].id, blocks[j].num_entry, elem_var_vals));
}
}
/* write sideset variables */
for (int k = 1; k <= num_sset_vars; k++) {
for (int j = 0; j < num_side_sets; j++) {
for (int m = 0; m < num_face_in_sset[j]; m++) {
sset_var_vals[m] =
(float)(k + 2) + (float)(j + 3) + ((float)(m + 1) * time_value);
/* printf("sset_var_vals[%d]: %f\n",m,sset_var_vals[m]); */
}
EXCHECK(ex_put_var(exoid, whole_time_step, EX_SIDE_SET, k, ssids[j],
num_face_in_sset[j], sset_var_vals));
}
}
/* write nodeset variables */
for (int k = 1; k <= num_nset_vars; k++) {
for (int j = 0; j < num_node_sets; j++) {
for (int m = 0; m < num_nodes_in_nset[j]; m++) {
nset_var_vals[m] =
(float)(k + 3) + (float)(j + 4) + ((float)(m + 1) * time_value);
/* printf("nset_var_vals[%d]: %f\n",m,nset_var_vals[m]); */
}
EXCHECK(ex_put_var(exoid, whole_time_step, EX_NODE_SET, k, nsids[j],
num_nodes_in_nset[j], nset_var_vals));
}
}
/* update the data file; this should be done at the end of every time step
* to ensure that no data is lost if the analysis dies
*/
EXCHECK(ex_update(exoid));
}
free(glob_var_vals);
free(nodal_var_vals);
free(elem_var_vals);
free(sset_var_vals);
free(nset_var_vals);
/* close the EXODUS files
*/
EXCHECK(ex_close(exoid));
return 0;
}

@ -0,0 +1,16 @@
ADD_DEFINITIONS(-DBuild64)
IF ("${FC_FN_UNDERSCORE}" STREQUAL "UNDER")
ADD_DEFINITIONS(-DADDC_)
ENDIF()
IF ("${CMAKE_Fortran_COMPILER_ID}" MATCHES "GNU")
SET(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -fcray-pointer -fdefault-real-8 -fdefault-integer-8 -fno-range-check")
ELSEIF ("${CMAKE_Fortran_COMPILER_ID}" MATCHES "XL")
SET(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -qintsize=8 -qrealsize=8")
ELSEIF ("${CMAKE_Fortran_COMPILER_ID}" MATCHES "Cray")
SET(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -sdefault64")
ELSE()
SET(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -r8 -i8")
ENDIF()

@ -0,0 +1,115 @@
IF (${Seacas_ENABLE_Kokkos})
PRINT_VAR(KOKKOS_ARCH)
# This is where to generate the gen_kokkos.cmake and KokkosCore_config.h
# that we will use in the configuration
set(Kokkos_GEN_DIR ${CMAKE_BINARY_DIR})
# Enable debug checking in Kokkos by default if
# ${PROJECT_NAME}_ENABLE_DEBUG=ON
set(KOKKOS_ENABLE_DEBUG ${${PROJECT_NAME}_ENABLE_DEBUG}
CACHE BOOL
"Enable debug checking in Kokkos.")
set(Kokkos_ENABLE_Debug_Bounds_Check ${KOKKOS_ENABLE_DEBUG}
CACHE BOOL
"Enable bounds checking in Kokkos array classes.")
set(Kokkos_ENABLE_Profiling_DEFAULT ON)
if (DEFINED TPL_ENABLE_DLlib)
if (NOT TPL_ENABLE_DLlib)
message(STATUS "Setting Kokkos_ENABLE_Profiling_DEFAULT=OFF because TPL_ENABLE_DLlib=${TPL_ENABLE_DLlib}")
set(Kokkos_ENABLE_Profiling_DEFAULT OFF)
endif()
endif()
set(Kokkos_ENABLE_Profiling ${Kokkos_ENABLE_Profiling_DEFAULT}
CACHE BOOL
"Enable Kokkos profiling hooks.")
# Basic initialization (Used in KOKKOS_SETTINGS)
set(KOKKOS_SRC_PATH ${Kokkos_SOURCE_DIR})
set(KOKKOS_PATH ${KOKKOS_SRC_PATH})
#------------ COMPILER AND FEATURE CHECKS ------------------------------------
include(${KOKKOS_SRC_PATH}/cmake/kokkos_functions.cmake)
set_kokkos_cxx_compiler()
set_kokkos_cxx_standard()
#------------ GET OPTIONS ----------------------------------------------------
set(KOKKOS_CMAKE_VERBOSE True)
set(KOKKOS_HAS_TRILINOS True)
include(${KOKKOS_SRC_PATH}/cmake/kokkos_options.cmake)
#------------ COMPUTE KOKKOS_SETTINGS ----------------------------------------
include(${KOKKOS_SRC_PATH}/cmake/kokkos_settings.cmake)
#------------ GENERATE HEADER AND SOURCE FILES -------------------------------
execute_process(
COMMAND ${KOKKOS_SETTINGS} make -f ${KOKKOS_SRC_PATH}/cmake/Makefile.generate_cmake_settings CXX=${CMAKE_CXX_COMPILER} generate_build_settings
WORKING_DIRECTORY "${Kokkos_GEN_DIR}"
OUTPUT_FILE ${Kokkos_GEN_DIR}/core_src_make.out
RESULT_VARIABLE GEN_SETTINGS_RESULT
)
if (GEN_SETTINGS_RESULT)
message(FATAL_ERROR "Kokkos settings generation failed:\n"
"${KOKKOS_SETTINGS} make -f ${KOKKOS_SRC_PATH}/cmake/Makefile.generate_cmake_settings CXX=${CMAKE_CXX_COMPILER} generate_build_settings")
endif()
include(${Kokkos_GEN_DIR}/kokkos_generated_settings.cmake)
set(libdir lib)
if (${PROJECT_NAME}_INSTALL_LIB_DIR)
set(libdir ${${PROJECT_NAME}_INSTALL_LIB_DIR})
endif()
if (INSTALL_LIB_DIR)
set(libdir ${INSTALL_LIB_DIR})
endif()
install(FILES ${Kokkos_GEN_DIR}/kokkos_generated_settings.cmake DESTINATION ${libdir}/cmake/Kokkos)
IF (NOT KOKKOS_ARCH STREQUAL "None")
# Convert KOKKOS_CXX_FLAGS, which is a CMake list, into a string for CXX_FLAGS
set(KOKKOS_CXX_FLAGS_str "")
# When compiling CUDA with Clang, the flags "-x cuda" and "--cuda-gpu-arch=sm_??"
# cannot be passed to the link line, so we sneak these into the lesser-used
# add_compile_options() function, which only affects the compile line and not the link line
foreach(opt ${KOKKOS_CXX_FLAGS})
if (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
if (opt MATCHES "--cuda-gpu-arch")
# Furthermore, add_compile_options normally affects all languages, so
# we need a generator expression to prevent CUDA flags being passed to C or Fortran
add_compile_options($<$<COMPILE_LANGUAGE:CXX>:${opt}>)
else()
set(KOKKOS_CXX_FLAGS_str "${KOKKOS_CXX_FLAGS_str} ${opt}")
endif()
else()
set(KOKKOS_CXX_FLAGS_str "${KOKKOS_CXX_FLAGS_str} ${opt}")
endif()
endforeach()
if (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
# Since "-x cuda" shows up as two arguments, its easier to filter out here:
if (KOKKOS_CXX_FLAGS_str MATCHES "-x cuda")
string(REPLACE "-x cuda" "" KOKKOS_CXX_FLAGS_str "${KOKKOS_CXX_FLAGS_str}")
add_compile_options($<$<COMPILE_LANGUAGE:CXX>:-x>)
add_compile_options($<$<COMPILE_LANGUAGE:CXX>:cuda>)
endif()
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${KOKKOS_CXX_FLAGS_str}")
# TODO -- need to remove the -lkokkos. Check on LDFlags
#set(KOKKOS_LINK_DEPENDS libkokkos.a CACHE STRING "")
#set(KOKKOS_LIBS -lkokkos -ldl -lpthread CACHE STRING "")
#set(KOKKOS_LDFLAGS -L/scr_gabrielle/kruger/builds-ptsolve/trilinos/par2 --gcc-toolchain=/usr CACHE STRING "")
MESSAGE("-- " "Skip adding flags for C++11 because Kokkos flags does that ...")
SET(${PROJECT_NAME}_CXX11_FLAGS " ")
MESSAGE("-- " "Skip adding flags for OpenMP because Kokkos flags does that ...")
SET(OpenMP_CXX_FLAGS_OVERRIDE " ")
ENDIF()
# Above, It is important not to distrube the default configuraiton of
# Trilinos if KOKKOS_ARCH is not set. But the implementation of the new
# Kokkos TriBITS CMake files requires kokkos_generated_settings.cmake be
# included.
ENDIF()

@ -0,0 +1,6 @@
# Special logic to disable SEACAS subpackages depending on Fortran enabled or not
if (SEACAS_SOURCE_DIR)
include("${SEACAS_SOURCE_DIR}/cmake/SeacasDisableSubpackagesDependingOnFortran.cmake")
seacas_disable_subpackages_depending_on_fortran()
endif()

@ -0,0 +1,2 @@
message(FATAL_ERROR
"Error, the TPL ${TPL_NAME} is not defined for the CMake project ${PROJECT_NAME}")

@ -0,0 +1,59 @@
# @HEADER
# ************************************************************************
#
# Trilinos: An Object-Oriented Solver Framework
# Copyright (2001) Sandia Corporation
#
#
# Copyright (2001) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000, there is a non-exclusive license for use of this
# work by or on behalf of the U.S. Government. Export of this program
# may require a license from the United States Government.
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Corporation nor the names of the
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# NOTICE: The United States Government is granted for itself and others
# acting on its behalf a paid-up, nonexclusive, irrevocable worldwide
# license in this data to reproduce, prepare derivative works, and
# perform publicly and display publicly. Beginning five (5) years from
# July 25, 2001, the United States Government is granted for itself and
# others acting on its behalf a paid-up, nonexclusive, irrevocable
# worldwide license in this data to reproduce, prepare derivative works,
# distribute copies to the public, perform publicly and display
# publicly, and to permit others to do so.
#
# NEITHER THE UNITED STATES GOVERNMENT, NOR THE UNITED STATES DEPARTMENT
# OF ENERGY, NOR SANDIA CORPORATION, NOR ANY OF THEIR EMPLOYEES, MAKES
# ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LEGAL LIABILITY OR
# RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY
# INFORMATION, APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS
# THAT ITS USE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS.
#
# ************************************************************************
# @HEADER
find_package(CGNS REQUIRED)
TRIBITS_TPL_FIND_INCLUDE_DIRS_AND_LIBRARIES( CGNS
REQUIRED_HEADERS cgnslib.h
REQUIRED_LIBS_NAMES "cgns")

@ -0,0 +1,129 @@
# @HEADER
# ************************************************************************
#
# Trilinos: An Object-Oriented Solver Framework
# Copyright (2001) Sandia Corporation
#
#
# Copyright (2001) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000, there is a non-exclusive license for use of this
# work by or on behalf of the U.S. Government. Export of this program
# may require a license from the United States Government.
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Corporation nor the names of the
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# NOTICE: The United States Government is granted for itself and others
# acting on its behalf a paid-up, nonexclusive, irrevocable worldwide
# license in this data to reproduce, prepare derivative works, and
# perform publicly and display publicly. Beginning five (5) years from
# July 25, 2001, the United States Government is granted for itself and
# others acting on its behalf a paid-up, nonexclusive, irrevocable
# worldwide license in this data to reproduce, prepare derivative works,
# distribute copies to the public, perform publicly and display
# publicly, and to permit others to do so.
#
# NEITHER THE UNITED STATES GOVERNMENT, NOR THE UNITED STATES DEPARTMENT
# OF ENERGY, NOR SANDIA CORPORATION, NOR ANY OF THEIR EMPLOYEES, MAKES
# ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LEGAL LIABILITY OR
# RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY
# INFORMATION, APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS
# THAT ITS USE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS.
#
# ************************************************************************
# @HEADER
# Check for CUDA support
SET(_CUDA_FAILURE OFF)
# Have CMake find CUDA
IF(NOT _CUDA_FAILURE)
FIND_PACKAGE(CUDA 3.2 REQUIRED)
IF (NOT CUDA_FOUND)
SET(_CUDA_FAILURE ON)
ENDIF()
ENDIF()
# # Test that CUDA compiler works
# IF(NOT _CUDA_FAILURE)
# INCLUDE(TrilinosCUDASupport)
# SET(SRC "
# #include <cuda_runtime.h>
# __global__ void vecAdd(const float* a, const float* b, float* c, int N)
# {
# int i = blockDim.x * blockIdx.x + threadIdx.x;
# if (i < N) c[i] = a[i] + b[i];
# }
# __global__ void vecInit(float* x, float val, int N)
# {
# int i = blockDim.x * blockIdx.x + threadIdx.x;
# if (i < N) x[i] = val;
# }
# int main() {
# const int N = 2048;
# const int threadsPerBlock = 256;
# const int blocksPerGrid = 8;
# float* a = NULL;
# float* b = NULL;
# float* c = NULL;
# cudaMalloc((void**)&a, N);
# cudaMalloc((void**)&b, N);
# cudaMalloc((void**)&c, N);
# // init
# vecInit<<<blocksPerGrid, threadsPerBlock>>>(a,1.0f,N);
# vecInit<<<blocksPerGrid, threadsPerBlock>>>(b,2.0f,N);
# vecInit<<<blocksPerGrid, threadsPerBlock>>>(c,0.0f,N);
# // run
# vecAdd<<<blocksPerGrid, threadsPerBlock>>>(a, b, c, N);
# }
# ")
# CHECK_CUDA_SOURCE_COMPILES(${SRC} _NVCC_SUCCESS)
# IF(NOT _NVCC_SUCCESS)
# SET(_CUDA_FAILURE ON)
# ENDIF()
# ENDIF()
IF(NOT _CUDA_FAILURE)
# if we haven't met failure
######################################################
### Commented out code from original Trilinos file ###
######################################################
#macro(PACKAGE_ADD_CUDA_LIBRARY cuda_target)
# TRIBITS_ADD_LIBRARY(${cuda_target} ${ARGN} CUDALIBRARY)
#endmacro()
#GLOBAL_SET(TPL_CUDA_LIBRARY_DIRS)
#GLOBAL_SET(TPL_CUDA_INCLUDE_DIRS ${CUDA_TOOLKIT_INCLUDE})
#GLOBAL_SET(TPL_CUDA_LIBRARIES ${CUDA_CUDART_LIBRARY} ${CUDA_cublas_LIBRARY}
# ${CUDA_cufft_LIBRARY})
######################################################
TRIBITS_TPL_FIND_INCLUDE_DIRS_AND_LIBRARIES( CUDA
REQUIRED_HEADERS cuda.h
REQUIRED_LIBS_NAMES "cudart"
)
ELSE()
SET(TPL_ENABLE_CUDA OFF PARENT_SCOPE)
MESSAGE(FATAL_ERROR "\nDid not find acceptable version of CUDA compiler")
ENDIF()

@ -0,0 +1,59 @@
# @HEADER
# ************************************************************************
#
# Trilinos: An Object-Oriented Solver Framework
# Copyright (2001) Sandia Corporation
#
#
# Copyright (2001) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000, there is a non-exclusive license for use of this
# work by or on behalf of the U.S. Government. Export of this program
# may require a license from the United States Government.
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Corporation nor the names of the
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# NOTICE: The United States Government is granted for itself and others
# acting on its behalf a paid-up, nonexclusive, irrevocable worldwide
# license in this data to reproduce, prepare derivative works, and
# perform publicly and display publicly. Beginning five (5) years from
# July 25, 2001, the United States Government is granted for itself and
# others acting on its behalf a paid-up, nonexclusive, irrevocable
# worldwide license in this data to reproduce, prepare derivative works,
# distribute copies to the public, perform publicly and display
# publicly, and to permit others to do so.
#
# NEITHER THE UNITED STATES GOVERNMENT, NOR THE UNITED STATES DEPARTMENT
# OF ENERGY, NOR SANDIA CORPORATION, NOR ANY OF THEIR EMPLOYEES, MAKES
# ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LEGAL LIABILITY OR
# RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY
# INFORMATION, APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS
# THAT ITS USE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS.
#
# ************************************************************************
# @HEADER
TRIBITS_TPL_FIND_INCLUDE_DIRS_AND_LIBRARIES( DLlib
REQUIRED_LIBS_NAMES dl
)

@ -0,0 +1,39 @@
# @HEADER
# Copyright(C) 2019 National Technology & Engineering Solutions of
# Sandia, LLC (NTESS). Under the terms of Contract DE-NA0003525 with
# NTESS, the U.S. Government retains certain rights in this software.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of NTESS nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# @HEADER
find_package(DataWarp REQUIRED)
TRIBITS_TPL_FIND_INCLUDE_DIRS_AND_LIBRARIES( DataWarp
REQUIRED_HEADERS datawarp.h
REQUIRED_LIBS_NAMES "datawarp")

@ -0,0 +1,59 @@
# @HEADER
# ************************************************************************
#
# Trilinos: An Object-Oriented Solver Framework
# Copyright (2001, 2022) Sandia Corporation
#
#
# Copyright (2001) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000, there is a non-exclusive license for use of this
# work by or on behalf of the U.S. Government. Export of this program
# may require a license from the United States Government.
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Corporation nor the names of the
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# NOTICE: The United States Government is granted for itself and others
# acting on its behalf a paid-up, nonexclusive, irrevocable worldwide
# license in this data to reproduce, prepare derivative works, and
# perform publicly and display publicly. Beginning five (5) years from
# July 25, 2001, the United States Government is granted for itself and
# others acting on its behalf a paid-up, nonexclusive, irrevocable
# worldwide license in this data to reproduce, prepare derivative works,
# distribute copies to the public, perform publicly and display
# publicly, and to permit others to do so.
#
# NEITHER THE UNITED STATES GOVERNMENT, NOR THE UNITED STATES DEPARTMENT
# OF ENERGY, NOR SANDIA CORPORATION, NOR ANY OF THEIR EMPLOYEES, MAKES
# ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LEGAL LIABILITY OR
# RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY
# INFORMATION, APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS
# THAT ITS USE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS.
#
# ************************************************************************
# @HEADER
find_package(GTest CONFIG REQUIRED)
tribits_extpkg_create_imported_all_libs_target_and_config_file(GTest
INNER_FIND_PACKAGE_NAME GTest
IMPORTED_TARGETS_FOR_ALL_LIBS GTest::gtest )

@ -0,0 +1,61 @@
# @HEADER
# ************************************************************************
#
# Trilinos: An Object-Oriented Solver Framework
# Copyright (2001) Sandia Corporation
#
#
# Copyright (2001) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000, there is a non-exclusive license for use of this
# work by or on behalf of the U.S. Government. Export of this program
# may require a license from the United States Government.
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Corporation nor the names of the
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# NOTICE: The United States Government is granted for itself and others
# acting on its behalf a paid-up, nonexclusive, irrevocable worldwide
# license in this data to reproduce, prepare derivative works, and
# perform publicly and display publicly. Beginning five (5) years from
# July 25, 2001, the United States Government is granted for itself and
# others acting on its behalf a paid-up, nonexclusive, irrevocable
# worldwide license in this data to reproduce, prepare derivative works,
# distribute copies to the public, perform publicly and display
# publicly, and to permit others to do so.
#
# NEITHER THE UNITED STATES GOVERNMENT, NOR THE UNITED STATES DEPARTMENT
# OF ENERGY, NOR SANDIA CORPORATION, NOR ANY OF THEIR EMPLOYEES, MAKES
# ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LEGAL LIABILITY OR
# RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY
# INFORMATION, APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS
# THAT ITS USE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS.
#
# ************************************************************************
# @HEADER
find_package(HDF5 REQUIRED COMPONENTS C HL)
TRIBITS_TPL_FIND_INCLUDE_DIRS_AND_LIBRARIES(HDF5
REQUIRED_HEADERS hdf5.h
REQUIRED_LIBS_NAMES hdf5_hl hdf5 z
)

@ -0,0 +1,60 @@
# @HEADER
# ************************************************************************
#
# Trilinos: An Object-Oriented Solver Framework
# Copyright (2001) Sandia Corporation
#
#
# Copyright (2001) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000, there is a non-exclusive license for use of this
# work by or on behalf of the U.S. Government. Export of this program
# may require a license from the United States Government.
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Corporation nor the names of the
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# NOTICE: The United States Government is granted for itself and others
# acting on its behalf a paid-up, nonexclusive, irrevocable worldwide
# license in this data to reproduce, prepare derivative works, and
# perform publicly and display publicly. Beginning five (5) years from
# July 25, 2001, the United States Government is granted for itself and
# others acting on its behalf a paid-up, nonexclusive, irrevocable
# worldwide license in this data to reproduce, prepare derivative works,
# distribute copies to the public, perform publicly and display
# publicly, and to permit others to do so.
#
# NEITHER THE UNITED STATES GOVERNMENT, NOR THE UNITED STATES DEPARTMENT
# OF ENERGY, NOR SANDIA CORPORATION, NOR ANY OF THEIR EMPLOYEES, MAKES
# ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LEGAL LIABILITY OR
# RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY
# INFORMATION, APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS
# THAT ITS USE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS.
#
# ************************************************************************
# @HEADER
TRIBITS_TPL_FIND_INCLUDE_DIRS_AND_LIBRARIES( METIS
REQUIRED_HEADERS metis.h
REQUIRED_LIBS_NAMES "metis"
)

@ -0,0 +1,62 @@
# @HEADER
# ************************************************************************
#
# Trilinos: An Object-Oriented Solver Framework
# Copyright (2001) Sandia Corporation
#
#
# Copyright (2001) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000, there is a non-exclusive license for use of this
# work by or on behalf of the U.S. Government. Export of this program
# may require a license from the United States Government.
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Corporation nor the names of the
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# NOTICE: The United States Government is granted for itself and others
# acting on its behalf a paid-up, nonexclusive, irrevocable worldwide
# license in this data to reproduce, prepare derivative works, and
# perform publicly and display publicly. Beginning five (5) years from
# July 25, 2001, the United States Government is granted for itself and
# others acting on its behalf a paid-up, nonexclusive, irrevocable
# worldwide license in this data to reproduce, prepare derivative works,
# distribute copies to the public, perform publicly and display
# publicly, and to permit others to do so.
#
# NEITHER THE UNITED STATES GOVERNMENT, NOR THE UNITED STATES DEPARTMENT
# OF ENERGY, NOR SANDIA CORPORATION, NOR ANY OF THEIR EMPLOYEES, MAKES
# ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LEGAL LIABILITY OR
# RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY
# INFORMATION, APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS
# THAT ITS USE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS.
#
# ************************************************************************
# @HEADER
if (${CMAKE_VERSION} GREATER "3.13")
cmake_policy(SET CMP0074 NEW)
endif()
find_package(Matio REQUIRED)
TRIBITS_TPL_FIND_INCLUDE_DIRS_AND_LIBRARIES( Matio
REQUIRED_HEADERS matio.h
REQUIRED_LIBS_NAMES "matio")

@ -0,0 +1,61 @@
# @HEADER
# ************************************************************************
#
# Trilinos: An Object-Oriented Solver Framework
# Copyright (2001) Sandia Corporation
#
#
# Copyright (2001) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000, there is a non-exclusive license for use of this
# work by or on behalf of the U.S. Government. Export of this program
# may require a license from the United States Government.
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Corporation nor the names of the
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# NOTICE: The United States Government is granted for itself and others
# acting on its behalf a paid-up, nonexclusive, irrevocable worldwide
# license in this data to reproduce, prepare derivative works, and
# perform publicly and display publicly. Beginning five (5) years from
# July 25, 2001, the United States Government is granted for itself and
# others acting on its behalf a paid-up, nonexclusive, irrevocable
# worldwide license in this data to reproduce, prepare derivative works,
# distribute copies to the public, perform publicly and display
# publicly, and to permit others to do so.
#
# NEITHER THE UNITED STATES GOVERNMENT, NOR THE UNITED STATES DEPARTMENT
# OF ENERGY, NOR SANDIA CORPORATION, NOR ANY OF THEIR EMPLOYEES, MAKES
# ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LEGAL LIABILITY OR
# RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY
# INFORMATION, APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS
# THAT ITS USE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS.
#
# ************************************************************************
# @HEADER
find_package(NetCDF REQUIRED)
TRIBITS_TPL_FIND_INCLUDE_DIRS_AND_LIBRARIES( Netcdf
REQUIRED_HEADERS netcdf.h
REQUIRED_LIBS_NAMES netcdf
)

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save