diff --git a/.gitmodules b/.gitmodules index b1f475b64..f72ba6d90 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,9 +1,7 @@ [submodule "spack"] path = spack - #url = https://github.com/spack/spack - #branch = develop url = https://github.com/jcsda/spack - branch = jcsda_emc_spack_stack + branch = spack-stack-dev [submodule "doc/CMakeModules"] path = doc/CMakeModules url = https://github.com/noaa-emc/cmakemodules diff --git a/configs/common/modules_lmod.yaml b/configs/common/modules_lmod.yaml index 6b2b9af75..44d608c85 100644 --- a/configs/common/modules_lmod.yaml +++ b/configs/common/modules_lmod.yaml @@ -189,7 +189,6 @@ modules: 'TOCGRIB': '{prefix}/bin/tocgrib' 'TOCGRIB2': '{prefix}/bin/tocgrib2' 'TOCGRIB2SUPER': '{prefix}/bin/tocgrib2super' - 'WGRIB2': '{prefix}/bin/wgrib2' landsfcutil: environment: set: diff --git a/configs/common/modules_tcl.yaml b/configs/common/modules_tcl.yaml index 8011b215f..5bafae524 100644 --- a/configs/common/modules_tcl.yaml +++ b/configs/common/modules_tcl.yaml @@ -191,7 +191,6 @@ modules: 'TOCGRIB': '{prefix}/bin/tocgrib' 'TOCGRIB2': '{prefix}/bin/tocgrib2' 'TOCGRIB2SUPER': '{prefix}/bin/tocgrib2super' - 'WGRIB2': '{prefix}/bin/wgrib2' landsfcutil: environment: set: diff --git a/configs/common/packages.yaml b/configs/common/packages.yaml index 31a4dc967..312ec407d 100644 --- a/configs/common/packages.yaml +++ b/configs/common/packages.yaml @@ -82,7 +82,7 @@ freetype: variants: +pic g2: - version: ['3.4.5'] + version: ['3.4.9'] g2c: version: ['1.6.4'] g2tmpl: @@ -100,7 +100,7 @@ # the container builds. #version: ['2.11.0'] grib-util: - version: ['1.3.0'] + version: ['1.4.0'] gsibec: version: ['1.2.1'] gsi-ncdiag: @@ -114,7 +114,7 @@ version: ['1.14.3'] variants: +hl +fortran +mpi ~threadsafe ~szip ip: - version: ['4.3.0'] + version: ['5.0.0'] variants: precision=4,d,8 ip2: version: ['1.1.2'] @@ -259,6 +259,8 @@ variants: precision=4,d,8 udunits: version: ['2.2.28'] + ufs-utils: + version: ['1.13.0'] # Note - we can remove upp from stack at some point? upp: version: ['10.0.10'] diff --git a/configs/containers/docker-ubuntu-clang-mpich.yaml b/configs/containers/docker-ubuntu-clang-mpich.yaml index 8e169cf12..04e5b18f6 100644 --- a/configs/containers/docker-ubuntu-clang-mpich.yaml +++ b/configs/containers/docker-ubuntu-clang-mpich.yaml @@ -115,7 +115,7 @@ spack: os: ubuntu:20.04 spack: url: https://github.com/jcsda/spack - ref: jcsda_emc_spack_stack + ref: spack-stack-dev resolve_sha: true # Whether or not to strip binaries diff --git a/configs/containers/docker-ubuntu-gcc-openmpi.yaml b/configs/containers/docker-ubuntu-gcc-openmpi.yaml index f5949a5f2..674f67c34 100644 --- a/configs/containers/docker-ubuntu-gcc-openmpi.yaml +++ b/configs/containers/docker-ubuntu-gcc-openmpi.yaml @@ -96,7 +96,7 @@ spack: os: ubuntu:20.04 spack: url: https://github.com/jcsda/spack - ref: jcsda_emc_spack_stack + ref: spack-stack-dev resolve_sha: true # Whether or not to strip binaries diff --git a/configs/containers/docker-ubuntu-intel-impi.yaml b/configs/containers/docker-ubuntu-intel-impi.yaml index a4f201536..ac8c160f1 100644 --- a/configs/containers/docker-ubuntu-intel-impi.yaml +++ b/configs/containers/docker-ubuntu-intel-impi.yaml @@ -115,7 +115,7 @@ spack: os: ubuntu:20.04 spack: url: https://github.com/jcsda/spack - ref: jcsda_emc_spack_stack + ref: spack-stack-dev resolve_sha: true # Whether or not to strip binaries diff --git a/configs/containers/specs/jedi-ci.yaml b/configs/containers/specs/jedi-ci.yaml index 86c5cb89a..38ebe9324 100644 --- a/configs/containers/specs/jedi-ci.yaml +++ b/configs/containers/specs/jedi-ci.yaml @@ -2,15 +2,15 @@ specs: [base-env@1.0.0, jedi-base-env@1.0.0, ewok-env@1.0.0, jedi-fv3-env@1.0.0, jedi-mpas-env@1.0.0, bacio@2.4.1, bison@3.8.2, bufr@12.0.1, ecbuild@3.7.2, eccodes@2.33.0, ecflow@5, eckit@1.24.5, ecmwf-atlas@0.36.0 +fckit +trans +tesselation +fftw, fiat@1.2.0, ectrans@1.2.0 +fftw, - eigen@3.4.0, fckit@0.11.0, fms@release-jcsda, g2@3.4.5, g2tmpl@1.10.2, gftl-shared@1.6.1, - gsibec@1.2.1, hdf@4.2.15, hdf5@1.14.3, ip@4.3.0, jasper@2.0.32, jedi-cmake@1.4.0, + eigen@3.4.0, fckit@0.11.0, fms@release-jcsda, g2@3.4.9, g2tmpl@1.10.2, gftl-shared@1.6.1, + gsibec@1.2.1, hdf@4.2.15, hdf5@1.14.3, ip@5.0.0, jasper@2.0.32, jedi-cmake@1.4.0, libpng@1.6.37, nccmp@1.9.0.1, netcdf-c@4.9.2, netcdf-cxx4@4.3.1, netcdf-fortran@4.6.1, nlohmann-json@3.10.5, nlohmann-json-schema-validator@2.1.0, parallelio@2.6.2, parallel-netcdf@1.12.3, py-eccodes@1.5.0, py-f90nml@1.4.3, py-gitpython@3.1.40, py-h5py@3.8.0, py-numpy@1.22.3, py-pandas@1.5.3, py-pip, py-pyyaml@6.0, py-scipy@1.11.4, py-shapely@1.8.0, py-xarray@2023.7.0, sp@2.5.0, udunits@2.2.28, w3emc@2.10.0, nco@5.1.6, esmf@8.6.0, mapl@2.40.3, - zlib@1.2.13, zstd@1.5.2, odc@1.4.6, shumlib@macos_clang_linux_intel_port, + zlib-ng@2.1.5, zstd@1.5.2, odc@1.4.6, shumlib@macos_clang_linux_intel_port, awscli-v2@2.13.22, py-globus-cli@3.16.0] # Notes: # 1. Don't build CRTM by default so that it gets built in the JEDI bundles diff --git a/configs/sites/derecho/compilers.yaml b/configs/sites/derecho/compilers.yaml index 897260905..553814786 100644 --- a/configs/sites/derecho/compilers.yaml +++ b/configs/sites/derecho/compilers.yaml @@ -25,7 +25,7 @@ compilers:: FI_CXI_RX_MATCH_MODE: 'hybrid' # https://github.com/JCSDA/spack-stack/issues/1012 I_MPI_EXTRA_FILESYSTEM: 'ON' - extra_rpaths: [] + extra_rpaths: [] - compiler: spec: gcc@12.2.0 paths: diff --git a/configs/sites/discover-scu16/compilers.yaml b/configs/sites/discover-scu16/compilers.yaml index f763d7ccd..84cfc9b9a 100644 --- a/configs/sites/discover-scu16/compilers.yaml +++ b/configs/sites/discover-scu16/compilers.yaml @@ -1,24 +1,24 @@ compilers: - compiler: - spec: intel@=2021.5.0 + spec: intel@=2021.6.0 paths: - cc: /usr/local/intel/oneapi/2021/compiler/2022.0.1/linux/bin/intel64/icc - cxx: /usr/local/intel/oneapi/2021/compiler/2022.0.1/linux/bin/intel64/icpc - f77: /usr/local/intel/oneapi/2021/compiler/2022.0.1/linux/bin/intel64/ifort - fc: /usr/local/intel/oneapi/2021/compiler/2022.0.1/linux/bin/intel64/ifort + cc: /usr/local/intel/oneapi/2021/compiler/2022.1.0/linux/bin/intel64/icc + cxx: /usr/local/intel/oneapi/2021/compiler/2022.1.0/linux/bin/intel64/icpc + f77: /usr/local/intel/oneapi/2021/compiler/2022.1.0/linux/bin/intel64/ifort + fc: /usr/local/intel/oneapi/2021/compiler/2022.1.0/linux/bin/intel64/ifort flags: ldflags: '-L/usr/local/other/gcc/11.2.0/lib64' operating_system: sles12 target: x86_64 modules: - - comp/intel/2021.5.0 + - comp/intel/2021.6.0 environment: prepend_path: PATH: '/usr/local/other/gcc/11.2.0/bin' CPATH: '/usr/local/other/gcc/11.2.0/include' - LD_LIBRARY_PATH: '/usr/local/intel/oneapi/2021/compiler/2022.0.1/linux/compiler/lib/intel64_lin:/usr/local/other/gcc/11.2.0/lib64' + LD_LIBRARY_PATH: '/usr/local/intel/oneapi/2021/compiler/2022.1.0/linux/compiler/lib/intel64_lin:/usr/local/other/gcc/11.2.0/lib64' set: - I_MPI_ROOT: '/usr/local/intel/oneapi/2021/mpi/2021.5.0' + I_MPI_ROOT: '/usr/local/intel/oneapi/2021/mpi/2021.6.0' extra_rpaths: [] - compiler: spec: gcc@=12.1.0 diff --git a/configs/sites/discover-scu16/packages.yaml b/configs/sites/discover-scu16/packages.yaml index 113218cee..83532853f 100644 --- a/configs/sites/discover-scu16/packages.yaml +++ b/configs/sites/discover-scu16/packages.yaml @@ -1,18 +1,18 @@ packages: all: - compiler:: [intel@2021.5.0, gcc@12.1.0] + compiler:: [intel@2021.6.0, gcc@12.1.0] providers: - mpi:: [intel-oneapi-mpi@2021.5.0, openmpi@4.1.3] + mpi:: [intel-oneapi-mpi@2021.6.0, openmpi@4.1.3] ### MPI, Python, MKL mpi: buildable: False intel-oneapi-mpi: externals: - - spec: intel-oneapi-mpi@2021.5.0%intel@2021.5.0 + - spec: intel-oneapi-mpi@2021.6.0%intel@2021.6.0 prefix: /usr/local/intel/oneapi/2021 modules: - - mpi/impi/2021.5.0 + - mpi/impi/2021.6.0 openmpi:: externals: - spec: openmpi@4.1.3%gcc@12.1.0 ~cuda~cxx~cxx_exceptions~java~memchecker+pmi~static~wrapper-rpath @@ -26,8 +26,12 @@ packages: # Problems building shared hdf-eos2 with Intel, not needed hdf-eos2: variants: ~shared + # Explicitly requested by GMAO collaborators met: variants: +python +grib2 +graphics +lidar2nc +modis + # xnnpack option doesn't build on this system + py-torch: + variants: ~xnnpack ### All other external packages listed alphabetically autoconf: diff --git a/configs/sites/discover-scu17/compilers.yaml b/configs/sites/discover-scu17/compilers.yaml index d24bde1ba..873d9a693 100644 --- a/configs/sites/discover-scu17/compilers.yaml +++ b/configs/sites/discover-scu17/compilers.yaml @@ -20,11 +20,10 @@ compilers: CPATH: '/usr/local/other/gcc/11.4.0/include' LD_LIBRARY_PATH: '/usr/local/intel/oneapi/2021/compiler/2023.2.1/linux/compiler/lib/intel64_lin:/usr/local/other/gcc/11.4.0/lib64' set: - # https://github.com/JCSDA/spack-stack/issues/1012 - I_MPI_EXTRA_FILESYSTEM: 'ON' # https://github.com/JCSDA/spack-stack/issues/1011 I_MPI_SHM_HEAP_VSIZE: '512' PSM2_MEMORY: 'large' + # https://github.com/JCSDA/spack-stack/issues/1012 I_MPI_EXTRA_FILESYSTEM: '1' I_MPI_EXTRA_FILESYSTEM_FORCE: 'gpfs' I_MPI_FABRICS: 'ofi' diff --git a/configs/sites/hera/compilers.yaml b/configs/sites/hera/compilers.yaml index 6f8f2fb05..8150d7afc 100644 --- a/configs/sites/hera/compilers.yaml +++ b/configs/sites/hera/compilers.yaml @@ -7,7 +7,7 @@ compilers: f77: /apps/oneapi/compiler/2022.0.2/linux/bin/intel64/ifort fc: /apps/oneapi/compiler/2022.0.2/linux/bin/intel64/ifort flags: {} - operating_system: centos7 + operating_system: rocky8 modules: - intel/2022.1.2 environment: @@ -24,7 +24,7 @@ compilers: f77: /apps/intel/parallel_studio_xe_2018.4.057/compilers_and_libraries_2018/linux/bin/intel64/ifort fc: /apps/intel/parallel_studio_xe_2018.4.057/compilers_and_libraries_2018/linux/bin/intel64/ifort flags: {} - operating_system: centos7 + operating_system: rocky8 modules: - intel/18.0.5.274 - compiler: @@ -35,7 +35,7 @@ compilers: f77: /apps/gnu/gcc-9.2.0/bin/gfortran fc: /apps/gnu/gcc-9.2.0/bin/gfortran flags: {} - operating_system: centos7 + operating_system: rocky8 modules: - gnu/9.2.0 environment: {} diff --git a/configs/sites/hera/packages.yaml b/configs/sites/hera/packages.yaml index 95c8e4e42..a1a0b3817 100644 --- a/configs/sites/hera/packages.yaml +++ b/configs/sites/hera/packages.yaml @@ -3,7 +3,7 @@ packages: compiler:: [intel@2021.5.0, gcc@9.2.0] #compiler:: [intel@18.0.5.274] providers: - mpi:: [intel-oneapi-mpi@2021.5.1, openmpi@4.1.5] + mpi:: [intel-oneapi-mpi@2021.5.1, openmpi@4.1.6] #mpi:: [intel-mpi@2018.0.4] # To support hecflow01 target: [haswell] @@ -22,11 +22,14 @@ packages: prefix: /apps/oneapi openmpi: externals: - - spec: openmpi@4.1.5~cuda~cxx~cxx_exceptions~java+lustre~memchecker+pmi~static~wrapper-rpath schedulers=slurm - prefix: /scratch1/NCEPDEV/jcsda/jedipara/spack-stack/openmpi-4.1.5 + - spec: openmpi@4.1.6~atomics~cuda~cxx~cxx_exceptions~gpfs~internal-hwloc~internal-libevent~internal-pmix~java+legacylaunchers~lustre~memchecker~openshmem~orterunprefix+pmi+romio+rsh~singularity+static+vt+wrapper-rpath fabrics=ucx schedulers=slurm modules: - gnu/9.2.0 - - openmpi/4.1.5 + - openmpi/4.1.6_gnu9.2.0 + zlib-ng: + require: + - any_of: ['~shared'] + when: "%intel" ### All other external packages listed alphabetically autoconf: diff --git a/configs/sites/jet/compilers.yaml b/configs/sites/jet/compilers.yaml index 6f8f2fb05..3105b2da7 100644 --- a/configs/sites/jet/compilers.yaml +++ b/configs/sites/jet/compilers.yaml @@ -7,14 +7,14 @@ compilers: f77: /apps/oneapi/compiler/2022.0.2/linux/bin/intel64/ifort fc: /apps/oneapi/compiler/2022.0.2/linux/bin/intel64/ifort flags: {} - operating_system: centos7 + operating_system: rocky8 modules: - intel/2022.1.2 environment: prepend_path: - PATH: '/apps/gnu/gcc-9.2.0/bin' - LD_LIBRARY_PATH: '/apps/gnu/gcc-9.2.0/lib64' - CPATH: '/apps/gnu/gcc-9.2.0/include' + PATH: '/apps/gnu/gcc-9.2.0b/bin' + LD_LIBRARY_PATH: '/apps/gnu/gcc-9.2.0b/lib64' + CPATH: '/apps/gnu/gcc-9.2.0b/include' extra_rpaths: [] - compiler: spec: intel@18.0.5.274 @@ -24,19 +24,19 @@ compilers: f77: /apps/intel/parallel_studio_xe_2018.4.057/compilers_and_libraries_2018/linux/bin/intel64/ifort fc: /apps/intel/parallel_studio_xe_2018.4.057/compilers_and_libraries_2018/linux/bin/intel64/ifort flags: {} - operating_system: centos7 + operating_system: rocky8 modules: - intel/18.0.5.274 - compiler: spec: gcc@9.2.0 paths: - cc: /apps/gnu/gcc-9.2.0/bin/gcc - cxx: /apps/gnu/gcc-9.2.0/bin/g++ - f77: /apps/gnu/gcc-9.2.0/bin/gfortran - fc: /apps/gnu/gcc-9.2.0/bin/gfortran + cc: /apps/gnu/gcc-9.2.0b/bin/gcc + cxx: /apps/gnu/gcc-9.2.0b/bin/g++ + f77: /apps/gnu/gcc-9.2.0b/bin/gfortran + fc: /apps/gnu/gcc-9.2.0b/bin/gfortran flags: {} - operating_system: centos7 + operating_system: rocky8 modules: - - gnu/9.2.0 + - gnu/9.2.0b environment: {} extra_rpaths: [] diff --git a/configs/sites/jet/packages.yaml b/configs/sites/jet/packages.yaml index 5ae9b49a6..549546be2 100644 --- a/configs/sites/jet/packages.yaml +++ b/configs/sites/jet/packages.yaml @@ -3,7 +3,7 @@ packages: compiler:: [intel@2021.5.0, gcc@9.2.0] #compiler:: [intel@18.0.5.274] providers: - mpi:: [intel-oneapi-mpi@2021.5.1, openmpi@3.1.4] + mpi:: [intel-oneapi-mpi@2021.5.1, openmpi@4.1.6] #mpi:: [intel-mpi@2018.4.274] # To support all generations of jet target: [core2] @@ -23,11 +23,15 @@ packages: - impi/2022.1.2 openmpi: externals: - - spec: openmpi@3.1.4%gcc@9.2.0~cuda+cxx+cxx_exceptions~java~memchecker+pmi+static~wrapper-rpath schedulers=slurm - prefix: /apps/openmpi/3.1.4/gnu/gcc-9.2.0 + - spec: openmpi@4.1.6%gcc@9.2.0 + prefix: /apps/openmpi/4.1.6/gnu9.2.0/ modules: - - gnu/9.2.0 - - openmpi/3.1.4 + - gnu/9.2.0b + - openmpi/4.1.6_gnu9.2.0 + zlib-ng: + require: + - any_of: ['~shared'] + when: "%intel" ### All other external packages listed alphabetically autoconf: @@ -148,12 +152,6 @@ packages: prefix: /lfs4/HFIP/hfv3gfs/role.epic/apps/mysql-8.0.31 modules: - mysql/8.0.31 - ncurses: - externals: - - spec: ncurses@6.3.20211021+termlib abi=6 - prefix: /lfs4/HFIP/hfv3gfs/Kyle.Gerheiser/miniconda/miniconda-3.9.7 - - spec: ncurses@5.9.20130511+termlib abi=5 - prefix: /usr perl: externals: - spec: perl@5.16.3~cpanm+shared+threads @@ -186,10 +184,6 @@ packages: externals: - spec: wget@1.14 prefix: /usr - xz: - externals: - - spec: xz@5.2.5 - prefix: /lfs4/HFIP/hfv3gfs/Kyle.Gerheiser/miniconda/miniconda-3.9.7 zip: externals: - spec: zip@3.0 diff --git a/configs/sites/noaa-gcloud/README.md b/configs/sites/noaa-gcloud/README.md index a7399c401..d60a111d2 100644 --- a/configs/sites/noaa-gcloud/README.md +++ b/configs/sites/noaa-gcloud/README.md @@ -26,6 +26,9 @@ mkdir -p /contrib/admin cat < /contrib/admin/basic_setup.sh #!/bin/bash +yum-config-manager --disable intel-clck-2019-repo +yum-config-manager --disable intel-hpc-platform +yum-config-manager --disable intelpython chmod 777 /contrib yum install -y qt5-qtbase-devel yum install -y qt5-qtsvg-devel diff --git a/configs/sites/orion/compilers.yaml b/configs/sites/orion/compilers.yaml index 36d18c38b..39f646399 100644 --- a/configs/sites/orion/compilers.yaml +++ b/configs/sites/orion/compilers.yaml @@ -17,24 +17,6 @@ compilers: LD_LIBRARY_PATH: '/apps/gcc-10.2.0/gcc-10.2.0/lib64:/apps/gcc-10.2.0/gcc-10.2.0/contrib/lib' CPATH: '/apps/gcc-10.2.0/gcc-10.2.0/include' extra_rpaths: [] -- compiler: - spec: intel@18.0.5 - paths: - cc: /apps/intel-2018/intel-2018.u4/compilers_and_libraries_2018.5.274/linux/bin/intel64/icc - cxx: /apps/intel-2018/intel-2018.u4/compilers_and_libraries_2018.5.274/linux/bin/intel64/icpc - f77: /apps/intel-2018/intel-2018.u4/compilers_and_libraries_2018.5.274/linux/bin/intel64/ifort - fc: /apps/intel-2018/intel-2018.u4/compilers_and_libraries_2018.5.274/linux/bin/intel64/ifort - flags: {} - operating_system: centos7 - target: x86_64 - modules: - - intel/2018.4 - environment: - prepend_path: - PATH: '/apps/gcc-8/gcc-8.3.0/bin' - LD_LIBRARY_PATH: '/apps/gcc-8/gcc-8.3.0/lib64:/apps/gcc-8/gcc-8.3.0//lib:/apps/gcc-8/gcc-8.3.0/mpfr-4.0.2/lib:/apps/gcc-8/gcc-8.3.0/mpc-1.1.0/lib:/apps/gcc-8/gcc-8.3.0/gmp-6.1.2/lib' - CPATH: '/apps/gcc-8/gcc-8.3.0/include' - extra_rpaths: [] - compiler: spec: gcc@10.2.0 paths: diff --git a/configs/sites/orion/packages.yaml b/configs/sites/orion/packages.yaml index e5b3f50be..f53d23021 100644 --- a/configs/sites/orion/packages.yaml +++ b/configs/sites/orion/packages.yaml @@ -1,10 +1,8 @@ packages: all: compiler:: [intel@2022.0.2, gcc@10.2.0] - #compiler:: [intel@18.0.5] providers: mpi:: [intel-oneapi-mpi@2021.5.1, openmpi@4.0.4] - #mpi:: [intel-mpi@2018.5.274] ### MPI, Python, MKL mpi: @@ -15,12 +13,6 @@ packages: prefix: /apps/intel-2022.1.2/intel-2022.1.2 modules: - impi/2022.1.2 - intel-mpi: - externals: - - spec: intel-mpi@2018.5.274%intel@18.0.5 - prefix: /apps/intel-2018/intel-2018.u4/compilers_and_libraries_2018.5.274/linux/mpi - modules: - - impi/2018.4 openmpi: externals: - spec: openmpi@4.0.4%gcc@10.2.0~cuda~cxx~cxx_exceptions~java~memchecker+pmi~static~wrapper-rpath diff --git a/configs/sites/s4/packages.yaml b/configs/sites/s4/packages.yaml index 20ad01b68..e13d63cb2 100644 --- a/configs/sites/s4/packages.yaml +++ b/configs/sites/s4/packages.yaml @@ -7,6 +7,8 @@ packages: compiler:: [intel@2021.5.0] providers: mpi:: [intel-oneapi-mpi@2021.5.0] + # https://github.com/JCSDA/spack-stack/issues/1055 + zlib-api:: [zlib] ### MPI, Python, MKL mpi: diff --git a/doc/source/KnownIssues.rst b/doc/source/KnownIssues.rst index b168084b6..2c5a9f507 100644 --- a/doc/source/KnownIssues.rst +++ b/doc/source/KnownIssues.rst @@ -9,7 +9,7 @@ General 1. ``gcc@13`` (``gcc``, ``g++``, ``gfortran``) and ``apple-clang@15`` (``clang``, ``clang++``) not yet supported - Our software stack doesn't build with ``gcc@13`` yet. This is also true when combining the LLVM or Apple ``clang`` compiler with ``gfortran@13``. We also don't support the latest release of ``apple-clang@15`` yet. + Our software stack doesn't build with ``gcc@13`` yet. This is also true when combining the LLVM or Apple ``clang`` compiler with ``gfortran@13``. We also don't support the latest release of ``apple-clang@15`` with Xcode 15.3 yet, and with Xcode 15.0 a workaround is described in :numref:`Section %s `. 2. Build errors for ``mapl@2.35.2`` with ``mpich@4.1.1`` @@ -53,6 +53,10 @@ NASA Discover This can happen if a spack install is started in a ``screen`` session, because Discover puts the temporary data in directories like ``/gpfsm/dnb33/tdirs/login/discover13.29716.dheinzel``, which get wiped out after some time. Without ``screen``, this problem doesn't occur. +3. Insufficient diskspace when building ``py-pytorch`` + + This is because ``py-pytorch`` uses directory ``~/.ccache`` during the build, and the user's home directories have small quotas set. This problem can be avoided by creating a symbolic link from the home directory to a different place with sufficient quota: ``rm -fr ~/.ccache && ln -sf /path/to/dot_ccache_pytorch/ ~/.ccache``. It's probably a good idea to revert this hack after a successful installation. + ============================== NOAA Parallel Works ============================== diff --git a/doc/source/NewSiteConfigs.rst b/doc/source/NewSiteConfigs.rst index 6eac881af..a0ecb388b 100644 --- a/doc/source/NewSiteConfigs.rst +++ b/doc/source/NewSiteConfigs.rst @@ -13,13 +13,13 @@ It is also instructive to peruse the GitHub actions scripts in ``.github/workflo +-------------------------------------------+----------------------------------------------------------------------+---------------------------+ | Compiler | Versions tested/in use in one or more site configs | Spack compiler identifier | +===========================================+======================================================================+===========================+ -| Intel classic (icc, icpc, ifort) | 2021.3.0 to the latest available version in oneAPI 2023.1.0 | ``intel@`` | +| Intel classic (icc, icpc, ifort) | 2021.3.0 to the latest available version in oneAPI 2023.1.0 [#fn1]_ | ``intel@`` | +-------------------------------------------+----------------------------------------------------------------------+---------------------------+ | Intel mixed (icx, icpx, ifort) | all versions up to latest available version in oneAPI 2023.1.0 | ``intel@`` | +-------------------------------------------+----------------------------------------------------------------------+---------------------------+ | GNU (gcc, g++, gfortran) | 9.2.0 to 12.2.0 (note: 13.x.y is **not** yet supported) | ``gcc@`` | +-------------------------------------------+----------------------------------------------------------------------+---------------------------+ -| Apple clang (clang, clang++, w/ gfortran) | 13.1.6 to 15.0.0 [#fn1]_ | ``apple-clang@`` | +| Apple clang (clang, clang++, w/ gfortran) | 13.1.6 to 15.0.0 [#fn2]_ | ``apple-clang@`` | +-------------------------------------------+----------------------------------------------------------------------+---------------------------+ | LLVM clang (clang, clang++, w/ gfortran) | 10.0.0 to 14.0.3 | ``clang@`` | +-------------------------------------------+----------------------------------------------------------------------+---------------------------+ @@ -27,8 +27,11 @@ It is also instructive to peruse the GitHub actions scripts in ``.github/workflo .. rubric:: Footnotes .. [#fn1] - Note that apple-clang@14.x compiler versions are fully supported, and apple-clang@15.0.0 will work but requires the :ref:`workaround noted below`. - Also, when using apple-clang@15.0.0 you must use Command Line Tools version 15.1, and the Command Line Tools versions 15.3 and newer are not yet supported. + We have noted problems on some - not all - platforms with ``intel@2021.5.0`` when we switched from ``zlib`` to ``zlib-ng`` in spack-stack-1.7.0. These issues went away when using a different version of the compiler (anything between 2021.3.0 and 2021.11.0). It is therefore recommended to avoid using ``intel@2021.5.0`` unless it is the only option. + +.. [#fn2] + Note that ``apple-clang@14.x`` compiler versions are fully supported, and ``apple-clang@15.0.0`` will work but requires the :ref:`workaround noted below`. + Also, when using ``apple-clang@15.0.0`` you must use Command Line Tools version 15.1, and the Command Line Tools versions 15.3 and newer are not yet supported. .. _NewSiteConfigs_macOS: @@ -384,6 +387,8 @@ The following instructions were used to prepare a basic Red Hat 8 system as it i yum -y install automake yum -y install xorg-x11-xauth yum -y install xterm + yum -y install perl-IPC-Cmd + yum -y install gettext-devel yum -y install texlive # Do not install qt@5 for now @@ -511,8 +516,8 @@ It is recommended to increase the stacksize limit by using ``ulimit -S -s unlimi # JEDI-Skylab system (using R2D2 localhost) spack external find --scope system mysql + # Note - only needed for generating documentation spack external find --scope system texlive - spack external find --scope system sed 5. Find compilers, add to site config's ``compilers.yaml`` @@ -535,10 +540,10 @@ It is recommended to increase the stacksize limit by using ``ulimit -S -s unlimi spack config add "packages:all:compiler:[gcc@YOUR-VERSION]" # Example for Red Hat 8 following the above instructions - spack config add "packages:all:providers:mpi:[openmpi@4.1.6]" + spack config add "packages:all:providers:mpi:[openmpi@5.0.1]" # Example for Ubuntu 20.04 or 22.04 following the above instructions - spack config add "packages:all:providers:mpi:[mpich@4.1.1]" + spack config add "packages:all:providers:mpi:[mpich@4.1.2]" .. warning:: On some systems, the default compiler (e.g., ``gcc`` on Ubuntu 20) may not get used by spack if a newer version is found. Compare your entry to the output of the concretization step later and adjust the entry, if necessary. diff --git a/doc/source/PreConfiguredSites.rst b/doc/source/PreConfiguredSites.rst index ffb5e788e..b7aec5771 100644 --- a/doc/source/PreConfiguredSites.rst +++ b/doc/source/PreConfiguredSites.rst @@ -8,63 +8,73 @@ Directory ``configs/sites`` contains site configurations for several HPC systems Pre-configured sites are split into two categories: Tier 1 with officially supported spack-stack installations (see :numref:`Section %s `), and Tier 2 (sites with configuration files that were tested or contributed by others in the past, but that are not officially supported by the spack-stack team; see :numref:`Section %s `). ============================================================= -Officially supported spack-stack 1.6.0 installations (tier 1) +Officially supported spack-stack 1.7.0 installations (tier 1) ============================================================= -Ready-to-use spack-stack 1.6.0 installations are available on the following, fully supported platforms. This version supports JEDI-Skylab and various UFS Applications (UFS Weather Model, EMC Global Workflow, GSI, UFS Short Range Weather Application). Amazon Web Services AMI are available in the US East 1 or 2 regions. - -On selected systems, developmental versions / release candidates are installed that are newer than spack-stack 1.6.0 (see following table). For information on the spack-stack 1.6.0 releases on this platforms, please revert to version 1.6.0 of the documentation (https://spack-stack.readthedocs.io/en/1.6.0/PreConfiguredSites.html#pre-configured-sites-tier-1). - -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| Organization | System | Compilers | Location | Maintainers (principal/backup)| -+=====================+==================================+=================+=========================================================================================================+===============================+ -| **HPC platforms** | -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| MSU | Hercules GCC+OpenMPI recommended | GCC | ``/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/ue-gcc12-openmpi416`` | EPIC / JCSDA | -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | Hercules | (GCC), Intel | ``/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/unified-env`` | EPIC / JCSDA | -| MSU +----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | Orion | GCC, Intel | ``/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/unified-env`` | EPIC / JCSDA | -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | Discover SCU16 | GCC, Intel | ``/gpfsm/dswdev/jcsda/spack-stack/scu16/spack-stack-20240228/envs/unified-env-*`` | JCSDA | -| NASA +----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | Discover SCU17 | GCC, Intel | ``/gpfsm/dswdev/jcsda/spack-stack/scu17/spack-stack-20240228/envs/unified-env-*`` | JCSDA | -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | Casper | GCC | ``/glade/work/epicufsrt/contrib/spack-stack/casper/spack-stack-1.6.0/envs/unified-env`` | JCSDA / EPIC | -| NCAR-Wyoming +----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | Derecho | GCC, Intel | ``/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.6.0/envs/unified-env`` | EPIC / JCSDA | -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| NOAA (NCEP) | Acorn | Intel | ``/lfs/h1/emc/nceplibs/noscrub/spack-stack/spack-stack-1.6.0/envs/unified-env-intel{19,2022}`` | NOAA-EMC | -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | Gaea C5 | Intel | ``/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env`` | EPIC / NOAA-EMC | -| +----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| NOAA (RDHPCS) | Hera | GCC, Intel | ``/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env`` | EPIC / NOAA-EMC | -| +----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | Jet | GCC, Intel | ``/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env`` | EPIC / NOAA-EMC | -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | Narwhal | GCC, Intel | ``/p/app/projects/NEPTUNE/spack-stack/spack-stack-1.6.0/envs/unified-env-*`` | JCSDA / NRL | -| +----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| U.S. Navy (HPCMP) | Nautilus | Intel | ``/p/app/projects/NEPTUNE/spack-stack/spack-stack-1.6.0/envs/unified-env`` | JCSDA / NRL | -| +----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | Nautilus | AOCC | *currently not supported* | JCSDA / NRL | -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | S4 | Intel | ``/data/prod/jedi/spack-stack/spack-stack-1.6.0/envs/unified-env`` | JCSDA | -| Univ. of Wisconsin +----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | S4 | GCC | *currently not supported* | JCSDA | -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| **Cloud platforms** | -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | AMI Red Hat 8 | GCC | ``/home/ec2-user/spack-stack/spack-stack-1.6.0/envs/unified-env`` | JCSDA | -+ Amazon Web Services +----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| | Parallelcluster JCSDA R&D | Intel | ``/mnt/experiments-efs/skylab-v8/spack-stack-20240207/envs/unified-env-*`` | JCSDA | -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ -| NOAA (RDHPCS) | RDHPCS Cloud (Parallel Works) | Intel | ``/contrib/spack-stack/spack-stack-1.6.0/envs/unified-env`` | EPIC / JCSDA | -+---------------------+----------------------------------+-----------------+---------------------------------------------------------------------------------------------------------+-------------------------------+ +Ready-to-use spack-stack 1.7.0 installations are available on the following, fully supported platforms. This version supports JEDI-Skylab and various UFS Applications (UFS Weather Model, EMC Global Workflow, GSI, UFS Short Range Weather Application). Amazon Web Services AMI are available in the US East 1 or 2 regions. + ++---------------------+----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| Organization | System | Compilers | Location | Maintainers (principal/backup)| ++=====================+==================================+=================+==============================================================================+===============================+ +| **HPC platforms** | ++---------------------+----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| | Hercules | GCC, Intel | ``/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.7.0/envs`` | EPIC / JCSDA | +| MSU +----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| | Orion | GCC, Intel | ``/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.7.0/envs`` | EPIC / JCSDA | ++---------------------+----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| | Discover SCU16 | GCC, Intel | ``/gpfsm/dswdev/jcsda/spack-stack/scu16/spack-stack-1.7.0/envs`` | JCSDA | +| NASA +----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| | Discover SCU17 | GCC, Intel | ``/gpfsm/dswdev/jcsda/spack-stack/scu17/spack-stack-1.7.0/envs`` | JCSDA | ++---------------------+----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| | Casper | GCC | ``/glade/work/epicufsrt/contrib/spack-stack/casper/spack-stack-1.7.0/envs`` | JCSDA / EPIC | +| NCAR-Wyoming +----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| | Derecho | GCC, Intel | ``/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.7.0/envs`` | EPIC / JCSDA | ++---------------------+----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| NOAA (NCEP) | Acorn | Intel | ``/lfs/h1/emc/nceplibs/noscrub/spack-stack/spack-stack-1.7.0/envs`` | NOAA-EMC | ++---------------------+----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| | Gaea C5 | Intel | ``/ncrc/proj/epic/spack-stack/spack-stack-1.7.0/envs`` | EPIC / NOAA-EMC | +| +----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| NOAA (RDHPCS) | Hera | GCC, Intel | ``/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.7.0/envs`` | EPIC / NOAA-EMC | +| +----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| | Jet | GCC, Intel | ``/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.7.0/envs`` | EPIC / NOAA-EMC | ++---------------------+----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| | Narwhal | GCC, Intel | ``/p/app/projects/NEPTUNE/spack-stack/spack-stack-1.7.0/envs`` | JCSDA / NRL | +| U.S. Navy (HPCMP) +----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| | Nautilus | Intel | ``/p/app/projects/NEPTUNE/spack-stack/spack-stack-1.7.0/envs`` | JCSDA / NRL | ++---------------------+----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| Univ. of Wisconsin | S4 | Intel | ``/data/prod/jedi/spack-stack/spack-stack-1.7.0/envs`` | JCSDA | ++---------------------+----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| **Cloud platforms** | ++---------------------+----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| | AMI Red Hat 8 | GCC | ``/home/ec2-user/spack-stack/spack-stack-1.7.0/envs`` | JCSDA | ++ Amazon Web Services +----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| | Parallelcluster JCSDA R&D | GCC, Intel | *currently unavailable* | JCSDA | ++---------------------+----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ +| NOAA (RDHPCS) | RDHPCS Cloud (Parallel Works) | Intel | ``/contrib/spack-stack/spack-stack-1.7.0/envs`` | EPIC / JCSDA | ++---------------------+----------------------------------+-----------------+------------------------------------------------------------------------------+-------------------------------+ For more information about a specific platform, please see the individual sections below. For questions or problems, please consult the known issues in :numref:`Section %s `, the currently open GitHub `issues `_ and `discussions `_ first. +.. note:: + This release of spack-stack uses different versions of ``mapl`` with different variants, depending on the version of the compiler and whether the system is used for UFS or GEOS. Please see the following table. + ++----------------------------+--------------------------------------+-----------------------------------------------------------------------+ +| Compiler | mapl configuration | Affected systems | ++============================+======================================+=======================================================================+ +| gcc (any) | ``mapl@2.40.3 +pflogger +extdata2g`` | All systems with GCC stacks | ++----------------------------+--------------------------------------+-----------------------------------------------------------------------+ +| intel@2021.6.0 and earlier | ``mapl@2.40.3 +pflogger +extdata2g`` | Discover SCU16, Acorn, Hera, Jet, Narwhal, Nautilus, S4, RDHPCS Cloud | ++----------------------------+--------------------------------------+-----------------------------------------------------------------------+ +| intel@2021.7.0 and later | ``mapl@2.40.3 ~pflogger ~extdata2g`` | Hercules, Orion, Acorn, Gaea and Derecho | ++----------------------------+--------------------------------------+-----------------------------------------------------------------------+ +| intel@2021.7.0 and later | ``mapl@2.43.0 +pflogger +extdata2g`` | Discover SCU17 | ++----------------------------+--------------------------------------+-----------------------------------------------------------------------+ + +.. note:: + We have noted problems on some - not all - platforms with ``intel@2021.5.0`` when we switched from ``zlib`` to ``zlib-ng`` in spack-stack-1.7.0. These issues went away when using a different version of the compiler (anything between 2021.3.0 and 2021.11.0). It is therefore recommended to avoid using ``intel@2021.5.0`` unless it is the only option. + .. _Preconfigured_Sites_Tier1: ============================================================= @@ -86,31 +96,29 @@ The following is required for building new spack environments and for using spac module load python/3.9.2 module load ecflow/5.8.4 -For ``spack-stack-1.6.0`` with Intel, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.7.0`` with Intel, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.7.0/envs/ue-intel-centos/install/modulefiles/Core module load stack-intel/2022.0.2 module load stack-intel-oneapi-mpi/2021.5.1 module load stack-python/3.10.13 - module available -For ``spack-stack-1.6.0`` with GNU, load the following modules after loading miniconda and ecflow: +For ``spack-stack-1.7.0`` with GNU, load the following modules after loading miniconda and ecflow: .. code-block:: console - module use /work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.7.0/envs/ue-gcc-centos/install/modulefiles/Core module load stack-gcc/10.2.0 module load stack-openmpi/4.0.4 module load stack-python/3.10.13 - module available .. note:: - The unified environment on Orion uses ``cdo@2.0.5`` instead of the default ``cdo@2.2.0`` because of a bug in the ``cdo`` package recipe that affects systems that don't have a ``python3`` interpreter in the default search paths (see https://github.com/spack/spack/issues/41947) for more information. This is a temporary change on Orion for the spack-stack-1.6.0 release and will be reverted once the ``cdo`` package is updated in the upstream spack develop code. + The unified environment on Orion uses ``cdo@2.0.5`` instead of the default ``cdo@2.2.0`` because of a bug in the ``cdo`` package recipe that affects systems that don't have a ``python3`` interpreter in the default search paths (see https://github.com/spack/spack/issues/41947) for more information. This is a temporary change on Orion for the spack-stack-1.7.0 release and will be reverted once the ``cdo`` package is updated in the upstream spack develop code. .. note:: - spack-stack-1.6.0 on Orion provides a chained environment `gsi-addon-env` for GSI with Intel and GNU. To use this environment, replace `unified-env` in the above `module use` statements with `gsi-addon-env`, and load module `stack-python/3.11.6` instead of `stack-python/3.10.13`. + spack-stack-1.7.0 on Orion provides a chained environment `gsi-addon-env` for GSI with Intel and GNU. To use this environment, replace `ue` in the above `module use` statements with `gsi-addon`, and load module `stack-python/3.11.6` instead of `stack-python/3.10.13`. ------------------------------ MSU Hercules @@ -125,41 +133,26 @@ The following is required for building new spack environments and for using spac module load ecflow/5.8.4 module load git-lfs/3.1.2 -For ``spack-stack-1.6.0`` with Intel, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with Intel, proceed with loading the following modules: .. code-block:: console - module use /work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.7.0/envs/ue-intel/install/modulefiles/Core module load stack-intel/2021.9.0 module load stack-intel-oneapi-mpi/2021.9.0 module load stack-python/3.10.13 - module available -For ``spack-stack-1.6.0`` with GNU, proceed with loading the following modules. Note that this environment is not recommended for GNU, an alternative installation using GNU+OpenMPI is available (see below). +For ``spack-stack-1.7.0`` with GNU, proceed with loading the following modules: .. code-block:: console - module use /work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core - module load stack-gcc/12.2.0 - module load stack-mvapich2/2.3.7 - module load stack-python/3.10.13 - module available - -For ``spack-stack-1.6.0`` with GNU+OpenMPI, an alternative and recommended version is available. Load the following modules: - -.. code-block:: console - - module use /work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/ue-gcc12-openmpi416/install/modulefiles/Core + module use /work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.7.0/envs/ue-gcc/install/modulefiles/Core module load stack-gcc/12.2.0 module load stack-openmpi/4.1.6 module load stack-python/3.10.13 - module available .. note:: - spack-stack-1.6.0 on Hercules provides a chained environment `gsi-addon-env` for GSI with Intel and GNU. To use this environment, replace `unified-env` in the above `module use` statements with `gsi-addon-env`, and load module `stack-python/3.11.6` instead of `stack-python/3.10.13`. - -.. note:: - spack-stack-1.6.0 on Hercules has additional packages `fms@2023.02.01`, `sp@2.3.0`, and `crtm@2.4.0` installed in the unified environment, in addition to the two default versions `fms@2023.04` and `fms@release-jcsda`. + spack-stack-1.7.0 on Hercules provides a chained environment `gsi-addon-env` for GSI with Intel and GNU. To use this environment, replace `ue` in the above `module use` statements with `gsi-addon`, and load module `stack-python/3.11.6` instead of `stack-python/3.10.13`. .. _Preconfigured_Sites_Discover_SCU16: @@ -177,25 +170,23 @@ The following is required for building new spack environments and for using spac module load miniconda/3.9.7 module load ecflow/5.8.4 -For ``spack-stack-20240228`` with Intel, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with Intel, proceed with loading the following modules: .. code-block:: console - module use /gpfsm/dswdev/jcsda/spack-stack/scu16/spack-stack-20240228/envs/unified-env-intel-2021.5.0/install/modulefiles/Core - module load stack-intel/2021.5.0 - module load stack-intel-oneapi-mpi/2021.5.0 + module use /gpfsm/dswdev/jcsda/spack-stack/scu16/spack-stack-1.7.0/envs/ue-intel-2021.6.0/install/modulefiles/Core + module load stack-intel/2021.6.0 + module load stack-intel-oneapi-mpi/2021.6.0 module load stack-python/3.10.13 - module available -For ``spack-stack-20240228`` with GNU, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with GNU, proceed with loading the following modules: .. code-block:: console - module use /gpfsm/dswdev/jcsda/spack-stack/scu16/spack-stack-20240228/envs/unified-env-gcc-12.1.0/install/modulefiles/Core + module use /gpfsm/dswdev/jcsda/spack-stack/scu16/spack-stack-1.7.0/envs/ue-gcc-12.1.0/install/modulefiles/Core module load stack-gcc/12.1.0 module load stack-openmpi/4.1.3 module load stack-python/3.10.13 - module available ------------------------------ NASA Discover SCU17 @@ -210,25 +201,23 @@ The following is required for building new spack environments and for using spac module use /discover/swdev/jcsda/spack-stack/scu17/modulefiles module load ecflow/5.11.4 -For ``spack-stack-20240228`` with Intel, load the following modules after loading ecflow: +For ``spack-stack-1.7.0`` with Intel, load the following modules after loading ecflow: .. code-block:: console - module use /gpfsm/dswdev/jcsda/spack-stack/scu17/spack-stack-20240228/envs/unified-env-intel-2021.10.0/install/modulefiles/Core + module use /gpfsm/dswdev/jcsda/spack-stack/scu17/spack-stack-1.7.0/envs/ue-intel-2021.10.0/install/modulefiles/Core module load stack-intel/2021.10.0 module load stack-intel-oneapi-mpi/2021.10.0 module load stack-python/3.10.13 - module available -For ``spack-stack-20240228`` with GNU, load the following modules after loading ecflow: +For ``spack-stack-1.7.0`` with GNU, load the following modules after loading ecflow: .. code-block:: console - module use /gpfsm/dswdev/jcsda/spack-stack/scu17/spack-stack-20240228/envs/unified-env-gcc-12.3.0/install/modulefiles/Core + module use /gpfsm/dswdev/jcsda/spack-stack/scu17/spack-stack-1.7.0/envs/ue-gcc-12.3.0/install/modulefiles/Core module load stack-gcc/12.3.0 module load stack-openmpi/4.1.6 module load stack-python/3.10.13 - module available .. _Preconfigured_Sites_Narwhal: @@ -255,7 +244,7 @@ With Intel, the following is required for building new spack environments and fo module use /p/app/projects/NEPTUNE/spack-stack/modulefiles module load ecflow/5.8.4 -For ``spack-stack-1.6.0`` with Intel, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with Intel, proceed with loading the following modules: .. code-block:: console @@ -263,7 +252,7 @@ For ``spack-stack-1.6.0`` with Intel, proceed with loading the following modules # Note we can't load craype-network-ucx for building spack-stack environments, must do here module unload craype-network-ofi module load craype-network-ucx - module use /p/app/projects/NEPTUNE/spack-stack/spack-stack-1.6.0/envs/unified-env-intel-2021.4.0/install/modulefiles/Core + module use /p/app/projects/NEPTUNE/spack-stack/spack-stack-1.7.0/envs/ue-intel-2021.4.0/install/modulefiles/Core module load stack-intel/2021.4.0 module load stack-cray-mpich/8.1.14 module load stack-python/3.10.13 @@ -287,7 +276,7 @@ With GNU, the following is required for building new spack environments and for module use /p/app/projects/NEPTUNE/spack-stack/modulefiles module load ecflow/5.8.4 -For ``spack-stack-1.6.0`` with GNU, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with GNU, proceed with loading the following modules: .. code-block:: console @@ -295,7 +284,7 @@ For ``spack-stack-1.6.0`` with GNU, proceed with loading the following modules: # Note we can't load craype-network-ucx for building spack-stack environments, must do here module unload craype-network-ofi module load craype-network-ucx - module use /p/app/projects/NEPTUNE/spack-stack/spack-stack-1.6.0/envs/unified-env-gcc-10.3.0/install/modulefiles/Core + module use /p/app/projects/NEPTUNE/spack-stack/spack-stack-1.7.0/envs/ue-gcc-10.3.0/install/modulefiles/Core module load stack-gcc/10.3.0 module load stack-cray-mpich/8.1.14 module load stack-python/3.10.13 @@ -320,11 +309,11 @@ With Intel, the following is required for building new spack environments and fo module use /p/app/projects/NEPTUNE/spack-stack/modulefiles module load ecflow/5.8.4 -For ``spack-stack-1.6.0`` with Intel, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with Intel, proceed with loading the following modules: .. code-block:: console - module use /p/app/projects/NEPTUNE/spack-stack/spack-stack-1.6.0/envs/ue-openmpi416/install/modulefiles/Core + module use /p/app/projects/NEPTUNE/spack-stack/spack-stack-1.7.0/envs/ue-intel-2021.5.0/install/modulefiles/Core module load stack-intel/2021.5.0 module load stack-openmpi/4.1.6 module load stack-python/3.10.13 @@ -346,16 +335,12 @@ With AMD clang/flang (aocc), the following is required for building new spack en .. note:: - ``spack-stack-1.6.0`` is not yet supported with the Arm clang/flang compilers. Use Intel instead. + ``spack-stack-1.7.0`` is not yet supported with the Arm clang/flang compilers. Use Intel instead. .. note:: `wgrib2@2.0.8` does not build on Nautilus, therefore we are using `wgrib2@3.1.1` on this system. -.. note:: - - There are still problems launching the ecflow GUI, although the package is installed. - .. _Preconfigured_Sites_Casper: ------------------------------ @@ -373,16 +358,14 @@ The following is required for building new spack environments and for using spac module use /glade/work/epicufsrt/contrib/spack-stack/casper/modulefiles module load ecflow/5.8.4 -For ``spack-stack-1.6.0`` with GNU, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with GNU, proceed with loading the following modules: .. code-block:: console - module use /glade/work/epicufsrt/contrib/spack-stack/casper/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core - + module use /glade/work/epicufsrt/contrib/spack-stack/casper/spack-stack-1.7.0/envs/ue-gcc-12.2.0/install/modulefiles/Core module load stack-gcc/12.2.0 module load stack-openmpi/4.1.6 module load stack-python/3.10.13 - module available .. _Preconfigured_Sites_Derecho: @@ -401,25 +384,23 @@ The following is required for building new spack environments and for using spac module use /glade/work/epicufsrt/contrib/spack-stack/derecho/modulefiles module load ecflow/5.8.4 -For ``spack-stack-1.6.0`` with Intel, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with Intel, proceed with loading the following modules: .. code-block:: console - module use /glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.7.0/envs/ue-intel/install/modulefiles/Core module load stack-intel/2021.10.0 module load stack-cray-mpich/8.1.25 module load stack-python/3.10.13 - module available -For ``spack-stack-1.6.0`` with GNU, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with GNU, proceed with loading the following modules: .. code-block:: console - module use /glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.7.0/envs/ue-gcc/install/modulefiles/Core module load stack-gcc/12.2.0 module load stack-cray-mpich/8.1.25 module load stack-python/3.10.13 - module available .. note:: CISL restricts the amount of memory available for processes on the login nodes. For example, it is impossible to compile JEDI with even one task (``make -j1``) with the Intel compiles in release mode (``-O2``). We therefore recommend compiling on compute nodes using interactive jobs, if possible. @@ -430,7 +411,7 @@ For ``spack-stack-1.6.0`` with GNU, proceed with loading the following modules: NOAA Acorn (WCOSS2 test system) ------------------------------- -For spack-stack-1.6.0, the meta modules are in ``/lfs/h1/emc/nceplibs/noscrub/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core``. +For spack-stack-1.7.0, the meta modules are in ``/lfs/h1/emc/nceplibs/noscrub/spack-stack/spack-stack-1.7.0/envs/ue-intel{19,2022}/modulefiles/Core``. On WCOSS2 OpenSUSE sets ``CONFIG_SITE`` which causes libraries to be installed in ``lib64``, breaking the ``lib`` assumption made by some packages. Therefore, ``CONFIG_SITE`` should be set to empty in ``compilers.yaml``. Also, don't use ``module purge`` on Acorn! @@ -463,15 +444,14 @@ The following is required for building new spack environments and for using spac module load ecflow/5.8.4 module load git-lfs/2.4.1 -For ``spack-stack-1.6.0`` with Intel, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with Intel, proceed with loading the following modules: .. code-block:: console - module use /contrib/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /contrib/spack-stack/spack-stack-1.7.0/envs/ue-intel-2021.3.0/install/modulefiles/Core module load stack-intel/2021.3.0 module load stack-intel-oneapi-mpi/2021.3.0 module load stack-python/3.10.13 - module available .. _Preconfigured_Sites_Gaea_C5: @@ -491,16 +471,19 @@ The following is required for building new spack environments and for using spac module use /ncrc/proj/epic/spack-stack/modulefiles module load ecflow/5.8.4 -For ``spack-stack-1.6.0`` with Intel, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with Intel, proceed with loading the following modules: .. code-block:: console - module use /ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /ncrc/proj/epic/spack-stack/spack-stack-1.7.0/envs/ue-intel/install/modulefiles/Core module load stack-intel/2023.1.0 module load stack-cray-mpich/8.1.25 module load stack-python/3.10.13 module -t available +.. note:: + spack-stack-1.7.0 on Gaea provides a chained environment `gsi-addon-env` for GSI with Intel. To use this environment, replace `ue` in the above `module use` statements with `gsi-addon`, and load module `stack-python/3.11.6` instead of `stack-python/3.10.13`. + .. note:: On Gaea C5, running ``module available`` without the option ``-t`` leads to an error: ``/usr/bin/lua5.3: /opt/cray/pe/lmod/lmod/libexec/Spider.lua:568: stack overflow`` @@ -526,31 +509,28 @@ The following is required for building new spack environments and for using spac module load miniconda/3.9.12 module load ecflow/5.5.3 -For ``spack-stack-1.6.0`` with Intel, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with Intel, proceed with loading the following modules: .. code-block:: console - module use /scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.7.0/envs/ue-intel/install/modulefiles/Core module load stack-intel/2021.5.0 module load stack-intel-oneapi-mpi/2021.5.1 module load stack-python/3.10.13 - module available -For ``spack-stack-1.6.0`` with GNU, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with GNU, proceed with loading the following modules: .. code-block:: console - module use /scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.7.0/envs/ue-gcc/install/modulefiles/Core module load stack-gcc/9.2.0 module load stack-openmpi/4.1.5 module load stack-python/3.10.13 - module available Note that on Hera, a dedicated node exists for ``ecflow`` server jobs (``hecflow01``). Users starting ``ecflow_server`` on the regular login nodes will see their servers being killed every few minutes, and may be barred from accessing the system. .. note:: - - spack-stack-1.6.0 on Hera provides a chained environment `gsi-addon-env` for GSI with Intel and GNU. To use this environment, replace `unified-env` in the above `module use` statements with `gsi-addon-env`, and load module `stack-python/3.11.6` instead of `stack-python/3.10.13`. + spack-stack-1.7.0 on Hera provides a chained environment `gsi-addon-env` for GSI with Intel and GNU. To use this environment, replace `ue` in the above `module use` statements with `gsi-addon`, and load module `stack-python/3.11.6` instead of `stack-python/3.10.13`. .. _Preconfigured_Sites_Jet: @@ -568,29 +548,26 @@ The following is required for building new spack environments and for using spac module load ecflow/5.5.3 module use /lfs4/HFIP/hfv3gfs/role.epic/modulefiles -For ``spack-stack-1.6.0`` with Intel, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with Intel, proceed with loading the following modules: .. code-block:: console - module use /mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.7.0/envs/ue-intel/install/modulefiles/Core module load stack-intel/2021.5.0 module load stack-intel-oneapi-mpi/2021.5.1 module load stack-python/3.10.8 - module available -For ``spack-stack-1.6.0`` with GNU, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with GNU, proceed with loading the following modules: .. code-block:: console - module use /mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.7.0/envs/ue-gcc/install/modulefiles/Core module load stack-gcc/9.2.0 module load stack-openmpi/3.1.4 module load stack-python/3.10.8 - module available .. note:: - - spack-stack-1.6.0 on Jet provides a chained environment `gsi-addon-env` for GSI with Intel and GNU. To use this environment, replace `unified-env` in the above `module use` statements with `gsi-addon-env`, and load module `stack-python/3.11.6` instead of `stack-python/3.10.13`. + spack-stack-1.7.0 on Hercules provides a chained environment `gsi-addon-env` for GSI with Intel and GNU. To use this environment, replace `ue` in the above `module use` statements with `gsi-addon`, and load module `stack-python/3.11.6` instead of `stack-python/3.10.13`. ------------------------------ UW (Univ. of Wisconsin) S4 @@ -605,74 +582,45 @@ The following is required for building new spack environments and for using spac module load miniconda/3.9.12 module load ecflow/5.8.4 -For ``spack-stack-1.6.0`` with Intel, proceed with loading the following modules: +For ``spack-stack-1.7.0`` with Intel, proceed with loading the following modules: .. code-block:: console - module use /data/prod/jedi/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /data/prod/jedi/spack-stack/spack-stack-1.7.0/envs/ue-intel-2021.5.0/install/modulefiles/Core module load stack-intel/2021.5.0 module load stack-intel-oneapi-mpi/2021.5.0 module load stack-python/3.10.13 module unuse /opt/apps/modulefiles/Compiler/intel/non-default/22 module unuse /opt/apps/modulefiles/Compiler/intel/22 - module available Note the two `module unuse` commands, that need to be run after the stack metamodules are loaded. Loading the Intel compiler meta module loads the Intel compiler module provided by the sysadmins, which adds those two directories to the module path. These contain duplicate libraries that are not compatible with our stack, such as ``hdf4``. .. note:: - spack-stack-1.6.0 on S4 provides a chained environment `gsi-addon-env` for GSI with Intel. To use this environment, replace `unified-env` in the above `module use` statements with `gsi-addon-env`, and load module `stack-python/3.11.6` instead of `stack-python/3.10.13`. - -.. note:: - - There is currently no support for GNU on S4, because recent updates to ``hdf5`` require a newer version of ``mpich`` (or other MPI library) than available on the system. + There is currently no support for GNU on S4, because recent updates to ``hdf5`` require a newer version of ``mpich`` (or other MPI library) than available on the system. Also, for spack-stack-1.7.0, S4 is the only system that uses ``zlib`` instead of ``zlib-ng`` due to the issues described in https://github.com/JCSDA/spack-stack/issues/1055. ------------------------------------------------ Amazon Web Services Parallelcluster Ubuntu 20.04 ------------------------------------------------ -Access to the JCSDA-managed AWS Parallel Clusters is not available to the public. The following instructions are for JCSDA core staff and in-kind contributors. - -For ``spack-stack-20240207`` with Intel on the JCSDA R&D cluster (``hpc6a.48xlarge`` instances), run the following commands/load the following modules: - -.. code-block:: console - - module purge - ulimit -s unlimited - source /opt/intel/oneapi/compiler/2022.1.0/env/vars.sh - module use /mnt/experiments-efs/skylab-v8/spack-stack-20240207/envs/unified-env-intel-2021.6.0/install/modulefiles/Core - module load stack-intel/2021.6.0 - module load stack-intel-oneapi-mpi/2021.6.0 - module load stack-python/3.10.13 - module available - -For ``spack-stack-20240207`` with GNU on the JCSDA R&D cluster (``hpc6a.48xlarge`` instances), run the following commands/load the following modules: - - module purge - ulimit -s unlimited - module use /mnt/experiments-efs/skylab-v8/spack-stack-20240207/envs/unified-env-gcc-9.4.0/install/modulefiles/Core - module load stack-gcc/9.4.0 - module load stack-openmpi/4.1.4 - module load stack-python/3.10.13 - module available +The JCSDA-managed AWS Parallel Cluster is currently unavailable. ----------------------------- Amazon Web Services Red Hat 8 ----------------------------- -Use a c6i.4xlarge instance or larger if running out of memory with AMI "skylab-7.1.0-redhat8" (see JEDI documentation at https://jointcenterforsatellitedataassimilation-jedi-docs.readthedocs-hosted.com/en/latest for more information). +Use a c6i.4xlarge instance or larger if running out of memory with AMI "skylab-8.0.0-redhat8" (see JEDI documentation at https://jointcenterforsatellitedataassimilation-jedi-docs.readthedocs-hosted.com/en/latest for more information). -For ``spack-stack-1.6.0``, run: +For ``spack-stack-1.7.0``, run: .. code-block:: console ulimit -s unlimited scl_source enable gcc-toolset-11 - module use /home/ec2-user/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core + module use /home/ec2-user/spack-stack/spack-stack-1.7.0/envs/unified-env-gcc-11.2.1/install/modulefiles/Core module load stack-gcc/11.2.1 - module load stack-openmpi/4.1.5 + module load stack-openmpi/5.0.1 module load stack-python/3.10.13 - module available .. _Configurable_Sites_CreateEnv: diff --git a/doc/source/conf.py b/doc/source/conf.py index ca4ccb5ff..2462bbb56 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -143,7 +143,7 @@ # Latex figure (float) alignment # # 'figure_align': 'htbp', - 'maketitle': r'\newcommand\sphinxbackoftitlepage{For referencing this document please use: \newline \break Heinzeller, D., A. Richert, C. Book, 2024. spack-stack documentation release v1.6.0. Available at https://spack-stack.readthedocs.io/\textunderscore/downloads/en/v1.6.0/pdf/.}\sphinxmaketitle' + 'maketitle': r'\newcommand\sphinxbackoftitlepage{For referencing this document please use: \newline \break Heinzeller, D., A. Richert, C. Book, E. Hartnett, H. Lei, N. Perlin, R. Vasic, S. Herbener, 2024. spack-stack documentation develop. Available at https://spack-stack.readthedocs.io/\textunderscore/downloads/en/latest/pdf/.}\sphinxmaketitle' } # Grouping the document tree into LaTeX files. List of tuples diff --git a/project_charter.md b/project_charter.md index ca9bbc235..4d39f7e66 100644 --- a/project_charter.md +++ b/project_charter.md @@ -116,24 +116,24 @@ which case the work will be shared by the code managers. #### Directory structure of spack-stack installs -/path/to/spack-stack/spack-stack-x.y.z/envs/unified-env-compiler-name-compiler-version/install/compiler-name/compiler-version/package-name-version-hash +/path/to/spack-stack/spack-stack-x.y.z/envs/ue-compiler-name-compiler-version/install/compiler-name/compiler-version/package-name-version-hash _Example_ -/Users/heinzell/prod/spack-stack-1.4.0/envs/unified-env-apple-clang-13.1.6/install/apple-clang/13.1.6/netcdf-c-4.9.2-vrrvi2u +/Users/heinzell/prod/spack-stack-1.4.0/envs/ue-apple-clang-13.1.6/install/apple-clang/13.1.6/netcdf-c-4.9.2-vrrvi2u #### Auto-generated modules structure (no MPI dependency) -/path/to/spack-stack/spack-stack-x.y.z/envs/unified-env/install/modulefiles/compiler-name/compiler-version/package-name/package-version[.lua] +/path/to/spack-stack/spack-stack-x.y.z/envs/ue-compiler-name-compiler-version/install/modulefiles/compiler-name/compiler-version/package-name/package-version[.lua] _Example_ -/Users/heinzell/prod/spack-stack-1.4.0/envs/unified-env/install/modulefiles/apple-clang/13.1.6/sfcio/1.4.1.lua +/Users/heinzell/prod/spack-stack-1.4.0/envs/ue-apple-clang-13.1.6/install/modulefiles/apple-clang/13.1.6/sfcio/1.4.1.lua #### Auto-generated modules structure (MPI dependency) -/path/to/spack-stack/spack-stack-x.y.z/envs/unified-env/install/modulefiles/mpi-name/mpi-version/compiler-name/compiler-version/package-name/package-version[.lua] +/path/to/spack-stack/spack-stack-x.y.z/envs/ue-compiler-name-compiler-version/install/modulefiles/mpi-name/mpi-version/compiler-name/compiler-version/package-name/package-version[.lua] _Example_ -/Users/heinzell/prod/spack-stack-1.4.0/envs/unified-env/install/modulefiles/openmpi/4.1.5/apple-clang/13.1.6/hdf5/1.14.0.lua +/Users/heinzell/prod/spack-stack-1.4.0/envs/ue-apple-clang-13.1.6/install/modulefiles/openmpi/4.1.5/apple-clang/13.1.6/hdf5/1.14.0.lua #### Important points to remember diff --git a/spack b/spack index 4fafe3ead..1daf6f1d5 160000 --- a/spack +++ b/spack @@ -1 +1 @@ -Subproject commit 4fafe3eadbe330e64423b8ee16c88f8309f48d5b +Subproject commit 1daf6f1d5df35d80d68ac742c9395dfb76997872 diff --git a/spack-ext/repos/spack-stack/packages/emc-gfs-wafs-env/package.py b/spack-ext/repos/spack-stack/packages/emc-gfs-wafs-env/package.py index 6e45dece4..f21738ec6 100644 --- a/spack-ext/repos/spack-stack/packages/emc-gfs-wafs-env/package.py +++ b/spack-ext/repos/spack-stack/packages/emc-gfs-wafs-env/package.py @@ -21,7 +21,7 @@ class EmcGfsWafsEnv(BundlePackage): depends_on("bacio") depends_on("w3emc") depends_on("w3nco") - depends_on("sp") + depends_on("sp", when="^ip@:4") depends_on("ip") depends_on("g2") depends_on("bufr") diff --git a/spack-ext/repos/spack-stack/packages/gldas-env/package.py b/spack-ext/repos/spack-stack/packages/gldas-env/package.py index 29dd2531c..1f41ede5e 100644 --- a/spack-ext/repos/spack-stack/packages/gldas-env/package.py +++ b/spack-ext/repos/spack-stack/packages/gldas-env/package.py @@ -23,6 +23,6 @@ class GldasEnv(BundlePackage): depends_on("w3emc") depends_on("nemsio") depends_on("bacio") - depends_on("sp") + depends_on("sp", when="^ip@:4") # There is no need for install() since there is no code. diff --git a/spack-ext/repos/spack-stack/packages/global-workflow-env/package.py b/spack-ext/repos/spack-stack/packages/global-workflow-env/package.py index 69d71286c..6360ca1e1 100644 --- a/spack-ext/repos/spack-stack/packages/global-workflow-env/package.py +++ b/spack-ext/repos/spack-stack/packages/global-workflow-env/package.py @@ -30,7 +30,7 @@ class GlobalWorkflowEnv(BundlePackage): depends_on("g2tmpl") depends_on("w3nco") depends_on("w3emc") - depends_on("sp") + depends_on("sp", when="^ip@:4") depends_on("ip") depends_on("nemsio") depends_on("nemsiogfs") diff --git a/spack-ext/repos/spack-stack/packages/gsi-env/package.py b/spack-ext/repos/spack-stack/packages/gsi-env/package.py index 948fa5150..71994ad39 100644 --- a/spack-ext/repos/spack-stack/packages/gsi-env/package.py +++ b/spack-ext/repos/spack-stack/packages/gsi-env/package.py @@ -23,7 +23,7 @@ class GsiEnv(BundlePackage): depends_on("bufr") depends_on("bacio") depends_on("w3emc") - depends_on("sp") + depends_on("sp", when="^ip@:4") depends_on("ip") depends_on("sigio") depends_on("sfcio") diff --git a/spack-ext/repos/spack-stack/packages/jedi-base-env/package.py b/spack-ext/repos/spack-stack/packages/jedi-base-env/package.py index 01774834c..a1f8726e9 100644 --- a/spack-ext/repos/spack-stack/packages/jedi-base-env/package.py +++ b/spack-ext/repos/spack-stack/packages/jedi-base-env/package.py @@ -46,7 +46,7 @@ class JediBaseEnv(BundlePackage): depends_on("nlohmann-json", type="run") depends_on("nlohmann-json-schema-validator", type="run") depends_on("odc", type="run") - depends_on("sp", type="run") + depends_on("sp", type="run", when="^ip@:4") depends_on("udunits", type="run") # Python packages diff --git a/spack-ext/repos/spack-stack/packages/nceplibs-env/package.py b/spack-ext/repos/spack-stack/packages/nceplibs-env/package.py index ea534c618..e5c25bfcb 100644 --- a/spack-ext/repos/spack-stack/packages/nceplibs-env/package.py +++ b/spack-ext/repos/spack-stack/packages/nceplibs-env/package.py @@ -32,7 +32,7 @@ class NceplibsEnv(BundlePackage): depends_on("nemsio") depends_on("sfcio") depends_on("sigio") - depends_on("sp") + depends_on("sp", when="^ip@:4") depends_on("w3emc") depends_on("w3nco") depends_on("wrf-io") diff --git a/spack-ext/repos/spack-stack/packages/ufs-srw-app-env/package.py b/spack-ext/repos/spack-stack/packages/ufs-srw-app-env/package.py index ae5501f1d..80ae52b71 100644 --- a/spack-ext/repos/spack-stack/packages/ufs-srw-app-env/package.py +++ b/spack-ext/repos/spack-stack/packages/ufs-srw-app-env/package.py @@ -31,7 +31,7 @@ class UfsSrwAppEnv(BundlePackage): depends_on("g2") depends_on("g2tmpl") depends_on("ip") - depends_on("sp") + depends_on("sp", when="^ip@:4") depends_on("w3nco") depends_on("gfsio") depends_on("landsfcutil") diff --git a/spack-ext/repos/spack-stack/packages/ufs-utils-env/package.py b/spack-ext/repos/spack-stack/packages/ufs-utils-env/package.py index ea9129907..26f02c38e 100644 --- a/spack-ext/repos/spack-stack/packages/ufs-utils-env/package.py +++ b/spack-ext/repos/spack-stack/packages/ufs-utils-env/package.py @@ -26,7 +26,7 @@ class UfsUtilsEnv(BundlePackage): depends_on("ip2") depends_on("nemsio") depends_on("nemsiogfs") - depends_on("sp") + depends_on("sp", when="^ip@:4") depends_on("w3emc") depends_on("sigio") depends_on("sfcio") diff --git a/spack-ext/repos/spack-stack/packages/ufs-weather-model-env/package.py b/spack-ext/repos/spack-stack/packages/ufs-weather-model-env/package.py index 342ef2dbb..d8354989f 100644 --- a/spack-ext/repos/spack-stack/packages/ufs-weather-model-env/package.py +++ b/spack-ext/repos/spack-stack/packages/ufs-weather-model-env/package.py @@ -32,7 +32,7 @@ class UfsWeatherModelEnv(BundlePackage): depends_on("g2", type="run") depends_on("g2tmpl", type="run") depends_on("ip", type="run") - depends_on("sp", type="run") + depends_on("sp", type="run", when="^ip@:4") depends_on("w3emc", type="run") depends_on("scotch", type="run") depends_on("cprnc", type="run") diff --git a/spack-ext/repos/spack-stack/packages/upp-env/package.py b/spack-ext/repos/spack-stack/packages/upp-env/package.py index 3d524ed02..eb051622d 100644 --- a/spack-ext/repos/spack-stack/packages/upp-env/package.py +++ b/spack-ext/repos/spack-stack/packages/upp-env/package.py @@ -24,7 +24,7 @@ class UppEnv(BundlePackage): depends_on("nemsio") depends_on("sfcio") depends_on("sigio") - depends_on("sp") + depends_on("sp", when="^ip@:4") depends_on("w3nco") depends_on("w3emc") depends_on("wrf-io")