diff --git a/mache/cime_machine_config/config_machines.xml b/mache/cime_machine_config/config_machines.xml
index f41ced73..0e5bf4bf 100644
--- a/mache/cime_machine_config/config_machines.xml
+++ b/mache/cime_machine_config/config_machines.xml
@@ -136,10 +136,10 @@
/opt/cray/pe/netcdf-hdf5parallel/4.8.1.3/gnu/9.1/
$SHELL{dirname $(dirname $(which pnetcdf_version))}
-
+
128M
-
+
cores
@@ -148,7 +148,7 @@
Perlmutter CPU-only nodes at NERSC. Phase2 only: Each node has 2 AMD EPYC 7713 64-Core (Milan) 512GB
$ENV{NERSC_HOST}:perlmutter
Linux
- gnu,intel,nvidia,amdclang
+ intel,gnu,nvidia,amdclang
mpich
e3sm
/global/cfs/cdirs/e3sm
@@ -160,7 +160,7 @@
/global/cfs/cdirs/e3sm/inputdata/atm/datm7
$CIME_OUTPUT_ROOT/archive/$CASE
/global/cfs/cdirs/e3sm/baselines/$COMPILER
- /global/cfs/cdirs/e3sm/tools/cprnc.cori/cprnc
+ /global/cfs/cdirs/e3sm/tools/cprnc/cprnc
10
e3sm_developer
4
@@ -193,6 +193,8 @@
cray-hdf5-parallel
cray-netcdf-hdf5parallel
cray-parallel-netcdf
+ cray-netcdf
+ cray-hdf5
PrgEnv-gnu
PrgEnv-intel
PrgEnv-nvidia
@@ -200,7 +202,10 @@
PrgEnv-aocc
intel
intel-oneapi
+ nvidia
+ aocc
cudatoolkit
+ climate-utils
craype-accel-nvidia80
craype-accel-host
perftools-base
@@ -227,14 +232,14 @@
PrgEnv-aocc
- aocc/3.2.0
+ aocc/4.0.0
cray-libsci/23.02.1.1
craype-accel-host
- craype/2.7.19
- cray-mpich/8.1.24
+ craype/2.7.20
+ cray-mpich/8.1.25
cray-hdf5-parallel/1.12.2.3
cray-netcdf-hdf5parallel/4.9.0.3
cray-parallel-netcdf/1.12.3.3
@@ -257,15 +262,31 @@
/global/cfs/cdirs/e3sm/perl/lib/perl5-only-switch
software
MPI_Bcast
+ $SHELL{if [ -z "$Albany_ROOT" ]; then echo /global/common/software/e3sm/mali_tpls/albany-e3sm-serial-release-gcc; else echo "$Albany_ROOT"; fi}
+ $SHELL{if [ -z "$Trilinos_ROOT" ]; then echo /global/common/software/e3sm/mali_tpls/trilinos-e3sm-serial-release-gcc; else echo "$Trilinos_ROOT"; fi}
+ $ENV{CRAY_NETCDF_HDF5PARALLEL_PREFIX}
+ $ENV{CRAY_PARALLEL_NETCDF_PREFIX}
+
+
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /global/cfs/cdirs/e3sm/3rdparty/adios2/2.9.1/cray-mpich-8.1.25/intel-2023.1.0; else echo "$ADIOS2_ROOT"; fi}
- /global/cfs/cdirs/e3sm/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.15/gcc-11.2.0
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /global/cfs/cdirs/e3sm/3rdparty/adios2/2.9.1/cray-mpich-8.1.25/gcc-11.2.0; else echo "$ADIOS2_ROOT"; fi}
+ Generic
- /global/cfs/cdirs/e3sm/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.15/nvidia-21.11
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /global/cfs/cdirs/e3sm/3rdparty/adios2/2.9.1/cray-mpich-8.1.25/nvidia-22.7; else echo "$ADIOS2_ROOT"; fi}
+
+
+ $SHELL{if [ -z "$BLAS_ROOT" ]; then echo /opt/nvidia/hpc_sdk/Linux_x86_64/22.7/compilers; else echo "$BLAS_ROOT"; fi}
+ $SHELL{if [ -z "$LAPACK_ROOT" ]; then echo /opt/nvidia/hpc_sdk/Linux_x86_64/22.7/compilers; else echo "$LAPACK_ROOT"; fi}
+ NVHPC
+
+
+ Intel10_64_dyn
- /global/cfs/cdirs/e3sm/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.15/aocc-3.2.0
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /global/cfs/cdirs/e3sm/3rdparty/adios2/2.9.1/cray-mpich-8.1.25/aocc-4.0.0; else echo "$ADIOS2_ROOT"; fi}
-1
@@ -288,7 +309,152 @@
/global/cfs/cdirs/e3sm/inputdata/atm/datm7
$CIME_OUTPUT_ROOT/archive/$CASE
/global/cfs/cdirs/e3sm/baselines/$COMPILER
- /global/cfs/cdirs/e3sm/tools/cprnc.cori/cprnc
+ /global/cfs/cdirs/e3sm/tools/cprnc/cprnc
+ 10
+ e3sm_developer
+ 4
+ nersc_slurm
+ e3sm
+ 128
+ 256
+ 256
+ 4
+ 64
+ 64
+ TRUE
+
+ srun
+
+ --label
+ -n {{ total_tasks }} -N {{ num_nodes }}
+ -c $SHELL{echo 128/`./xmlquery --value MAX_MPITASKS_PER_NODE`|bc}
+ $SHELL{if [ 64 -ge `./xmlquery --value MAX_MPITASKS_PER_NODE` ]; then echo "--cpu_bind=cores"; else echo "--cpu_bind=threads";fi;}
+ -m plane=$SHELL{echo `./xmlquery --value MAX_MPITASKS_PER_NODE`}
+
+
+
+ /usr/share/lmod/8.3.1/init/perl
+ /usr/share/lmod/8.3.1/init/python
+ /usr/share/lmod/8.3.1/init/sh
+ /usr/share/lmod/8.3.1/init/csh
+ /usr/share/lmod/lmod/libexec/lmod perl
+ /usr/share/lmod/lmod/libexec/lmod python
+ module
+ module
+
+
+ cray-hdf5-parallel
+ cray-netcdf-hdf5parallel
+ cray-parallel-netcdf
+ cray-netcdf
+ cray-hdf5
+ PrgEnv-gnu
+ PrgEnv-intel
+ PrgEnv-nvidia
+ PrgEnv-cray
+ PrgEnv-aocc
+ intel
+ intel-oneapi
+ nvidia
+ aocc
+ cudatoolkit
+ climate-utils
+ craype-accel-nvidia80
+ craype-accel-host
+ perftools-base
+ perftools
+ darshan
+
+
+
+ PrgEnv-gnu/8.3.3
+ gcc/11.2.0
+
+
+
+ PrgEnv-nvidia
+ nvidia/22.7
+
+
+
+ cudatoolkit/11.7
+ craype-accel-nvidia80
+
+
+
+ cudatoolkit/11.7
+ craype-accel-nvidia80
+
+
+
+ craype-accel-host
+
+
+
+ craype-accel-host
+
+
+
+ cray-libsci/23.02.1.1
+ craype/2.7.20
+ cray-mpich/8.1.25
+ cray-hdf5-parallel/1.12.2.3
+ cray-netcdf-hdf5parallel/4.9.0.3
+ cray-parallel-netcdf/1.12.3.3
+ cmake/3.24.3
+
+
+
+ $CIME_OUTPUT_ROOT/$CASE/run
+ $CIME_OUTPUT_ROOT/$CASE/bld
+ 0.1
+
+
+ 1
+ 1
+ 128M
+ spread
+ threads
+ FALSE
+ /global/cfs/cdirs/e3sm/perl/lib/perl5-only-switch
+ MPI_Bcast
+ $ENV{CRAY_NETCDF_HDF5PARALLEL_PREFIX}
+ $ENV{CRAY_PARALLEL_NETCDF_PREFIX}
+
+
+ 1
+
+
+ 1
+
+
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /global/cfs/cdirs/e3sm/3rdparty/adios2/2.9.1/cray-mpich-8.1.25/gcc-11.2.0; else echo "$ADIOS2_ROOT"; fi}
+
+
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /global/cfs/cdirs/e3sm/3rdparty/adios2/2.9.1/cray-mpich-8.1.25/nvidia-22.7; else echo "$ADIOS2_ROOT"; fi}
+
+
+ -1
+
+
+
+
+ Muller small internal machine at NERSC with GPU nodes similar to pm-gpu
+ $ENV{NERSC_HOST}:muller
+ Linux
+ gnugpu,gnu,nvidiagpu,nvidia
+ mpich
+ e3sm_g
+ /global/cfs/cdirs/e3sm
+ e3sm,m3411,m3412
+ $ENV{SCRATCH}/e3sm_scratch/muller
+ /global/cfs/cdirs/e3sm/www/$ENV{USER}
+ http://portal.nersc.gov/project/e3sm/$ENV{USER}
+ /global/cfs/cdirs/e3sm/inputdata
+ /global/cfs/cdirs/e3sm/inputdata/atm/datm7
+ $CIME_OUTPUT_ROOT/archive/$CASE
+ /global/cfs/cdirs/e3sm/baselines/$COMPILER
+ /global/cfs/cdirs/e3sm/tools/cprnc/cprnc
10
e3sm_developer
4
@@ -325,11 +491,19 @@
cray-hdf5-parallel
cray-netcdf-hdf5parallel
cray-parallel-netcdf
+ cray-netcdf
+ cray-hdf5
PrgEnv-gnu
+ PrgEnv-intel
PrgEnv-nvidia
PrgEnv-cray
PrgEnv-aocc
+ intel
+ intel-oneapi
+ nvidia
+ aocc
cudatoolkit
+ climate-utils
craype-accel-nvidia80
craype-accel-host
perftools-base
@@ -348,12 +522,12 @@
- cudatoolkit/11.5
+ cudatoolkit/11.7
craype-accel-nvidia80
- cudatoolkit/11.5
+ cudatoolkit/11.7
craype-accel-nvidia80
@@ -367,8 +541,8 @@
cray-libsci/23.02.1.1
- craype/2.7.19
- cray-mpich/8.1.24
+ craype/2.7.20
+ cray-mpich/8.1.25
cray-hdf5-parallel/1.12.2.3
cray-netcdf-hdf5parallel/4.9.0.3
cray-parallel-netcdf/1.12.3.3
@@ -389,6 +563,8 @@
FALSE
/global/cfs/cdirs/e3sm/perl/lib/perl5-only-switch
MPI_Bcast
+ $ENV{CRAY_NETCDF_HDF5PARALLEL_PREFIX}
+ $ENV{CRAY_PARALLEL_NETCDF_PREFIX}
1
@@ -397,10 +573,10 @@
1
- /global/cfs/cdirs/e3sm/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.15/gcc-11.2.0
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /global/cfs/cdirs/e3sm/3rdparty/adios2/2.9.1/cray-mpich-8.1.25/gcc-11.2.0; else echo "$ADIOS2_ROOT"; fi}
- /global/cfs/cdirs/e3sm/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.15/nvidia-21.11
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /global/cfs/cdirs/e3sm/3rdparty/adios2/2.9.1/cray-mpich-8.1.25/nvidia-22.7; else echo "$ADIOS2_ROOT"; fi}
-1
@@ -411,19 +587,19 @@
test machine at NERSC, very similar to pm-cpu. each node has 2 AMD EPYC 7713 64-Core (Milan) 512GB
$ENV{NERSC_HOST}:alvarez
Linux
- gnu,nvidia,amdclang
+ intel,gnu,nvidia,amdclang
mpich
e3sm
/global/cfs/cdirs/e3sm
e3sm,m3411,m3412
- $ENV{PSCRATCH}/e3sm_scratch/alvarez
+ $ENV{SCRATCH}/e3sm_scratch/alvarez
/global/cfs/cdirs/e3sm/www/$ENV{USER}
http://portal.nersc.gov/project/e3sm/$ENV{USER}
/global/cfs/cdirs/e3sm/inputdata
/global/cfs/cdirs/e3sm/inputdata/atm/datm7
$CIME_OUTPUT_ROOT/archive/$CASE
/global/cfs/cdirs/e3sm/baselines/$COMPILER
- /global/cfs/cdirs/e3sm/tools/cprnc.cori/cprnc
+ /global/cfs/cdirs/e3sm/tools/cprnc/cprnc
10
e3sm_developer
4
@@ -443,12 +619,12 @@
- /usr/share/lmod/8.3.1/init/perl
- /usr/share/lmod/8.3.1/init/python
- /usr/share/lmod/8.3.1/init/sh
- /usr/share/lmod/8.3.1/init/csh
- /usr/share/lmod/lmod/libexec/lmod perl
- /usr/share/lmod/lmod/libexec/lmod python
+ /opt/cray/pe/lmod/8.7.19/init/perl
+ /opt/cray/pe/lmod/8.7.19/init/python
+ /opt/cray/pe/lmod/8.7.19/init/sh
+ /opt/cray/pe/lmod/8.7.19/init/csh
+ /opt/cray/pe/lmod/lmod/libexec/lmod perl
+ /opt/cray/pe/lmod/lmod/libexec/lmod python
module
module
@@ -456,11 +632,19 @@
cray-hdf5-parallel
cray-netcdf-hdf5parallel
cray-parallel-netcdf
+ cray-netcdf
+ cray-hdf5
PrgEnv-gnu
+ PrgEnv-intel
PrgEnv-nvidia
PrgEnv-cray
PrgEnv-aocc
+ intel
+ intel-oneapi
+ nvidia
+ aocc
cudatoolkit
+ climate-utils
craype-accel-nvidia80
craype-accel-host
perftools-base
@@ -469,25 +653,33 @@
- PrgEnv-gnu/8.3.3
- gcc/12.1.0
+ PrgEnv-gnu
+ gcc
+ cray-libsci
+
+
+
+ PrgEnv-intel
+ intel
PrgEnv-nvidia
nvidia/22.7
+ cray-libsci
PrgEnv-aocc
- aocc/3.2.0
+ aocc/4.0.0
+ cray-libsci
craype-accel-host
- cray-libsci/23.02.1.1
- craype/2.7.19
- cray-mpich/8.1.24
+ cray-libsci
+ craype/2.7.21
+ cray-mpich/8.1.26
cray-hdf5-parallel/1.12.2.3
cray-netcdf-hdf5parallel/4.9.0.3
cray-parallel-netcdf/1.12.3.3
@@ -509,6 +701,8 @@
/global/cfs/cdirs/e3sm/perl/lib/perl5-only-switch
software
MPI_Bcast
+ $ENV{CRAY_NETCDF_HDF5PARALLEL_PREFIX}
+ $ENV{CRAY_PARALLEL_NETCDF_PREFIX}
-1
@@ -589,7 +783,7 @@
$SHELL{dirname $(dirname $(which nc-config))}
$SHELL{dirname $(dirname $(which pnetcdf_version))}
-
+
128M
spread
threads
@@ -704,19 +898,19 @@
10
3
-
+
128M
spread
threads
- /lustre/orion/cli133/world-shared/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.17/gcc-11.2.0
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /lustre/orion/cli115/world-shared/frontier/3rdparty/adios2/2.9.1/cray-mpich-8.1.23/gcc-11.2.0; else echo "$ADIOS2_ROOT"; fi}
- /lustre/orion/cli133/world-shared/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.17/crayclang-14.0.2
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /lustre/orion/cli115/world-shared/frontier/3rdparty/adios2/2.9.1/cray-mpich-8.1.23/crayclang-15.0.1; else echo "$ADIOS2_ROOT"; fi}
- /lustre/orion/cli133/world-shared/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.17/amdclang-15.0.0
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /lustre/orion/cli115/world-shared/frontier/3rdparty/adios2/2.9.1/cray-mpich-8.1.23/amdclang-15.0.0; else echo "$ADIOS2_ROOT"; fi}
@@ -827,20 +1021,11 @@
10
3
-
+
128M
spread
threads
-
- /lustre/orion/cli133/world-shared/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.17/gcc-11.2.0
-
-
- /lustre/orion/cli133/world-shared/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.17/crayclang-14.0.2
-
-
- /lustre/orion/cli133/world-shared/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.17/amdclang-15.0.0
-
@@ -932,14 +1117,11 @@
0
-
+
128M
spread
threads
-
- /lustre/orion/cli133/world-shared/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.17/crayclang-14.0.2
-
@@ -1018,14 +1200,11 @@
romio_cb_read=disable
-
+
128M
spread
threads
-
- /lustre/orion/cli133/world-shared/3rdparty/adios2/2.8.3.patch/cray-mpich-8.1.17/crayclang-14.0.2
-
@@ -1042,7 +1221,7 @@
$ENV{SCRATCH}/inputdata/atm/datm7
$CIME_OUTPUT_ROOT/archive/$CASE
$ENV{SCRATCH}/baselines/$COMPILER
- $ENV{SCRATCH}/tools/cprnc.cori/cprnc
+ $ENV{SCRATCH}/tools/cprnc/cprnc
8
e3sm_developer
slurm
@@ -1111,6 +1290,8 @@
threads
1
-l
+ $ENV{TACC_NETCDF_DIR}
+ $ENV{TACC_PNETCDF_DIR}
@@ -1202,6 +1383,9 @@
$ENV{HOME}/e3sm_scratch/$CASE/run
$ENV{HOME}/e3sm_scratch/$CASE/bld
+
+ $ENV{BLASLAPACK_LIBDIR}
+
@@ -1243,7 +1427,7 @@
/usr/local/packages/netcdf-parallel
/usr/local/packages/pnetcdf
- /usr/local/packages/hdf5-parallel
+ /usr/local/packages/hdf5-parallel
/usr/local/packages/cmake/bin:/usr/local/packages/mpich/bin:/usr/local/packages/hdf5-parallel/bin:/usr/local/packages/netcdf-parallel/bin:/usr/local/packages/pnetcdf/bin:$ENV{PATH}
/usr/local/packages/mpich/lib:/usr/local/packages/szip/lib:/usr/local/packages/hdf5-parallel/lib:/usr/local/packages/netcdf-parallel/lib:/usr/local/packages/pnetcdf/lib
@@ -1393,10 +1577,11 @@
0
- $ENV{SEMS_NETCDF_ROOT}
+ $ENV{SEMS_NETCDF_ROOT}
64M
spread
threads
+ Generic
@@ -1451,6 +1636,9 @@
/ascldap/users/projects/e3sm/scream/libs/gcc/install/weaver/gcc/8.5.0/bin:/ascldap/users/projects/e3sm/scream/libs/gcc/install/weaver/gcc/8.5.0/libexec/gcc/powerpc64le-unknown-linux-gnu/8.5.0:/ascldap/users/projects/e3sm/scream/libs/openmpi/install/weaver/gcc/8.5.0/cuda/10.1.105/bin:/ascldap/users/projects/e3sm/scream/libs/pnetcdf/install/weaver/gcc/8.5.0/cuda/10.1.105/bin:/ascldap/users/projects/e3sm/scream/libs/netcdf-c/install/weaver/gcc/8.5.0/cuda/10.1.105/bin:/ascldap/users/projects/e3sm/scream/libs/netcdf-fortran/install/weaver/gcc/8.5.0/cuda/10.1.105/bin:/ascldap/users/projects/e3sm/scream/libs/wget/bin:/ascldap/users/jgfouca/perl5/bin:$ENV{PATH}
/ascldap/users/jgfouca/perl5/lib/perl5
/ascldap/users/jgfouca/perl5
+ /ascldap/users/projects/e3sm/scream/libs/netcdf-c/install/weaver/gcc/8.5.0/cuda/10.1.105
+ /ascldap/users/projects/e3sm/scream/libs/netcdf-fortran/install/weaver/gcc/8.5.0/cuda/10.1.105
+ /ascldap/users/projects/e3sm/scream/libs/pnetcdf/install/weaver/gcc/8.5.0/cuda/10.1.105
@@ -1592,7 +1780,7 @@
/soft/apps/packages/climate/mpich/3.3.2/gcc-8.2.0/bin:/soft/apps/packages/climate/cmake/3.18.4/bin:/soft/apps/packages/climate/gmake/bin:$ENV{PATH}
- /soft/apps/packages/climate/hdf5/1.8.16-parallel/mpich-3.3.2/gcc-8.2.0
+ /soft/apps/packages/climate/hdf5/1.8.16-parallel/mpich-3.3.2/gcc-8.2.0
/soft/apps/packages/climate/netcdf/4.4.1c-4.2cxx-4.4.4f-parallel/mpich-3.3.2/gcc-8.2.0
@@ -1601,28 +1789,22 @@
/soft/apps/packages/climate/openmpi/2.1.5/gcc-8.2.0/bin:/soft/apps/packages/climate/cmake/3.18.4/bin:/soft/apps/packages/climate/gmake/bin:$ENV{PATH}
- /soft/apps/packages/climate/zlib/1.2.11/gcc-8.2.0-static
- /soft/apps/packages/climate/szip/2.1/gcc-8.2.0-static
- /soft/apps/packages/climate/hdf5/1.8.12-parallel/openmpi-2.1.5/gcc-8.2.0-static
+ /soft/apps/packages/climate/zlib/1.2.11/gcc-8.2.0-static
+ /soft/apps/packages/climate/szip/2.1/gcc-8.2.0-static
+ /soft/apps/packages/climate/hdf5/1.8.12-parallel/openmpi-2.1.5/gcc-8.2.0-static
/soft/apps/packages/climate/netcdf/4.7.4c-4.3.1cxx-4.4.4f-parallel/openmpi-2.1.5/gcc-8.2.0-static-hdf5-1.8.12-pnetcdf-1.12.0
/soft/apps/packages/climate/pnetcdf/1.12.0/openmpi-2.1.5/gcc-8.2.0
-
+
64M
/soft/apps/packages/climate/perl5/lib/perl5
-
- /soft/apps/packages/climate/adios2/2.7.0/mpich-3.3.2/gcc-8.2.0
-
-
- /soft/apps/packages/climate/adios2/2.7.0/openmpi-2.1.5/gcc-8.2.0
-
-
- ANL CELS General Computing Environment (Linux) workstation (Ubuntu 18.04)
+
+ ANL CELS General Computing Environment (Linux) workstation (Ubuntu 22.04)
compute-386-01|compute-386-02
LINUX
gnu
@@ -1655,59 +1837,53 @@
- /nfs/gce/software/spack/opt/spack/linux-ubuntu18.04-x86_64/gcc-7.3.0/lmod-7.7.29-zg24dcc/lmod/lmod/init/env_modules_python.py
- /nfs/gce/software/spack/opt/spack/linux-ubuntu18.04-x86_64/gcc-7.3.0/lmod-7.7.29-zg24dcc/lmod/lmod/init/perl
- /nfs/gce/software/spack/opt/spack/linux-ubuntu18.04-x86_64/gcc-7.3.0/lmod-7.7.29-zg24dcc/lmod/lmod/init/bash
- /nfs/gce/software/spack/opt/spack/linux-ubuntu18.04-x86_64/gcc-7.3.0/lmod-7.7.29-zg24dcc/lmod/lmod/init/sh
- /nfs/gce/software/spack/opt/spack/linux-ubuntu18.04-x86_64/gcc-7.3.0/lmod-7.7.29-zg24dcc/lmod/lmod/init/csh
- /nfs/gce/software/spack/opt/spack/linux-ubuntu18.04-x86_64/gcc-7.3.0/lmod-7.7.29-zg24dcc/lmod/lmod/libexec/lmod python
+ /nfs/gce/software/custom/linux-ubuntu22.04-x86_64/spack/opt/spack/linux-ubuntu22.04-x86_64/gcc-11.2.0/lmod-8.5.6-hkjjxhp/lmod/8.5.6/init/env_modules_python.py
+ /nfs/gce/software/custom/linux-ubuntu22.04-x86_64/spack/opt/spack/linux-ubuntu22.04-x86_64/gcc-11.2.0/lmod-8.5.6-hkjjxhp/lmod/lmod/init/perl
+ /nfs/gce/software/custom/linux-ubuntu22.04-x86_64/spack/opt/spack/linux-ubuntu22.04-x86_64/gcc-11.2.0/lmod-8.5.6-hkjjxhp/lmod/lmod/init/bash
+ /nfs/gce/software/custom/linux-ubuntu22.04-x86_64/spack/opt/spack/linux-ubuntu22.04-x86_64/gcc-11.2.0/lmod-8.5.6-hkjjxhp/lmod/lmod/init/sh
+ /nfs/gce/software/custom/linux-ubuntu22.04-x86_64/spack/opt/spack/linux-ubuntu22.04-x86_64/gcc-11.2.0/lmod-8.5.6-hkjjxhp/lmod/lmod/init/csh
+ /nfs/gce/software/custom/linux-ubuntu22.04-x86_64/spack/opt/spack/linux-ubuntu22.04-x86_64/gcc-11.2.0/lmod-8.5.6-hkjjxhp/lmod/lmod/libexec/lmod python
module
module
module
module
- autoconf/2.69-tz6eue5
- automake/1.16.3-fm5m6qc
- libtool/2.4.6-jdxbjft
- m4/1.4.19-wq3bm42
- cmake/3.20.5-yjp2hz6
- gcc/11.1.0-5ikoznk
- zlib/1.2.11-smoyzzo
+ gcc/12.1.0
$CIME_OUTPUT_ROOT/$CASE/run
$CIME_OUTPUT_ROOT/$CASE/bld
- /nfs/gce/projects/climate/software/netcdf/4.8.0c-4.3.1cxx-4.5.3f-serial/gcc-11.1.0
+ /nfs/gce/projects/climate/software/linux-ubuntu22.04-x86_64/netcdf/4.8.0c-4.3.1cxx-4.5.3f-serial/gcc-12.1.0
- /nfs/gce/projects/climate/software/mpich/3.4.2/gcc-11.1.0/lib:$ENV{LD_LIBRARY_PATH}
- /nfs/gce/projects/climate/software/mpich/3.4.2/gcc-11.1.0/bin:$ENV{PATH}
- /nfs/gce/software/spack/opt/spack/linux-ubuntu18.04-x86_64/gcc-7.5.0/zlib-1.2.11-smoyzzo
- /nfs/gce/projects/climate/software/hdf5/1.12.1/mpich-3.4.2/gcc-11.1.0
- /nfs/gce/projects/climate/software/netcdf/4.8.0c-4.3.1cxx-4.5.3f-parallel/mpich-3.4.2/gcc-11.1.0
- /nfs/gce/projects/climate/software/pnetcdf/1.12.2/mpich-3.4.2/gcc-11.1.0
+ /nfs/gce/projects/climate/software/linux-ubuntu22.04-x86_64/mpich/4.1.2/gcc-12.1.0/lib:$ENV{LD_LIBRARY_PATH}
+ /nfs/gce/projects/climate/software/linux-ubuntu22.04-x86_64/mpich/4.1.2/gcc-12.1.0/bin:$ENV{PATH}
+ /nfs/gce/software/spack/opt/spack/linux-ubuntu20.04-x86_64/gcc-9.3.0/zlib-1.2.11-p7dmb5p
+ /nfs/gce/projects/climate/software/linux-ubuntu22.04-x86_64/hdf5/1.12.2/mpich-4.1.2/gcc-12.1.0
+ /nfs/gce/projects/climate/software/linux-ubuntu22.04-x86_64/netcdf/4.8.0c-4.3.1cxx-4.5.3f-parallel/mpich-4.1.2/gcc-12.1.0
+ /nfs/gce/projects/climate/software/linux-ubuntu22.04-x86_64/pnetcdf/1.12.3/mpich-4.1.2/gcc-12.1.0
- /nfs/gce/projects/climate/software/openmpi/4.1.3/gcc-11.1.0/lib:$ENV{LD_LIBRARY_PATH}
- /nfs/gce/projects/climate/software/openmpi/4.1.3/gcc-11.1.0/bin:$ENV{PATH}
- /nfs/gce/software/spack/opt/spack/linux-ubuntu18.04-x86_64/gcc-7.5.0/zlib-1.2.11-smoyzzo
- /nfs/gce/projects/climate/software/hdf5/1.12.1/openmpi-4.1.3/gcc-11.1.0
- /nfs/gce/projects/climate/software/netcdf/4.8.0c-4.3.1cxx-4.5.3f-parallel/openmpi-4.1.3/gcc-11.1.0
- /nfs/gce/projects/climate/software/pnetcdf/1.12.2/openmpi-4.1.3/gcc-11.1.0
-
-
+ /nfs/gce/projects/climate/software/linux-ubuntu22.04-x86_64/openmpi/4.1.6/gcc-12.1.0/lib:$ENV{LD_LIBRARY_PATH}
+ /nfs/gce/projects/climate/software/linux-ubuntu22.04-x86_64/openmpi/4.1.6/gcc-12.1.0/bin:$ENV{PATH}
+ /nfs/gce/software/spack/opt/spack/linux-ubuntu20.04-x86_64/gcc-9.3.0/zlib-1.2.11-p7dmb5p
+ /nfs/gce/projects/climate/software/linux-ubuntu22.04-x86_64/hdf5/1.12.2/openmpi-4.1.6/gcc-12.1.0
+ /nfs/gce/projects/climate/software/linux-ubuntu22.04-x86_64/netcdf/4.8.0c-4.3.1cxx-4.5.3f-parallel/openmpi-4.1.6/gcc-12.1.0
+ /nfs/gce/projects/climate/software/linux-ubuntu22.04-x86_64/pnetcdf/1.12.3/openmpi-4.1.6/gcc-12.1.0
+
+
64M
/nfs/gce/projects/climate/software/perl5/lib/perl5
- /nfs/gce/projects/climate/software/adios2/2.8.3.patch/mpich-3.4.2/gcc-11.1.0
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /nfs/gce/projects/climate/software/linux-ubuntu22.04-x86_64/adios2/2.9.1/mpich-4.1.2/gcc-12.1.0; else echo "$ADIOS2_ROOT"; fi}
@@ -1776,28 +1952,30 @@
/nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/mpich/4.0/gcc-11.1.0/lib:$ENV{LD_LIBRARY_PATH}
/nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/mpich/4.0/gcc-11.1.0/bin:$ENV{PATH}
- /nfs/gce/software/spack/opt/spack/linux-ubuntu20.04-x86_64/gcc-9.3.0/zlib-1.2.11-p7dmb5p
- /nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/hdf5/1.12.1/mpich-4.0/gcc-11.1.0
+ /nfs/gce/software/spack/opt/spack/linux-ubuntu20.04-x86_64/gcc-9.3.0/zlib-1.2.11-p7dmb5p
+ /nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/hdf5/1.12.1/mpich-4.0/gcc-11.1.0
/nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/netcdf/4.8.0c-4.3.1cxx-4.5.3f-parallel/mpich-4.0/gcc-11.1.0
/nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/pnetcdf/1.12.2/mpich-4.0/gcc-11.1.0
+ $SHELL{if [ -z "$MOAB_ROOT" ]; then echo /nfs/gce/projects/climate/software/moab/devel/mpich-4.0/gcc-11.1.0; else echo "$MOAB_ROOT"; fi}
+ /nfs/gce/projects/climate/software/moab/devel/mpich-4.0/gcc-11.1.0
/nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/openmpi/4.1.3/gcc-11.1.0/lib:$ENV{LD_LIBRARY_PATH}
/nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/openmpi/4.1.3/gcc-11.1.0/bin:$ENV{PATH}
- /nfs/gce/software/spack/opt/spack/linux-ubuntu20.04-x86_64/gcc-9.3.0/zlib-1.2.11-p7dmb5p
- /nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/hdf5/1.12.1/openmpi-4.1.3/gcc-11.1.0
+ /nfs/gce/software/spack/opt/spack/linux-ubuntu20.04-x86_64/gcc-9.3.0/zlib-1.2.11-p7dmb5p
+ /nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/hdf5/1.12.1/openmpi-4.1.3/gcc-11.1.0
/nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/netcdf/4.8.0c-4.3.1cxx-4.5.3f-parallel/openmpi-4.1.3/gcc-11.1.0
/nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/pnetcdf/1.12.2/openmpi-4.1.3/gcc-11.1.0
-
+
64M
/nfs/gce/projects/climate/software/perl5/lib/perl5
- /nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/adios2/2.8.3.patch/mpich-4.0/gcc-11.1.0
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /nfs/gce/projects/climate/software/linux-ubuntu20.04-x86_64/adios2/2.9.1/mpich-4.0/gcc-11.1.0; else echo "$ADIOS2_ROOT"; fi}
@@ -1867,13 +2045,11 @@
0.1
- $ENV{SEMS_NETCDF_ROOT}
- $ENV{SEMS_NETCDF_ROOT}/include
- $ENV{SEMS_NETCDF_ROOT}/lib
+ $ENV{SEMS_NETCDF_ROOT}
64M
- $ENV{SEMS_NETCDF_ROOT}
+ $ENV{SEMS_NETCDF_ROOT}
@@ -1938,13 +2114,11 @@
- $ENV{SEMS_NETCDF_ROOT}
- $ENV{SEMS_NETCDF_ROOT}/include
- $ENV{SEMS_NETCDF_ROOT}/lib
+ $ENV{SEMS_NETCDF_ROOT}
64M
- $ENV{SEMS_NETCDF_ROOT}
+ $ENV{SEMS_NETCDF_ROOT}
@@ -1990,7 +2164,7 @@
module
- cmake/3.20.3-vedypwm
+ cmake/3.26.3-nszudya
gcc/7.4.0
@@ -2047,7 +2221,7 @@
$CIME_OUTPUT_ROOT/$CASE/run
$CIME_OUTPUT_ROOT/$CASE/bld
0.1
- 1000
+ 0
$SHELL{dirname $(dirname $(which nc-config))}
$SHELL{dirname $(dirname $(which nf-config))}
@@ -2066,34 +2240,16 @@
10
-
+
64M
-
+
granularity=core,balanced
1
-
+
cores
-
- /lcrc/group/e3sm/3rdparty/anvil/adios2/2.8.3.patch/intel-mpi-2019.9.304/intel-20.0.4
-
-
- /lcrc/group/e3sm/3rdparty/anvil/adios2/2.8.3.patch/intel-mpi-2019.9.304/gcc-8.2.0
-
-
- /lcrc/group/e3sm/3rdparty/anvil/adios2/2.8.3.patch/openmpi-4.1.1/intel-20.0.4
-
-
- /lcrc/group/e3sm/3rdparty/anvil/adios2/2.8.3.patch/openmpi-4.1.1/gcc-8.2.0
-
-
- /lcrc/group/e3sm/3rdparty/anvil/adios2/2.8.3.patch/mvapich2-2.3.6/intel-20.0.4
-
-
- /lcrc/group/e3sm/3rdparty/anvil/adios2/2.8.3.patch/mvapich2-2.2/gcc-8.2.0
-
@@ -2206,29 +2362,23 @@
$SHELL{dirname $(dirname $(which nf-config))}
$SHELL{dirname $(dirname $(which pnetcdf_version))}
-
+
128M
-
+
granularity=core,balanced
-
+
granularity=thread,balanced
-
+
cores
- /lcrc/group/e3sm/3rdparty/chrysalis/adios2/2.8.3.patch/openmpi-4.1.3/intel-20.0.4
+ $SHELL{if [ -z "$MOAB_ROOT" ]; then echo /lcrc/soft/climate/moab/chrysalis/intel; else echo "$MOAB_ROOT"; fi}
- /lcrc/group/e3sm/3rdparty/chrysalis/adios2/2.8.3.patch/openmpi-4.1.3/gcc-9.2.0
-
-
- /lcrc/group/e3sm/3rdparty/chrysalis/adios2/2.8.3.patch/intel-mpi-2019.9.304/intel-20.0.4
-
-
- /lcrc/group/e3sm/3rdparty/chrysalis/adios2/2.8.3.patch/intel-mpi-2019.9.304/gcc-9.2.0
+ $SHELL{if [ -z "$MOAB_ROOT" ]; then echo /lcrc/soft/climate/moab/chrysalis/gnu; else echo "$MOAB_ROOT"; fi}
@@ -2302,7 +2452,7 @@
1
2
-
+
64M
cores
@@ -2373,7 +2523,7 @@
$SHELL{dirname $(dirname $(which pnetcdf_version))}
/lcrc/group/e3sm/soft/perl/5.26.0/bin:$ENV{PATH}
-
+
64M
cores
@@ -2464,6 +2614,7 @@
$CIME_OUTPUT_ROOT/$CASE/run
$CIME_OUTPUT_ROOT/$CASE/bld
0.1
+ 0
$SHELL{dirname $(dirname $(which nc-config))}
$SHELL{dirname $(dirname $(which nf-config))}
@@ -2473,9 +2624,9 @@
$SHELL{dirname $(dirname $(which pnetcdf_version))}
- $SHELL{which h5dump | xargs dirname | xargs dirname}
+ $SHELL{which h5dump | xargs dirname | xargs dirname}
-
+
128M
spread
threads
@@ -2483,71 +2634,6 @@
shm:tmi
-
- /lcrc/group/e3sm/3rdparty/bebop/adios2/2.8.3.patch/intel-mpi-2018.4.274/intel-18.0.4
-
-
- /lcrc/group/e3sm/3rdparty/bebop/adios2/2.8.3.patch/intel-mpi-2018.4.274/gcc-8.2.0
-
-
- /lcrc/group/e3sm/3rdparty/bebop/adios2/2.8.3.patch/mvapich2-2.3.1/intel-18.0.4
-
-
- /lcrc/group/e3sm/3rdparty/bebop/adios2/2.8.3.patch/mvapich2-2.3/gcc-8.2.0
-
-
-
-
- LLNL Linux Cluster, Linux (pgi), 16 pes/node, batch system is Slurm
- LINUX
- intel
- mpich
- cbronze
- /p/lustre2/$USER/e3sm_scratch/syrah
- /usr/gdata/climdat/ccsm3data/inputdata
- /usr/gdata/climdat/ccsm3data/inputdata/atm/datm7
- /p/lustre2/$USER/archive/$CASE
- /p/lustre2/$USER/ccsm_baselines/$COMPILER
- /usr/gdata/climdat/tools/cprnc
- 8
- lc_slurm
- donahue5 -at- llnl.gov
- 32
- 16
-
-
-
-
- srun
-
-
- /usr/share/lmod/lmod/init/env_modules_python.py
- /usr/share/lmod/lmod/init/perl
- /usr/share/lmod/lmod/init/sh
- /usr/share/lmod/lmod/init/csh
- module
- module
- /usr/share/lmod/lmod/libexec/lmod python
- /usr/share/lmod/lmod/libexec/lmod perl
-
- python
- git
- intel/19.0.4
- mvapich2/2.3
- cmake/3.18.0
- netcdf-fortran/4.4.4
- pnetcdf/1.9.0
-
-
- $CIME_OUTPUT_ROOT/$CASE/run
- $CIME_OUTPUT_ROOT/$CASE/bld
-
- /usr/tce/packages/netcdf-fortran/netcdf-fortran-4.4.4-intel-19.0.4/
- /usr/tce/packages/netcdf-fortran/netcdf-fortran-4.4.4-intel-19.0.4/
-
-
- /usr/tce/packages/pnetcdf/pnetcdf-1.9.0-intel-19.0.4-mvapich2-2.3/
-
@@ -2583,24 +2669,23 @@
/usr/share/lmod/lmod/libexec/lmod python
/usr/share/lmod/lmod/libexec/lmod perl
- python/3.8.2
+ python/3.9.12
git
- intel/19.0.4
- mvapich2/2.3
- cmake/3.18.0
- netcdf-fortran/4.4.4
- pnetcdf/1.9.0
+ mkl/2022.1.0
+ intel-classic/2021.6.0-magic
+ mvapich2/2.3.7
+ cmake/3.19.2
+ netcdf-fortran-parallel/4.6.0
+ netcdf-c-parallel/4.9.0
+ parallel-netcdf/1.12.3
$CIME_OUTPUT_ROOT/$CASE/run
$CIME_OUTPUT_ROOT/$CASE/bld
- /usr/tce/packages/netcdf-fortran/netcdf-fortran-4.4.4-intel-19.0.4/
- /usr/tce/packages/netcdf-fortran/netcdf-fortran-4.4.4-intel-19.0.4/
-
-
- /usr/tce/packages/pnetcdf/pnetcdf-1.9.0-intel-19.0.4-mvapich2-2.3/
-
+ /usr/tce/packages/netcdf-fortran/netcdf-fortran-4.6.0-mvapich2-2.3.7-intel-classic-2021.6.0/
+ /usr/tce/packages/parallel-netcdf/parallel-netcdf-1.12.3-mvapich2-2.3.7-intel-classic-2021.6.0/
+
@@ -2640,17 +2725,21 @@
git
mkl/2022.1.0
intel-classic/2021.6.0-magic
- mvapich2/2.3.6
+ mvapich2/2.3.7
cmake/3.19.2
- netcdf-fortran-parallel/4.6.0
- netcdf-c-parallel/4.9.0
+ /usr/gdata/climdat/install/quartz/modulefiles
+ hdf5/1.12.2
+ netcdf-c/4.9.0
+ netcdf-fortran/4.6.0
+ parallel-netcdf/1.12.3
+ screamML-venv/0.0.1
$CIME_OUTPUT_ROOT/$CASE/run
$CIME_OUTPUT_ROOT/$CASE/bld
- /usr/tce/packages/netcdf-fortran/netcdf-fortran-4.6.0-mvapich2-2.3.6-intel-classic-2021.6.0/
- /usr/tce/packages/netcdf-fortran/netcdf-fortran-4.6.0-mvapich2-2.3.6-intel-classic-2021.6.0/
+ /usr/gdata/climdat/install/quartz/netcdf-fortran/
+ /usr/tce/packages/parallel-netcdf/parallel-netcdf-1.12.3-mvapich2-2.3.7-intel-classic-2021.6.0
@@ -2744,26 +2833,21 @@
1
-e PMI_LABEL_ERROUT=1
+ $ENV{NETCDF_DIR}
-
+
-e OMP_NUM_THREADS=$ENV{OMP_NUM_THREADS} -e OMP_STACKSIZE=128M -e KMP_AFFINITY=granularity=thread,scatter
-
+
-e OMP_NUM_THREADS=$ENV{OMP_NUM_THREADS} -e OMP_STACKSIZE=128M -e OMP_PROC_BIND=spread -e OMP_PLACES=threads
-
- /projects/ClimateEnergy_4/3rdparty/adios2/2.7.0/cray-mpich-7.7.14/intel-19.1.0
-
-
- /projects/ClimateEnergy_4/3rdparty/adios2/2.7.0/cray-mpich-7.7.14/gcc-9.3.0
-
ANL experimental/evaluation cluster, batch system is cobalt
jlse.*
LINUX
- oneapi-ifx,oneapi-ifxgpu,oneapi-ifort,gnu
+ oneapi-ifx,oneapi-ifxgpu,gnu
mpich,impi,openmpi
/gpfs/jlse-fs0/projects/climate/$USER/scratch
/gpfs/jlse-fs0/projects/climate/inputdata
@@ -2823,6 +2907,7 @@
/home/azamat/soft/perl/5.32.0/bin:$ENV{PATH}
/home/azamat/soft/netcdf/4.4.1c-4.2cxx-4.4.4f/oneapi-2020.12.15.004-intel_mpi-2019.4.243
/home/azamat/soft/pnetcdf/1.12.1/oneapi-2020.12.15.004-intel_mpi-2019.4.243
+ /home/azamat/soft/libs
1
@@ -2865,11 +2950,11 @@
0
DISABLED
-
+
verbose,granularity=thread,balanced
128M
-
+
threads
128M
@@ -2882,7 +2967,7 @@
ANL Sunspot Test and Development System (TDS), batch system is pbspro
uan-.*
LINUX
- oneapi-ifx,oneapi-ifxgpu,oneapi-ifort,gnu
+ oneapi-ifx,oneapi-ifxgpu,gnu
mpich,impi,openmpi
CSC249ADSE15_CNDA
/gila/CSC249ADSE15_CNDA/performance_archive
@@ -2933,33 +3018,25 @@
/soft/modulefiles
- spack cmake
- /soft/restricted/CNDA/updates/modulefiles
+ spack cmake/3.26.3-gcc-11.2.0-vnn7ncx
+ prepend-deps/default
- oneapi/eng-compiler/2022.12.30.003
- mpich/52.2/icc-all-pmix-gpu
-
-
-
-
-
+ gcc
+ oneapi/eng-compiler/2023.05.15.007
spack cmake
gcc/10.3.0
-
- cray-pals
- append-deps/default
- libfabric/1.15.2.0
-
$CIME_OUTPUT_ROOT/$CASE/run
$CIME_OUTPUT_ROOT/$CASE/bld
/lus/gila/projects/CSC249ADSE15_CNDA/software/oneAPI.2022.12.30.003/netcdf
/lus/gila/projects/CSC249ADSE15_CNDA/software/oneAPI.2022.12.30.003/pnetcdf
+ /lus/gila/projects/CSC249ADSE15_CNDA/software/oneAPI.2022.12.30.003/netcdf/lib:$ENV{LD_LIBRARY_PATH}
+ /lus/gila/projects/CSC249ADSE15_CNDA/software/oneAPI.2022.12.30.003/netcdf/bin:$ENV{PATH}
1
@@ -2991,11 +3068,120 @@
0
-
+
+ verbose,granularity=thread,balanced
+ 128M
+
+
+ threads
+ 128M
+
+
+ -1
+
+
+
+
+ ALCF Aurora, 10624 nodes, 2x52c SPR, 6x2s PVC, 2x512GB DDR5, 2x64GB CPU-HBM, 6x128GB GPU-HBM, Slingshot 11, PBSPro
+ aurora-uan-.*
+ LINUX
+ oneapi-ifx,oneapi-ifxgpu,gnu
+ mpich
+ CSC249ADSE15_CNDA
+ /lus/gecko/projects/CSC249ADSE15_CNDA/performance_archive
+ .*
+ /lus/gecko/projects/CSC249ADSE15_CNDA/$USER/scratch
+ /lus/gecko/projects/CSC249ADSE15_CNDA/inputdata
+ /lus/gecko/projects/CSC249ADSE15_CNDA/inputdata/atm/datm7
+ $CIME_OUTPUT_ROOT/archive/$CASE
+ /lus/gecko/projects/CSC249ADSE15_CNDA/baselines/$COMPILER
+ /lus/gecko/projects/CSC249ADSE15_CNDA/tools/cprnc/cprnc
+ 16
+ e3sm_developer
+ 4
+ pbspro
+ e3sm
+ 208
+ 104
+ 104
+ 12
+ FALSE
+
+ mpiexec
+
+
+ -np {{ total_tasks }} --label
+ -ppn {{ tasks_per_node }}
+ --cpu-bind $ENV{RANKS_BIND} -envall
+ -d $ENV{OMP_NUM_THREADS}
+ $ENV{GPU_TILE_COMPACT}
+
+
+
+ /lus/gecko/projects/CSC249ADSE15_CNDA/modules/lmod.sh
+ /soft/sunspot_migrate/soft/packaging/lmod/lmod/init/csh
+ /soft/sunspot_migrate/soft/packaging/lmod/lmod/init/env_modules_python.py
+ module
+ module
+ /soft/sunspot_migrate/soft/packaging/lmod/lmod/libexec/lmod python
+
+
+ /soft/modulefiles
+ /soft/restricted/CNDA/updates/modulefiles
+ spack-pe-gcc cmake
+
+
+ oneapi/eng-compiler/2023.05.15.007
+
+
+ spack-pe-gcc cmake
+ gcc/10.3.0
+
+
+ cray-pals
+ libfabric/1.15.2.0
+ cray-libpals/1.3.2
+
+
+ $CIME_OUTPUT_ROOT/$CASE/run
+ $CIME_OUTPUT_ROOT/$CASE/bld
+
+ /lus/gecko/projects/CSC249ADSE15_CNDA/software/netcdf-c/4.9.2/oneapi.eng.2023.05.15.007
+ /lus/gecko/projects/CSC249ADSE15_CNDA/software/netcdf-fortran/4.6.1/oneapi.eng.2023.05.15.007
+ /lus/gecko/projects/CSC249ADSE15_CNDA/software/pnetcdf/1.12.3/oneapi.eng.2023.05.15.007
+ /lus/gecko/projects/CSC249ADSE15_CNDA/software/pnetcdf/1.12.3/oneapi.eng.2023.05.15.007/lib:/lus/gecko/projects/CSC249ADSE15_CNDA/software/netcdf-fortran/4.6.1/oneapi.eng.2023.05.15.007/lib:/lus/gecko/projects/CSC249ADSE15_CNDA/software/netcdf-c/4.9.2/oneapi.eng.2023.05.15.007/lib:$ENV{LD_LIBRARY_PATH}
+ /lus/gecko/projects/CSC249ADSE15_CNDA/software/pnetcdf/1.12.3/oneapi.eng.2023.05.15.007/bin:/lus/gecko/projects/CSC249ADSE15_CNDA/software/netcdf-fortran/4.6.1/oneapi.eng.2023.05.15.007/bin:/lus/gecko/projects/CSC249ADSE15_CNDA/software/netcdf-c/4.9.2/oneapi.eng.2023.05.15.007/bin:$ENV{PATH}
+ list:0-7,104-111:8-15,112-119:16-23,120-127:24-31,128-135:32-39,136-143:40-47,144-151:52-59,156-163:60-67,164-171:68-75,172-179:76-83,180-187:84-91,188-195:92-99,196-203
+
+
+ 1
+
+
+ level_zero:gpu
+ NO_GPU
+ 0
+ disable
+ disable
+ 1
+ 4000MB
+ 0
+ /soft/tools/mpi_wrapper_utils/gpu_tile_compact.sh
+ 131072
+ 20
+
+
+ 0
+ DISABLED
+ 131072
+ 20
+ 0
+
+
+
verbose,granularity=thread,balanced
128M
-
+
threads
128M
@@ -3149,7 +3335,7 @@
$CIME_OUTPUT_ROOT/csmruns/$CASE/bld
64M
- $ENV{NETCDF_ROOT}
+ $ENV{NETCDF_ROOT}
$ENV{MLIBHOME}
@@ -3271,7 +3457,7 @@
64M
- $ENV{NETCDF_LIB}/../
+ $ENV{NETCDF_LIB}/../
$ENV{MKLROOT}
@@ -3366,7 +3552,8 @@
0.05
0
- $ENV{NETCDF_ROOT}/
+ $ENV{NETCDF_ROOT}/
+ $ENV{PNETCDF_ROOT}/
$ENV{MKLROOT}
@@ -3382,22 +3569,10 @@
10
-
+
64M
cores
-
- /qfs/people/wuda020/3rdparty/adios2/2.8.3.patch/intelmpi-2019u4/intel-19.0.5
-
-
- /qfs/people/wuda020/3rdparty/adios2/2.8.3.patch/intelmpi-2019u3/pgi-19.10
-
-
- /qfs/people/wuda020/3rdparty/adios2/2.8.3.patch/mvapich2-2.3.1/intel-19.0.4
-
-
- /qfs/people/wuda020/3rdparty/adios2/2.8.3.patch/mvapich2-2.3.1/pgi-19.7
-
@@ -3477,7 +3652,7 @@
0.05
0
- $SHELL{dirname $(dirname $(which h5diff))}
+ $SHELL{dirname $(dirname $(which h5diff))}
$SHELL{dirname $(dirname $(which nc-config))}
$SHELL{dirname $(dirname $(which nf-config))}
$ENV{MKLROOT}
@@ -3493,7 +3668,7 @@
10
-
+
64M
cores
@@ -3595,6 +3770,9 @@
/software/user_tools/current/cades-ccsi/perl5/lib/perl5/
+ /software/dev_tools/swtree/cs400_centos7.2_pe2016-08/netcdf-hdf5parallel/4.3.3.1/centos7.2_gnu5.3.0
+ /software/dev_tools/swtree/cs400_centos7.2_pe2016-08/pnetcdf/1.9.0/centos7.2_gnu5.3.0
+ /software/tools/compilers/intel_2017/mkl/lib/intel64
@@ -3672,8 +3850,6 @@
$CIME_OUTPUT_ROOT/$CASE/run
$CIME_OUTPUT_ROOT/$CASE/bld
- $ENV{PNETCDF_PATH}
- $ENV{NETCDF_PATH}
$ENV{MKLROOT}
romio_ds_write=disable;romio_ds_read=disable;romio_cb_write=enable;romio_cb_read=enable
@@ -3752,8 +3928,6 @@
$CIME_OUTPUT_ROOT/$CASE/run
$CIME_OUTPUT_ROOT/$CASE/bld
- $ENV{PNETCDF_PATH}
- $ENV{NETCDF_PATH}
$ENV{MKLROOT}
romio_ds_write=disable;romio_ds_read=disable;romio_cb_write=enable;romio_cb_read=enable
@@ -3810,30 +3984,31 @@
PrgEnv-aocc
craype-accel-nvidia80
craype-accel-host
+ cce
- PrgEnv-gnu/8.3.3
- gcc/12.1.0
+ PrgEnv-gnu/8.4.0
+ gcc/12.2.0
- PrgEnv-nvidia/8.3.3
- nvidia/22.3
+ PrgEnv-nvidia/8.4.0
+ nvidia/22.7
- PrgEnv-intel/8.3.3
- intel-classic/2022.2.1
+ PrgEnv-intel/8.4.0
+ intel-classic/2023.2.0
- PrgEnv-aocc
+ PrgEnv-aocc/8.4.0
aocc/3.2.0
- PrgEnv-aocc
+ PrgEnv-aocc/8.4.0
aocc/3.2.0
@@ -3841,12 +4016,12 @@
craype-accel-host
cray-libsci
craype
- cray-mpich/8.1.21
- libfabric/1.15.0.0
- cray-hdf5-parallel/1.12.2.1
- cray-netcdf-hdf5parallel/4.9.0.1
- cray-parallel-netcdf/1.12.3.1
- cmake/3.20.3
+ cray-mpich/8.1.26
+ libfabric/1.15.2.0
+ cray-hdf5-parallel/1.12.2.3
+ cray-netcdf-hdf5parallel/4.9.0.3
+ cray-parallel-netcdf/1.12.3.3
+ cmake/3.22.3
@@ -3865,6 +4040,8 @@
romio_ds_write=disable;romio_ds_read=disable;romio_cb_write=enable;romio_cb_read=enable
software
MPI_Bcast
+ $ENV{CRAY_NETCDF_HDF5PARALLEL_PREFIX}
+ $ENV{CRAY_PARALLEL_NETCDF_PREFIX}
-1
@@ -3952,6 +4129,9 @@
+
+ /soft/netcdf/fortran-4.4-intel-sp1-update3-parallel/lib
+
@@ -4031,6 +4211,12 @@
$CIME_OUTPUT_ROOT/$CASE/run
$CIME_OUTPUT_ROOT/$CASE/bld
+
+
+ $ENV{NETCDF_DIR}
+ $ENV{PNETCDF_DIR}
+ /global/software/sl-6.x86_64/modules/intel/2016.1.150/lapack/3.6.0-intel
+
@@ -4059,12 +4245,10 @@
-
mpirun
-np {{ total_tasks }}
-
/etc/profile.d/modules.sh
@@ -4076,10 +4260,10 @@
/usr/Modules/bin/modulecmd perl
/usr/Modules/bin/modulecmd python
-
+
cmake/3.15.0
perl
- xml-libxml/2.0116
+ xml-libxml/2.0116
python/3.6
@@ -4110,6 +4294,12 @@
$CIME_OUTPUT_ROOT/$CASE/run
$CIME_OUTPUT_ROOT/$CASE/bld
+
+
+ $ENV{NETCDF_DIR}
+ $ENV{PNETCDF_DIR}
+ /global/software/sl-6.x86_64/modules/intel/2016.1.150/lapack/3.6.0-intel
+
@@ -4188,6 +4378,7 @@
/usr/local/share/cmake-3.21/
+ $ENV{NETCDF_HOME}
@@ -4290,16 +4481,19 @@
$ENV{OLCF_NETCDF_C_ROOT}
$ENV{OLCF_NETCDF_FORTRAN_ROOT}
+ $ENV{OLCF_NETLIB_LAPACK_ROOT}
+ Generic
$ENV{OLCF_ESSL_ROOT}
- $ENV{OLCF_HDF5_ROOT}
+ $ENV{OLCF_HDF5_ROOT}
+ True
$ENV{OLCF_PARALLEL_NETCDF_ROOT}
0
-
+
-
+
-E OMP_NUM_THREADS=$ENV{OMP_NUM_THREADS} -E OMP_PROC_BIND=spread -E OMP_PLACES=threads -E OMP_STACKSIZE=256M
@@ -4359,13 +4553,13 @@
mlx5_0:1,mlx5_3:1
- /gpfs/alpine/cli115/world-shared/3rdparty/adios2/2.8.3.patch/spectrum-mpi-10.4.0.3/xl-16.1.1-10
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /gpfs/alpine/cli115/world-shared/3rdparty/adios2/2.9.1/spectrum-mpi-10.4.0.3/xl-16.1.1-10; else echo "$ADIOS2_ROOT"; fi}
- /gpfs/alpine/cli115/world-shared/3rdparty/adios2/2.8.3.patch/spectrum-mpi-10.4.0.3/nvhpc-21.11
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /gpfs/alpine/cli115/world-shared/3rdparty/adios2/2.9.1/spectrum-mpi-10.4.0.3/nvhpc-21.11; else echo "$ADIOS2_ROOT"; fi}
- /gpfs/alpine/cli115/world-shared/3rdparty/adios2/2.8.3.patch/spectrum-mpi-10.4.0.3/gcc-9.1.0
+ $SHELL{if [ -z "$ADIOS2_ROOT" ]; then echo /gpfs/alpine/cli115/world-shared/3rdparty/adios2/2.9.1/spectrum-mpi-10.4.0.3/gcc-9.1.0; else echo "$ADIOS2_ROOT"; fi}
@@ -4464,15 +4658,18 @@
$SHELL{dirname $(dirname $(which nc-config))}
$SHELL{dirname $(dirname $(which nf-config))}
$SHELL{dirname $(dirname $(which pnetcdf_version))}
+ $ENV{OLCF_NETLIB_LAPACK_ROOT}
+ Generic
$ENV{OLCF_ESSL_ROOT}
- $ENV{OLCF_HDF5_ROOT}
+ $ENV{OLCF_HDF5_ROOT}
+ True
0
-
+
-
+
-E OMP_NUM_THREADS=$ENV{OMP_NUM_THREADS} -E OMP_PROC_BIND=spread -E OMP_PLACES=threads -E OMP_STACKSIZE=256M
@@ -4637,7 +4834,7 @@
/home/groups/coegroup/e3sm/soft/openmpi/2.1.6/gcc/8.2.0/lib:/home/groups/coegroup/e3sm/soft/netcdf/4.4.1c-4.2cxx-4.4.4f/gcc/8.2.0/lib:$ENV{LD_LIBRARY_PATH}
/home/groups/coegroup/e3sm/soft/pnetcdf/1.12.1/gcc/8.2.0/openmpi/2.1.6
-
+
128M
threads
@@ -4724,17 +4921,17 @@
0.2
0.20
- $SHELL{dirname $(dirname $(which h5diff))}
+ $SHELL{dirname $(dirname $(which h5diff))}
$SHELL{dirname $(dirname $(which nc-config))}
$SHELL{dirname $(dirname $(which nf-config))}
$SHELL{dirname $(dirname $(which pnetcdf-config))}
/opt/apps/spack/opt/spack/linux-centos7-zen2/gcc-12.2.0/openblas-0.3.21-z66r7lyxwkhsshgreexm4cedffp73scp
- /opt/apps/spack/opt/spack/linux-centos7-zen2/gcc-12.2.0/netlib-lapack-3.10.1-lkhddpuidlw2z74g5ui6eq5iattsfjxp
+ /opt/apps/spack/opt/spack/linux-centos7-zen2/gcc-12.2.0/netlib-lapack-3.10.1-lkhddpuidlw2z74g5ui6eq5iattsfjxp
$ENV{PERL5LIB}:/opt/apps/spack/opt/spack/linux-centos7-zen2/gcc-12.2.0/perl-5.36.0-sly2pft2edg2p3iyijfyy6dzntusokno/lib/site_perl/5.36.0
FALSE
-
+
128M
threads
@@ -4814,16 +5011,16 @@
0.2
0.20
- $SHELL{dirname $(dirname $(which h5diff))}
+ $SHELL{dirname $(dirname $(which h5diff))}
$SHELL{dirname $(dirname $(which nc-config))}
$SHELL{dirname $(dirname $(which nf-config))}
$SHELL{dirname $(dirname $(which pnetcdf-config))}
/apps/spack/opt/spack/linux-centos7-cascadelake/gcc-12.2.0/openblas-0.3.20-nxcsxdi56nj2gxyo65iyuaecp3cbd4xd
- /apps/spack/opt/spack/linux-centos7-cascadelake/gcc-12.2.0/netlib-lapack-3.10.1-xjw3q4abrpdihbyvx72em7l4wrzxm3zp
+ /apps/spack/opt/spack/linux-centos7-cascadelake/gcc-12.2.0/netlib-lapack-3.10.1-xjw3q4abrpdihbyvx72em7l4wrzxm3zp
FALSE
-
+
128M
threads
@@ -4897,10 +5094,10 @@
$SHELL{dirname $(dirname $(which nf-config))}
$SHELL{dirname $(dirname $(which pnetcdf_version))}
-
+
128M
-
+
cores
@@ -5005,6 +5202,7 @@
FALSE
+ $ENV{NETCDF_DIR}
yes
@@ -5240,6 +5438,8 @@
1
1
+ /opt/conda
+ /opt/conda
diff --git a/mache/machines/pm-gpu.cfg b/mache/machines/pm-gpu.cfg
new file mode 100644
index 00000000..1dbb2447
--- /dev/null
+++ b/mache/machines/pm-gpu.cfg
@@ -0,0 +1,84 @@
+# Options related to deploying an e3sm-unified conda environment on supported
+# machines
+[e3sm_unified]
+
+# the unix group for permissions for the e3sm-unified conda environment
+group = e3sm
+
+# the compiler set to use for system libraries and MPAS builds
+compiler = nvidiagpu
+
+# the system MPI library to use for intel18 compiler
+mpi = mpich
+
+# the path to the directory where activation scripts, the base environment, and
+# system libraries will be deployed
+base_path = /global/common/software/e3sm/anaconda_envs
+
+# whether to use system modules for hdf5, netcdf-c, netcdf-fortran and pnetcdf
+# (spack modules are used otherwise)
+use_system_hdf5_netcdf = True
+
+
+# config options related to data needed by diagnostics software such as
+# e3sm_diags and MPAS-Analysis
+[diagnostics]
+
+# The base path to the diagnostics directory
+base_path = /global/cfs/cdirs/e3sm/diagnostics
+
+# the unix group for permissions for diagnostics
+group = e3sm
+
+
+# config options associated with web portals
+[web_portal]
+
+# The path to the base of the web portals
+base_path = /global/cfs/cdirs/e3sm/www
+
+# The base URL that corresponds to the base path
+base_url = https://portal.nersc.gov/cfs/e3sm
+
+
+# The parallel section describes options related to running jobs in parallel
+[parallel]
+
+# parallel system of execution: slurm, cobalt or single_node
+system = slurm
+
+# whether to use mpirun or srun to run a task
+parallel_executable = srun
+
+# cores per node on the machine
+cores_per_node = 256
+
+# account for running diagnostics jobs
+account = e3sm
+
+# available constraint(s) (default is the first)
+constraints = gpu
+
+# quality of service (default is the first)
+qos = regular, debug, premium
+
+# Config options related to spack environments
+[spack]
+
+# whether to load modules from the spack yaml file before loading the spack
+# environment
+modules_before = False
+
+# whether to load modules from the spack yaml file after loading the spack
+# environment
+modules_after = False
+
+# whether the machine uses cray compilers
+cray_compilers = True
+
+
+# config options related to synchronizing files
+[sync]
+
+# the full hostname of the machine
+hostname = perlmutter-p1.nersc.gov
diff --git a/mache/spack/pm-cpu_gnu_mpich.csh b/mache/spack/pm-cpu_gnu_mpich.csh
index e59845d1..7ea047b4 100644
--- a/mache/spack/pm-cpu_gnu_mpich.csh
+++ b/mache/spack/pm-cpu_gnu_mpich.csh
@@ -21,10 +21,10 @@ module load craype-accel-host
{% if e3sm_lapack %}
module load cray-libsci/23.02.1.1
{% endif %}
-module load craype/2.7.19
+module load craype/2.7.20
module rm cray-mpich &> /dev/null
module load libfabric/1.15.2.0
-module load cray-mpich/8.1.24
+module load cray-mpich/8.1.25
{% if e3sm_hdf5_netcdf %}
module rm cray-hdf5-parallel &> /dev/null
module rm cray-netcdf-hdf5parallel &> /dev/null
diff --git a/mache/spack/pm-cpu_gnu_mpich.sh b/mache/spack/pm-cpu_gnu_mpich.sh
index c8d6766a..60a5c8fa 100644
--- a/mache/spack/pm-cpu_gnu_mpich.sh
+++ b/mache/spack/pm-cpu_gnu_mpich.sh
@@ -21,10 +21,10 @@ module load craype-accel-host
{% if e3sm_lapack %}
module load cray-libsci/23.02.1.1
{% endif %}
-module load craype/2.7.19
+module load craype/2.7.20
module rm cray-mpich &> /dev/null
module load libfabric/1.15.2.0
-module load cray-mpich/8.1.24
+module load cray-mpich/8.1.25
{% if e3sm_hdf5_netcdf %}
module rm cray-hdf5-parallel &> /dev/null
module rm cray-netcdf-hdf5parallel &> /dev/null
diff --git a/mache/spack/pm-cpu_gnu_mpich.yaml b/mache/spack/pm-cpu_gnu_mpich.yaml
index 3d44ea17..2da79594 100644
--- a/mache/spack/pm-cpu_gnu_mpich.yaml
+++ b/mache/spack/pm-cpu_gnu_mpich.yaml
@@ -18,7 +18,7 @@ spack:
all:
compiler: [gcc@11.2.0]
providers:
- mpi: [cray-mpich@8.1.24]
+ mpi: [cray-mpich@8.1.25]
{% if e3sm_lapack %}
lapack: [cray-libsci@23.02.1.1]
{% endif %}
@@ -32,21 +32,46 @@ spack:
- spec: curl@7.66.0
prefix: /usr
buildable: false
+ cmake:
+ externals:
+ - spec: cmake@3.24.3
+ prefix: /global/common/software/nersc/pm-2022q4/spack/linux-sles15-zen/cmake-3.24.3-k5msymx/
+ buildable: false
gettext:
externals:
- spec: gettext@0.20.2
prefix: /usr
buildable: false
+ gmake:
+ externals:
+ - spec: gmake@4.2.1
+ prefix: /usr
+ buildable: false
+ libuv:
+ externals:
+ - spec: libuv@1.44.2
+ prefix: /usr
+ buildable: false
libxml2:
externals:
- spec: libxml2@2.9.7
prefix: /usr
buildable: false
+ m4:
+ externals:
+ - spec: m4@1.4.18
+ prefix: /usr
+ buildable: false
ncurses:
externals:
- spec: ncurses@6.1.20180317
prefix: /usr
buildable: false
+ ninja:
+ externals:
+ - spec: ninja@1.10.0
+ prefix: /usr
+ buildable: false
openssl:
externals:
- spec: openssl@1.1.1d
@@ -81,16 +106,16 @@ spack:
- PrgEnv-gnu/8.3.3
- gcc/11.2.0
- craype-accel-host
- - craype/2.7.19
+ - craype/2.7.20
- libfabric/1.15.2.0
buildable: false
cray-mpich:
externals:
- - spec: cray-mpich@8.1.24
- prefix: /opt/cray/pe/mpich/8.1.24/ofi/gnu/9.1
+ - spec: cray-mpich@8.1.25
+ prefix: /opt/cray/pe/mpich/8.1.25/ofi/gnu/9.1
modules:
- libfabric/1.15.2.0
- - cray-mpich/8.1.24
+ - cray-mpich/8.1.25
buildable: false
libfabric:
externals:
@@ -108,35 +133,11 @@ spack:
- cray-libsci/23.02.1.1
buildable: false
{% endif %}
-{% if e3sm_hdf5_netcdf %}
- hdf5:
- externals:
- - spec: hdf5@1.12.2.3~cxx+fortran+hl~java+mpi+shared
- prefix: /opt/cray/pe/hdf5-parallel/1.12.2.3/GNU/9.1
- buildable: false
- parallel-netcdf:
- externals:
- - spec: parallel-netcdf@1.12.3.3+cxx+fortran+pic+shared
- prefix: /opt/cray/pe/parallel-netcdf/1.12.3.3/GNU/9.1/
- buildable: false
- netcdf-c:
- externals:
- - spec: netcdf-c@4.9.0.3+mpi~parallel-netcdf
- prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/GNU/9.1
- buildable: false
- netcdf-fortran:
- externals:
- - spec: netcdf-fortran@4.5.3 ^netcdf-c+mpi
- prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/GNU/9.1
- buildable: false
-{% endif %}
-{% if system_hdf5_netcdf %}
+{% if e3sm_hdf5_netcdf or system_hdf5_netcdf %}
hdf5:
externals:
- spec: hdf5@1.12.2.3~cxx+fortran+hl~java+mpi+shared
prefix: /opt/cray/pe/hdf5-parallel/1.12.2.3/GNU/9.1
- - spec: hdf5@1.12.2.3~cxx+fortran+hl~java~mpi+shared
- prefix: /opt/cray/pe/hdf5/1.12.2.3/GNU/9.1
buildable: false
parallel-netcdf:
externals:
@@ -147,15 +148,11 @@ spack:
externals:
- spec: netcdf-c@4.9.0.3+mpi~parallel-netcdf
prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/GNU/9.1
- - spec: netcdf-c@4.9.0.3~mpi~parallel-netcdf
- prefix: /opt/cray/pe/netcdf/4.9.0.3/GNU/9.1
buildable: false
netcdf-fortran:
externals:
- - spec: netcdf-fortran@4.5.3 ^netcdf-c+mpi
+ - spec: netcdf-fortran@4.5.3
prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/GNU/9.1
- - spec: netcdf-fortran@4.5.3 ^netcdf-c~mpi
- prefix: /opt/cray/pe/netcdf/4.9.0.3/GNU/9.1
buildable: false
{% endif %}
config:
@@ -175,8 +172,8 @@ spack:
- PrgEnv-gnu/8.3.3
- gcc/11.2.0
- craype-accel-host
- - craype/2.7.19
+ - craype/2.7.20
- libfabric/1.15.2.0
environment:
prepend_path:
- PKG_CONFIG_PATH: "/opt/cray/xpmem/2.5.2-2.4_3.30__gd0f7936.shasta/lib64/pkgconfig"
+ PKG_CONFIG_PATH: "/opt/cray/xpmem/2.6.2-2.5_2.33__gd067c3f.shasta/lib64/pkgconfig"
diff --git a/mache/spack/pm-cpu_intel_mpich.csh b/mache/spack/pm-cpu_intel_mpich.csh
index 57ebf184..58307f08 100644
--- a/mache/spack/pm-cpu_intel_mpich.csh
+++ b/mache/spack/pm-cpu_intel_mpich.csh
@@ -18,10 +18,9 @@ module rm darshan &> /dev/null
module load PrgEnv-intel/8.3.3
module load intel/2023.1.0
module load craype-accel-host
-module load craype/2.7.19
+module load craype/2.7.20
module rm cray-mpich &> /dev/null
-module load libfabric/1.15.2.0
-module load cray-mpich/8.1.24
+module load cray-mpich/8.1.25
{% if e3sm_hdf5_netcdf %}
module rm cray-hdf5-parallel &> /dev/null
module rm cray-netcdf-hdf5parallel &> /dev/null
diff --git a/mache/spack/pm-cpu_intel_mpich.sh b/mache/spack/pm-cpu_intel_mpich.sh
index c7505d41..0c59136a 100644
--- a/mache/spack/pm-cpu_intel_mpich.sh
+++ b/mache/spack/pm-cpu_intel_mpich.sh
@@ -18,10 +18,9 @@ module rm darshan &> /dev/null
module load PrgEnv-intel/8.3.3
module load intel/2023.1.0
module load craype-accel-host
-module load craype/2.7.19
+module load craype/2.7.20
module rm cray-mpich &> /dev/null
-module load libfabric/1.15.2.0
-module load cray-mpich/8.1.24
+module load cray-mpich/8.1.25
{% if e3sm_hdf5_netcdf %}
module rm cray-hdf5-parallel &> /dev/null
module rm cray-netcdf-hdf5parallel &> /dev/null
diff --git a/mache/spack/pm-cpu_intel_mpich.yaml b/mache/spack/pm-cpu_intel_mpich.yaml
index 3ea1ce60..98f1070b 100644
--- a/mache/spack/pm-cpu_intel_mpich.yaml
+++ b/mache/spack/pm-cpu_intel_mpich.yaml
@@ -26,21 +26,46 @@ spack:
- spec: curl@7.66.0
prefix: /usr
buildable: false
+ cmake:
+ externals:
+ - spec: cmake@3.24.3
+ prefix: /global/common/software/nersc/pm-2022q4/spack/linux-sles15-zen/cmake-3.24.3-k5msymx/
+ buildable: false
gettext:
externals:
- spec: gettext@0.20.2
prefix: /usr
buildable: false
+ gmake:
+ externals:
+ - spec: gmake@4.2.1
+ prefix: /usr
+ buildable: false
+ libuv:
+ externals:
+ - spec: libuv@1.44.2
+ prefix: /usr
+ buildable: false
libxml2:
externals:
- spec: libxml2@2.9.7
prefix: /usr
buildable: false
+ m4:
+ externals:
+ - spec: m4@1.4.18
+ prefix: /usr
+ buildable: false
ncurses:
externals:
- spec: ncurses@6.1.20180317
prefix: /usr
buildable: false
+ ninja:
+ externals:
+ - spec: ninja@1.10.0
+ prefix: /usr
+ buildable: false
openssl:
externals:
- spec: openssl@1.1.1d
@@ -75,16 +100,14 @@ spack:
- PrgEnv-intel/8.3.3
- intel/2023.1.0
- craype-accel-host
- - craype/2.7.19
- - libfabric/1.15.2.0
+ - craype/2.7.20
buildable: false
cray-mpich:
externals:
- - spec: cray-mpich@8.1.24
- prefix: /opt/cray/pe/mpich/8.1.24/ofi/intel/19.0
+ - spec: cray-mpich@8.1.25
+ prefix: /opt/cray/pe/mpich/8.1.25/ofi/intel/19.0
modules:
- - libfabric/1.15.2.0
- - cray-mpich/8.1.24
+ - cray-mpich/8.1.25
buildable: false
libfabric:
externals:
@@ -93,35 +116,11 @@ spack:
modules:
- libfabric/1.15.2.0
buildable: false
-{% if e3sm_hdf5_netcdf %}
- hdf5:
- externals:
- - spec: hdf5@1.12.2.3~cxx+fortran+hl~java+mpi+shared
- prefix: /opt/cray/pe/hdf5-parallel/1.12.2.3/intel/19.0
- buildable: false
- parallel-netcdf:
- externals:
- - spec: parallel-netcdf@1.12.3.3+cxx+fortran+pic+shared
- prefix: /opt/cray/pe/parallel-netcdf/1.12.3.3/intel/19.0/
- buildable: false
- netcdf-c:
- externals:
- - spec: netcdf-c@4.9.0.3+mpi~parallel-netcdf
- prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/intel/19.0
- buildable: false
- netcdf-fortran:
- externals:
- - spec: netcdf-fortran@4.5.3 ^netcdf-c+mpi
- prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/intel/19.0
- buildable: false
-{% endif %}
-{% if system_hdf5_netcdf %}
+{% if e3sm_hdf5_netcdf or system_hdf5_netcdf %}
hdf5:
externals:
- spec: hdf5@1.12.2.3~cxx+fortran+hl~java+mpi+shared
prefix: /opt/cray/pe/hdf5-parallel/1.12.2.3/intel/19.0
- - spec: hdf5@1.12.2.3~cxx+fortran+hl~java~mpi+shared
- prefix: /opt/cray/pe/hdf5/1.12.2.3/intel/19.0
buildable: false
parallel-netcdf:
externals:
@@ -132,15 +131,11 @@ spack:
externals:
- spec: netcdf-c@4.9.0.3+mpi~parallel-netcdf
prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/intel/19.0
- - spec: netcdf-c@4.9.0.3~mpi~parallel-netcdf
- prefix: /opt/cray/pe/netcdf/4.9.0.3/intel/19.0
buildable: false
netcdf-fortran:
externals:
- - spec: netcdf-fortran@4.5.3 ^netcdf-c+mpi
+ - spec: netcdf-fortran@4.5.3
prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/intel/19.0
- - spec: netcdf-fortran@4.5.3 ^netcdf-c~mpi
- prefix: /opt/cray/pe/netcdf/4.9.0.3/intel/19.0
buildable: false
{% endif %}
config:
@@ -160,8 +155,7 @@ spack:
- PrgEnv-intel/8.3.3
- intel/2023.1.0
- craype-accel-host
- - craype/2.7.19
- - libfabric/1.15.2.0
+ - craype/2.7.20
environment:
prepend_path:
- PKG_CONFIG_PATH: "/opt/cray/xpmem/2.5.2-2.4_3.30__gd0f7936.shasta/lib64/pkgconfig"
+ PKG_CONFIG_PATH: "/opt/cray/xpmem/2.6.2-2.5_2.33__gd067c3f.shasta/lib64/pkgconfig"
diff --git a/mache/spack/pm-cpu_nvidia_mpich.csh b/mache/spack/pm-cpu_nvidia_mpich.csh
new file mode 100644
index 00000000..1c6a8174
--- /dev/null
+++ b/mache/spack/pm-cpu_nvidia_mpich.csh
@@ -0,0 +1,52 @@
+module rm cray-hdf5-parallel &> /dev/null
+module rm cray-netcdf-hdf5parallel &> /dev/null
+module rm cray-parallel-netcdf &> /dev/null
+module rm PrgEnv-gnu &> /dev/null
+module rm PrgEnv-intel &> /dev/null
+module rm PrgEnv-nvidia &> /dev/null
+module rm PrgEnv-cray &> /dev/null
+module rm PrgEnv-aocc &> /dev/null
+module rm intel &> /dev/null
+module rm intel-oneapi &> /dev/null
+module rm cudatoolkit &> /dev/null
+module rm craype-accel-nvidia80 &> /dev/null
+module rm craype-accel-host &> /dev/null
+module rm perftools-base &> /dev/null
+module rm perftools &> /dev/null
+module rm darshan &> /dev/null
+
+module load PrgEnv-nvidia
+module load nvidia/22.7
+module load craype-x86-milan
+module load libfabric/1.15.2.0
+module load craype-accel-host
+module load craype/2.7.20
+module rm cray-mpich &> /dev/null
+module load cray-mpich/8.1.25
+{% if e3sm_lapack %}
+module load cray-libsci/23.02.1.1
+{% endif %}
+{% if e3sm_hdf5_netcdf %}
+module rm cray-hdf5-parallel &> /dev/null
+module rm cray-netcdf-hdf5parallel &> /dev/null
+module rm cray-parallel-netcdf &> /dev/null
+module load cray-hdf5-parallel/1.12.2.3
+module load cray-netcdf-hdf5parallel/4.9.0.3
+module load cray-parallel-netcdf/1.12.3.3
+{% endif %}
+
+{% if e3sm_hdf5_netcdf %}
+setenv NETCDF_C_PATH $CRAY_NETCDF_HDF5PARALLEL_PREFIX
+setenv NETCDF_FORTRAN_PATH $CRAY_NETCDF_HDF5PARALLEL_PREFIX
+setenv PNETCDF_PATH $CRAY_PARALLEL_NETCDF_PREFIX
+{% endif %}
+setenv MPICH_ENV_DISPLAY 1
+setenv MPICH_VERSION_DISPLAY 1
+## purposefully omitting OMP variables that cause trouble in ESMF
+# setenv OMP_STACKSIZE 128M
+# setenv OMP_PROC_BIND spread
+# setenv OMP_PLACES threads
+setenv HDF5_USE_FILE_LOCKING FALSE
+## Not needed
+# setenv PERL5LIB /global/cfs/cdirs/e3sm/perl/lib/perl5-only-switch
+setenv MPICH_GPU_SUPPORT_ENABLED 1
diff --git a/mache/spack/pm-cpu_nvidia_mpich.sh b/mache/spack/pm-cpu_nvidia_mpich.sh
new file mode 100644
index 00000000..1dfdd4a6
--- /dev/null
+++ b/mache/spack/pm-cpu_nvidia_mpich.sh
@@ -0,0 +1,57 @@
+module rm cray-hdf5-parallel &> /dev/null
+module rm cray-netcdf-hdf5parallel &> /dev/null
+module rm cray-parallel-netcdf &> /dev/null
+module rm PrgEnv-gnu &> /dev/null
+module rm PrgEnv-intel &> /dev/null
+module rm PrgEnv-nvidia &> /dev/null
+module rm PrgEnv-cray &> /dev/null
+module rm PrgEnv-aocc &> /dev/null
+module rm intel &> /dev/null
+module rm intel-oneapi &> /dev/null
+module rm cudatoolkit &> /dev/null
+module rm craype-accel-nvidia80 &> /dev/null
+module rm craype-accel-host &> /dev/null
+module rm perftools-base &> /dev/null
+module rm perftools &> /dev/null
+module rm darshan &> /dev/null
+
+module load PrgEnv-nvidia
+module load nvidia/22.7
+module load craype-x86-milan
+module load libfabric/1.15.2.0
+module load craype-accel-host
+module load craype/2.7.20
+module rm cray-mpich &> /dev/null
+module load cray-mpich/8.1.25
+{% if e3sm_lapack %}
+module load cray-libsci/23.02.1.1
+{% endif %}
+{% if e3sm_hdf5_netcdf %}
+module rm cray-hdf5-parallel &> /dev/null
+module rm cray-netcdf-hdf5parallel &> /dev/null
+module rm cray-parallel-netcdf &> /dev/null
+module load cray-hdf5-parallel/1.12.2.3
+module load cray-netcdf-hdf5parallel/4.9.0.3
+module load cray-parallel-netcdf/1.12.3.3
+{% endif %}
+
+{% if e3sm_hdf5_netcdf %}
+export NETCDF_C_PATH=$CRAY_NETCDF_HDF5PARALLEL_PREFIX
+export NETCDF_FORTRAN_PATH=$CRAY_NETCDF_HDF5PARALLEL_PREFIX
+export PNETCDF_PATH=$CRAY_PARALLEL_NETCDF_PREFIX
+{% endif %}
+export MPICH_ENV_DISPLAY=1
+export MPICH_VERSION_DISPLAY=1
+## purposefully omitting OMP variables that cause trouble in ESMF
+# export OMP_STACKSIZE=128M
+# export OMP_PROC_BIND=spread
+# export OMP_PLACES=threads
+export HDF5_USE_FILE_LOCKING=FALSE
+## Not needed
+# export PERL5LIB=/global/cfs/cdirs/e3sm/perl/lib/perl5-only-switch
+export MPICH_GPU_SUPPORT_ENABLED=1
+
+if [ -z "${NERSC_HOST:-}" ]; then
+ # happens when building spack environment
+ export NERSC_HOST="perlmutter"
+fi
diff --git a/mache/spack/pm-cpu_nvidia_mpich.yaml b/mache/spack/pm-cpu_nvidia_mpich.yaml
new file mode 100644
index 00000000..fe17a1cc
--- /dev/null
+++ b/mache/spack/pm-cpu_nvidia_mpich.yaml
@@ -0,0 +1,166 @@
+spack:
+ specs:
+ - cray-mpich
+{% if e3sm_lapack %}
+ - cray-libsci
+{% endif %}
+{% if e3sm_hdf5_netcdf %}
+ - hdf5
+ - netcdf-c
+ - netcdf-fortran
+ - parallel-netcdf
+{% endif %}
+{{ specs }}
+ concretizer:
+ unify: when_possible
+ packages:
+ all:
+ compiler: [nvhpc@22.7]
+ providers:
+ mpi: [cray-mpich@8.1.25]
+{% if e3sm_lapack %}
+ lapack: [cray-libsci@23.02.1.1]
+{% endif %}
+ bzip2:
+ externals:
+ - spec: bzip2@1.0.6
+ prefix: /usr
+ buildable: false
+ curl:
+ externals:
+ - spec: curl@7.66.0
+ prefix: /usr
+ buildable: false
+ cmake:
+ externals:
+ - spec: cmake@3.24.3
+ prefix: /global/common/software/nersc/pm-2022q4/spack/linux-sles15-zen/cmake-3.24.3-k5msymx/
+ buildable: false
+ gettext:
+ externals:
+ - spec: gettext@0.20.2
+ prefix: /usr
+ buildable: false
+ gmake:
+ externals:
+ - spec: gmake@4.2.1
+ prefix: /usr
+ buildable: false
+ libuv:
+ externals:
+ - spec: libuv@1.44.2
+ prefix: /usr
+ buildable: false
+ libxml2:
+ externals:
+ - spec: libxml2@2.9.7
+ prefix: /usr
+ buildable: false
+ m4:
+ externals:
+ - spec: m4@1.4.18
+ prefix: /usr
+ buildable: false
+ ncurses:
+ externals:
+ - spec: ncurses@6.1.20180317
+ prefix: /usr
+ buildable: false
+ ninja:
+ externals:
+ - spec: ninja@1.10.0
+ prefix: /usr
+ buildable: false
+ openssl:
+ externals:
+ - spec: openssl@1.1.1d
+ prefix: /usr
+ buildable: false
+ perl:
+ externals:
+ - spec: perl@5.26.1
+ prefix: /usr
+ buildable: false
+ python:
+ externals:
+ - spec: python@3.9.7
+ prefix: /global/common/software/nersc/pm-2022q3/sw/python/3.9-anaconda-2021.11
+ modules:
+ - python/3.9-anaconda-2021.11
+ buildable: false
+ tar:
+ externals:
+ - spec: tar@1.34
+ prefix: /usr
+ buildable: false
+ xz:
+ externals:
+ - spec: xz@5.2.3
+ prefix: /usr
+ buildable: false
+ cray-mpich:
+ externals:
+ - spec: cray-mpich@8.1.25
+ prefix: /opt/cray/pe/mpich/8.1.25/ofi/nvidia/20.7
+ modules:
+ - libfabric/1.15.2.0
+ - cray-mpich/8.1.25
+ buildable: false
+ libfabric:
+ externals:
+ - spec: libfabric@1.15.2.0
+ prefix: /opt/cray/libfabric/1.15.2.0
+ modules:
+ - libfabric/1.15.2.0
+ buildable: false
+{% if e3sm_lapack %}
+ cray-libsci:
+ externals:
+ - spec: cray-libsci@23.02.1.1
+ prefix: /opt/cray/pe/libsci/23.02.1.1/NVIDIA/20.7/x86_64
+ buildable: false
+{% endif %}
+{% if e3sm_hdf5_netcdf or system_hdf5_netcdf%}
+ hdf5:
+ externals:
+ - spec: hdf5@1.12.2.3~cxx+fortran+hl~java+mpi+shared
+ prefix: /opt/cray/pe/hdf5-parallel/1.12.2.3/nvidia/20.7
+ buildable: false
+ parallel-netcdf:
+ externals:
+ - spec: parallel-netcdf@1.12.3.3+cxx+fortran+pic+shared
+ prefix: /opt/cray/pe/parallel-netcdf/1.12.3.3/nvidia/20.7
+ buildable: false
+ netcdf-c:
+ externals:
+ - spec: netcdf-c@4.9.0.3+mpi~parallel-netcdf
+ prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/nvidia/20.7
+ buildable: false
+ netcdf-fortran:
+ externals:
+ - spec: netcdf-fortran@4.5.3
+ prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/nvidia/20.7
+ buildable: false
+{% endif %}
+ config:
+ install_missing_compilers: false
+ compilers:
+ - compiler:
+ spec: nvhpc@22.7
+ paths:
+ cc: /opt/nvidia/hpc_sdk/Linux_x86_64/22.7/compilers/bin/nvc
+ cxx: /opt/nvidia/hpc_sdk/Linux_x86_64/22.7/compilers/bin/nvc++
+ f77: /opt/nvidia/hpc_sdk/Linux_x86_64/22.7/compilers/bin/nvfortran
+ fc: /opt/nvidia/hpc_sdk/Linux_x86_64/22.7/compilers/bin/nvfortran
+ flags: {}
+ operating_system: sles15
+ target: any
+ modules:
+ - PrgEnv-nvidia
+ - nvidia/22.7
+ - craype-x86-milan
+ - libfabric
+ - craype-accel-host
+ environment:
+ prepend_path:
+ PKG_CONFIG_PATH: "/opt/cray/xpmem/2.6.2-2.5_2.33__gd067c3f.shasta/lib64/pkgconfig"
diff --git a/mache/spack/pm-gpu_gnu_mpich.csh b/mache/spack/pm-gpu_gnu_mpich.csh
new file mode 120000
index 00000000..fe10cab4
--- /dev/null
+++ b/mache/spack/pm-gpu_gnu_mpich.csh
@@ -0,0 +1 @@
+pm-cpu_gnu_mpich.csh
\ No newline at end of file
diff --git a/mache/spack/pm-gpu_gnu_mpich.sh b/mache/spack/pm-gpu_gnu_mpich.sh
new file mode 120000
index 00000000..ab1eb811
--- /dev/null
+++ b/mache/spack/pm-gpu_gnu_mpich.sh
@@ -0,0 +1 @@
+pm-cpu_gnu_mpich.sh
\ No newline at end of file
diff --git a/mache/spack/pm-gpu_gnu_mpich.yaml b/mache/spack/pm-gpu_gnu_mpich.yaml
new file mode 120000
index 00000000..d3e359eb
--- /dev/null
+++ b/mache/spack/pm-gpu_gnu_mpich.yaml
@@ -0,0 +1 @@
+pm-cpu_gnu_mpich.yaml
\ No newline at end of file
diff --git a/mache/spack/pm-gpu_gnugpu_mpich.csh b/mache/spack/pm-gpu_gnugpu_mpich.csh
new file mode 100644
index 00000000..3b7192b9
--- /dev/null
+++ b/mache/spack/pm-gpu_gnugpu_mpich.csh
@@ -0,0 +1,53 @@
+module rm cray-hdf5-parallel &> /dev/null
+module rm cray-netcdf-hdf5parallel &> /dev/null
+module rm cray-parallel-netcdf &> /dev/null
+module rm PrgEnv-gnu &> /dev/null
+module rm PrgEnv-intel &> /dev/null
+module rm PrgEnv-nvidia &> /dev/null
+module rm PrgEnv-cray &> /dev/null
+module rm PrgEnv-aocc &> /dev/null
+module rm intel &> /dev/null
+module rm intel-oneapi &> /dev/null
+module rm cudatoolkit &> /dev/null
+module rm craype-accel-nvidia80 &> /dev/null
+module rm craype-accel-host &> /dev/null
+module rm perftools-base &> /dev/null
+module rm perftools &> /dev/null
+module rm darshan &> /dev/null
+
+module load PrgEnv-gnu/8.3.3
+module load gcc/11.2.0
+module load craype-x86-milan
+module load libfabric/1.15.2.0
+module load cudatoolkit/11.7
+module load craype-accel-nvidia80
+module load craype/2.7.20
+module rm cray-mpich &> /dev/null
+module load cray-mpich/8.1.25
+{% if e3sm_lapack %}
+module load cray-libsci/23.02.1.1
+{% endif %}
+{% if e3sm_hdf5_netcdf %}
+module rm cray-hdf5-parallel &> /dev/null
+module rm cray-netcdf-hdf5parallel &> /dev/null
+module rm cray-parallel-netcdf &> /dev/null
+module load cray-hdf5-parallel/1.12.2.3
+module load cray-netcdf-hdf5parallel/4.9.0.3
+module load cray-parallel-netcdf/1.12.3.3
+{% endif %}
+
+{% if e3sm_hdf5_netcdf %}
+setenv NETCDF_C_PATH $CRAY_NETCDF_HDF5PARALLEL_PREFIX
+setenv NETCDF_FORTRAN_PATH $CRAY_NETCDF_HDF5PARALLEL_PREFIX
+setenv PNETCDF_PATH $CRAY_PARALLEL_NETCDF_PREFIX
+{% endif %}
+setenv MPICH_ENV_DISPLAY 1
+setenv MPICH_VERSION_DISPLAY 1
+## purposefully omitting OMP variables that cause trouble in ESMF
+# setenv OMP_STACKSIZE 128M
+# setenv OMP_PROC_BIND spread
+# setenv OMP_PLACES threads
+setenv HDF5_USE_FILE_LOCKING FALSE
+## Not needed
+# setenv PERL5LIB /global/cfs/cdirs/e3sm/perl/lib/perl5-only-switch
+setenv MPICH_GPU_SUPPORT_ENABLED 1
diff --git a/mache/spack/pm-gpu_gnugpu_mpich.sh b/mache/spack/pm-gpu_gnugpu_mpich.sh
new file mode 100644
index 00000000..beccd17e
--- /dev/null
+++ b/mache/spack/pm-gpu_gnugpu_mpich.sh
@@ -0,0 +1,58 @@
+module rm cray-hdf5-parallel &> /dev/null
+module rm cray-netcdf-hdf5parallel &> /dev/null
+module rm cray-parallel-netcdf &> /dev/null
+module rm PrgEnv-gnu &> /dev/null
+module rm PrgEnv-intel &> /dev/null
+module rm PrgEnv-nvidia &> /dev/null
+module rm PrgEnv-cray &> /dev/null
+module rm PrgEnv-aocc &> /dev/null
+module rm intel &> /dev/null
+module rm intel-oneapi &> /dev/null
+module rm cudatoolkit &> /dev/null
+module rm craype-accel-nvidia80 &> /dev/null
+module rm craype-accel-host &> /dev/null
+module rm perftools-base &> /dev/null
+module rm perftools &> /dev/null
+module rm darshan &> /dev/null
+
+module load PrgEnv-gnu/8.3.3
+module load gcc/11.2.0
+module load craype-x86-milan
+module load libfabric/1.15.2.0
+module load cudatoolkit/11.7
+module load craype-accel-nvidia80
+module load craype/2.7.20
+module rm cray-mpich &> /dev/null
+module load cray-mpich/8.1.25
+{% if e3sm_lapack %}
+module load cray-libsci/23.02.1.1
+{% endif %}
+{% if e3sm_hdf5_netcdf %}
+module rm cray-hdf5-parallel &> /dev/null
+module rm cray-netcdf-hdf5parallel &> /dev/null
+module rm cray-parallel-netcdf &> /dev/null
+module load cray-hdf5-parallel/1.12.2.3
+module load cray-netcdf-hdf5parallel/4.9.0.3
+module load cray-parallel-netcdf/1.12.3.3
+{% endif %}
+
+{% if e3sm_hdf5_netcdf %}
+export NETCDF_C_PATH=$CRAY_NETCDF_HDF5PARALLEL_PREFIX
+export NETCDF_FORTRAN_PATH=$CRAY_NETCDF_HDF5PARALLEL_PREFIX
+export PNETCDF_PATH=$CRAY_PARALLEL_NETCDF_PREFIX
+{% endif %}
+export MPICH_ENV_DISPLAY=1
+export MPICH_VERSION_DISPLAY=1
+## purposefully omitting OMP variables that cause trouble in ESMF
+# export OMP_STACKSIZE=128M
+# export OMP_PROC_BIND=spread
+# export OMP_PLACES=threads
+export HDF5_USE_FILE_LOCKING=FALSE
+## Not needed
+# export PERL5LIB=/global/cfs/cdirs/e3sm/perl/lib/perl5-only-switch
+export MPICH_GPU_SUPPORT_ENABLED=1
+
+if [ -z "${NERSC_HOST:-}" ]; then
+ # happens when building spack environment
+ export NERSC_HOST="perlmutter"
+fi
diff --git a/mache/spack/pm-gpu_gnugpu_mpich.yaml b/mache/spack/pm-gpu_gnugpu_mpich.yaml
new file mode 100644
index 00000000..6554b52f
--- /dev/null
+++ b/mache/spack/pm-gpu_gnugpu_mpich.yaml
@@ -0,0 +1,181 @@
+spack:
+ specs:
+ - gcc
+ - cray-mpich
+{% if e3sm_lapack %}
+ - cray-libsci
+{% endif %}
+{% if e3sm_hdf5_netcdf %}
+ - hdf5
+ - netcdf-c
+ - netcdf-fortran
+ - parallel-netcdf
+{% endif %}
+{{ specs }}
+ concretizer:
+ unify: when_possible
+ packages:
+ all:
+ compiler: [gcc@11.2.0]
+ providers:
+ mpi: [cray-mpich@8.1.24]
+{% if e3sm_lapack %}
+ lapack: [cray-libsci@23.02.1.1]
+{% endif %}
+ bzip2:
+ externals:
+ - spec: bzip2@1.0.6
+ prefix: /usr
+ buildable: false
+ curl:
+ externals:
+ - spec: curl@7.66.0
+ prefix: /usr
+ buildable: false
+ cmake:
+ externals:
+ - spec: cmake@3.24.3
+ prefix: /global/common/software/nersc/pm-2022q4/spack/linux-sles15-zen/cmake-3.24.3-k5msymx/
+ buildable: false
+ gettext:
+ externals:
+ - spec: gettext@0.20.2
+ prefix: /usr
+ buildable: false
+ gmake:
+ externals:
+ - spec: gmake@4.2.1
+ prefix: /usr
+ buildable: false
+ libuv:
+ externals:
+ - spec: libuv@1.44.2
+ prefix: /usr
+ buildable: false
+ libxml2:
+ externals:
+ - spec: libxml2@2.9.7
+ prefix: /usr
+ buildable: false
+ m4:
+ externals:
+ - spec: m4@1.4.18
+ prefix: /usr
+ buildable: false
+ ncurses:
+ externals:
+ - spec: ncurses@6.1.20180317
+ prefix: /usr
+ buildable: false
+ ninja:
+ externals:
+ - spec: ninja@1.10.0
+ prefix: /usr
+ buildable: false
+ openssl:
+ externals:
+ - spec: openssl@1.1.1d
+ prefix: /usr
+ buildable: false
+ perl:
+ externals:
+ - spec: perl@5.26.1
+ prefix: /usr
+ buildable: false
+ python:
+ externals:
+ - spec: python@3.9.7
+ prefix: /global/common/software/nersc/pm-2022q3/sw/python/3.9-anaconda-2021.11
+ modules:
+ - python/3.9-anaconda-2021.11
+ buildable: false
+ tar:
+ externals:
+ - spec: tar@1.34
+ prefix: /usr
+ buildable: false
+ xz:
+ externals:
+ - spec: xz@5.2.3
+ prefix: /usr
+ buildable: false
+ gcc:
+ externals:
+ - spec: gcc@11.2.0
+ modules:
+ - PrgEnv-gnu/8.3.3
+ - gcc/11.2.0
+ - cudatoolkit/11.7
+ - craype-accel-nvidia80
+ - craype/2.7.19
+ - libfabric/1.15.2.0
+ buildable: false
+ cray-mpich:
+ externals:
+ - spec: cray-mpich@8.1.25
+ prefix: /opt/cray/pe/mpich/8.1.25/ofi/gnu/9.1
+ modules:
+ - libfabric/1.15.2.0
+ - cray-mpich/8.1.25
+ buildable: false
+ libfabric:
+ externals:
+ - spec: libfabric@1.15.2.0
+ prefix: /opt/cray/libfabric/1.15.2.0
+ modules:
+ - libfabric/1.15.2.0
+ buildable: false
+{% if e3sm_lapack %}
+ cray-libsci:
+ externals:
+ - spec: cray-libsci@23.02.1.1
+ prefix: /opt/cray/pe/libsci/23.02.1.1/GNU/9.1/x86_64
+ modules:
+ - cray-libsci/23.02.1.1
+ buildable: false
+{% endif %}
+{% if e3sm_hdf5_netcdf or system_hdf5_netcdf %}
+ hdf5:
+ externals:
+ - spec: hdf5@1.12.2.3~cxx+fortran+hl~java+mpi+shared
+ prefix: /opt/cray/pe/hdf5-parallel/1.12.2.3/GNU/9.1
+ buildable: false
+ parallel-netcdf:
+ externals:
+ - spec: parallel-netcdf@1.12.3.3+cxx+fortran+pic+shared
+ prefix: /opt/cray/pe/parallel-netcdf/1.12.3.3/GNU/9.1/
+ buildable: false
+ netcdf-c:
+ externals:
+ - spec: netcdf-c@4.9.0.3+mpi~parallel-netcdf
+ prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/GNU/9.1
+ buildable: false
+ netcdf-fortran:
+ externals:
+ - spec: netcdf-fortran@4.5.3
+ prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/GNU/9.1
+ buildable: false
+{% endif %}
+ config:
+ install_missing_compilers: false
+ compilers:
+ - compiler:
+ spec: gcc@11.2.0
+ paths:
+ cc: cc
+ cxx: CC
+ f77: ftn
+ fc: ftn
+ flags: {}
+ operating_system: sles15
+ target: any
+ modules:
+ - PrgEnv-gnu/8.3.3
+ - gcc/11.2.0
+ - cudatoolkit/11.7
+ - craype-accel-nvidia80
+ - craype/2.7.20
+ - libfabric/1.15.2.0
+ environment:
+ prepend_path:
+ PKG_CONFIG_PATH: "/opt/cray/xpmem/2.6.2-2.5_2.33__gd067c3f.shasta/lib64/pkgconfig"
diff --git a/mache/spack/pm-gpu_nvidia_mpich.csh b/mache/spack/pm-gpu_nvidia_mpich.csh
new file mode 120000
index 00000000..289fc529
--- /dev/null
+++ b/mache/spack/pm-gpu_nvidia_mpich.csh
@@ -0,0 +1 @@
+pm-cpu_nvidia_mpich.csh
\ No newline at end of file
diff --git a/mache/spack/pm-gpu_nvidia_mpich.sh b/mache/spack/pm-gpu_nvidia_mpich.sh
new file mode 120000
index 00000000..e6edc27b
--- /dev/null
+++ b/mache/spack/pm-gpu_nvidia_mpich.sh
@@ -0,0 +1 @@
+pm-cpu_nvidia_mpich.sh
\ No newline at end of file
diff --git a/mache/spack/pm-gpu_nvidia_mpich.yaml b/mache/spack/pm-gpu_nvidia_mpich.yaml
new file mode 120000
index 00000000..23d1cda8
--- /dev/null
+++ b/mache/spack/pm-gpu_nvidia_mpich.yaml
@@ -0,0 +1 @@
+pm-cpu_nvidia_mpich.yaml
\ No newline at end of file
diff --git a/mache/spack/pm-gpu_nvidiagpu_mpich.csh b/mache/spack/pm-gpu_nvidiagpu_mpich.csh
new file mode 100644
index 00000000..230eec5c
--- /dev/null
+++ b/mache/spack/pm-gpu_nvidiagpu_mpich.csh
@@ -0,0 +1,54 @@
+module rm cray-hdf5-parallel &> /dev/null
+module rm cray-netcdf-hdf5parallel &> /dev/null
+module rm cray-parallel-netcdf &> /dev/null
+module rm PrgEnv-gnu &> /dev/null
+module rm PrgEnv-intel &> /dev/null
+module rm PrgEnv-nvidia &> /dev/null
+module rm PrgEnv-cray &> /dev/null
+module rm PrgEnv-aocc &> /dev/null
+module rm intel &> /dev/null
+module rm intel-oneapi &> /dev/null
+module rm cudatoolkit &> /dev/null
+module rm craype-accel-nvidia80 &> /dev/null
+module rm craype-accel-host &> /dev/null
+module rm perftools-base &> /dev/null
+module rm perftools &> /dev/null
+module rm darshan &> /dev/null
+
+module load PrgEnv-nvidia
+module load nvidia/22.7
+module load craype-x86-milan
+module load libfabric/1.15.2.0
+module load cudatoolkit/11.7
+module load craype-accel-nvidia80
+module load gcc-mixed/11.2.0
+module load craype/2.7.20
+module rm cray-mpich &> /dev/null
+module load cray-mpich/8.1.25
+{% if e3sm_lapack %}
+module load cray-libsci/23.02.1.1
+{% endif %}
+{% if e3sm_hdf5_netcdf %}
+module rm cray-hdf5-parallel &> /dev/null
+module rm cray-netcdf-hdf5parallel &> /dev/null
+module rm cray-parallel-netcdf &> /dev/null
+module load cray-hdf5-parallel/1.12.2.3
+module load cray-netcdf-hdf5parallel/4.9.0.3
+module load cray-parallel-netcdf/1.12.3.3
+{% endif %}
+
+{% if e3sm_hdf5_netcdf %}
+setenv NETCDF_C_PATH $CRAY_NETCDF_HDF5PARALLEL_PREFIX
+setenv NETCDF_FORTRAN_PATH $CRAY_NETCDF_HDF5PARALLEL_PREFIX
+setenv PNETCDF_PATH $CRAY_PARALLEL_NETCDF_PREFIX
+{% endif %}
+setenv MPICH_ENV_DISPLAY 1
+setenv MPICH_VERSION_DISPLAY 1
+## purposefully omitting OMP variables that cause trouble in ESMF
+# setenv OMP_STACKSIZE 128M
+# setenv OMP_PROC_BIND spread
+# setenv OMP_PLACES threads
+setenv HDF5_USE_FILE_LOCKING FALSE
+## Not needed
+# setenv PERL5LIB /global/cfs/cdirs/e3sm/perl/lib/perl5-only-switch
+setenv MPICH_GPU_SUPPORT_ENABLED 1
diff --git a/mache/spack/pm-gpu_nvidiagpu_mpich.sh b/mache/spack/pm-gpu_nvidiagpu_mpich.sh
new file mode 100644
index 00000000..bf507d70
--- /dev/null
+++ b/mache/spack/pm-gpu_nvidiagpu_mpich.sh
@@ -0,0 +1,59 @@
+module rm cray-hdf5-parallel &> /dev/null
+module rm cray-netcdf-hdf5parallel &> /dev/null
+module rm cray-parallel-netcdf &> /dev/null
+module rm PrgEnv-gnu &> /dev/null
+module rm PrgEnv-intel &> /dev/null
+module rm PrgEnv-nvidia &> /dev/null
+module rm PrgEnv-cray &> /dev/null
+module rm PrgEnv-aocc &> /dev/null
+module rm intel &> /dev/null
+module rm intel-oneapi &> /dev/null
+module rm cudatoolkit &> /dev/null
+module rm craype-accel-nvidia80 &> /dev/null
+module rm craype-accel-host &> /dev/null
+module rm perftools-base &> /dev/null
+module rm perftools &> /dev/null
+module rm darshan &> /dev/null
+
+module load PrgEnv-nvidia
+module load nvidia/22.7
+module load craype-x86-milan
+module load libfabric/1.15.2.0
+module load cudatoolkit/11.7
+module load craype-accel-nvidia80
+module load gcc-mixed/11.2.0
+module load craype/2.7.20
+module rm cray-mpich &> /dev/null
+module load cray-mpich/8.1.25
+{% if e3sm_lapack %}
+module load cray-libsci/23.02.1.1
+{% endif %}
+{% if e3sm_hdf5_netcdf %}
+module rm cray-hdf5-parallel &> /dev/null
+module rm cray-netcdf-hdf5parallel &> /dev/null
+module rm cray-parallel-netcdf &> /dev/null
+module load cray-hdf5-parallel/1.12.2.3
+module load cray-netcdf-hdf5parallel/4.9.0.3
+module load cray-parallel-netcdf/1.12.3.3
+{% endif %}
+
+{% if e3sm_hdf5_netcdf %}
+export NETCDF_C_PATH=$CRAY_NETCDF_HDF5PARALLEL_PREFIX
+export NETCDF_FORTRAN_PATH=$CRAY_NETCDF_HDF5PARALLEL_PREFIX
+export PNETCDF_PATH=$CRAY_PARALLEL_NETCDF_PREFIX
+{% endif %}
+export MPICH_ENV_DISPLAY=1
+export MPICH_VERSION_DISPLAY=1
+## purposefully omitting OMP variables that cause trouble in ESMF
+# export OMP_STACKSIZE=128M
+# export OMP_PROC_BIND=spread
+# export OMP_PLACES=threads
+export HDF5_USE_FILE_LOCKING=FALSE
+## Not needed
+# export PERL5LIB=/global/cfs/cdirs/e3sm/perl/lib/perl5-only-switch
+export MPICH_GPU_SUPPORT_ENABLED=1
+
+if [ -z "${NERSC_HOST:-}" ]; then
+ # happens when building spack environment
+ export NERSC_HOST="perlmutter"
+fi
diff --git a/mache/spack/pm-gpu_nvidiagpu_mpich.yaml b/mache/spack/pm-gpu_nvidiagpu_mpich.yaml
new file mode 100644
index 00000000..f40d7f76
--- /dev/null
+++ b/mache/spack/pm-gpu_nvidiagpu_mpich.yaml
@@ -0,0 +1,170 @@
+spack:
+ specs:
+ - cray-mpich
+{% if e3sm_lapack %}
+ - cray-libsci
+{% endif %}
+{% if e3sm_hdf5_netcdf %}
+ - hdf5
+ - netcdf-c
+ - netcdf-fortran
+ - parallel-netcdf
+{% endif %}
+{{ specs }}
+ concretizer:
+ unify: when_possible
+ packages:
+ all:
+ compiler: [nvhpc@22.7]
+ providers:
+ mpi: [cray-mpich@8.1.25]
+{% if e3sm_lapack %}
+ lapack: [cray-libsci@23.02.1.1]
+{% endif %}
+ bzip2:
+ externals:
+ - spec: bzip2@1.0.6
+ prefix: /usr
+ buildable: false
+ curl:
+ externals:
+ - spec: curl@7.66.0
+ prefix: /usr
+ buildable: false
+ cmake:
+ externals:
+ - spec: cmake@3.24.3
+ prefix: /global/common/software/nersc/pm-2022q4/spack/linux-sles15-zen/cmake-3.24.3-k5msymx/
+ buildable: false
+ gettext:
+ externals:
+ - spec: gettext@0.20.2
+ prefix: /usr
+ buildable: false
+ gmake:
+ externals:
+ - spec: gmake@4.2.1
+ prefix: /usr
+ buildable: false
+ libuv:
+ externals:
+ - spec: libuv@1.44.2
+ prefix: /usr
+ buildable: false
+ libxml2:
+ externals:
+ - spec: libxml2@2.9.7
+ prefix: /usr
+ buildable: false
+ m4:
+ externals:
+ - spec: m4@1.4.18
+ prefix: /usr
+ buildable: false
+ ncurses:
+ externals:
+ - spec: ncurses@6.1.20180317
+ prefix: /usr
+ buildable: false
+ ninja:
+ externals:
+ - spec: ninja@1.10.0
+ prefix: /usr
+ buildable: false
+ openssl:
+ externals:
+ - spec: openssl@1.1.1d
+ prefix: /usr
+ buildable: false
+ perl:
+ externals:
+ - spec: perl@5.26.1
+ prefix: /usr
+ buildable: false
+ python:
+ externals:
+ - spec: python@3.9.7
+ prefix: /global/common/software/nersc/pm-2022q3/sw/python/3.9-anaconda-2021.11
+ modules:
+ - python/3.9-anaconda-2021.11
+ buildable: false
+ tar:
+ externals:
+ - spec: tar@1.34
+ prefix: /usr
+ buildable: false
+ xz:
+ externals:
+ - spec: xz@5.2.3
+ prefix: /usr
+ buildable: false
+ cray-mpich:
+ externals:
+ - spec: cray-mpich@8.1.25
+ prefix: /opt/cray/pe/mpich/8.1.25/ofi/nvidia/20.7
+ modules:
+ - libfabric/1.15.2.0
+ - cray-mpich/8.1.25
+ buildable: false
+ libfabric:
+ externals:
+ - spec: libfabric@1.15.2.0
+ prefix: /opt/cray/libfabric/1.15.2.0
+ modules:
+ - libfabric/1.15.2.0
+ buildable: false
+{% if e3sm_lapack %}
+ cray-libsci:
+ externals:
+ - spec: cray-libsci@23.02.1.1
+ prefix: /opt/cray/pe/libsci/23.02.1.1/NVIDIA/20.7/x86_64
+ modules:
+ - cray-libsci/23.02.1.1
+ buildable: false
+{% endif %}
+{% if e3sm_hdf5_netcdf or system_hdf5_netcdf %}
+ hdf5:
+ externals:
+ - spec: hdf5@1.12.2.3~cxx+fortran+hl~java+mpi+shared
+ prefix: /opt/cray/pe/hdf5-parallel/1.12.2.3/nvidia/20.7
+ buildable: false
+ parallel-netcdf:
+ externals:
+ - spec: parallel-netcdf@1.12.3.3+cxx+fortran+pic+shared
+ prefix: /opt/cray/pe/parallel-netcdf/1.12.3.3/nvidia/20.7
+ buildable: false
+ netcdf-c:
+ externals:
+ - spec: netcdf-c@4.9.0.3+mpi~parallel-netcdf
+ prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/nvidia/20.7
+ buildable: false
+ netcdf-fortran:
+ externals:
+ - spec: netcdf-fortran@4.5.3
+ prefix: /opt/cray/pe/netcdf-hdf5parallel/4.9.0.3/nvidia/20.7
+ buildable: false
+{% endif %}
+ config:
+ install_missing_compilers: false
+ compilers:
+ - compiler:
+ spec: nvhpc@22.7
+ paths:
+ cc: /opt/nvidia/hpc_sdk/Linux_x86_64/22.7/compilers/bin/nvc
+ cxx: /opt/nvidia/hpc_sdk/Linux_x86_64/22.7/compilers/bin/nvc++
+ f77: /opt/nvidia/hpc_sdk/Linux_x86_64/22.7/compilers/bin/nvfortran
+ fc: /opt/nvidia/hpc_sdk/Linux_x86_64/22.7/compilers/bin/nvfortran
+ flags: {}
+ operating_system: sles15
+ target: any
+ modules:
+ - PrgEnv-nvidia
+ - nvidia/22.7
+ - cudatoolkit/11.7
+ - craype-accel-nvidia80
+ - gcc-mixed/11.2.0
+ - craype-x86-milan
+ - libfabric
+ environment:
+ prepend_path:
+ PKG_CONFIG_PATH: "/opt/cray/xpmem/2.6.2-2.5_2.33__gd067c3f.shasta/lib64/pkgconfig"