Skip to content

Commit

Permalink
Add support for frontier
Browse files Browse the repository at this point in the history
  • Loading branch information
xylar committed Jan 23, 2024
1 parent cc251bf commit a85514e
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 0 deletions.
4 changes: 4 additions & 0 deletions deploy/albany_supported.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,9 @@ anvil, gnu, openmpi
chicoma-cpu, gnu, mpich
chrysalis, gnu, openmpi
compy, gnu, openmpi
frontier, gnu, mpich
frontier, gnugpu, mpich
frontier, crayclang, mpich
frontier, crayclanggpu, mpich
pm-cpu, gnu, mpich
morpheus, gnu, openmpi
1 change: 1 addition & 0 deletions deploy/petsc_supported.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,5 @@ chicoma-cpu, gnu, mpich
chrysalis, intel, openmpi
chrysalis, gnu, openmpi
compy, intel, impi
frontier, gnu, mpich
pm-cpu, gnu, mpich
46 changes: 46 additions & 0 deletions polaris/machines/frontier.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
# The paths section describes paths for data and environments
[paths]

# A shared root directory where polaris data can be found
database_root = /lustre/orion/cli115/world-shared/polaris

# the path to the base conda environment where polaris environments have
# been created
polaris_envs = /ccs/proj/cli115/software/polaris/frontier/conda/base


# Options related to deploying a polaris conda and spack environments
[deploy]

# the compiler set to use for system libraries and MPAS builds
compiler = gnu

# the compiler to use to build software (e.g. ESMF and MOAB) with spack
software_compiler = gnu

# the system MPI library to use for gnu compiler
mpi_gnu = mpich

# the system MPI library to use for gnugpu compiler
mpi_gnugpu = mpich

# the system MPI library to use for crayclang compiler
mpi_crayclang = mpich

# the system MPI library to use for crayclanggpu compiler
mpi_crayclanggpu = mpich

# the base path for spack environments used by polaris
spack = /ccs/proj/cli115/software/polaris/frontier/spack

# whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and
# pnetcdf as E3SM (spack modules are used otherwise)
use_e3sm_hdf5_netcdf = True

# The parallel section describes options related to running jobs in parallel.
# Most options in this section come from mache so here we just add or override
# some defaults
[parallel]

# cores per node on the machine
cores_per_node = 128

0 comments on commit a85514e

Please sign in to comment.