-
Notifications
You must be signed in to change notification settings - Fork 14
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
3 changed files
with
51 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
# The paths section describes paths for data and environments | ||
[paths] | ||
|
||
# A shared root directory where polaris data can be found | ||
database_root = /lustre/orion/cli115/world-shared/polaris | ||
|
||
# the path to the base conda environment where polaris environments have | ||
# been created | ||
polaris_envs = /ccs/proj/cli115/software/polaris/frontier/conda/base | ||
|
||
|
||
# Options related to deploying a polaris conda and spack environments | ||
[deploy] | ||
|
||
# the compiler set to use for system libraries and MPAS builds | ||
compiler = gnu | ||
|
||
# the compiler to use to build software (e.g. ESMF and MOAB) with spack | ||
software_compiler = gnu | ||
|
||
# the system MPI library to use for gnu compiler | ||
mpi_gnu = mpich | ||
|
||
# the system MPI library to use for gnugpu compiler | ||
mpi_gnugpu = mpich | ||
|
||
# the system MPI library to use for crayclang compiler | ||
mpi_crayclang = mpich | ||
|
||
# the system MPI library to use for crayclanggpu compiler | ||
mpi_crayclanggpu = mpich | ||
|
||
# the base path for spack environments used by polaris | ||
spack = /ccs/proj/cli115/software/polaris/frontier/spack | ||
|
||
# whether to use the same modules for hdf5, netcdf-c, netcdf-fortran and | ||
# pnetcdf as E3SM (spack modules are used otherwise) | ||
use_e3sm_hdf5_netcdf = True | ||
|
||
# The parallel section describes options related to running jobs in parallel. | ||
# Most options in this section come from mache so here we just add or override | ||
# some defaults | ||
[parallel] | ||
|
||
# cores per node on the machine | ||
cores_per_node = 128 |