-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathCastro-gpu.ini
149 lines (122 loc) · 3.45 KB
/
Castro-gpu.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
[main]
testTopDir = /raid/testing/castro-gpu/
webTopDir = /raid/www/Castro/test-suite/gpu/
sourceTree = C_Src
numMakeJobs = 20
suiteName = Castro-gpu
sub_title = CUDA
goUpLink = 1
reportActiveTestsOnly = 1
COMP = gnu
add_to_c_make_command = XTRA_NVCC_FLAGS=-allow-unsupported-compiler USE_CUDA=TRUE CUDA_ARCH=70 TINY_PROFILE=TRUE HYPRE_DIR=/opt/hypre/hypre-git-gpu
use_ctools = 0
purge_output = 1
summary_job_info_field1 = EOS
summary_job_info_field2 = NETWORK
#globalAddToExecString = diffusion.use_mlmg_solver=1 gravity.use_mlmg_solver=1
# MPIcommand should use the placeholders:
# @host@ to indicate where to put the hostname to run on
# @nprocs@ to indicate where to put the number of processors
# @command@ to indicate where to put the command to run
#
# only tests with useMPI = 1 will run in parallel
# nprocs is problem dependent and specified in the individual problem
# sections.
MPIcommand = mpiexec -n @nprocs@ @command@
default_branch = development
# email
sendEmailWhenFail = 0
emailTo = castro-development@googlegroups.com
emailBody = check http://groot.astro.sunysb.edu/Castro/test-suite/gpu/
# slack
slack_post = 1
slack_webhookfile = /raid/testing/.slack.webhook
slack_channel = "#gpu"
slack_username = "i am groot"
[AMReX]
dir = /raid/testing/castro-gpu/amrex
branch = development
#branch = pr-2754
[source]
dir = /raid/testing/castro-gpu/Castro/
branch = development
# this is a safeguard in case any problem GNUmakefiles hardcode in CASTRO_HOME
comp_string = CASTRO_HOME=@source@
[extra-Microphysics]
dir = /raid/testing/castro-gpu/Microphysics
branch = development
comp_string = MICROPHYSICS_HOME=@self@
[Sedov-2d]
buildDir = Exec/hydro_tests/Sedov/
inputFile = inputs.2d.sph_in_cylcoords.testsuite
dim = 2
restartTest = 0
useMPI = 0
useOMP = 0
compileTest = 0
doVis = 0
#analysisRoutine = Exec/hydro_tests/Sedov/testsuite_analysis/sedov_2d_sph_in_cyl.py
#analysisMainArgs = source_dir
#analysisOutputImage = Sedov-2d.png
keywords = not3d
[Sedov-3d]
buildDir = Exec/hydro_tests/Sedov/
inputFile = inputs.3d.sph.testsuite
dim = 3
restartTest = 0
useMPI = 0
useOMP = 0
compileTest = 0
doVis = 0
#analysisRoutine = Exec/hydro_tests/Sedov/testsuite_analysis/sedov_3d_sph.py
#analysisMainArgs = source_dir
#analysisOutputImage = Sedov-3d.png
[flame_wave]
buildDir = Exec/science/flame_wave
inputFile = inputs_2d.testsuite
link1File = helm_table.dat
dim = 2
useMPI = 0
useOMP = 0
compileTest = 0
keywords = helm, diffusion, not3d, rotation, react, gravity
[flame_wave-3d]
buildDir = Exec/science/flame_wave
inputFile = inputs_3d.testsuite.gpu
link1File = helm_table.dat
dim = 3
useMPI = 0
useOMP = 0
compileTest = 0
keywords = helm, diffusion, not3d, rotation, react, gravity
addToCompileString = NETWORK_DIR=iso7
runtime_params = amrex.the_arena_is_managed=1 amrex.abort_on_out_of_gpu_memory=0 castro.hydro_memory_footprint_ratio=3
[wdmerger_3D]
buildDir = Exec/science/wdmerger/tests/wdmerger_3D
inputFile = inputs_test_wdmerger_3D
dim = 3
useMPI = 0
useOMP = 0
link1File = helm_table.dat
debug = 0
keywords = helm, gravity
[dustcollapse-restart]
buildDir = Exec/gravity_tests/DustCollapse
inputFile = inputs_3d_poisson_regtest_restart
dim = 3
useMPI = 0
useOMP = 0
debug = 0
keywords = helm, gravity
addToCompileString = EOS_DIR=gamma_law_general
restartTest = 1
restartFileNum = 5
runtime_params = amr.blocking_factor=8
[rad-SuOlson]
buildDir = Exec/radiation_tests/RadSuOlson
inputFile = inputs
dim = 1
useMPI = 1
numprocs = 1
keywords = rad, not3d
addToCompileString = NGROUPS=1