-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmaestroex-sbu.ini
207 lines (172 loc) · 4.1 KB
/
maestroex-sbu.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
[main]
testTopDir = /raid/testing/maestroex-gfortran
webTopDir = /raid/www/MAESTROeX/test-suite/gfortran
sourceTree = C_Src
numMakeJobs = 20
suiteName = MAESTROeX-SBU
sub_title = gfortran version
goUpLink = 1
reportActiveTestsOnly = 1
COMP = gnu
add_to_c_make_command = TEST=TRUE
plot_file_name = maestro.plot_base_name
purge_output = 1
summary_job_info_field1 = EOS
summary_job_info_field2 = NETWORK
globalAddToExecString = amrex.fpe_trap_invalid=1 fab.format=NATIVE
#globalAddToExecString = diffusion.use_mlmg_solver=1 gravity.use_mlmg_solver=1
# MPIcommand should use the placeholders:
# @host@ to indicate where to put the hostname to run on
# @nprocs@ to indicate where to put the number of processors
# @command@ to indicate where to put the command to run
#
# only tests with useMPI = 1 will run in parallel
# nprocs is problem dependent and specified in the individual problem
# sections.
MPIcommand = mpiexec -n @nprocs@ @command@
default_branch = development
# email
sendEmailWhenFail = 0
emailTo = castro-development@googlegroups.com
emailBody = check http://groot.astro.sunysb.edu/MAESTROeX/test-suite/gfortran/
# slack
slack_post = 1
slack_webhookfile = /home/zingale/.slack.webhook
slack_channel = "#maestro"
slack_username = "i am groot"
[AMReX]
dir = /raid/testing/maestroex-gfortran/amrex
branch = development
[source]
dir = /raid/testing/maestroex-gfortran/MAESTROeX
branch = development
[extra-Microphysics]
dir = /raid/testing/maestroex-gfortran/Microphysics
#branch = remove_fort_param_support
branch = development
#branch = enforce_namespaces
comp_string = MICROPHYSICS_HOME=@self@
[RT]
buildDir = Exec/test_problems/rt
inputFile = inputs_2d_regression
dim = 2
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 1
numprocs = 2
useOMP = 1
numthreads = 4
keywords = hydro
[reacting_bubble-2d]
buildDir = Exec/test_problems/reacting_bubble
inputFile = inputs_2d_regression
dim = 2
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 1
numprocs = 4
useOMP = 1
numthreads = 4
keywords = hydro
link1File = helm_table.dat
link2File = model.hse.cool.coulomb
[reacting_bubble-2d-amr]
buildDir = Exec/test_problems/reacting_bubble
inputFile = inputs_2d_regression.2levels
dim = 2
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 1
numprocs = 4
keywords = hydro
link1File = helm_table.dat
link2File = model.hse.cool.coulomb
[reacting_bubble-3d]
buildDir = Exec/test_problems/reacting_bubble
inputFile = inputs_3d_regression
dim = 3
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 1
numprocs = 4
useOMP = 1
numthreads = 4
keywords = hydro
link1File = helm_table.dat
link2File = model.hse.cool.coulomb
[reacting_bubble-3d-amr]
buildDir = Exec/test_problems/reacting_bubble
inputFile = inputs_3d_amr_regression
dim = 3
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 1
numprocs = 4
useOMP = 1
numthreads = 4
keywords = hydro
link1File = helm_table.dat
link2File = model.hse.cool.coulomb
[wdconvect]
buildDir = Exec/science/wdconvect
inputFile = inputs_files/inputs_3d_regression
dim = 3
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 1
numprocs = 16
keywords = hydro
link1File = helm_table.dat
link2File = model_files/kepler_new_6.25e8.hybrid.hse.320
[wdconvect-amr]
buildDir = Exec/science/wdconvect
inputFile = inputs_files/inputs_3d_regression.2levels
dim = 3
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 1
numprocs = 16
keywords = hydro
link1File = helm_table.dat
link2File = model_files/kepler_new_6.25e8.hybrid.hse.320
[test_stability]
buildDir = Exec/test_problems/test_stability
inputFile = inputs_2d_C_regression
dim = 2
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 1
numprocs = 16
keywords = hydro, reflect
link1File = helm_table.dat
link2File = 5peaks_const_grav.dat
[flame_2d]
buildDir = Exec/science/flame
inputFile = inputs_2d_smallscale.testsuite
dim = 2
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 1
numprocs = 16
keywords = hydro, smallscale, inflow
link1File = helm_table.dat
[flame_3d]
buildDir = Exec/science/flame
inputFile = inputs_3d_smallscale.testsuite
dim = 3
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 1
numprocs = 16
keywords = hydro, smallscale, inflow
link1File = helm_table.dat