Skip to content

Commit fae64cd

Browse files
authored
Merge pull request #230 from alexandermote/better_tally
Adding 3D tally recomposition and 3D Domain Decomposition test
2 parents 5ebb026 + 3a9517c commit fae64cd

File tree

4 files changed

+121
-12
lines changed

4 files changed

+121
-12
lines changed

mcdc/main.py

Lines changed: 51 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1224,20 +1224,60 @@ def dict_to_h5group(dict_, group):
12241224
group[k] = v
12251225

12261226

1227+
def dd_mergetally(mcdc, data):
1228+
"""
1229+
Performs tally recombination on domain-decomposed mesh tallies.
1230+
Gathers and re-organizes tally data into a single array as it
1231+
would appear in a non-decomposed simulation.
1232+
"""
1233+
1234+
tally = data[TALLY]
1235+
# create bin for recomposed tallies
1236+
d_Nx = input_deck.technique["dd_mesh"]["x"].size - 1
1237+
d_Ny = input_deck.technique["dd_mesh"]["y"].size - 1
1238+
d_Nz = input_deck.technique["dd_mesh"]["z"].size - 1
1239+
1240+
# capture tally lengths for reorganizing later
1241+
xlen = len(mcdc["mesh_tallies"][0]["filter"]["x"]) - 1
1242+
ylen = len(mcdc["mesh_tallies"][0]["filter"]["y"]) - 1
1243+
zlen = len(mcdc["mesh_tallies"][0]["filter"]["z"]) - 1
1244+
1245+
dd_tally = np.zeros((tally.shape[0], tally.shape[1] * d_Nx * d_Ny * d_Nz))
1246+
# gather tallies
1247+
for i, t in enumerate(tally):
1248+
MPI.COMM_WORLD.Gather(tally[i], dd_tally[i], root=0)
1249+
if mcdc["mpi_master"]:
1250+
buff = np.zeros_like(dd_tally)
1251+
# reorganize tally data
1252+
# TODO: find/develop a more efficient algorithm for this
1253+
tally_idx = 0
1254+
for di in range(0, d_Nx * d_Ny * d_Nz):
1255+
dz = di // (d_Nx * d_Ny)
1256+
dy = (di % (d_Nx * d_Ny)) // d_Nx
1257+
dx = di % d_Nx
1258+
for xi in range(0, xlen):
1259+
for yi in range(0, ylen):
1260+
for zi in range(0, zlen):
1261+
# calculate reorganized index
1262+
ind_x = xi * (ylen * d_Ny * zlen * d_Nz) + dx * (
1263+
xlen * ylen * d_Ny * zlen * d_Nz
1264+
)
1265+
ind_y = yi * (xlen * d_Nx) + dy * (ylen * xlen * d_Nx)
1266+
ind_z = zi + dz * zlen
1267+
buff_idx = ind_x + ind_y + ind_z
1268+
# place tally value in correct position
1269+
buff[:, buff_idx] = dd_tally[:, tally_idx]
1270+
tally_idx += 1
1271+
# replace old tally with reorganized tally
1272+
dd_tally = buff
1273+
1274+
return dd_tally
1275+
1276+
12271277
def generate_hdf5(data, mcdc):
12281278

1229-
# recombine tallies before output processing
12301279
if mcdc["technique"]["domain_decomposition"]:
1231-
tally = data[TALLY]
1232-
# create bin for recomposed tallies
1233-
d_Nx = input_deck.technique["dd_mesh"]["x"].size - 1
1234-
d_Ny = input_deck.technique["dd_mesh"]["y"].size - 1
1235-
d_Nz = input_deck.technique["dd_mesh"]["z"].size - 1
1236-
dd_tally = np.zeros((tally.shape[0], tally.shape[1] * d_Nx * d_Ny * d_Nz))
1237-
# gather tallies
1238-
MPI.COMM_WORLD.Gather(tally[0], dd_tally[0], root=0)
1239-
MPI.COMM_WORLD.Gather(tally[1], dd_tally[1], root=0)
1240-
MPI.COMM_WORLD.Gather(tally[2], dd_tally[2], root=0)
1280+
dd_tally = dd_mergetally(mcdc, data)
12411281

12421282
if mcdc["mpi_master"]:
12431283
if mcdc["setting"]["progress_bar"]:

test/regression/dd_cooper/answer.h5

235 KB
Binary file not shown.

test/regression/dd_cooper/input.py

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
import numpy as np
2+
import mcdc
3+
4+
5+
# =============================================================================
6+
# Set model
7+
# =============================================================================
8+
# A shielding problem based on Problem 2 of [Coper NSE 2001]
9+
# https://ans.tandfonline.com/action/showCitFormats?doi=10.13182/NSE00-34
10+
11+
# Set materials
12+
SigmaT = 5.0
13+
c = 0.8
14+
m_barrier = mcdc.material(capture=np.array([SigmaT]), scatter=np.array([[SigmaT * c]]))
15+
SigmaT = 1.0
16+
m_room = mcdc.material(capture=np.array([SigmaT]), scatter=np.array([[SigmaT * c]]))
17+
18+
# Set surfaces
19+
sx1 = mcdc.surface("plane-x", x=0.0, bc="reflective")
20+
sx2 = mcdc.surface("plane-x", x=2.0)
21+
sx3 = mcdc.surface("plane-x", x=2.4)
22+
sx4 = mcdc.surface("plane-x", x=4.0, bc="vacuum")
23+
sy1 = mcdc.surface("plane-y", y=0.0, bc="reflective")
24+
sy2 = mcdc.surface("plane-y", y=2.0)
25+
sy3 = mcdc.surface("plane-y", y=4.0, bc="vacuum")
26+
sz1 = mcdc.surface("plane-z", z=0.0, bc="reflective")
27+
sz2 = mcdc.surface("plane-z", z=4.0, bc="vacuum")
28+
29+
# Set cells
30+
mcdc.cell(+sx1 & -sx2 & +sy1 & -sy2 & +sz1 & -sz2, m_room)
31+
mcdc.cell(+sx1 & -sx4 & +sy2 & -sy3 & +sz1 & -sz2, m_room)
32+
mcdc.cell(+sx3 & -sx4 & +sy1 & -sy2 & +sz1 & -sz2, m_room)
33+
mcdc.cell(+sx2 & -sx3 & +sy1 & -sy2 & +sz1 & -sz2, m_barrier)
34+
35+
# =============================================================================
36+
# Set source
37+
# =============================================================================
38+
# Uniform isotropic source throughout the domain
39+
40+
mcdc.source(x=[0.0, 1.0], y=[0.0, 1.0], z=[0.0, 1.0], isotropic=True)
41+
42+
# =============================================================================
43+
# Set tally, setting, and run mcdc
44+
# =============================================================================
45+
46+
mcdc.tally.mesh_tally(
47+
scores=["flux"],
48+
x=np.linspace(0.0, 4.0, 11),
49+
y=np.linspace(0.0, 4.0, 11),
50+
z=np.linspace(0.0, 4.0, 11),
51+
)
52+
53+
# Setting
54+
mcdc.setting(N_particle=50)
55+
mcdc.domain_decomposition(
56+
x=np.linspace(0.0, 4.0, 3), y=np.linspace(0.0, 4.0, 3), z=np.linspace(0.0, 4.0, 3)
57+
)
58+
59+
mcdc.implicit_capture()
60+
61+
# Run
62+
mcdc.run()

test/regression/run.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,13 +44,20 @@
4444
# Skip domain decomp tests unless there are 4 MPI processes
4545
temp = names.copy()
4646
for name in names:
47-
if name[:3] == "dd_" and not (mpiexec == 4 or srun == 4):
47+
if name == "dd_slab_reed" and not (mpiexec == 4 or srun == 4):
4848
temp.remove(name)
4949
print(
5050
Fore.YELLOW
5151
+ "Note: Skipping %s (require 4 MPI ranks)" % name
5252
+ Style.RESET_ALL
5353
)
54+
elif name == "dd_cooper" and not (mpiexec == 8 or srun == 8):
55+
temp.remove(name)
56+
print(
57+
Fore.YELLOW
58+
+ "Note: Skipping %s (require 8 MPI ranks)" % name
59+
+ Style.RESET_ALL
60+
)
5461
names = temp
5562

5663
# Skip iqmc if GPU run

0 commit comments

Comments
 (0)