Skip to content

Commit

Permalink
Merge pull request #230 from alexandermote/better_tally
Browse files Browse the repository at this point in the history
Adding 3D tally recomposition and 3D Domain Decomposition test
  • Loading branch information
ilhamv authored Aug 23, 2024
2 parents 5ebb026 + 3a9517c commit fae64cd
Show file tree
Hide file tree
Showing 4 changed files with 121 additions and 12 deletions.
62 changes: 51 additions & 11 deletions mcdc/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -1224,20 +1224,60 @@ def dict_to_h5group(dict_, group):
group[k] = v


def dd_mergetally(mcdc, data):
"""
Performs tally recombination on domain-decomposed mesh tallies.
Gathers and re-organizes tally data into a single array as it
would appear in a non-decomposed simulation.
"""

tally = data[TALLY]
# create bin for recomposed tallies
d_Nx = input_deck.technique["dd_mesh"]["x"].size - 1
d_Ny = input_deck.technique["dd_mesh"]["y"].size - 1
d_Nz = input_deck.technique["dd_mesh"]["z"].size - 1

# capture tally lengths for reorganizing later
xlen = len(mcdc["mesh_tallies"][0]["filter"]["x"]) - 1
ylen = len(mcdc["mesh_tallies"][0]["filter"]["y"]) - 1
zlen = len(mcdc["mesh_tallies"][0]["filter"]["z"]) - 1

dd_tally = np.zeros((tally.shape[0], tally.shape[1] * d_Nx * d_Ny * d_Nz))
# gather tallies
for i, t in enumerate(tally):
MPI.COMM_WORLD.Gather(tally[i], dd_tally[i], root=0)
if mcdc["mpi_master"]:
buff = np.zeros_like(dd_tally)
# reorganize tally data
# TODO: find/develop a more efficient algorithm for this
tally_idx = 0
for di in range(0, d_Nx * d_Ny * d_Nz):
dz = di // (d_Nx * d_Ny)
dy = (di % (d_Nx * d_Ny)) // d_Nx
dx = di % d_Nx
for xi in range(0, xlen):
for yi in range(0, ylen):
for zi in range(0, zlen):
# calculate reorganized index
ind_x = xi * (ylen * d_Ny * zlen * d_Nz) + dx * (
xlen * ylen * d_Ny * zlen * d_Nz
)
ind_y = yi * (xlen * d_Nx) + dy * (ylen * xlen * d_Nx)
ind_z = zi + dz * zlen
buff_idx = ind_x + ind_y + ind_z
# place tally value in correct position
buff[:, buff_idx] = dd_tally[:, tally_idx]
tally_idx += 1
# replace old tally with reorganized tally
dd_tally = buff

return dd_tally


def generate_hdf5(data, mcdc):

# recombine tallies before output processing
if mcdc["technique"]["domain_decomposition"]:
tally = data[TALLY]
# create bin for recomposed tallies
d_Nx = input_deck.technique["dd_mesh"]["x"].size - 1
d_Ny = input_deck.technique["dd_mesh"]["y"].size - 1
d_Nz = input_deck.technique["dd_mesh"]["z"].size - 1
dd_tally = np.zeros((tally.shape[0], tally.shape[1] * d_Nx * d_Ny * d_Nz))
# gather tallies
MPI.COMM_WORLD.Gather(tally[0], dd_tally[0], root=0)
MPI.COMM_WORLD.Gather(tally[1], dd_tally[1], root=0)
MPI.COMM_WORLD.Gather(tally[2], dd_tally[2], root=0)
dd_tally = dd_mergetally(mcdc, data)

if mcdc["mpi_master"]:
if mcdc["setting"]["progress_bar"]:
Expand Down
Binary file added test/regression/dd_cooper/answer.h5
Binary file not shown.
62 changes: 62 additions & 0 deletions test/regression/dd_cooper/input.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
import numpy as np
import mcdc


# =============================================================================
# Set model
# =============================================================================
# A shielding problem based on Problem 2 of [Coper NSE 2001]
# https://ans.tandfonline.com/action/showCitFormats?doi=10.13182/NSE00-34

# Set materials
SigmaT = 5.0
c = 0.8
m_barrier = mcdc.material(capture=np.array([SigmaT]), scatter=np.array([[SigmaT * c]]))
SigmaT = 1.0
m_room = mcdc.material(capture=np.array([SigmaT]), scatter=np.array([[SigmaT * c]]))

# Set surfaces
sx1 = mcdc.surface("plane-x", x=0.0, bc="reflective")
sx2 = mcdc.surface("plane-x", x=2.0)
sx3 = mcdc.surface("plane-x", x=2.4)
sx4 = mcdc.surface("plane-x", x=4.0, bc="vacuum")
sy1 = mcdc.surface("plane-y", y=0.0, bc="reflective")
sy2 = mcdc.surface("plane-y", y=2.0)
sy3 = mcdc.surface("plane-y", y=4.0, bc="vacuum")
sz1 = mcdc.surface("plane-z", z=0.0, bc="reflective")
sz2 = mcdc.surface("plane-z", z=4.0, bc="vacuum")

# Set cells
mcdc.cell(+sx1 & -sx2 & +sy1 & -sy2 & +sz1 & -sz2, m_room)
mcdc.cell(+sx1 & -sx4 & +sy2 & -sy3 & +sz1 & -sz2, m_room)
mcdc.cell(+sx3 & -sx4 & +sy1 & -sy2 & +sz1 & -sz2, m_room)
mcdc.cell(+sx2 & -sx3 & +sy1 & -sy2 & +sz1 & -sz2, m_barrier)

# =============================================================================
# Set source
# =============================================================================
# Uniform isotropic source throughout the domain

mcdc.source(x=[0.0, 1.0], y=[0.0, 1.0], z=[0.0, 1.0], isotropic=True)

# =============================================================================
# Set tally, setting, and run mcdc
# =============================================================================

mcdc.tally.mesh_tally(
scores=["flux"],
x=np.linspace(0.0, 4.0, 11),
y=np.linspace(0.0, 4.0, 11),
z=np.linspace(0.0, 4.0, 11),
)

# Setting
mcdc.setting(N_particle=50)
mcdc.domain_decomposition(
x=np.linspace(0.0, 4.0, 3), y=np.linspace(0.0, 4.0, 3), z=np.linspace(0.0, 4.0, 3)
)

mcdc.implicit_capture()

# Run
mcdc.run()
9 changes: 8 additions & 1 deletion test/regression/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,20 @@
# Skip domain decomp tests unless there are 4 MPI processes
temp = names.copy()
for name in names:
if name[:3] == "dd_" and not (mpiexec == 4 or srun == 4):
if name == "dd_slab_reed" and not (mpiexec == 4 or srun == 4):
temp.remove(name)
print(
Fore.YELLOW
+ "Note: Skipping %s (require 4 MPI ranks)" % name
+ Style.RESET_ALL
)
elif name == "dd_cooper" and not (mpiexec == 8 or srun == 8):
temp.remove(name)
print(
Fore.YELLOW
+ "Note: Skipping %s (require 8 MPI ranks)" % name
+ Style.RESET_ALL
)
names = temp

# Skip iqmc if GPU run
Expand Down

0 comments on commit fae64cd

Please sign in to comment.