Skip to content

Commit

Permalink
Merge pull request #118 from ilhamv/main
Browse files Browse the repository at this point in the history
add batch loop for fixed source
  • Loading branch information
ilhamv authored Sep 7, 2023
2 parents 7b04daf + f01ba39 commit a4e7b1f
Show file tree
Hide file tree
Showing 29 changed files with 107 additions and 61 deletions.
2 changes: 1 addition & 1 deletion examples/fixed_source/slab_absorbium/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
)

# Setting
mcdc.setting(N_particle=1e4)
mcdc.setting(N_particle=1e7)

# Run
mcdc.run()
1 change: 1 addition & 0 deletions mcdc/card.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ def reset(self):
self.setting = {
"tag": "Setting",
"N_particle": 0,
"N_batch": 1,
"rng_seed": 1,
"time_boundary": INF,
"progress_bar": True,
Expand Down
1 change: 1 addition & 0 deletions mcdc/constant.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
RNG_MOD = nb.uint64(0x8000000000000000)

# RNG splitter seeds
SEED_SPLIT_CENSUS = nb.uint64(0x43454D654E54)
SEED_SPLIT_SOURCE = nb.uint64(0x43616D696C6C65)
SEED_SPLIT_SOURCE_PRECURSOR = nb.uint64(0x546F6464)
SEED_SPLIT_BANK = nb.uint64(0x5279616E)
Expand Down
26 changes: 16 additions & 10 deletions mcdc/input_.py
Original file line number Diff line number Diff line change
Expand Up @@ -903,35 +903,37 @@ def setting(**kw):
"setting parameter",
key,
[
"progress_bar",
"N_particle",
"N_batch",
"rng_seed",
"time_boundary",
"particle_tracker",
"save_input_deck",
"progress_bar",
"output_name",
"save_input_deck",
"particle_tracker",
"k_eff",
"source_file",
"IC_file",
"active_bank_buff",
"census_bank_buff",
"k_eff",
],
False,
)

# Get keyword arguments
N_particle = kw.get("N_particle")
time_boundary = kw.get("time_boundary")
N_batch = kw.get("N_batch")
rng_seed = kw.get("rng_seed")
output = kw.get("output_name")
time_boundary = kw.get("time_boundary")
progress_bar = kw.get("progress_bar")
output = kw.get("output_name")
save_input_deck = kw.get("save_input_deck")
particle_tracker = kw.get("particle_tracker")
k_eff = kw.get("k_eff")
bank_active_buff = kw.get("active_bank_buff")
bank_census_buff = kw.get("census_bank_buff")
source_file = kw.get("source_file")
particle_tracker = kw.get("particle_tracker")
save_input_deck = kw.get("save_input_deck")
IC_file = kw.get("IC_file")
bank_active_buff = kw.get("active_bank_buff")
bank_census_buff = kw.get("census_bank_buff")

# Check if setting card has been initialized
card = mcdc.input_deck.setting
Expand All @@ -940,6 +942,10 @@ def setting(**kw):
if N_particle is not None:
card["N_particle"] = int(N_particle)

# Number of batches
if N_batch is not None:
card["N_batch"] = int(N_batch)

# Time boundary
if time_boundary is not None:
card["time_boundary"] = time_boundary
Expand Down
6 changes: 5 additions & 1 deletion mcdc/kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -1465,8 +1465,12 @@ def score_closeout_history(score):
def score_closeout(score, mcdc):
N_history = mcdc["setting"]["N_particle"]

if mcdc["setting"]["mode_eigenvalue"]:
if mcdc["setting"]["N_batch"] > 1:
N_history = mcdc["setting"]["N_batch"]

elif mcdc["setting"]["mode_eigenvalue"]:
N_history = mcdc["setting"]["N_active"]

else:
# MPI Reduce
buff = np.zeros_like(score["mean"])
Expand Down
71 changes: 44 additions & 27 deletions mcdc/loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

from mcdc.constant import *
from mcdc.print_ import (
print_header_batch,
print_progress,
print_progress_eigenvalue,
print_progress_iqmc,
Expand All @@ -25,29 +26,45 @@

@njit
def loop_fixed_source(mcdc):
# Loop over time censuses
for idx_census in range(mcdc["setting"]["N_census"]):
mcdc["idx_census"] = idx_census
seed_census = kernel.split_seed(idx_census, mcdc["setting"]["rng_seed"])
# Loop over batches
for idx_batch in range(mcdc["setting"]["N_batch"]):
mcdc["idx_batch"] = idx_batch
seed_batch = kernel.split_seed(idx_batch, mcdc["setting"]["rng_seed"])

# Loop over source particles
seed_source = kernel.split_seed(seed_census, SEED_SPLIT_SOURCE)
loop_source(seed_source, mcdc)

# Loop over source precursors
if mcdc["bank_precursor"]["size"] > 0:
seed_source_precursor = kernel.split_seed(
seed_census, SEED_SPLIT_SOURCE_PRECURSOR
)
loop_source_precursor(seed_source_precursor, mcdc)

# Time census closeout
if idx_census < mcdc["setting"]["N_census"] - 1:
# TODO: Output tally (optional)

# Manage particle banks: population control and work rebalance
seed_bank = kernel.split_seed(seed_census, SEED_SPLIT_BANK)
kernel.manage_particle_banks(seed_bank, mcdc)
# Print multi-batch header
if mcdc["setting"]["N_batch"] > 1:
with objmode():
print_header_batch(mcdc)

# Loop over time censuses
for idx_census in range(mcdc["setting"]["N_census"]):
mcdc["idx_census"] = idx_census
seed_census = kernel.split_seed(seed_batch, SEED_SPLIT_CENSUS)

# Loop over source particles
seed_source = kernel.split_seed(seed_census, SEED_SPLIT_SOURCE)
loop_source(seed_source, mcdc)

# Loop over source precursors
if mcdc["bank_precursor"]["size"] > 0:
seed_source_precursor = kernel.split_seed(
seed_census, SEED_SPLIT_SOURCE_PRECURSOR
)
loop_source_precursor(seed_source_precursor, mcdc)

# Time census closeout
if idx_census < mcdc["setting"]["N_census"] - 1:
# TODO: Output tally (optional)

# Manage particle banks: population control and work rebalance
seed_bank = kernel.split_seed(seed_census, SEED_SPLIT_BANK)
kernel.manage_particle_banks(seed_bank, mcdc)

# Multi-batch closeout
if mcdc["setting"]["N_batch"] > 1:
# Tally history closeout
kernel.tally_reduce_bin(mcdc)
kernel.tally_closeout_history(mcdc)

# Tally closeout
kernel.tally_closeout(mcdc)
Expand All @@ -60,8 +77,6 @@ def loop_fixed_source(mcdc):

@njit
def loop_eigenvalue(mcdc):
simulation_end = False

# Loop over power iteration cycles
for idx_cycle in range(mcdc["setting"]["N_cycle"]):
seed_cycle = kernel.split_seed(idx_cycle, mcdc["setting"]["rng_seed"])
Expand Down Expand Up @@ -105,7 +120,9 @@ def loop_source(seed, mcdc):
N_prog = 0

# Loop over particle sources
for idx_work in range(mcdc["mpi_work_size"]):
work_start = mcdc["mpi_work_start"]
work_end = work_start + mcdc["mpi_work_size"]
for idx_work in range(work_start, work_end):
seed_work = kernel.split_seed(idx_work, seed)

# Particle tracker
Expand Down Expand Up @@ -158,8 +175,8 @@ def loop_source(seed, mcdc):
# Closeout
# =====================================================================

# Tally history closeout for fixed-source simulation
if not mcdc["setting"]["mode_eigenvalue"]:
# Tally history closeout for one-batch fixed-source simulation
if not mcdc["setting"]["mode_eigenvalue"] and mcdc["setting"]["N_batch"] == 1:
kernel.tally_closeout_history(mcdc)

# Progress printout
Expand Down
6 changes: 6 additions & 0 deletions mcdc/print_.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,12 @@ def print_header_eigenvalue(mcdc):
print(" ==== ======= ===================")


def print_header_batch(mcdc):
idx_batch = mcdc["idx_batch"]
if master:
print("\nBatch %i/%i" % (idx_batch + 1, mcdc["setting"]["N_batch"]))


def print_progress_eigenvalue(mcdc):
if master:
idx_cycle = mcdc["idx_cycle"]
Expand Down
2 changes: 2 additions & 0 deletions mcdc/type_.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,6 +455,7 @@ def make_type_setting(deck):
struct = [
# Basic MC simulation parameters
("N_particle", uint64),
("N_batch", uint64),
("rng_seed", uint64),
("time_boundary", float64),
# Misc.
Expand Down Expand Up @@ -731,6 +732,7 @@ def make_type_global(card):
("eigenvalue_tally_n", float64),
("eigenvalue_tally_C", float64),
("idx_census", int64),
("idx_batch", int64),
("mpi_size", int64),
("mpi_rank", int64),
("mpi_master", bool_),
Expand Down
Binary file modified test/regression/eigenvalue/2d_c5g7/answer.h5
Binary file not shown.
Binary file modified test/regression/eigenvalue/inf_shem361/answer.h5
Binary file not shown.
Binary file modified test/regression/eigenvalue/slab_kornreich/answer.h5
Binary file not shown.
Binary file modified test/regression/eigenvalue/smrg7/answer.h5
Binary file not shown.
Binary file modified test/regression/fixed_source/C5G7-TDX/answer.h5
Binary file not shown.
Binary file modified test/regression/fixed_source/azurv1_pl_super/answer.h5
Binary file not shown.
Binary file modified test/regression/fixed_source/cooper2/answer.h5
Binary file not shown.
Binary file modified test/regression/fixed_source/dsm_azurv1/answer.h5
Binary file not shown.
Binary file modified test/regression/fixed_source/dsm_lattice/answer.h5
Binary file not shown.
Binary file modified test/regression/fixed_source/inf_casmo70/answer.h5
Binary file not shown.
Binary file modified test/regression/fixed_source/inf_casmo70_td/answer.h5
Binary file not shown.
Binary file modified test/regression/fixed_source/kobayashi3TD/answer.h5
Binary file not shown.
Binary file modified test/regression/fixed_source/reed/answer.h5
Binary file not shown.
Binary file modified test/regression/fixed_source/slab_absorbium/answer.h5
Binary file not shown.
Binary file modified test/regression/fixed_source/slab_isobeam_td/answer.h5
Binary file not shown.
Binary file modified test/regression/fixed_source/slab_moving/answer.h5
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@
mcdc.setting(
N_particle=N_particle,
output_name="output_" + str(N_particle),
active_bank_buff=1000,
active_bank_buff=10000,
progress_bar=False,
)

Expand Down
6 changes: 3 additions & 3 deletions test/verification/analytic/fixed_source/inf_shem361/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,6 @@
from reference import reference


# Reference solution
phi_ref = reference() / dE * E_mid

# Load results
output = sys.argv[1]
with np.load("SHEM-361.npz") as data:
Expand All @@ -20,6 +17,9 @@
phi = f["tally/flux/mean"][:] / dE * E_mid
phi_sd = f["tally/flux/sdev"][:] / dE * E_mid

# Reference solution
phi_ref = reference() / dE * E_mid

# Flux
plt.step(E_mid, phi, "-b", label="MC", where="mid")
plt.fill_between(E_mid, phi - phi_sd, phi + phi_sd, alpha=0.2, color="b", step="mid")
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from plotter import plot_convergence
import numpy as np
import h5py
import sys
Expand All @@ -14,8 +13,8 @@

# Reference solution
data = np.load("reference.npz")
phi_ref = data["phi"].T
n_ref = data["n"].T
phi_ref = data["phi"][1:].T
n_ref = data["n"][1:].T

# Error container
error = np.zeros(len(N_particle_list))
Expand All @@ -25,7 +24,7 @@
error_max_n = np.zeros(len(N_particle_list))

# Calculate error
for k, N_particle in enumerate(N_particle_list):
for i, N_particle in enumerate(N_particle_list):
# Get results
with h5py.File("output_%i.h5" % int(N_particle), "r") as f:
phi = f["tally/flux-t/mean"][:]
Expand All @@ -34,14 +33,15 @@
speed = data["v"]

# Neutron density
K = phi.shape[1]
n = np.zeros(K)
for k in range(K):
n[k] = np.sum(phi[:, k] / speed)

error[k] = tool.error(phi, phi_ref)
error_n[k] = tool.error(n, n_ref)
error_max[k] = tool.error_max(phi, phi_ref)
error_max_n[k] = tool.error_max(n, n_ref)
error[i] = tool.error(phi, phi_ref)
error_n[i] = tool.error(n, n_ref)
error_max[i] = tool.error_max(phi, phi_ref)
error_max_n[i] = tool.error_max(n, n_ref)

tool.plot_convergence("inf_shem361_td_flux", N_particle_list, error, error_max)
tool.plot_convergence("inf_shem361_td_n", N_particle_list, error_n, error_max_n)
27 changes: 18 additions & 9 deletions test/verification/analytic/fixed_source/reed/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
import h5py
import sys

sys.path.append("../../util")
from plotter import plot_convergence
sys.path.append("../../")
import tool


# Cases run
N_min = int(sys.argv[1])
N_max = int(sys.argv[2])
N_particle_list = np.logspace(N_min, N_max, (N_max - N_min) * 2 + 1)
Expand All @@ -17,17 +18,25 @@
dx = x[1:] - x[:-1]
phi_ref, phi_x_ref = reference(x)

error = []
error_x = []
# Error containers
error = np.zeros(len(N_particle_list))
error_x = np.zeros(len(N_particle_list))

for N_particle in N_particle_list:
error_max = np.zeros(len(N_particle_list))
error_max_x = np.zeros(len(N_particle_list))

# Calculate error
for k, N_particle in enumerate(N_particle_list):
# Get results
with h5py.File("output_%i.h5" % int(N_particle), "r") as f:
phi = f["tally/flux/mean"][:] / dx * 101.0
phi_x = f["tally/flux-x/mean"][:] * 101.0

error.append(np.linalg.norm((phi - phi_ref) / phi_ref))
error_x.append(np.linalg.norm((phi_x - phi_x_ref) / phi_x_ref))
# Get error
error[k] = tool.error(phi, phi_ref)
error_x[k] = tool.error(phi_x, phi_x_ref)
error_max[k] = tool.error_max(phi, phi_ref)
error_max_x[k] = tool.error_max(phi_x, phi_x_ref)

plot_convergence("reed_flux", N_particle_list, error)
plot_convergence("reed_flux_x", N_particle_list, error_x)
tool.plot_convergence("reed_flux", N_particle_list, error, error_max)
tool.plot_convergence("reed_flux_x", N_particle_list, error_x, error_max_x)
2 changes: 1 addition & 1 deletion test/verification/analytic/tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,6 @@ def plot_convergence(name, N_particle, error, error_max):
plt.legend()
plt.grid()
plt.title(name)
plt.savefig("../../results/" + name + ".png")
# plt.savefig("../../results/" + name + ".png")
plt.show()
plt.clf()

0 comments on commit a4e7b1f

Please sign in to comment.