From 0ad9e3154a0882672826de629e71c67022ed78b9 Mon Sep 17 00:00:00 2001 From: deegan Date: Wed, 4 Sep 2024 13:42:24 +0200 Subject: [PATCH] ++ --- pyphare/pyphare/pharein/__init__.py | 340 +----------------- pyphare/pyphare/pharein/examples/job.py | 110 ------ pyphare/pyphare/pharein/init.py | 2 + .../pyphare/pharein/initialize/__init__.py | 9 + pyphare/pyphare/pharein/initialize/general.py | 311 ++++++++++++++++ .../pyphare/pharein/initialize/samrai_hdf5.py | 16 + .../pyphare/pharein/initialize/user_fns.py | 51 +++ pyphare/pyphare/pharein/load_balancer.py | 6 +- pyphare/pyphare/pharein/simulation.py | 19 + res/cmake/test.cmake | 2 +- .../particle_initializer_factory.hpp | 39 +- .../samrai_hdf5_particle_initializer.hpp | 103 ++++++ src/amr/physical_models/hybrid_model.hpp | 14 +- src/core/CMakeLists.txt | 1 - .../electromag/electromag_initializer.hpp | 7 +- src/core/data/ions/ions.hpp | 2 +- src/phare_core.hpp | 3 - src/simulator/phare_types.hpp | 5 +- .../particles/initializer}/CMakeLists.txt | 2 +- .../data/particles/initializer}/test_main.cpp | 8 +- .../core/numerics/ion_updater/CMakeLists.txt | 2 +- .../numerics/ion_updater/test_updater.cpp | 21 +- .../simulator/test_samrai_restarts_parser.py | 15 +- 23 files changed, 596 insertions(+), 492 deletions(-) delete mode 100644 pyphare/pyphare/pharein/examples/job.py create mode 100644 pyphare/pyphare/pharein/initialize/__init__.py create mode 100644 pyphare/pyphare/pharein/initialize/general.py create mode 100644 pyphare/pyphare/pharein/initialize/samrai_hdf5.py create mode 100644 pyphare/pyphare/pharein/initialize/user_fns.py rename src/{core/data/ions/particle_initializers => amr/data/particles/initializers}/particle_initializer_factory.hpp (69%) create mode 100644 src/amr/data/particles/initializers/samrai_hdf5_particle_initializer.hpp rename tests/{core/data/particle_initializer => amr/data/particles/initializer}/CMakeLists.txt (96%) rename tests/{core/data/particle_initializer => amr/data/particles/initializer}/test_main.cpp (87%) diff --git a/pyphare/pyphare/pharein/__init__.py b/pyphare/pyphare/pharein/__init__.py index 5342dfab1..3cbc770a9 100644 --- a/pyphare/pyphare/pharein/__init__.py +++ b/pyphare/pyphare/pharein/__init__.py @@ -1,9 +1,7 @@ import os import sys import subprocess -import numpy as np -from pyphare.core.phare_utilities import is_scalar from .uniform_model import UniformModel from .maxwellian_fluid_model import MaxwellianFluidModel from .electron_model import ElectronModel @@ -14,11 +12,7 @@ MetaDiagnostics, InfoDiagnostics, ) -from .simulation import ( - Simulation, - serialize as serialize_sim, - deserialize as deserialize_sim, -) +from .simulation import Simulation from .load_balancer import LoadBalancer __all__ = [ @@ -31,6 +25,7 @@ "MetaDiagnostics", "InfoDiagnostics", "Simulation", + "LoadBalancer", ] # This exists to allow a condition variable for when we are running PHARE from C++ via phare-exe @@ -64,58 +59,6 @@ def NO_GUI(): mpl.use("Agg") -def getSimulation(): - from .global_vars import sim - - return sim - - -def _patch_data_ids(restart_file_dir): - """ - for restarts we save samrai patch data ids to the restart files, which we access from here - to tell samrai which patch datas to load from the restart file on restart - """ - from pyphare.cpp import cpp_etc_lib - - return cpp_etc_lib().patch_data_ids(restart_file_dir) - - -def _serialized_simulation_string(restart_file_dir): - from pyphare.cpp import cpp_etc_lib - - return cpp_etc_lib().serialized_simulation_string(restart_file_dir) - - -# converts scalars to array of expected size -# converts lists to arrays -class py_fn_wrapper: - def __init__(self, fn): - self.fn = fn - - def __call__(self, *xyz): - args = [np.asarray(arg) for arg in xyz] - ret = self.fn(*args) - if isinstance(ret, list): - ret = np.asarray(ret) - if is_scalar(ret): - ret = np.full(len(args[-1]), ret) - return ret - - -# Wrap calls to user init functions to turn C++ vectors to ndarrays, -# and returned ndarrays to C++ span -class fn_wrapper(py_fn_wrapper): - def __init__(self, fn): - super().__init__(fn) - - def __call__(self, *xyz): - from pyphare.cpp import cpp_etc_lib - - # convert numpy array to C++ SubSpan - # couples vector init functions to C++ - return cpp_etc_lib().makePyArrayWrapper(super().__call__(*xyz)) - - def clearDict(): """ dict may contain dangling references from a previous simulation unless cleared @@ -126,279 +69,12 @@ def clearDict(): def populateDict(): - from .global_vars import sim as simulation - import pybindlibs.dictator as pp - - # pybind complains if receiving wrong type - def add_int(path, val): - pp.add_int(path, int(val)) - - def add_bool(path, val): - pp.add_bool(path, bool(val)) - - def add_double(path, val): - pp.add_double(path, float(val)) - - def add_size_t(path, val): - casted = int(val) - if casted < 0: - raise RuntimeError("pyphare.__init__::add_size_t received negative value") - pp.add_size_t(path, casted) - - def add_vector_int(path, val): - pp.add_vector_int(path, list(val)) - - add_string = pp.add_string - addInitFunction = getattr(pp, "addInitFunction{:d}".format(simulation.ndim) + "D") - - add_string("simulation/name", "simulation_test") - add_int("simulation/dimension", simulation.ndim) - - if simulation.smallest_patch_size is not None: - add_vector_int( - "simulation/AMR/smallest_patch_size", simulation.smallest_patch_size - ) - if simulation.largest_patch_size is not None: - add_vector_int( - "simulation/AMR/largest_patch_size", simulation.largest_patch_size - ) - - add_string("simulation/grid/layout_type", simulation.layout) - add_int("simulation/grid/nbr_cells/x", simulation.cells[0]) - add_double("simulation/grid/meshsize/x", simulation.dl[0]) - add_double("simulation/grid/origin/x", simulation.origin[0]) - add_string("simulation/grid/boundary_type/x", simulation.boundary_types[0]) - - if simulation.ndim > 1: - add_int("simulation/grid/nbr_cells/y", simulation.cells[1]) - add_double("simulation/grid/meshsize/y", simulation.dl[1]) - add_double("simulation/grid/origin/y", simulation.origin[1]) - add_string("simulation/grid/boundary_type/y", simulation.boundary_types[1]) - - if simulation.ndim > 2: - add_int("simulation/grid/nbr_cells/z", simulation.cells[2]) - add_double("simulation/grid/meshsize/z", simulation.dl[2]) - add_double("simulation/grid/origin/z", simulation.origin[2]) - add_string("simulation/grid/boundary_type/z", simulation.boundary_types[2]) - - add_int("simulation/interp_order", simulation.interp_order) - add_int("simulation/refined_particle_nbr", simulation.refined_particle_nbr) - add_double("simulation/time_step", simulation.time_step) - add_int("simulation/time_step_nbr", simulation.time_step_nbr) - - add_string("simulation/AMR/clustering", simulation.clustering) - add_int("simulation/AMR/max_nbr_levels", simulation.max_nbr_levels) - add_vector_int("simulation/AMR/nesting_buffer", simulation.nesting_buffer) - - add_int("simulation/AMR/tag_buffer", simulation.tag_buffer) - - refinement_boxes = simulation.refinement_boxes - - def as_paths(rb): - add_int("simulation/AMR/refinement/boxes/nbr_levels/", len(rb.keys())) - for level, boxes in rb.items(): - level_path = "simulation/AMR/refinement/boxes/" + level + "/" - add_int(level_path + "nbr_boxes/", int(len(boxes))) - for box_i, box in enumerate(boxes): - box_id = "B" + str(box_i) - lower = box.lower - upper = box.upper - box_lower_path_x = box_id + "/lower/x/" - box_upper_path_x = box_id + "/upper/x/" - add_int(level_path + box_lower_path_x, lower[0]) - add_int(level_path + box_upper_path_x, upper[0]) - if len(lower) >= 2: - box_lower_path_y = box_id + "/lower/y/" - box_upper_path_y = box_id + "/upper/y/" - add_int(level_path + box_lower_path_y, lower[1]) - add_int(level_path + box_upper_path_y, upper[1]) - if len(lower) == 3: - box_lower_path_z = box_id + "/lower/z/" - box_upper_path_z = box_id + "/upper/z/" - add_int(level_path + box_lower_path_z, lower[2]) - add_int(level_path + box_upper_path_z, upper[2]) - - if refinement_boxes is not None and simulation.refinement == "boxes": - as_paths(refinement_boxes) - elif simulation.refinement == "tagging": - add_string("simulation/AMR/refinement/tagging/method", "auto") - else: - add_string( - "simulation/AMR/refinement/tagging/method", "none" - ) # integrator.h might want some looking at - - add_string("simulation/algo/ion_updater/pusher/name", simulation.particle_pusher) - add_double("simulation/algo/ohm/resistivity", simulation.resistivity) - add_double("simulation/algo/ohm/hyper_resistivity", simulation.hyper_resistivity) - - # load balancer block start - lb = simulation.load_balancer or LoadBalancer(active=False, _register=False) - base = "simulation/AMR/loadbalancing" - add_bool(f"{base}/active", lb.active) - add_string(f"{base}/mode", lb.mode) - add_double(f"{base}/tolerance", lb.tol) - - # if mode==nppc, imbalance allowed - add_bool(f"{base}/auto", lb.auto) - add_size_t(f"{base}/next_rebalance", lb.next_rebalance) - add_size_t(f"{base}/max_next_rebalance", lb.max_next_rebalance) - add_size_t( - f"{base}/next_rebalance_backoff_multiplier", - lb.next_rebalance_backoff_multiplier, - ) - - # cadence based values - add_size_t(f"{base}/every", lb.every) - add_bool(f"{base}/on_init", lb.on_init) - # load balancer block end - - init_model = simulation.model - modelDict = init_model.model_dict - - if init_model.nbr_populations() < 0: - raise RuntimeError("Number of populations cannot be negative") - add_size_t("simulation/ions/nbrPopulations", init_model.nbr_populations()) - - partinit = "particle_initializer" - for pop_index, pop in enumerate(init_model.populations): - pop_path = "simulation/ions/pop" - partinit_path = pop_path + "{:d}/".format(pop_index) + partinit + "/" - d = modelDict[pop] - add_string(pop_path + "{:d}/name".format(pop_index), pop) - add_double(pop_path + "{:d}/mass".format(pop_index), d["mass"]) - add_string(partinit_path + "name", "maxwellian") - - addInitFunction(partinit_path + "density", fn_wrapper(d["density"])) - addInitFunction(partinit_path + "bulk_velocity_x", fn_wrapper(d["vx"])) - addInitFunction(partinit_path + "bulk_velocity_y", fn_wrapper(d["vy"])) - addInitFunction(partinit_path + "bulk_velocity_z", fn_wrapper(d["vz"])) - addInitFunction(partinit_path + "thermal_velocity_x", fn_wrapper(d["vthx"])) - addInitFunction(partinit_path + "thermal_velocity_y", fn_wrapper(d["vthy"])) - addInitFunction(partinit_path + "thermal_velocity_z", fn_wrapper(d["vthz"])) - add_double(partinit_path + "charge", d["charge"]) - add_string(partinit_path + "basis", "cartesian") - if "init" in d and "seed" in d["init"]: - pp.add_optional_size_t(partinit_path + "init/seed", d["init"]["seed"]) - - add_int(partinit_path + "nbr_part_per_cell", d["nbrParticlesPerCell"]) - add_double(partinit_path + "density_cut_off", d["density_cut_off"]) - - add_string("simulation/electromag/name", "EM") - add_string("simulation/electromag/electric/name", "E") - - add_string("simulation/electromag/magnetic/name", "B") - maginit_path = "simulation/electromag/magnetic/initializer/" - addInitFunction(maginit_path + "x_component", fn_wrapper(modelDict["bx"])) - addInitFunction(maginit_path + "y_component", fn_wrapper(modelDict["by"])) - addInitFunction(maginit_path + "z_component", fn_wrapper(modelDict["bz"])) - - serialized_sim = serialize_sim(simulation) - - #### adding diagnostics - - diag_path = "simulation/diagnostics/" - for diag in list(simulation.diagnostics.values()): - diag.attributes["serialized_simulation"] = serialized_sim - - type_path = diag_path + diag.type + "/" - name_path = type_path + diag.name - add_string(name_path + "/" + "type", diag.type) - add_string(name_path + "/" + "quantity", diag.quantity) - add_size_t(name_path + "/" + "flush_every", diag.flush_every) - pp.add_array_as_vector( - name_path + "/" + "write_timestamps", diag.write_timestamps - ) - pp.add_array_as_vector( - name_path + "/" + "compute_timestamps", diag.compute_timestamps - ) - - add_size_t(name_path + "/" + "n_attributes", len(diag.attributes)) - for attr_idx, attr_key in enumerate(diag.attributes): - add_string(name_path + "/" + f"attribute_{attr_idx}_key", attr_key) - add_string( - name_path + "/" + f"attribute_{attr_idx}_value", - diag.attributes[attr_key], - ) - - if len(simulation.diagnostics) > 0: - if simulation.diag_options is not None and "options" in simulation.diag_options: - add_string( - diag_path + "filePath", simulation.diag_options["options"]["dir"] - ) - if "mode" in simulation.diag_options["options"]: - add_string( - diag_path + "mode", simulation.diag_options["options"]["mode"] - ) - if "fine_dump_lvl_max" in simulation.diag_options["options"]: - add_int( - diag_path + "fine_dump_lvl_max", - simulation.diag_options["options"]["fine_dump_lvl_max"], - ) - else: - add_string(diag_path + "filePath", "phare_output") - #### diagnostics added - - #### adding restarts - if simulation.restart_options is not None: - restart_options = simulation.restart_options - restarts_path = "simulation/restarts/" - restart_file_path = "phare_outputs" - - if "dir" in restart_options: - restart_file_path = restart_options["dir"] - - if "restart_time" in restart_options: - from pyphare.cpp import cpp_etc_lib - - restart_time = restart_options["restart_time"] - restart_file_load_path = cpp_etc_lib().restart_path_for_time( - restart_file_path, restart_time - ) - - if not os.path.exists(restart_file_load_path): - raise ValueError( - f"PHARE restart file not found for time {restart_time}" - ) - - deserialized_simulation = deserialize_sim( - _serialized_simulation_string(restart_file_load_path) - ) - if not simulation.is_restartable_compared_to(deserialized_simulation): - raise ValueError( - "deserialized Restart simulation is incompatible with configured simulation parameters" - ) - - add_vector_int( - restarts_path + "restart_ids", _patch_data_ids(restart_file_load_path) - ) - add_string(restarts_path + "loadPath", restart_file_load_path) - add_double(restarts_path + "restart_time", restart_time) - - if "mode" in restart_options: - add_string(restarts_path + "mode", restart_options["mode"]) - - add_string(restarts_path + "filePath", restart_file_path) - - if "elapsed_timestamps" in restart_options: - pp.add_array_as_vector( - restarts_path + "elapsed_timestamps", - restart_options["elapsed_timestamps"], - ) - - if "timestamps" in restart_options: - pp.add_array_as_vector( - restarts_path + "write_timestamps", restart_options["timestamps"] - ) + from .global_vars import sim + from . import initialize - add_string(restarts_path + "serialized_simulation", serialized_sim) - #### restarts added + initialize.general.populateDict(sim) - #### adding electrons - if simulation.electrons is None: - raise RuntimeError("Error - no electrons registered to this Simulation") + if sim.init_options is None: + initialize.user_fns.populateDict(sim) else: - for item in simulation.electrons.dict_path(): - if isinstance(item[1], str): - add_string("simulation/" + item[0], item[1]) - else: - add_double("simulation/" + item[0], item[1]) + initialize.samrai_hdf5.populateDict(sim) diff --git a/pyphare/pyphare/pharein/examples/job.py b/pyphare/pyphare/pharein/examples/job.py deleted file mode 100644 index 24ed09f98..000000000 --- a/pyphare/pyphare/pharein/examples/job.py +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env python - -import numpy as np -import pyphare.pharein as ph - - -# ------------------------------------ -# configure the simulation -# ------------------------------------ - -ph.Simulation( - time_step_nbr=1000, # number of time steps (not specified if time_step and final_time provided) - final_time=1.0, # simulation final time (not specified if time_step and time_step_nbr given) - boundary_types="periodic", # boundary condition, string or tuple, length == len(cell) == len(dl) - cells=80, # integer or tuple length == dimension - dl=0.1, # mesh size of the root level, float or tuple - path="test5", # directory where INI file and diagnostics directories will be - # time_step = 0.005, # simulation time step (not specified if time_step_nbr and final_time given) - # domain_size = 8., # float or tuple, not specified if dl and cells are - # interp_order = 1, # interpolation order, [default = 1] can be 1, 2, 3 or 4 - # layout = "yee", # grid layout, [default="yee"] - # origin = 0., # position of the origin of the domain, float or tuple (length = dimension) - # particle_pusher = "modified_boris", # particle pusher method, [default = "modified_boris"] - # refined_particle_nbr = 2, # number of refined particle a particle is split into [default : ] - # diag_export_format = 'ascii', # export format of the diagnostics [default = 'ascii'] - # refinement = {"level":[0,1], # AMR parameters - # "extent_ratio":[0.4, 0.6], - # "refinement_iterations":[0, 3]}, -) # end Simulation - - -# in the following we use the MaxwellianFluidModel - - -Te = 0.12 - - -def n(x): - return 1.0 - - -def bx(x): - xmax = ph.getSimulation().simulation_domain()[0] - return np.cos(2 * np.pi / xmax * x) - - -ph.MaxwellianFluidModel(bx=bx, protons={"density": n}, background={}) - - -ph.ElectronModel(closure="isothermal", Te=Te) - - -ph.ElectromagDiagnostics( - diag_type="E", # available : ("E", "B") - write_every=10, - compute_every=5, - start_iteration=0, - last_iteration=990, - path="ElectromagDiagnostics1", # where output files will be written, [default: name] -) - - -ph.FluidDiagnostics( - diag_type="density", # choose in (rho_s, flux_s) - write_every=10, # write on disk every x iterations - compute_every=5, # compute diagnostics every x iterations ( x <= write_every) - start_iteration=0, # iteration at which diag is enabled - last_iteration=990, # iteration at which diag is turned off - population_name="protons", # name of the population for which the diagnostics is made - # ,path = 'FluidDiagnostics1' # where output files will be written, [default: name] -) - - -ph.FluidDiagnostics( - diag_type="bulkVelocity", - write_every=10, - compute_every=5, - start_iteration=0, - last_iteration=990, - population_name="background", -) - -ph.FluidDiagnostics( - diag_type="density", - write_every=10, - compute_every=5, - start_iteration=0, - last_iteration=990, - population_name="all", -) - -ph.FluidDiagnostics( - diag_type="flux", - write_every=10, - compute_every=5, - start_iteration=0, - last_iteration=990, - population_name="background", -) - -ph.ElectromagDiagnostics( - diag_type="B", - write_every=10, - compute_every=5, - start_iteration=0, - last_iteration=990, -) - -for item in ph.getSimulation().electrons.dict_path(): - print(item[0], item[1]) diff --git a/pyphare/pyphare/pharein/init.py b/pyphare/pyphare/pharein/init.py index 5c3a5696f..4728000fc 100644 --- a/pyphare/pyphare/pharein/init.py +++ b/pyphare/pyphare/pharein/init.py @@ -6,4 +6,6 @@ def get_user_inputs(jobname): _init_.PHARE_EXE = True print(jobname) jobmodule = importlib.import_module(jobname) # lgtm [py/unused-local-variable] + if jobmodule is None: + raise RuntimeError("failed to import job") populateDict() diff --git a/pyphare/pyphare/pharein/initialize/__init__.py b/pyphare/pyphare/pharein/initialize/__init__.py new file mode 100644 index 000000000..bf53e1c00 --- /dev/null +++ b/pyphare/pyphare/pharein/initialize/__init__.py @@ -0,0 +1,9 @@ +from . import general +from . import user_fns +from . import samrai_hdf5 + +__all__ = [ + "general", + "user_fns", + "samrai_hdf5", +] diff --git a/pyphare/pyphare/pharein/initialize/general.py b/pyphare/pyphare/pharein/initialize/general.py new file mode 100644 index 000000000..51da4cb85 --- /dev/null +++ b/pyphare/pyphare/pharein/initialize/general.py @@ -0,0 +1,311 @@ +# +# + +import os +import numpy as np +import pybindlibs.dictator as pp +from pyphare.core.phare_utilities import is_scalar + +from pyphare.pharein.load_balancer import LoadBalancer +from pyphare.pharein.simulation import ( + serialize as serialize_sim, + deserialize as deserialize_sim, +) + + +def _patch_data_ids(restart_file_dir): + """ + for restarts we save samrai patch data ids to the restart files, which we access from here + to tell samrai which patch datas to load from the restart file on restart + """ + from pyphare.cpp import cpp_etc_lib + + return cpp_etc_lib().patch_data_ids(restart_file_dir) + + +def _serialized_simulation_string(restart_file_dir): + from pyphare.cpp import cpp_etc_lib + + return cpp_etc_lib().serialized_simulation_string(restart_file_dir) + + +# converts scalars to array of expected size +# converts lists to arrays +class py_fn_wrapper: + def __init__(self, fn): + self.fn = fn + + def __call__(self, *xyz): + args = [np.asarray(arg) for arg in xyz] + ret = self.fn(*args) + if isinstance(ret, list): + ret = np.asarray(ret) + if is_scalar(ret): + ret = np.full(len(args[-1]), ret) + return ret + + +# Wrap calls to user init functions to turn C++ vectors to ndarrays, +# and returned ndarrays to C++ span +class fn_wrapper(py_fn_wrapper): + def __init__(self, fn): + super().__init__(fn) + + def __call__(self, *xyz): + from pyphare.cpp import cpp_etc_lib + + # convert numpy array to C++ SubSpan + # couples vector init functions to C++ + return cpp_etc_lib().makePyArrayWrapper(super().__call__(*xyz)) + + +# pybind complains if receiving wrong type +def add_int(path, val): + pp.add_int(path, int(val)) + + +def add_bool(path, val): + pp.add_bool(path, bool(val)) + + +def add_double(path, val): + pp.add_double(path, float(val)) + + +def add_size_t(path, val): + casted = int(val) + if casted < 0: + raise RuntimeError("pyphare.__init__::add_size_t received negative value") + pp.add_size_t(path, casted) + + +def add_vector_int(path, val): + pp.add_vector_int(path, list(val)) + + +add_string = pp.add_string + + +def populateDict(sim): + import pybindlibs.dictator as pp + + add_string("simulation/name", "simulation_test") + add_int("simulation/dimension", sim.ndim) + + if sim.smallest_patch_size is not None: + add_vector_int("simulation/AMR/smallest_patch_size", sim.smallest_patch_size) + if sim.largest_patch_size is not None: + add_vector_int("simulation/AMR/largest_patch_size", sim.largest_patch_size) + + add_string("simulation/grid/layout_type", sim.layout) + add_int("simulation/grid/nbr_cells/x", sim.cells[0]) + add_double("simulation/grid/meshsize/x", sim.dl[0]) + add_double("simulation/grid/origin/x", sim.origin[0]) + add_string("simulation/grid/boundary_type/x", sim.boundary_types[0]) + + if sim.ndim > 1: + add_int("simulation/grid/nbr_cells/y", sim.cells[1]) + add_double("simulation/grid/meshsize/y", sim.dl[1]) + add_double("simulation/grid/origin/y", sim.origin[1]) + add_string("simulation/grid/boundary_type/y", sim.boundary_types[1]) + + if sim.ndim > 2: + add_int("simulation/grid/nbr_cells/z", sim.cells[2]) + add_double("simulation/grid/meshsize/z", sim.dl[2]) + add_double("simulation/grid/origin/z", sim.origin[2]) + add_string("simulation/grid/boundary_type/z", sim.boundary_types[2]) + + add_int("simulation/interp_order", sim.interp_order) + add_int("simulation/refined_particle_nbr", sim.refined_particle_nbr) + add_double("simulation/time_step", sim.time_step) + add_int("simulation/time_step_nbr", sim.time_step_nbr) + + add_string("simulation/AMR/clustering", sim.clustering) + add_int("simulation/AMR/max_nbr_levels", sim.max_nbr_levels) + add_vector_int("simulation/AMR/nesting_buffer", sim.nesting_buffer) + + add_int("simulation/AMR/tag_buffer", sim.tag_buffer) + + refinement_boxes = sim.refinement_boxes + + def as_paths(rb): + add_int("simulation/AMR/refinement/boxes/nbr_levels/", len(rb.keys())) + for level, boxes in rb.items(): + level_path = "simulation/AMR/refinement/boxes/" + level + "/" + add_int(level_path + "nbr_boxes/", int(len(boxes))) + for box_i, box in enumerate(boxes): + box_id = "B" + str(box_i) + lower = box.lower + upper = box.upper + box_lower_path_x = box_id + "/lower/x/" + box_upper_path_x = box_id + "/upper/x/" + add_int(level_path + box_lower_path_x, lower[0]) + add_int(level_path + box_upper_path_x, upper[0]) + if len(lower) >= 2: + box_lower_path_y = box_id + "/lower/y/" + box_upper_path_y = box_id + "/upper/y/" + add_int(level_path + box_lower_path_y, lower[1]) + add_int(level_path + box_upper_path_y, upper[1]) + if len(lower) == 3: + box_lower_path_z = box_id + "/lower/z/" + box_upper_path_z = box_id + "/upper/z/" + add_int(level_path + box_lower_path_z, lower[2]) + add_int(level_path + box_upper_path_z, upper[2]) + + if refinement_boxes is not None and sim.refinement == "boxes": + as_paths(refinement_boxes) + elif sim.refinement == "tagging": + add_string("simulation/AMR/refinement/tagging/method", "auto") + else: + add_string( + "simulation/AMR/refinement/tagging/method", "none" + ) # integrator.h might want some looking at + + add_string("simulation/algo/ion_updater/pusher/name", sim.particle_pusher) + add_double("simulation/algo/ohm/resistivity", sim.resistivity) + add_double("simulation/algo/ohm/hyper_resistivity", sim.hyper_resistivity) + + # load balancer block start + lb = sim.load_balancer or LoadBalancer(active=False, _register=False) + base = "simulation/AMR/loadbalancing" + add_bool(f"{base}/active", lb.active) + add_string(f"{base}/mode", lb.mode) + add_double(f"{base}/tolerance", lb.tol) + + # if mode==nppc, imbalance allowed + add_bool(f"{base}/auto", lb.auto) + add_size_t(f"{base}/next_rebalance", lb.next_rebalance) + add_size_t(f"{base}/max_next_rebalance", lb.max_next_rebalance) + add_size_t( + f"{base}/next_rebalance_backoff_multiplier", + lb.next_rebalance_backoff_multiplier, + ) + + # cadence based values + add_size_t(f"{base}/every", lb.every) + add_bool(f"{base}/on_init", lb.on_init) + # load balancer block end + + init_model = sim.model + + if init_model.nbr_populations() < 0: + raise RuntimeError("Number of populations cannot be negative") + add_size_t("simulation/ions/nbrPopulations", init_model.nbr_populations()) + + modelDict = init_model.model_dict + for pop_index, pop in enumerate(init_model.populations): + pop_path = "simulation/ions/pop" + add_string(pop_path + "{:d}/name".format(pop_index), pop) + d = modelDict[pop] + add_double(pop_path + "{:d}/mass".format(pop_index), d["mass"]) + + add_string("simulation/electromag/name", "EM") + add_string("simulation/electromag/electric/name", "E") + add_string("simulation/electromag/magnetic/name", "B") + + serialized_sim = serialize_sim(sim) + + #### adding diagnostics + + diag_path = "simulation/diagnostics/" + for diag in list(sim.diagnostics.values()): + diag.attributes["serialized_simulation"] = serialized_sim + + type_path = diag_path + diag.type + "/" + name_path = type_path + diag.name + add_string(name_path + "/" + "type", diag.type) + add_string(name_path + "/" + "quantity", diag.quantity) + add_size_t(name_path + "/" + "flush_every", diag.flush_every) + pp.add_array_as_vector( + name_path + "/" + "write_timestamps", diag.write_timestamps + ) + pp.add_array_as_vector( + name_path + "/" + "compute_timestamps", diag.compute_timestamps + ) + + add_size_t(name_path + "/" + "n_attributes", len(diag.attributes)) + for attr_idx, attr_key in enumerate(diag.attributes): + add_string(name_path + "/" + f"attribute_{attr_idx}_key", attr_key) + add_string( + name_path + "/" + f"attribute_{attr_idx}_value", + diag.attributes[attr_key], + ) + + if len(sim.diagnostics) > 0: + if sim.diag_options is not None and "options" in sim.diag_options: + add_string(diag_path + "filePath", sim.diag_options["options"]["dir"]) + if "mode" in sim.diag_options["options"]: + add_string(diag_path + "mode", sim.diag_options["options"]["mode"]) + if "fine_dump_lvl_max" in sim.diag_options["options"]: + add_int( + diag_path + "fine_dump_lvl_max", + sim.diag_options["options"]["fine_dump_lvl_max"], + ) + else: + add_string(diag_path + "filePath", "phare_output") + #### diagnostics added + + #### adding restarts + if sim.restart_options is not None: + restart_options = sim.restart_options + restarts_path = "simulation/restarts/" + restart_file_path = "phare_outputs" + + if "dir" in restart_options: + restart_file_path = restart_options["dir"] + + if "restart_time" in restart_options: + from pyphare.cpp import cpp_etc_lib + + restart_time = restart_options["restart_time"] + restart_file_load_path = cpp_etc_lib().restart_path_for_time( + restart_file_path, restart_time + ) + + if not os.path.exists(restart_file_load_path): + raise ValueError( + f"PHARE restart file not found for time {restart_time}" + ) + + deserialized_simulation = deserialize_sim( + _serialized_simulation_string(restart_file_load_path) + ) + if not sim.is_restartable_compared_to(deserialized_simulation): + raise ValueError( + "deserialized Restart simulation is incompatible with configured simulation parameters" + ) + + add_vector_int( + restarts_path + "restart_ids", _patch_data_ids(restart_file_load_path) + ) + add_string(restarts_path + "loadPath", restart_file_load_path) + add_double(restarts_path + "restart_time", restart_time) + + if "mode" in restart_options: + add_string(restarts_path + "mode", restart_options["mode"]) + + add_string(restarts_path + "filePath", restart_file_path) + + if "elapsed_timestamps" in restart_options: + pp.add_array_as_vector( + restarts_path + "elapsed_timestamps", + restart_options["elapsed_timestamps"], + ) + + if "timestamps" in restart_options: + pp.add_array_as_vector( + restarts_path + "write_timestamps", restart_options["timestamps"] + ) + + add_string(restarts_path + "serialized_simulation", serialized_sim) + #### restarts added + + #### adding electrons + if sim.electrons is None: + raise RuntimeError("Error - no electrons registered to this Simulation") + else: + for item in sim.electrons.dict_path(): + if isinstance(item[1], str): + add_string("simulation/" + item[0], item[1]) + else: + add_double("simulation/" + item[0], item[1]) diff --git a/pyphare/pyphare/pharein/initialize/samrai_hdf5.py b/pyphare/pyphare/pharein/initialize/samrai_hdf5.py new file mode 100644 index 000000000..9c81afa5b --- /dev/null +++ b/pyphare/pyphare/pharein/initialize/samrai_hdf5.py @@ -0,0 +1,16 @@ +from .general import add_string, add_int + +# import pybindlibs.dictator as pp + + +def populateDict(sim): + init_model = sim.model + partinit = "particle_initializer" + for pop_index, pop in enumerate(init_model.populations): + pop_path = "simulation/ions/pop" + partinit_path = pop_path + "{:d}/".format(pop_index) + partinit + "/" + + add_string(partinit_path + "name", "samraih5") + add_string(partinit_path + "filepath", sim.init_options["dir"]) + add_int(partinit_path + "mpi_size", sim.init_options.get("mpi_size", 1)) + add_int(partinit_path + "index", sim.init_options.get("index", 0)) diff --git a/pyphare/pyphare/pharein/initialize/user_fns.py b/pyphare/pyphare/pharein/initialize/user_fns.py new file mode 100644 index 000000000..855f127e2 --- /dev/null +++ b/pyphare/pyphare/pharein/initialize/user_fns.py @@ -0,0 +1,51 @@ +from .general import add_double, add_string, add_int, add_size_t, fn_wrapper +import pybindlibs.dictator as pp + + +def populateDict(sim): + populate_electromag(sim) + populate_particles(sim) + + +def populate_electromag(sim): + addInitFunction = getattr(pp, "addInitFunction{:d}".format(sim.ndim) + "D") + modelDict = sim.model.model_dict + maginit_path = "simulation/electromag/magnetic/initializer/" + addInitFunction(maginit_path + "x_component", fn_wrapper(modelDict["bx"])) + addInitFunction(maginit_path + "y_component", fn_wrapper(modelDict["by"])) + addInitFunction(maginit_path + "z_component", fn_wrapper(modelDict["bz"])) + + +def populate_particles(sim): + addInitFunction = getattr(pp, "addInitFunction{:d}".format(sim.ndim) + "D") + + init_model = sim.model + modelDict = init_model.model_dict + + if init_model.nbr_populations() < 0: + raise RuntimeError("Number of populations cannot be negative") + add_size_t("simulation/ions/nbrPopulations", init_model.nbr_populations()) + + partinit = "particle_initializer" + for pop_index, pop in enumerate(init_model.populations): + pop_path = "simulation/ions/pop" + partinit_path = pop_path + "{:d}/".format(pop_index) + partinit + "/" + d = modelDict[pop] + + add_string(partinit_path + "name", "maxwellian") + + addInitFunction(partinit_path + "density", fn_wrapper(d["density"])) + addInitFunction(partinit_path + "bulk_velocity_x", fn_wrapper(d["vx"])) + addInitFunction(partinit_path + "bulk_velocity_y", fn_wrapper(d["vy"])) + addInitFunction(partinit_path + "bulk_velocity_z", fn_wrapper(d["vz"])) + addInitFunction(partinit_path + "thermal_velocity_x", fn_wrapper(d["vthx"])) + addInitFunction(partinit_path + "thermal_velocity_y", fn_wrapper(d["vthy"])) + addInitFunction(partinit_path + "thermal_velocity_z", fn_wrapper(d["vthz"])) + add_double(partinit_path + "charge", d["charge"]) + add_string(partinit_path + "basis", "cartesian") + + if "init" in d and "seed" in d["init"]: + pp.add_optional_size_t(partinit_path + "init/seed", d["init"]["seed"]) + + add_int(partinit_path + "nbr_part_per_cell", d["nbrParticlesPerCell"]) + add_double(partinit_path + "density_cut_off", d["density_cut_off"]) diff --git a/pyphare/pyphare/pharein/load_balancer.py b/pyphare/pyphare/pharein/load_balancer.py index b2b542c06..5600469ee 100644 --- a/pyphare/pyphare/pharein/load_balancer.py +++ b/pyphare/pyphare/pharein/load_balancer.py @@ -33,7 +33,7 @@ class LoadBalancer: def __post_init__(self): if self.auto and self.every: - raise RuntimeError(f"LoadBalancer cannot work with both 'every' and 'auto'") + raise RuntimeError("LoadBalancer cannot work with both 'every' and 'auto'") if self.every is None: self.auto = True @@ -50,8 +50,8 @@ def __post_init__(self): if self._register: if not gv.sim: raise RuntimeError( - f"LoadBalancer cannot be registered as no simulation exists" + "LoadBalancer cannot be registered as no simulation exists" ) if gv.sim.load_balancer: - raise RuntimeError(f"LoadBalancer is already registered to simulation") + raise RuntimeError("LoadBalancer is already registered to simulation") gv.sim.load_balancer = self diff --git a/pyphare/pyphare/pharein/simulation.py b/pyphare/pyphare/pharein/simulation.py index 8674e7c2b..5c620f7b7 100644 --- a/pyphare/pyphare/pharein/simulation.py +++ b/pyphare/pyphare/pharein/simulation.py @@ -498,6 +498,23 @@ def check_restart_options(**kwargs): return restart_options +def check_init_options(**kwargs): + """Advanced options to initialize from SAMRAI HDF5 files""" + + formats = ["samraih5"] + init_options = kwargs.get("init_options", None) + + if init_options is not None and "format" in init_options: + if init_options["format"] not in formats: + raise ValueError("Error - init_options format is invalid") + if "options" in init_options and "dir" in init_options["options"]: + init_options["options"]["dir"] = check_directory( + init_options["options"]["dir"], "init_options" + ) + + return init_options + + def validate_restart_options(sim): import pyphare.pharein.restarts as restarts @@ -616,6 +633,7 @@ def wrapper(simulation_object, **kwargs): "description", "dry_run", "write_reports", + "init_options", ] accepted_keywords += check_optional_keywords(**kwargs) @@ -650,6 +668,7 @@ def wrapper(simulation_object, **kwargs): ndim = compute_dimension(cells) kwargs["diag_options"] = check_diag_options(**kwargs) + kwargs["init_options"] = check_init_options(**kwargs) kwargs["restart_options"] = check_restart_options(**kwargs) kwargs["boundary_types"] = check_boundaries(ndim, **kwargs) diff --git a/res/cmake/test.cmake b/res/cmake/test.cmake index 1f7331b1d..c3b81bd1d 100644 --- a/res/cmake/test.cmake +++ b/res/cmake/test.cmake @@ -16,7 +16,6 @@ if (test AND ${PHARE_EXEC_LEVEL_MIN} GREATER 0) # 0 = no tests add_subdirectory(tests/core/data/electrons) add_subdirectory(tests/core/data/ion_population) add_subdirectory(tests/core/data/maxwellian_particle_initializer) - add_subdirectory(tests/core/data/particle_initializer) add_subdirectory(tests/core/utilities/box) add_subdirectory(tests/core/utilities/range) add_subdirectory(tests/core/utilities/index) @@ -42,6 +41,7 @@ if (test AND ${PHARE_EXEC_LEVEL_MIN} GREATER 0) # 0 = no tests add_subdirectory(tests/amr/data/field/refine) add_subdirectory(tests/amr/data/field/variable) add_subdirectory(tests/amr/data/field/time_interpolate) + add_subdirectory(tests/amr/data/particles/initializer) add_subdirectory(tests/amr/resources_manager) add_subdirectory(tests/amr/messengers) add_subdirectory(tests/amr/models) diff --git a/src/core/data/ions/particle_initializers/particle_initializer_factory.hpp b/src/amr/data/particles/initializers/particle_initializer_factory.hpp similarity index 69% rename from src/core/data/ions/particle_initializers/particle_initializer_factory.hpp rename to src/amr/data/particles/initializers/particle_initializer_factory.hpp index 97cfa6b0c..568a76ff5 100644 --- a/src/core/data/ions/particle_initializers/particle_initializer_factory.hpp +++ b/src/amr/data/particles/initializers/particle_initializer_factory.hpp @@ -5,19 +5,22 @@ #include "core/def.hpp" #include "core/utilities/types.hpp" #include "initializer/data_provider.hpp" -#include "maxwellian_particle_initializer.hpp" -#include "particle_initializer.hpp" + +#include "core/data/ions/particle_initializers/particle_initializer.hpp" +#include "core/data/ions/particle_initializers/maxwellian_particle_initializer.hpp" + +#include "samrai_hdf5_particle_initializer.hpp" #include namespace PHARE { -namespace core +namespace amr { template class ParticleInitializerFactory { - using ParticleInitializerT = ParticleInitializer; + using ParticleInitializerT = core::ParticleInitializer; static constexpr auto dimension = GridLayout::dimension; @@ -66,8 +69,8 @@ namespace core if (basisName == "cartesian") { return std::make_unique< - MaxwellianParticleInitializer>( - density, v, vth, charge, nbrPartPerCell, seed, Basis::Cartesian, + core::MaxwellianParticleInitializer>( + density, v, vth, charge, nbrPartPerCell, seed, core::Basis::Cartesian, magneticField, densityCutOff); } else if (basisName == "magnetic") @@ -77,17 +80,31 @@ namespace core magneticField[2] = dict["magnetic_x"].template to(); return std::make_unique< - MaxwellianParticleInitializer>( - density, v, vth, charge, nbrPartPerCell, seed, Basis::Magnetic, + core::MaxwellianParticleInitializer>( + density, v, vth, charge, nbrPartPerCell, seed, core::Basis::Magnetic, magneticField, densityCutOff); } } - // TODO throw? - return nullptr; + + if (initializerName == "samraih5") + { + auto const dir = dict["filepath"].template to(); + int const index = dict["index"].template to(); + int const mpi_size = dict["mpi_size"].template to(); + + // scan restart files for later use + SamraiH5Interface::INSTANCE().populate_from(dir, index, + mpi_size); + + return std::make_unique>(); + } + + + throw std::runtime_error("No Particle Initializer chosen!"); } }; -} // namespace core +} // namespace amr } // namespace PHARE diff --git a/src/amr/data/particles/initializers/samrai_hdf5_particle_initializer.hpp b/src/amr/data/particles/initializers/samrai_hdf5_particle_initializer.hpp new file mode 100644 index 000000000..45f340138 --- /dev/null +++ b/src/amr/data/particles/initializers/samrai_hdf5_particle_initializer.hpp @@ -0,0 +1,103 @@ +#ifndef _PHARE_CORE_DATA_IONS_PARTICLE_INITIAZILIZERS_SAMRAI_HDF5_INITIALIZER_HPP_ +#define _PHARE_CORE_DATA_IONS_PARTICLE_INITIAZILIZERS_SAMRAI_HDF5_INITIALIZER_HPP_ + +#include +#include +#include +#include + +#include "core/data/grid/gridlayoutdefs.hpp" +#include "core/hybrid/hybrid_quantities.hpp" +#include "core/utilities/types.hpp" +#include "core/data/ions/particle_initializers/particle_initializer.hpp" +#include "core/data/particles/particle.hpp" +#include "initializer/data_provider.hpp" +#include "core/utilities/point/point.hpp" +#include "core/def.hpp" + +#include "hdf5/detail/h5/h5_file.hpp" + + +#include "SAMRAI/hier/PatchDataRestartManager.h" + + +namespace PHARE::amr +{ + + +template +class SamraiH5Interface +{ +public: + static SamraiH5Interface& INSTANCE() + { + static SamraiH5Interface i; + return i; + } + + void populate_from(std::string const& dir, int const& idx, int const& mpi_size); + + NO_DISCARD auto static getRestartFileFullPath(std::string path, int const& idx, + int const& mpi_size, int const& rank) + { + return path // + + "/restore." + SAMRAI::tbox::Utilities::intToString(idx, 6) // + + "/nodes." + SAMRAI::tbox::Utilities::nodeToString(mpi_size) // + + "/proc." + SAMRAI::tbox::Utilities::processorToString(rank); + } + + +private: + std::unordered_map box2dataset; +}; + + +template +void SamraiH5Interface::populate_from(std::string const& dir, + int const& idx, + int const& mpi_size) +{ + for (int rank = 0; rank < mpi_size; ++rank) + { + auto const hdf5_filepath = getRestartFileFullPath(dir, idx, mpi_size, rank); + + hdf5::h5::HighFiveFile h5File{hdf5_filepath, HighFive::File::ReadOnly, /*para=*/false}; + + PHARE_LOG_LINE_STR("SamraiH5Interface::populate_from"); + } +} + + + +template +class SamraiHDF5ParticleInitializer : public core::ParticleInitializer +{ +public: + static constexpr auto dimension = GridLayout::dimension; + + + + SamraiHDF5ParticleInitializer() {} + + + + void loadParticles(ParticleArray& particles, GridLayout const& layout) const override; +}; + + + + +template +void SamraiHDF5ParticleInitializer::loadParticles( + ParticleArray& particles, GridLayout const& layout) const +{ + PHARE_LOG_LINE_STR("SamraiHDF5ParticleInitializer::loadParticles"); +} + + + + +} // namespace PHARE::amr + + +#endif diff --git a/src/amr/physical_models/hybrid_model.hpp b/src/amr/physical_models/hybrid_model.hpp index 449bafc22..8315c805a 100644 --- a/src/amr/physical_models/hybrid_model.hpp +++ b/src/amr/physical_models/hybrid_model.hpp @@ -3,14 +3,18 @@ #include -#include "initializer/data_provider.hpp" +#include "core/def.hpp" +#include "core/data/vecfield/vecfield.hpp" #include "core/models/hybrid_state.hpp" + +#include "initializer/data_provider.hpp" + #include "amr/physical_models/physical_model.hpp" -#include "core/data/ions/particle_initializers/particle_initializer_factory.hpp" +#include "amr/data/particles/initializers/particle_initializer_factory.hpp" + #include "amr/resources_manager/resources_manager.hpp" #include "amr/messengers/hybrid_messenger_info.hpp" -#include "core/data/vecfield/vecfield.hpp" -#include "core/def.hpp" + namespace PHARE::solver { @@ -41,7 +45,7 @@ class HybridModel : public IPhysicalModel using particle_array_type = typename Ions::particle_array_type; using resources_manager_type = amr::ResourcesManager; using ParticleInitializerFactory - = core::ParticleInitializerFactory; + = amr::ParticleInitializerFactory; static const inline std::string model_name = "HybridModel"; diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt index 293de3711..d9754f624 100644 --- a/src/core/CMakeLists.txt +++ b/src/core/CMakeLists.txt @@ -19,7 +19,6 @@ set( SOURCES_INC data/electrons/electrons.hpp data/ions/particle_initializers/particle_initializer.hpp data/ions/particle_initializers/maxwellian_particle_initializer.hpp - data/ions/particle_initializers/particle_initializer_factory.hpp data/tensorfield/tensorfield.hpp data/vecfield/vecfield.hpp data/vecfield/vecfield_component.hpp diff --git a/src/core/data/electromag/electromag_initializer.hpp b/src/core/data/electromag/electromag_initializer.hpp index 413b4d527..1aa780695 100644 --- a/src/core/data/electromag/electromag_initializer.hpp +++ b/src/core/data/electromag/electromag_initializer.hpp @@ -43,9 +43,10 @@ class ElectromagInitializerFactory NO_DISCARD static std::unique_ptr> create(initializer::PHAREDict const& dict) { - return std::make_unique>(dict); - // else - // return std::make_unique>(); + if (dict["magnetic"]["initializer"].contains("x_component")) + return std::make_unique>(dict); + else + return std::make_unique>(); } }; diff --git a/src/core/data/ions/ions.hpp b/src/core/data/ions/ions.hpp index 2701ebdac..553f5c59d 100644 --- a/src/core/data/ions/ions.hpp +++ b/src/core/data/ions/ions.hpp @@ -15,7 +15,7 @@ #include "core/hybrid/hybrid_quantities.hpp" #include "core/data/vecfield/vecfield_component.hpp" #include "initializer/data_provider.hpp" -#include "particle_initializers/particle_initializer_factory.hpp" + #include "core/utilities/algorithm.hpp" namespace PHARE diff --git a/src/phare_core.hpp b/src/phare_core.hpp index 0d6ef0d83..2e04f7686 100644 --- a/src/phare_core.hpp +++ b/src/phare_core.hpp @@ -55,9 +55,6 @@ struct PHARE_Types = PHARE::core::IonPopulation; using Ions_t = PHARE::core::Ions; using Electrons_t = PHARE::core::Electrons; - - using ParticleInitializerFactory - = PHARE::core::ParticleInitializerFactory; }; struct PHARE_Sim_Types diff --git a/src/simulator/phare_types.hpp b/src/simulator/phare_types.hpp index 08d2c7707..b336d1697 100644 --- a/src/simulator/phare_types.hpp +++ b/src/simulator/phare_types.hpp @@ -29,9 +29,9 @@ struct PHARE_Types using MaxwellianParticleInitializer_t = typename core_types::MaxwellianParticleInitializer_t; using IonPopulation_t = typename core_types::IonPopulation_t; using Electrons_t = typename core_types::Electrons_t; - using ParticleInitializerFactory = typename core_types::ParticleInitializerFactory; - + using ParticleInitializerFactory + = amr::ParticleInitializerFactory; using amr_types = PHARE::amr::PHARE_Types; using hierarchy_t = typename amr_types::hierarchy_t; @@ -40,7 +40,6 @@ struct PHARE_Types - using solver_types = PHARE::solver::PHARE_Types; using IPhysicalModel = typename solver_types::IPhysicalModel; using HybridModel_t = typename solver_types::HybridModel_t; diff --git a/tests/core/data/particle_initializer/CMakeLists.txt b/tests/amr/data/particles/initializer/CMakeLists.txt similarity index 96% rename from tests/core/data/particle_initializer/CMakeLists.txt rename to tests/amr/data/particles/initializer/CMakeLists.txt index a380b41b7..fc7506044 100644 --- a/tests/core/data/particle_initializer/CMakeLists.txt +++ b/tests/amr/data/particles/initializer/CMakeLists.txt @@ -11,7 +11,7 @@ target_include_directories(${PROJECT_NAME} PRIVATE ) target_link_libraries(${PROJECT_NAME} PRIVATE - phare_core + phare_amr phare_initializer ${GTEST_LIBS}) diff --git a/tests/core/data/particle_initializer/test_main.cpp b/tests/amr/data/particles/initializer/test_main.cpp similarity index 87% rename from tests/core/data/particle_initializer/test_main.cpp rename to tests/amr/data/particles/initializer/test_main.cpp index 405d42e5a..bfcf2ec0d 100644 --- a/tests/core/data/particle_initializer/test_main.cpp +++ b/tests/amr/data/particles/initializer/test_main.cpp @@ -4,10 +4,10 @@ #include "core/utilities/span.hpp" #include "core/data/grid/gridlayout.hpp" #include "core/data/grid/gridlayoutimplyee.hpp" -#include "core/data/ions/particle_initializers/particle_initializer_factory.hpp" #include "core/data/particles/particle_array.hpp" #include "initializer/data_provider.hpp" +#include "amr/data/particles/initializers/particle_initializer_factory.hpp" #include "gmock/gmock.h" #include "gtest/gtest.h" @@ -18,9 +18,6 @@ using namespace PHARE::core; using namespace PHARE::initializer; -using namespace PHARE::core; -using namespace PHARE::initializer; - #include "tests/initializer/init_functions.hpp" using namespace PHARE::initializer::test_fn::func_1d; // density/etc are here @@ -47,7 +44,8 @@ TEST(AParticleIinitializerFactory, takesAPHAREDictToCreateAParticleVectorInitial dict["nbrPartPerCell"] = int{100}; dict["basis"] = std::string{"Cartesian"}; - auto initializer = ParticleInitializerFactory::create(dict); + auto initializer + = PHARE::amr::ParticleInitializerFactory::create(dict); } int main(int argc, char** argv) diff --git a/tests/core/numerics/ion_updater/CMakeLists.txt b/tests/core/numerics/ion_updater/CMakeLists.txt index e206a3e58..bdbb17a97 100644 --- a/tests/core/numerics/ion_updater/CMakeLists.txt +++ b/tests/core/numerics/ion_updater/CMakeLists.txt @@ -11,7 +11,7 @@ target_include_directories(${PROJECT_NAME} PRIVATE ) target_link_libraries(${PROJECT_NAME} PRIVATE - phare_core + phare_amr phare_simulator ${GTEST_LIBS}) diff --git a/tests/core/numerics/ion_updater/test_updater.cpp b/tests/core/numerics/ion_updater/test_updater.cpp index 2d939cf7d..aa217f6b4 100644 --- a/tests/core/numerics/ion_updater/test_updater.cpp +++ b/tests/core/numerics/ion_updater/test_updater.cpp @@ -3,6 +3,7 @@ #include "phare_core.hpp" #include "core/numerics/ion_updater/ion_updater.hpp" +#include "amr/data/particles/initializers/particle_initializer_factory.hpp" #include "tests/core/data/vecfield/test_vecfield_fixtures.hpp" #include "tests/core/data/tensorfield/test_tensorfield_fixtures.hpp" @@ -203,13 +204,14 @@ struct ElectromagBuffers template struct IonsBuffers { - using PHARETypes = PHARE::core::PHARE_Types; - using UsableVecFieldND = UsableVecField; - using Grid = typename PHARETypes::Grid_t; - using GridLayout = typename PHARETypes::GridLayout_t; - using Ions = typename PHARETypes::Ions_t; - using ParticleArray = typename PHARETypes::ParticleArray_t; - using ParticleInitializerFactory = typename PHARETypes::ParticleInitializerFactory; + using PHARETypes = PHARE::core::PHARE_Types; + using UsableVecFieldND = UsableVecField; + using Grid = typename PHARETypes::Grid_t; + using GridLayout = typename PHARETypes::GridLayout_t; + using Ions = typename PHARETypes::Ions_t; + using ParticleArray = typename PHARETypes::ParticleArray_t; + using ParticleInitializerFactory + = PHARE::amr::ParticleInitializerFactory; Grid ionDensity; Grid ionMassDensity; @@ -352,7 +354,8 @@ struct IonUpdaterTest : public ::testing::Test using Electromag = typename PHARETypes::Electromag_t; using GridLayout = typename PHARE::core::GridLayout>; using ParticleArray = typename PHARETypes::ParticleArray_t; - using ParticleInitializerFactory = typename PHARETypes::ParticleInitializerFactory; + using ParticleInitializerFactory + = PHARE::amr::ParticleInitializerFactory; using IonUpdater = typename PHARE::core::IonUpdater; @@ -515,7 +518,7 @@ struct IonUpdaterTest : public ::testing::Test } } // end 1D - } // end pop loop + } // end pop loop PHARE::core::depositParticles(ions, layout, Interpolator{}, PHARE::core::DomainDeposit{}); diff --git a/tests/simulator/test_samrai_restarts_parser.py b/tests/simulator/test_samrai_restarts_parser.py index 36fbc91e0..96b9777f1 100644 --- a/tests/simulator/test_samrai_restarts_parser.py +++ b/tests/simulator/test_samrai_restarts_parser.py @@ -33,16 +33,21 @@ def setup_model(ppc=100): timestep = 0.001 -out = "phare_outputs/parse_restarts" +out = "phare_outputs/restarts/test/test_restarts_1/1/1/1/00000.00400" simArgs = dict( time_step_nbr=2, time_step=timestep, - cells=200, + cells=100, dl=0.3, - restart_options=dict(dir=out, mode="overwrite"), + init_options=dict(dir=out, mode="overwrite"), ) +def dup(dic={}): + dic.update(copy.deepcopy(simArgs)) + return dic + + def traverse_h5_for_groups_recursive(h5content: "H5Content", group, path=""): if "level_0000" in path: for key in group.attrs: @@ -90,6 +95,10 @@ def test_restart_parser(self): for k in h5.data: print(k) + sim = ph.Simulation(**dup()) + model = setup_model() + Simulator(sim).initialize() + if __name__ == "__main__": unittest.main()