Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

JacobiPC and SliceJacobiPC for all-at-once-system #162

Merged
merged 32 commits into from
Feb 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
021449c
WIP aaojacobi
JHopeCollins Jan 15, 2024
17b09f5
Merge branch 'aaocofunction' into jacobipc
JHopeCollins Jan 16, 2024
dacf169
WIP aaopc base and aaobjacobipc
JHopeCollins Jan 16, 2024
3670f7f
Merge branch 'master' into jacobipc
JHopeCollins Jan 16, 2024
505c1c9
use AllAtOncePCBase for DiagFFTPC
JHopeCollins Jan 16, 2024
6f797ba
use AllAtOnceBlockPCBase for DiagFFTPC
JHopeCollins Jan 16, 2024
682b400
Paradiag only records pc diagnostics if pc exists
JHopeCollins Jan 16, 2024
dbf370d
AllAtOnceJacobiPC
JHopeCollins Jan 16, 2024
0b0001e
update block times in JacobiPC
JHopeCollins Jan 16, 2024
2745f24
move block iteration diagnostics to aaopcbase
JHopeCollins Jan 16, 2024
b40d3c6
try asQ.JacobiPC with galewsky
JHopeCollins Jan 16, 2024
dd8b2d6
rename DiagFFTPC -> CirculantPC
JHopeCollins Jan 16, 2024
92a36f7
remove redundant methods from preconditioner child classes
JHopeCollins Jan 17, 2024
f00c8b0
Merge branch 'master' into jacobipc
JHopeCollins Jan 30, 2024
24d41ef
capability to split an existing ensemble into several smaller subense…
JHopeCollins Jan 30, 2024
8db05e4
SliceJacobiPC
JHopeCollins Jan 30, 2024
f5854d2
guard from accessing nonexistent members on all-at-once pcs
JHopeCollins Jan 30, 2024
c6cc293
correct options_prefix for AllAtOnceJacobian
JHopeCollins Jan 30, 2024
4c7c43e
Use correct state to construct the SliceJacobiPC slice_jacobian and r…
JHopeCollins Jan 30, 2024
b3dce7f
galewsky with SliceJacobiPC
JHopeCollins Jan 30, 2024
2bb4f55
remove duplicated method from aaopc derived classes
JHopeCollins Jan 31, 2024
58edfc1
preconditioner docstrings
JHopeCollins Jan 31, 2024
bd58e9f
pass the appctx through to the SliceJacobiPC slice solvers
JHopeCollins Jan 31, 2024
efca94e
add iteration printing back to galewsky
JHopeCollins Feb 1, 2024
b5f9e2a
initial sketch of LinearSolver
JHopeCollins Feb 2, 2024
0878bb4
LinearSolver init
JHopeCollins Feb 2, 2024
8a1bf66
more docstrings for slicejacobi
JHopeCollins Feb 5, 2024
0964c22
update the slice initial conditions of SliceJacobiPC
JHopeCollins Feb 19, 2024
9cb8896
AllAtOnceJacobian method to construct a PETSc.Mat
JHopeCollins Feb 22, 2024
d208844
Merge branch 'master' into jacobipc
JHopeCollins Feb 22, 2024
57e4ae7
Remove commented code
JHopeCollins Feb 27, 2024
11f1266
Merge branch 'master' into jacobipc
JHopeCollins Feb 27, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion asQ/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from asQ.ensemble import * # noqa: F401
from asQ.parallel_arrays import * # noqa: F401
from asQ.paradiag import * # noqa: F401
from asQ.diag_preconditioner import * # noqa: F401
from asQ.preconditioners import * # noqa: F401
from asQ.allatonce import * # noqa: F401
from asQ.post import * # noqa: F401
from asQ import complex_proxy # noqa: F401
44 changes: 27 additions & 17 deletions asQ/allatonce/jacobian.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import firedrake as fd
from firedrake.petsc import PETSc

from functools import partial

from asQ.profiling import profiler
Expand Down Expand Up @@ -29,29 +31,26 @@ class AllAtOnceJacobian(TimePartitionMixin):
prefix = "aaos_jacobian_"

@profiler()
def __init__(self, aaoform, current_state,
def __init__(self, aaoform,
reference_state=None,
options_prefix="",
appctx={}):
"""
Python context for a PETSc Mat for the Jacobian of an AllAtOnceForm.

:arg aaoform: The AllAtOnceForm object to linearise.
:arg current_state: The AllAtOnceFunction being solved for.
:arg reference_state: A firedrake.Function for a single timestep.
Only needed if 'aaos_jacobian_state' is 'reference'.
:arg options_prefix: string prefix for the Jacobian PETSc options.
:arg appctx: the appcontext for the Jacobian and the preconditioner.
"""
self._time_partition_setup(aaoform.ensemble, aaoform.time_partition)
prefix = self.prefix + options_prefix
prefix = options_prefix + self.prefix

aaofunc = aaoform.aaofunc
self.aaoform = aaoform
self.aaofunc = aaofunc

self.current_state = current_state

self.appctx = appctx

# function the Jacobian acts on, and contribution from timestep at end of previous slice
Expand Down Expand Up @@ -110,34 +109,32 @@ def update(self, X=None):
Update the state to linearise around according to aaos_jacobian_state.

:arg X: an optional AllAtOnceFunction or global PETSc Vec.
If X is not None and aaos_jacobian_state = 'current' then the state
is updated from X instead of self.current_state.
If X is not None then the state is updated from X.
"""

aaofunc = self.aaofunc
jacobian_state = self.jacobian_state()

if jacobian_state == 'linear':
pass
if jacobian_state in ('linear', 'user'):
return

elif jacobian_state == 'current':
if X is None:
X = self.current_state
if X is not None:
self.aaofunc.assign(X)

if jacobian_state == 'current':
return

elif jacobian_state in ('window', 'slice'):
time_average(self.current_state, self.ureduce, self.uwrk, average=jacobian_state)
time_average(self.aaofunc, self.ureduce, self.uwrk,
average=jacobian_state)
aaofunc.assign(self.ureduce)

elif jacobian_state == 'initial':
aaofunc.assign(self.current_state.initial_condition)
aaofunc.assign(self.aaofunc.initial_condition)

elif jacobian_state == 'reference':
aaofunc.assign(self.reference_state)

elif jacobian_state == 'user':
pass

return

@profiler()
Expand Down Expand Up @@ -168,3 +165,16 @@ def mult(self, mat, X, Y):

with self.F.global_vec_ro() as v:
v.copy(Y)

@profiler()
def petsc_mat(self):
"""
Return a petsc4py.PETSc.Mat with this AllAtOnceJacobian as the python context.
"""
mat = PETSc.Mat().create(comm=self.ensemble.global_comm)
mat.setType("python")
sizes = (self.aaofunc.nlocal_dofs, self.aaofunc.nglobal_dofs)
mat.setSizes((sizes, sizes))
mat.setPythonContext(self)
mat.setUp()
return mat
93 changes: 81 additions & 12 deletions asQ/allatonce/solver.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from firedrake.petsc import PETSc, OptionsManager, flatten_parameters

from asQ.profiling import profiler
from asQ.allatonce import AllAtOnceJacobian, AllAtOnceCofunction
from asQ.allatonce import (AllAtOnceCofunction, AllAtOnceFunction,
AllAtOnceJacobian)
from asQ.allatonce.mixin import TimePartitionMixin

__all__ = ['AllAtOnceSolver']
__all__ = ['AllAtOnceSolver', 'LinearSolver']


class AllAtOnceSolver(TimePartitionMixin):
Expand All @@ -20,7 +21,10 @@ def __init__(self, aaoform, aaofunc,
pre_jacobian_callback=lambda solver, X, J: None,
post_jacobian_callback=lambda solver, X, J: None):
"""
Solves an all-at-once form over an all-at-once function.
Solve an all-at-once form over an all-at-once function.

This is used to solve for a timeseries defined by the all-at-once form
and the initial condition in the all-at-once function.

:arg aaoform: the AllAtOnceForm to solve.
:arg aaofunc: the AllAtOnceFunction solution.
Expand Down Expand Up @@ -99,18 +103,11 @@ def assemble_function(snes, X, F):
# Jacobian
with self.options.inserted_options():
self.jacobian = AllAtOnceJacobian(self.jacobian_form,
current_state=aaofunc,
reference_state=jacobian_reference_state,
options_prefix=options_prefix,
appctx=appctx)

jacobian_mat = PETSc.Mat().create(comm=self.ensemble.global_comm)
jacobian_mat.setType("python")
sizes = (aaofunc.nlocal_dofs, aaofunc.nglobal_dofs)
jacobian_mat.setSizes((sizes, sizes))
jacobian_mat.setPythonContext(self.jacobian)
jacobian_mat.setUp()
self.jacobian_mat = jacobian_mat
self.jacobian_mat = self.jacobian.petsc_mat()

def form_jacobian(snes, X, J, P):
self.pre_jacobian_callback(self, X, J)
Expand All @@ -119,7 +116,9 @@ def form_jacobian(snes, X, J, P):
J.assemble()
P.assemble()

self.snes.setJacobian(form_jacobian, J=jacobian_mat, P=jacobian_mat)
self.snes.setJacobian(form_jacobian,
J=self.jacobian_mat,
P=self.jacobian_mat)

# complete the snes setup
self.options.set_from_options(self.snes)
Expand All @@ -140,3 +139,73 @@ def solve(self, rhs=None):
raise TypeError(msg)
with rhs.global_vec_ro() as rvec:
self.snes.solve(rvec, gvec)


class LinearSolver(TimePartitionMixin):
@profiler()
def __init__(self, aaoform,
solver_parameters={},
appctx={},
options_prefix=""):
"""
Solve a linear system where the matrix is an all-at-once Jacobian.

This does not solve for a timeseries (use an AllAtOnceSolver if this
is what you need), but simply uses the AllAtOnceJacobian as the Mat
for a KSP.

:arg aaoform: the AllAtOnceForm to form the Jacobian from.
:arg solver_parameters: solver parameters to pass to PETSc.
This should be a dict mapping PETSc options to values.
:arg appctx: A dictionary containing application context that is
passed to the preconditioner if matrix-free.
:arg options_prefix: an optional prefix used to distinguish PETSc options.
Use this option if you want to pass options to the solver from the
command line in addition to through the solver_parameters dict.
"""
self._time_partition_setup(aaoform.ensemble, aaoform.time_partition)

self.aaoform = aaoform
self.appctx = appctx

# manage options from both dict and command line
self.solver_parameters = solver_parameters
self.flat_solver_parameters = flatten_parameters(solver_parameters)
self.options = OptionsManager(self.flat_solver_parameters, options_prefix)
options_prefix = self.options.options_prefix

# the solver
self.ksp = PETSc.KSP().create(comm=self.ensemble.global_comm)
self.ksp.setOptionsPrefix(options_prefix)

# create the all-at-once jacobian
with self.options.inserted_options():
self.jacobian = AllAtOnceJacobian(aaoform, appctx=appctx,
options_prefix=options_prefix)

# create petsc matrix
self.jacobian_mat = self.jacobian.petsc_mat()

# finish setting up the ksp
self.ksp.setOperators(self.jacobian_mat)
self.options.set_from_options(self.ksp)

@profiler()
def solve(self, b, x):
"""
Solve the all-at-once matrix Ax=b.

:arg b: AllAtOnceCofunction right hand side vector.
:arg x: AllAtOnceFunction solution vector.
"""
if not isinstance(x, AllAtOnceFunction):
msg = f"Solution of all-at-once problem must be AllAtOnceFunction not {type(x)}."
raise TypeError(msg)

if not isinstance(b, AllAtOnceCofunction):
msg = f"Right hand side of all-at-once problem must be AllAtOnceCofunction not {type(b)}."
raise TypeError(msg)

with x.global_vec() as xvec, b.global_vec_ro() as bvec:
with self.options.inserted_options():
self.ksp.solve(bvec, xvec)
82 changes: 82 additions & 0 deletions asQ/ensemble.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
from firedrake import COMM_WORLD, Ensemble
from pyop2.mpi import internal_comm, decref

__all__ = ['create_ensemble', 'split_ensemble', 'EnsembleConnector']


def create_ensemble(time_partition, comm=COMM_WORLD):
'''
Create an Ensemble for the given slice partition.
Checks that the number of slices and the size of the communicator are compatible.

:arg time_partition: a list of integers, the number of timesteps on each time-rank
:arg comm: the global communicator for the ensemble
'''
nslices = 1 if type(time_partition) is int else len(time_partition)
nranks = comm.size

if nranks % nslices != 0:
raise ValueError("Number of time slices must be exact factor of number of MPI ranks")

nspatial_domains = nranks//nslices

return Ensemble(comm, nspatial_domains)


def split_ensemble(ensemble, split_size):
"""
Split an Ensemble into multiple smaller Ensembles which share the same
spatial communicators `ensemble.comm`.

Each smaller Ensemble returned is defined over a contiguous subset of the
members of the large Ensemble.

:arg ensemble: the large Ensemble to split.
:arg split_size: the number of members in each smaller Ensemble.
"""
if (ensemble.ensemble_comm.size % split_size) != 0:
msg = "Ensemble size must be integer multiple of split_size"
raise ValueError(msg)

# which split are we part of?
split_rank = ensemble.ensemble_comm.rank // split_size

# create split_ensemble.global_comm
split_comm = ensemble.global_comm.Split(color=split_rank,
key=ensemble.global_comm.rank)

return EnsembleConnector(split_comm, ensemble.comm, split_size)


class EnsembleConnector(Ensemble):
def __init__(self, global_comm, local_comm, nmembers):
"""
An Ensemble created from provided spatial communicators (ensemble.comm).

:arg global_comm: global communicator the Ensemble is defined over.
:arg local_comm: communicator to use for the Ensemble.comm member.
:arg nmembers: number of Ensemble members (ensemble.ensemble_comm.size).
"""
if nmembers*local_comm.size != global_comm.size:
msg = "The global ensemble must have the same number of ranks as the sum of the local comms"
raise ValueError(msg)

self.global_comm = global_comm
self._global_comm = internal_comm(self.global_comm)

self.comm = local_comm
self._comm = internal_comm(self.comm)

self.ensemble_comm = self.global_comm.Split(color=self.comm.rank,
key=global_comm.rank)

self._ensemble_comm = internal_comm(self.ensemble_comm)

def __del__(self):
if hasattr(self, "ensemble_comm"):
self.ensemble_comm.Free()
del self.ensemble_comm
for comm_name in ["_global_comm", "_comm", "_ensemble_comm"]:
if hasattr(self, comm_name):
comm = getattr(self, comm_name)
decref(comm)
31 changes: 6 additions & 25 deletions asQ/paradiag.py
Original file line number Diff line number Diff line change
@@ -1,32 +1,11 @@
import firedrake as fd
from firedrake.petsc import PETSc
from asQ.profiling import profiler

from asQ.allatonce import AllAtOnceFunction, AllAtOnceForm, AllAtOnceSolver
from asQ.allatonce.mixin import TimePartitionMixin
from asQ.parallel_arrays import SharedArray

__all__ = ['create_ensemble', 'Paradiag']


@profiler()
def create_ensemble(time_partition, comm=fd.COMM_WORLD):
'''
Create an Ensemble for the given slice partition.
Checks that the number of slices and the size of the communicator are compatible.

:arg time_partition: a list of integers, the number of timesteps on each time-rank
:arg comm: the global communicator for the ensemble
'''
nslices = 1 if type(time_partition) is int else len(time_partition)
nranks = comm.size

if nranks % nslices != 0:
raise ValueError("Number of time slices must be exact factor of number of MPI ranks")

nspatial_domains = nranks//nslices

return fd.Ensemble(comm, nspatial_domains)
__all__ = ['Paradiag']


class Paradiag(TimePartitionMixin):
Expand Down Expand Up @@ -167,9 +146,11 @@ def sync_diagnostics(self):

Until this method is called, diagnostic information is not guaranteed to be valid.
"""
pc_block_iterations = self.solver.jacobian.pc.block_iterations
pc_block_iterations.synchronise()
self.block_iterations.data(deepcopy=False)[:] = pc_block_iterations.data(deepcopy=False)
jacobian = self.solver.jacobian
if hasattr(jacobian, "pc") and hasattr(jacobian.pc, "block_iterations"):
pc_block_iterations = self.solver.jacobian.pc.block_iterations
pc_block_iterations.synchronise()
self.block_iterations.data(deepcopy=False)[:] = pc_block_iterations.data(deepcopy=False)

@profiler()
def solve(self,
Expand Down
Loading
Loading