Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

More comprehensive optimiser convergence checks #351

Merged
merged 38 commits into from
Aug 12, 2024
Merged
Show file tree
Hide file tree
Changes from 32 commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
1b9ea91
optimiser criteria
shoubhikraj Jul 22, 2024
9259b5a
minor edits
shoubhikraj Jul 22, 2024
59f64ef
simplify convergence criteria
shoubhikraj Jul 23, 2024
a504593
unfinished update
shoubhikraj Jul 23, 2024
d40f771
convergence criteria set
shoubhikraj Jul 23, 2024
6136e9b
minor update
shoubhikraj Jul 23, 2024
3461cdf
unfinished update
shoubhikraj Jul 23, 2024
8f7d5f6
new update
shoubhikraj Jul 24, 2024
a0d38ef
unfinished updates
shoubhikraj Jul 24, 2024
ed2f338
replace tol in DHS
shoubhikraj Jul 24, 2024
cd25ce6
improve sign usage
shoubhikraj Jul 27, 2024
c9bca2f
unfinished bugfix
shoubhikraj Jul 27, 2024
46080c3
refactor class
shoubhikraj Jul 31, 2024
fc381b5
update dhs class
shoubhikraj Jul 31, 2024
cfa33ed
minor update
shoubhikraj Jul 31, 2024
ac60645
fix some tests
shoubhikraj Aug 1, 2024
1a00c57
minor update
shoubhikraj Aug 1, 2024
06e96ae
minor update
shoubhikraj Aug 1, 2024
2335e1a
allow None values in convergence criteria
shoubhikraj Aug 2, 2024
58e803c
refactor convergence params
shoubhikraj Aug 2, 2024
89d2f7e
fix some tests
shoubhikraj Aug 2, 2024
ced0f66
update some tests
shoubhikraj Aug 4, 2024
aebba47
fixed crfo tests
shoubhikraj Aug 4, 2024
481ef34
fixed prfo tests
shoubhikraj Aug 4, 2024
5819de1
more test fixes
shoubhikraj Aug 5, 2024
15a7a6b
black fix
shoubhikraj Aug 5, 2024
c377a05
fix nwchem issues
shoubhikraj Aug 5, 2024
009d86e
fix xtb issues
shoubhikraj Aug 5, 2024
aada5dc
updated parameters
shoubhikraj Aug 5, 2024
a8785e5
add tests, remove redundant code
shoubhikraj Aug 5, 2024
936a0c3
fix bug
shoubhikraj Aug 5, 2024
5847082
bug fix 2
shoubhikraj Aug 5, 2024
f7e42bd
pr suggestion 1
shoubhikraj Aug 11, 2024
f7c249a
pr suggestion 2
shoubhikraj Aug 11, 2024
c15a643
pr suggestion 3
shoubhikraj Aug 11, 2024
2281fab
pr suggestion 3
shoubhikraj Aug 11, 2024
4a0d9c1
pr suggestion 4
shoubhikraj Aug 11, 2024
a7d5a24
update changelog
shoubhikraj Aug 11, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 29 additions & 18 deletions autode/bracket/dhs.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from autode.opt.optimisers.utils import TruncatedTaylor
from autode.opt.optimisers.hessian_update import BFGSSR1Update
from autode.bracket.base import BaseBracketMethod
from autode.opt.optimisers import RFOptimiser
from autode.opt.optimisers import RFOptimiser, ConvergenceParams
from autode.exceptions import OptimiserStepError
from autode.log import logger

Expand Down Expand Up @@ -60,10 +60,10 @@ def __init__(
pivot_point: Coordinates of the pivot point
line_search: Whether to use linear search
angle_thresh: An angle threshold above which linear search
will be rejected (in Degrees)
will be rejected (in Degrees)
old_coords_read_hess: Old coordinate with hessian which will
be used to obtain initial hessian by
a Hessian update scheme
be used to obtain the initial hessian by a
Hessian update scheme
"""
kwargs.pop("init_alpha", None)
super().__init__(*args, init_alpha=init_trust, **kwargs)
Expand Down Expand Up @@ -103,15 +103,25 @@ def _initialise_run(self) -> None:
@property
def converged(self) -> bool:
"""Has the optimisation converged"""
# The tangential gradient should be close to zero
return (
self.rms_tangent_grad < self.gtol and self._abs_delta_e < self.etol
)
assert self._coords is not None

# Check only the tangential component of gradient
g_tau = self.tangent_grad
rms_g_tau = np.sqrt(np.mean(np.square(g_tau)))
max_g_tau = np.max(np.abs(g_tau))

curr_params = self._history.conv_params()
curr_params.rms_g = GradientRMS(rms_g_tau, "Ha/ang")
curr_params.max_g = GradientRMS(max_g_tau, "Ha/ang")
if self.conv_tol.meets_criteria(curr_params):
return True
else:
return False
shoubhikraj marked this conversation as resolved.
Show resolved Hide resolved

@property
def rms_tangent_grad(self) -> GradientRMS:
def tangent_grad(self) -> np.ndarray:
"""
Obtain the RMS of the gradient tangent to the distance
Obtain the component of atomic gradients tangent to the distance
vector between current coords and pivot point
"""
assert self._coords is not None and self._coords.g is not None
Expand All @@ -120,8 +130,7 @@ def rms_tangent_grad(self) -> GradientRMS:
# unit vector in the direction of distance vector
d_hat = self.dist_vec / np.linalg.norm(self.dist_vec)
tangent_grad = grad - (grad.dot(d_hat)) * d_hat
rms_grad = np.sqrt(np.mean(np.square(tangent_grad)))
return GradientRMS(rms_grad)
return tangent_grad

@property
def dist_vec(self) -> np.ndarray:
Expand Down Expand Up @@ -491,6 +500,7 @@ def __init__(
large_step: Union[Distance, float] = Distance(0.2, "ang"),
small_step: Union[Distance, float] = Distance(0.05, "ang"),
switch_thresh: Union[Distance, float] = Distance(1.5, "ang"),
conv_tol: Union["ConvergenceParams", str] = "loose",
**kwargs,
):
"""
Expand All @@ -513,6 +523,9 @@ def __init__(
switch_thresh: When distance between the two images is less than
this cutoff, smaller DHS extrapolation steps are taken

conv_tol: Convergence tolerance for the distance-constrained
optimiser

Keyword Args:

maxiter: Maximum number of en/grad evaluations
Expand All @@ -521,9 +534,6 @@ def __init__(
stop, values less than 0.5 Angstrom are not
recommended.

gtol: Gradient tolerance for the optimiser micro-iterations
in DHS (Hartree/angstrom)

cineb_at_conv: Whether to run CI-NEB calculation from the end
points after the DHS is converged
"""
Expand All @@ -542,6 +552,7 @@ def __init__(
self._small_step = Distance(abs(small_step), "ang")
self._sw_thresh = Distance(abs(switch_thresh), "ang")
assert self._small_step < self._large_step
self._conv_tol = conv_tol

self._step_size: Optional[Distance] = None
if self._large_step > self.imgpair.dist:
Expand Down Expand Up @@ -597,8 +608,7 @@ def _step(self) -> None:

opt = DistanceConstrainedOptimiser(
maxiter=curr_maxiter,
gtol=self._gtol,
etol=1.0e-3, # seems like a reasonable etol
conv_tol=self._conv_tol,
init_trust=opt_trust,
pivot_point=pivot,
old_coords_read_hess=old_coords,
Expand All @@ -612,9 +622,10 @@ def _step(self) -> None:
if not opt.converged:
return None

rms_g_tau = np.sqrt(np.mean(np.square(opt.tangent_grad)))
logger.info(
"Successful optimization after DHS step, final RMS of "
f"tangential gradient = {opt.rms_tangent_grad:.6f} "
f"tangential gradient = {rms_g_tau:.6f} "
f"Ha/angstrom"
)

Expand Down
8 changes: 2 additions & 6 deletions autode/calculations/executors.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,10 +346,7 @@ class CalculationExecutorO(_IndirectCalculationExecutor):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

self.etol = PotentialEnergy(3e-5, units="Ha")
self.gtol = GradientRMS(
1e-3, units="Ha Å^-1"
) # TODO: A better number here
self.conv_tol = "normal"
self._fix_unique()

def run(self) -> None:
Expand All @@ -367,8 +364,7 @@ def run(self) -> None:
self.optimiser: "NDOptimiser" = type_(
init_alpha=self._step_size,
maxiter=self._max_opt_cycles,
etol=self.etol,
gtol=self.gtol,
conv_tol=self.conv_tol,
)
method = self.method.copy()
method.keywords.grad = kws.GradientKeywords(self.input.keywords)
Expand Down
17 changes: 17 additions & 0 deletions autode/opt/coordinates/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,16 @@ def to(self, *args, **kwargs) -> "OptCoordinates":
def iadd(self, value: np.ndarray) -> "OptCoordinates":
"""Inplace addition of some coordinates"""

@property
@abstractmethod
def n_constraints(self) -> int:
"""Number of constraints in these coordinates"""

@property
@abstractmethod
def n_satisfied_constraints(self) -> int:
"""Number of constraints that are satisfied in these coordinates"""

@property
@abstractmethod
def active_indexes(self) -> List[int]:
Expand All @@ -358,6 +368,13 @@ def active_mol_indexes(self) -> List[int]:
def inactive_indexes(self) -> List[int]:
"""A list of indexes which are non-active in this coordinate set"""

@property
@abstractmethod
def cart_proj_g(self) -> Optional[np.ndarray]:
"""
The Cartesian gradient with any constraints projected out
"""

def __eq__(self, other):
"""Coordinates can never be identical..."""
return False
Expand Down
13 changes: 12 additions & 1 deletion autode/opt/coordinates/cartesian.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
if TYPE_CHECKING:
from autode.values import Gradient
from autode.hessians import Hessian
from autode.opt.coordinates.primitives import ConstrainedPrimitive


class CartesianCoordinates(OptCoordinates):
Expand Down Expand Up @@ -61,6 +60,14 @@ def _update_h_from_cart_h(self, arr: Optional["Hessian"]) -> None:
assert self.h_or_h_inv_has_correct_shape(arr)
self._h = None if arr is None else np.array(arr)

@property
def n_constraints(self) -> int:
return 0

@property
def n_satisfied_constraints(self) -> int:
return 0

@property
def active_indexes(self) -> List[int]:
return list(range(len(self)))
Expand Down Expand Up @@ -106,6 +113,10 @@ def to(self, value: str) -> OptCoordinates:
f"Cannot convert Cartesian coordinates to {value}"
)

@property
def cart_proj_g(self) -> Optional[np.ndarray]:
return self.g

@property
def expected_number_of_dof(self) -> int:
"""Expected number of degrees of freedom for the system"""
Expand Down
18 changes: 18 additions & 0 deletions autode/opt/coordinates/dic.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,10 @@
logger.info(f"Transformed in ...{time() - start_time:.4f} s")
return dic

@property
def cart_proj_g(self) -> Optional[np.ndarray]:
return self.to("cart").g

def _update_g_from_cart_g(self, arr: Optional["Gradient"]) -> None:
"""
Updates the gradient from a calculated Cartesian gradient
Expand Down Expand Up @@ -422,6 +426,20 @@

return super().iadd(delta_s)

@property
def cart_proj_g(self) -> Optional[np.ndarray]:
"""Obtain Cartesian gradient with constraints projected out"""
if self.g is None:
return None

Check warning on line 433 in autode/opt/coordinates/dic.py

View check run for this annotation

Codecov / codecov/patch

autode/opt/coordinates/dic.py#L433

Added line #L433 was not covered by tests
# constrained gradient with inactive terms set to zero
g_s = self.g
g_s[self.inactive_indexes] = 0.0
g_s = g_s[: len(self)]
# back to Cartesian
g_x = np.matmul(self.B.T, g_s)
assert len(g_x) == len(self.to("cart"))
return g_x

@property
def g(self):
"""
Expand Down
5 changes: 4 additions & 1 deletion autode/opt/optimisers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,20 @@
from autode.opt.optimisers.base import NDOptimiser
from autode.opt.optimisers.base import NDOptimiser, ConvergenceParams
from autode.opt.optimisers.rfo import RFOptimiser
from autode.opt.optimisers.prfo import PRFOptimiser
from autode.opt.optimisers.crfo import CRFOptimiser
from autode.opt.optimisers.qa import QAOptimiser
from autode.opt.optimisers.steepest_descent import (
CartesianSDOptimiser,
DIC_SD_Optimiser,
)

__all__ = [
"NDOptimiser",
"ConvergenceParams",
"RFOptimiser",
"PRFOptimiser",
"CRFOptimiser",
"QAOptimiser",
"CartesianSDOptimiser",
"DIC_SD_Optimiser",
]
Loading
Loading