Skip to content

Commit

Permalink
add tests, remove redundant code
Browse files Browse the repository at this point in the history
  • Loading branch information
shoubhikraj committed Aug 5, 2024
1 parent aada5dc commit a8785e5
Show file tree
Hide file tree
Showing 4 changed files with 64 additions and 52 deletions.
35 changes: 11 additions & 24 deletions autode/opt/optimisers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -610,12 +610,12 @@ def are_satisfied(self, other: "ConvergenceParams") -> List[bool]:

# unset criteria are always satisfied
for attr in self._num_attrs:
c = getattr(self, attr)
v = getattr(other, attr)
c = float(getattr(self, attr))
v = float(getattr(other, attr))
if c is None:
are_satisfied.append(True)
else:
are_satisfied.append(v < c)
are_satisfied.append(v <= c)
return are_satisfied

def meets_criteria(self, other: "ConvergenceParams"):
Expand All @@ -634,6 +634,10 @@ def meets_criteria(self, other: "ConvergenceParams"):
if all(self.are_satisfied(other)):
return True

# strict = everything must be converged
elif self.strict:
return False

# gradient, energy overachieved, but step not converged
if all(self.multiply([0.5, 0.5, 0.8, 3, 3]).are_satisfied(other)):
logger.warning(
Expand All @@ -651,13 +655,15 @@ def meets_criteria(self, other: "ConvergenceParams"):
return True

# everything except energy overachieved
if all(self.multiply([3, 0.6, 0.6, 1, 1]).are_satisfied(other)):
if all(self.multiply([3, 0.7, 0.7, 1, 1]).are_satisfied(other)):
logger.warning(
"Everything except energy has been converged. Reasonable"
"convergence on energy"
" convergence on energy"
)
return True

return False


class NDOptimiser(Optimiser, ABC):
"""Abstract base class for an optimiser in N-dimensions"""
Expand Down Expand Up @@ -818,25 +824,6 @@ def from_file(cls, filename: str) -> "NDOptimiser":
optimiser._history = hist
return optimiser

@property
def _g_norm(self) -> GradientRMS:
"""
Calculate RMS(∇E) based on the current Cartesian gradient.
-----------------------------------------------------------------------
Returns:
(autode.values.GradientRMS): Gradient norm. Infinity if the
gradient is not defined
"""
if self._coords is None:
logger.warning("Had no coordinates - cannot determine ||∇E||")
return GradientRMS(np.inf)

if self._coords.g is None:
return GradientRMS(np.inf)

return GradientRMS(np.sqrt(np.mean(np.square(self._coords.g))))

def _log_convergence(self) -> None:
"""Log the convergence of the all convergence parameters"""
assert self._coords is not None, "Must have coordinates!"
Expand Down
10 changes: 0 additions & 10 deletions autode/opt/optimisers/crfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,16 +235,6 @@ def _update_trust_radius(self):
f" Current trust radius = {self.alpha:.3f}"
)

@property
def _g_norm(self) -> GradientRMS:
"""Calculate the norm of the gradient in the active subspace"""

if self._coords is None or self._coords.g is None:
return super()._g_norm

gradient = self._coords.g[self._coords.active_indexes]
return GradientRMS(np.sqrt(np.mean(np.square(gradient))))

def _initialise_run(self) -> None:
"""Initialise the optimisation"""
logger.info("Initialising optimisation")
Expand Down
5 changes: 0 additions & 5 deletions tests/test_opt/test_crfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,11 +132,6 @@ def r(_x):
assert np.isclose(r(s.to("cartesian")), r_initial, atol=1e-10)


def test_init_g_norm_is_none():
optimiser = CRFOptimiser(conv_tol="loose", maxiter=1)
assert optimiser._g_norm > 0


def test_sanitised_zero_length_step():
"""Should be able to update with a null step"""

Expand Down
66 changes: 53 additions & 13 deletions tests/test_opt/test_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,15 @@ def test_optimiser_construct():
maxiter=1, conv_tol=ConvergenceParams(abs_d_e=0.1, rms_g=-0.1)
)

with pytest.raises(ValueError, match="Unknown preset convergence"):
_ = CartesianSDOptimiser(maxiter=1, conv_tol="unknown")

# should be able to set convergence through setter
opt = CartesianSDOptimiser(maxiter=1, conv_tol="loose")
opt.conv_tol = "normal"
with pytest.raises(ValueError, match="Unknown preset convergence"):
opt.conv_tol = "unknown"

# at least RMS g convergence criteria has to be defined
with pytest.raises(
ValueError, match="RMS gradient criteria has to be defined"
Expand All @@ -69,6 +78,50 @@ def test_optimiser_construct():
)


def test_optimiser_convergence(caplog):
opt = CartesianSDOptimiser(
maxiter=10,
conv_tol=ConvergenceParams(
abs_d_e=0.01, rms_g=0.01, max_g=0.01, rms_s=0.01, max_s=0.01
),
)
coords1 = CartesianCoordinates(np.arange(6, dtype=float))
opt._species = Molecule(smiles="N#N")
opt._coords = coords1
opt._coords.g = np.random.random(6)
opt._coords.e = PotentialEnergy(0.1, "Ha")

# grad + energy < 1/2 + step is < * 3
coords2 = coords1 + 0.02
coords2.g = np.array([0.004] * 6)
coords2.e = PotentialEnergy(0.1 - 0.004, "Ha")
opt._coords = coords2
with caplog.at_level("WARNING"):
assert opt.converged
assert "Overachieved gradient and energy" in caplog.text
assert "reasonable convergence on step size" in caplog.text
caplog.clear()
# grad ~ 1/10, dE < *1.5, step < * 2
coords2 = coords1 + 0.015
coords2.g = np.array([0.0009] * 6)
coords2.e = PotentialEnergy(0.1 - 0.015)
opt._history._memory[-1] = coords2
with caplog.at_level("WARNING"):
assert opt.converged
assert "Gradient is one order of magnitude below" in caplog.text
assert "other parameter(s) are almost converged"
caplog.clear()
# step achieved, grad ~ 0.7, dE < * 3
coords2 = coords1 + 0.009
coords2.g = np.array([0.006] * 6)
coords2.e = PotentialEnergy(0.1 - 0.025)
opt._history._memory[-1] = coords2
with caplog.at_level("WARNING"):
assert opt.converged
assert "Everything except energy has been converged" in caplog.text
assert "Reasonable convergence on energy" in caplog.text


def test_initialise_species_and_method():
optimiser = sample_cartesian_optimiser()

Expand All @@ -88,19 +141,6 @@ def test_coords_set():
optimiser._coords = "a"


def test_g_norm():
optimiser = sample_cartesian_optimiser()

# With no coordinates the norm of the gradient is infinity
assert optimiser._coords is None
assert not np.isfinite(optimiser._g_norm)

# Likewise if the gradient is unset
optimiser._coords = CartesianCoordinates([1.0, 0.0, 0.0])
assert optimiser._coords.g is None
assert not np.isfinite(optimiser._g_norm)


def test_history():
optimiser = sample_cartesian_optimiser()
assert optimiser.iteration < 1
Expand Down

0 comments on commit a8785e5

Please sign in to comment.