Skip to content

Commit

Permalink
Merge pull request #31 from SpeysideHEP/value_and_grad
Browse files Browse the repository at this point in the history
Optimise gradient execution
  • Loading branch information
jackaraz authored Jan 28, 2024
2 parents d81d799 + fce776c commit b7de44a
Show file tree
Hide file tree
Showing 6 changed files with 108 additions and 12 deletions.
11 changes: 11 additions & 0 deletions docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,17 @@ Hypothesis testing
asymptotic_calculator.compute_asymptotic_confidence_level
toy_calculator.compute_toy_confidence_level

Gradient Tools
--------------

.. currentmodule:: spey.math

.. autosummary::
:toctree: _generated/

value_and_grad
hessian

Default Backends
----------------

Expand Down
6 changes: 6 additions & 0 deletions docs/releases/changelog-v0.1.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,12 @@
* Utilities to retreive bibtex information for third-party plug-ins.
([#32](https://github.com/SpeysideHEP/spey/pull/32))

* Add math utilities for users to extract gradient and hessian of negative log-likelihood
([#31](https://github.com/SpeysideHEP/spey/pull/31))

* Improve gradient execution for `default_pdf`.
([#31](https://github.com/SpeysideHEP/spey/pull/31))

## Bug Fixes

* In accordance to the latest updates ```UnCorrStatisticsCombiner``` has been updated with
Expand Down
1 change: 1 addition & 0 deletions src/spey/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
"BackendBase",
"ConverterBase",
"about",
"math",
"check_updates",
"get_backend_bibtex",
"cite",
Expand Down
8 changes: 2 additions & 6 deletions src/spey/backends/default_pdf/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging
from typing import Any, Callable, Dict, List, Optional, Text, Tuple, Union

from autograd import grad, hessian, jacobian
from autograd import value_and_grad, hessian, jacobian
from autograd import numpy as np
from scipy.optimize import NonlinearConstraint

Expand Down Expand Up @@ -257,11 +257,7 @@ def negative_loglikelihood(pars: np.ndarray) -> np.ndarray:
) - self.constraint_model.log_prob(pars)

if do_grad:
grad_negative_loglikelihood = grad(negative_loglikelihood, argnum=0)
return lambda pars: (
negative_loglikelihood(pars),
grad_negative_loglikelihood(pars),
)
return value_and_grad(negative_loglikelihood, argnum=0)

return negative_loglikelihood

Expand Down
8 changes: 2 additions & 6 deletions src/spey/backends/default_pdf/simple_pdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from typing import Callable, List, Optional, Text, Tuple, Union

from autograd import grad, hessian
from autograd import value_and_grad, hessian
from autograd import numpy as np

from spey._version import __version__
Expand Down Expand Up @@ -162,11 +162,7 @@ def negative_loglikelihood(pars: np.ndarray) -> np.ndarray:
return -self.main_model.log_prob(pars, data)

if do_grad:
grad_negative_loglikelihood = grad(negative_loglikelihood, argnum=0)
return lambda pars: (
negative_loglikelihood(pars),
grad_negative_loglikelihood(pars),
)
return value_and_grad(negative_loglikelihood, argnum=0)

return negative_loglikelihood

Expand Down
86 changes: 86 additions & 0 deletions src/spey/math.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
from typing import Callable, Optional, Tuple

from autograd import numpy

from .interface.statistical_model import StatisticalModel
from .utils import ExpectationType

# pylint: disable=E1101

__all__ = ["value_and_grad", "hessian"]


def __dir__():
return __all__


def value_and_grad(
statistical_model: StatisticalModel,
expected: ExpectationType = ExpectationType.observed,
data: Optional[numpy.ndarray] = None,
) -> Callable[[numpy.ndarray], Tuple[numpy.ndarray, numpy.ndarray]]:
"""
Retreive function to compute negative log-likelihood and its gradient.
Args:
statistical_model (~spey.StatisticalModel): statistical model to be used.
expected (~spey.ExpectationType): Sets which values the fitting algorithm should focus and
p-values to be computed.
* :obj:`~spey.ExpectationType.observed`: Computes the p-values with via post-fit
prescriotion which means that the experimental data will be assumed to be the truth
(default).
* :obj:`~spey.ExpectationType.aposteriori`: Computes the expected p-values with via
post-fit prescriotion which means that the experimental data will be assumed to be
the truth.
* :obj:`~spey.ExpectationType.apriori`: Computes the expected p-values with via pre-fit
prescription which means that the SM will be assumed to be the truth.
data (``numpy.ndarray``, default ``None``): input data that to fit. If `None` observed
data will be used.
Returns:
``Callable[[numpy.ndarray], numpy.ndarray, numpy.ndarray]``:
negative log-likelihood and its gradient with respect to nuisance parameters
"""
val_and_grad = statistical_model.backend.get_objective_function(
expected=expected, data=data, do_grad=True
)
return lambda pars: val_and_grad(numpy.array(pars))


def hessian(
statistical_model: StatisticalModel,
expected: ExpectationType = ExpectationType.observed,
data: Optional[numpy.ndarray] = None,
) -> Callable[[numpy.ndarray], numpy.ndarray]:
r"""
Retreive the function to compute Hessian of negative log-likelihood
.. math::
{\rm Hessian} = -\frac{\partial^2\mathcal{L}(\theta)}{\partial\theta_i\partial\theta_j}
Args:
statistical_model (~spey.StatisticalModel): statistical model to be used.
expected (~spey.ExpectationType): Sets which values the fitting algorithm should focus and
p-values to be computed.
* :obj:`~spey.ExpectationType.observed`: Computes the p-values with via post-fit
prescriotion which means that the experimental data will be assumed to be the truth
(default).
* :obj:`~spey.ExpectationType.aposteriori`: Computes the expected p-values with via
post-fit prescriotion which means that the experimental data will be assumed to be
the truth.
* :obj:`~spey.ExpectationType.apriori`: Computes the expected p-values with via pre-fit
prescription which means that the SM will be assumed to be the truth.
data (``numpy.ndarray``, default ``None``): input data that to fit. If `None` observed
data will be used.
Returns:
``Callable[[numpy.ndarray], numpy.ndarray]``:
function to compute hessian of negative log-likelihood
"""
hess = statistical_model.backend.get_hessian_logpdf_func(expected=expected, data=data)
return lambda pars: -1.0 * hess(numpy.array(pars))

0 comments on commit b7de44a

Please sign in to comment.