From 49f6ae3cd89ab07fdfb714cdb4bf0fe5a4acf779 Mon Sep 17 00:00:00 2001 From: Uri Granta Date: Tue, 11 Jul 2023 14:52:01 +0100 Subject: [PATCH 01/11] Investigate adding acquisition metadata --- trieste/acquisition/interface.py | 46 +++++++++++++++++++++++++++----- trieste/acquisition/rule.py | 40 ++++++++++++++++++++++++--- 2 files changed, 77 insertions(+), 9 deletions(-) diff --git a/trieste/acquisition/interface.py b/trieste/acquisition/interface.py index e7c92859d4..817aaa9c57 100644 --- a/trieste/acquisition/interface.py +++ b/trieste/acquisition/interface.py @@ -18,7 +18,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Callable, Generic, Mapping, Optional +from typing import Any, Callable, Generic, Mapping, Optional from ..data import Dataset from ..models.interfaces import ProbabilisticModelType @@ -57,6 +57,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Prepare an acquisition function. We assume that this requires at least models, but @@ -64,6 +65,7 @@ def prepare_acquisition_function( :param models: The models for each tag. :param datasets: The data from the observer (optional). + :param metadata: Any metadata to pass to the acquisition function (optional). :return: An acquisition function. """ @@ -72,6 +74,7 @@ def update_acquisition_function( function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Update an acquisition function. By default this generates a new acquisition function each @@ -82,6 +85,7 @@ def update_acquisition_function( :param function: The acquisition function to update. :param models: The models for each tag. :param datasets: The data from the observer (optional). + :param metadata: Any metadata to pass to the acquisition function (optional). :return: The updated acquisition function. """ return self.prepare_acquisition_function(models, datasets=datasets) @@ -110,9 +114,12 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self.single_builder.prepare_acquisition_function( - models[tag], dataset=None if datasets is None else datasets[tag] + models[tag], + dataset=None if datasets is None else datasets[tag], + metadata=metadata, ) def update_acquisition_function( @@ -120,9 +127,13 @@ def update_acquisition_function( function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self.single_builder.update_acquisition_function( - function, models[tag], dataset=None if datasets is None else datasets[tag] + function, + models[tag], + dataset=None if datasets is None else datasets[tag], + metadata=metadata, ) def __repr__(self) -> str: @@ -135,10 +146,12 @@ def prepare_acquisition_function( self, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data to use to build the acquisition function (optional). + :param metadata: Any metadata to pass to the acquisition function (optional). :return: An acquisition function. """ @@ -147,14 +160,16 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer (optional). + :param metadata: Any metadata to pass to the acquisition function (optional). :return: The updated acquisition function. """ - return self.prepare_acquisition_function(model, dataset=dataset) + return self.prepare_acquisition_function(model, dataset=dataset, metadata=metadata) class GreedyAcquisitionFunctionBuilder(Generic[ProbabilisticModelType], ABC): @@ -174,6 +189,7 @@ def prepare_acquisition_function( models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Generate a new acquisition function. The first time this is called, ``pending_points`` @@ -184,6 +200,7 @@ def prepare_acquisition_function( :param datasets: The data from the observer (optional). :param pending_points: Points already chosen to be in the current batch (of shape [M,D]), where M is the number of pending points and D is the search space dimension. + :param metadata: Any metadata to pass to the acquisition function (optional). :return: An acquisition function. """ @@ -194,6 +211,7 @@ def update_acquisition_function( datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Update an acquisition function. By default this generates a new acquisition function each @@ -209,6 +227,7 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, of to continue collecting batch of points for the current step. Defaults to ``True``. + :param metadata: Any metadata to pass to the acquisition function (optional). :return: The updated acquisition function. """ return self.prepare_acquisition_function( @@ -240,11 +259,13 @@ def prepare_acquisition_function( models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self.single_builder.prepare_acquisition_function( models[tag], dataset=None if datasets is None else datasets[tag], pending_points=pending_points, + metadata=metadata, ) def update_acquisition_function( @@ -254,6 +275,7 @@ def update_acquisition_function( datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self.single_builder.update_acquisition_function( function, @@ -261,6 +283,7 @@ def update_acquisition_function( dataset=None if datasets is None else datasets[tag], pending_points=pending_points, new_optimization_step=new_optimization_step, + metadata=metadata, ) def __repr__(self) -> str: @@ -274,12 +297,14 @@ def prepare_acquisition_function( model: ProbabilisticModelType, dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer (optional). :param pending_points: Points already chosen to be in the current batch (of shape [M,D]), where M is the number of pending points and D is the search space dimension. + :param metadata: Any metadata to pass to the acquisition function (optional). :return: An acquisition function. """ @@ -290,6 +315,7 @@ def update_acquisition_function( dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. @@ -300,6 +326,7 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, of to continue collecting batch of points for the current step. Defaults to ``True``. + :param metadata: Any metadata to pass to the acquisition function (optional). :return: The updated acquisition function. """ return self.prepare_acquisition_function( @@ -344,9 +371,12 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self.single_builder.prepare_acquisition_function( - models[tag], dataset=None if datasets is None else datasets[tag] + models[tag], + dataset=None if datasets is None else datasets[tag], + metadata=metadata, ) def update_acquisition_function( @@ -354,9 +384,13 @@ def update_acquisition_function( function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self.single_builder.update_acquisition_function( - function, models[tag], dataset=None if datasets is None else datasets[tag] + function, + models[tag], + dataset=None if datasets is None else datasets[tag], + metadata=metadata, ) def __repr__(self) -> str: diff --git a/trieste/acquisition/rule.py b/trieste/acquisition/rule.py index 5056438e20..9b98818862 100644 --- a/trieste/acquisition/rule.py +++ b/trieste/acquisition/rule.py @@ -103,6 +103,7 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> ResultType: """ Return a value of type `T_co`. Typically this will be a set of query points, either on its @@ -117,6 +118,7 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model for each tag. :param datasets: The known observer query points and observations for each tag (optional). + :param metadata: Any metadata to use for acquisition (optional). :return: A value of type `T_co`. """ @@ -125,6 +127,7 @@ def acquire_single( search_space: SearchSpaceType, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> ResultType: """ A convenience wrapper for :meth:`acquire` that uses only one model, dataset pair. @@ -133,6 +136,7 @@ def acquire_single( is defined. :param model: The model to use. :param dataset: The known observer query points and observations (optional). + :param metadata: Any metadata to use for acquisition (optional). :return: A value of type `T_co`. """ if isinstance(dataset, dict) or isinstance(model, dict): @@ -144,6 +148,7 @@ def acquire_single( search_space, {OBJECTIVE: model}, datasets=None if dataset is None else {OBJECTIVE: dataset}, + metadata=metadata, ) @@ -268,6 +273,7 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ Return the query point(s) that optimizes the acquisition function produced by ``builder`` @@ -277,18 +283,21 @@ def acquire( :param models: The model for each tag. :param datasets: The known observer query points and observations. Whether this is required depends on the acquisition function used. + :param metadata: Any metadata to pass to the acquisition function (optional). :return: The single (or batch of) points to query. """ if self._acquisition_function is None: self._acquisition_function = self._builder.prepare_acquisition_function( models, datasets=datasets, + metadata=metadata, ) else: self._acquisition_function = self._builder.update_acquisition_function( self._acquisition_function, models, datasets=datasets, + metadata=metadata, ) summary_writer = logging.get_tensorboard_writer() @@ -321,6 +330,7 @@ def acquire( datasets=datasets, pending_points=points, new_optimization_step=False, + metadata=metadata, ) with tf.name_scope(f"EGO.optimizer[{i+1}]"): chosen_point = self._optimizer(search_space, self._acquisition_function) @@ -537,6 +547,7 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[AsynchronousRuleState | None, TensorType]: """ Constructs a function that, given ``AsynchronousRuleState``, @@ -554,6 +565,7 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model of the known data. Uses the single key `OBJECTIVE`. :param datasets: The known observer query points and observations. + :param metadata: Any metadata to pass to the acquisition function (optional). :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. """ @@ -570,12 +582,14 @@ def acquire( self._acquisition_function = self._builder.prepare_acquisition_function( models, datasets=datasets, + metadata=metadata, ) else: self._acquisition_function = self._builder.update_acquisition_function( self._acquisition_function, models, datasets=datasets, + metadata=metadata, ) def state_func( @@ -693,6 +707,7 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[AsynchronousRuleState | None, TensorType]: """ Constructs a function that, given ``AsynchronousRuleState``, @@ -708,6 +723,7 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model of the known data. Uses the single key `OBJECTIVE`. :param datasets: The known observer query points and observations. + :param metadata: Any metadata to pass to the acquisition function (optional). :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. """ @@ -734,6 +750,7 @@ def state_func( models, datasets=datasets, pending_points=state.pending_points, + metadata=metadata, ) else: self._acquisition_function = self._builder.update_acquisition_function( @@ -741,6 +758,7 @@ def state_func( models, datasets=datasets, pending_points=state.pending_points, + metadata=metadata, ) with tf.name_scope("AsynchronousOptimization.optimizer[0]"): @@ -758,6 +776,7 @@ def state_func( datasets=datasets, pending_points=state.pending_points, new_optimization_step=False, + metadata=metadata, ) with tf.name_scope(f"AsynchronousOptimization.optimizer[{i+1}]"): new_point = self._optimizer(search_space, self._acquisition_function) @@ -804,6 +823,7 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ Sample ``num_query_points`` (see :meth:`__init__`) points from the @@ -812,6 +832,7 @@ def acquire( :param search_space: The acquisition search space. :param models: Unused. :param datasets: Unused. + :param metadata: Unused. :return: The ``num_query_points`` points to query. """ samples = search_space.sample(self._num_query_points) @@ -905,6 +926,7 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ Sample `num_search_space_samples` (see :meth:`__init__`) points from the @@ -914,6 +936,7 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model of the known data. Uses the single key `OBJECTIVE`. :param datasets: The known observer query points and observations. + :param metadata: Unused. :return: The ``num_query_points`` points to query. :raise ValueError: If ``models`` do not contain the key `OBJECTIVE`, or it contains any other key. @@ -1019,6 +1042,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[State | None, TensorType]: """ Construct a local search space from ``search_space`` according the trust region algorithm, @@ -1050,6 +1074,7 @@ def acquire( :param models: The model for each tag. :param datasets: The known observer query points and observations. Uses the data for key `OBJECTIVE` to calculate the new trust region. + :param metadata: Any metadata to pass to the subrule (optional). :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. :raise KeyError: If ``datasets`` does not contain the key `OBJECTIVE`. @@ -1095,7 +1120,9 @@ def state_func( tf.reduce_min([global_upper, xmin + eps], axis=0), ) - points = self._rule.acquire(acquisition_space, models, datasets=datasets) + points = self._rule.acquire( + acquisition_space, models, datasets=datasets, metadata=metadata + ) state_ = TrustRegion.State(acquisition_space, eps, y_min, is_global) return state_, points @@ -1231,6 +1258,7 @@ def acquire( search_space: Box, models: Mapping[Tag, TrainableSupportsGetKernel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[State | None, TensorType]: """ Construct a local search space from ``search_space`` according the TURBO algorithm, @@ -1256,6 +1284,7 @@ def acquire( :param models: The model for each tag. :param datasets: The known observer query points and observations. Uses the data for key `OBJECTIVE` to calculate the new trust region. + :param metadata: Any metadata to pass to the subrule (optional). :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. :raise KeyError: If ``datasets`` does not contain the key `OBJECTIVE`. @@ -1324,7 +1353,9 @@ def state_func( local_model.optimize(local_dataset) # use local model and local dataset to choose next query point(s) - points = self._rule.acquire_single(acquisition_space, local_model, local_dataset) + points = self._rule.acquire_single( + acquisition_space, local_model, local_dataset, metadata=metadata + ) state_ = TURBO.State(acquisition_space, L, failure_counter, success_counter, y_min) return state_, points @@ -1433,6 +1464,7 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """Acquire a batch of points to observe based on the batch hypervolume Sharpe ratio indicator method. @@ -1443,6 +1475,7 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model for each tag. :param datasets: The known observer query points and observations. + :param metadata: Any metadata to pass to the acquisition function (optional). :return: The batch of points to query. """ if models.keys() != {OBJECTIVE}: @@ -1457,13 +1490,14 @@ def acquire( if self._acquisition_function is None: self._acquisition_function = self._builder.prepare_acquisition_function( - models, datasets=datasets + models, datasets=datasets, metadata=metadata ) else: self._acquisition_function = self._builder.update_acquisition_function( self._acquisition_function, models, datasets=datasets, + metadata=metadata, ) # Find non-dominated points From 2daf28166cb306674b8e9d16f997aac77caa4b79 Mon Sep 17 00:00:00 2001 From: Uri Granta Date: Tue, 11 Jul 2023 16:31:52 +0100 Subject: [PATCH 02/11] Fix subclasses --- docs/notebooks/code_overview.pct.py | 13 ++++- .../acquisition/function/test_function.py | 8 ++- .../multi_objective/test_function.py | 6 +- tests/unit/acquisition/test_combination.py | 5 +- tests/unit/acquisition/test_interface.py | 11 +++- tests/unit/acquisition/test_rule.py | 11 +++- tests/unit/test_ask_tell_optimization.py | 3 +- tests/unit/test_bayesian_optimizer.py | 7 ++- tests/unit/test_logging.py | 3 +- tests/util/misc.py | 2 + trieste/acquisition/combination.py | 11 +++- .../acquisition/function/active_learning.py | 20 +++++-- .../function/continuous_thompson_sampling.py | 10 +++- trieste/acquisition/function/entropy.py | 20 ++++++- trieste/acquisition/function/function.py | 58 ++++++++++++++++++- trieste/acquisition/function/greedy_batch.py | 10 +++- .../acquisition/function/multi_objective.py | 12 +++- 17 files changed, 184 insertions(+), 26 deletions(-) diff --git a/docs/notebooks/code_overview.pct.py b/docs/notebooks/code_overview.pct.py index 85216ce818..febbd93f85 100644 --- a/docs/notebooks/code_overview.pct.py +++ b/docs/notebooks/code_overview.pct.py @@ -80,7 +80,7 @@ # %% from __future__ import annotations -from typing import Optional +from typing import Any, Mapping, Optional import tensorflow as tf from trieste.types import TensorType @@ -203,7 +203,10 @@ class HasGizmoReparamSamplerAndObservationNoise( class ProbabilityOfValidity(SingleModelAcquisitionBuilder[ProbabilisticModel]): def prepare_acquisition_function( - self, model: ProbabilisticModel, dataset: Optional[Dataset] = None + self, + model: ProbabilisticModel, + dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: def acquisition(at: TensorType) -> TensorType: mean, _ = model.predict_y(tf.squeeze(at, -2)) @@ -219,7 +222,10 @@ def acquisition(at: TensorType) -> TensorType: # %% class ProbabilityOfValidity2(SingleModelAcquisitionBuilder[ProbabilisticModel]): def prepare_acquisition_function( - self, model: ProbabilisticModel, dataset: Optional[Dataset] = None + self, + model: ProbabilisticModel, + dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: @tf.function def acquisition(at: TensorType) -> TensorType: @@ -233,6 +239,7 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return function # no need to update anything diff --git a/tests/unit/acquisition/function/test_function.py b/tests/unit/acquisition/function/test_function.py index 986092681e..64358b53e1 100644 --- a/tests/unit/acquisition/function/test_function.py +++ b/tests/unit/acquisition/function/test_function.py @@ -15,7 +15,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Callable, Optional, Sequence +from typing import Any, Callable, Optional, Sequence from unittest.mock import MagicMock import numpy.testing as npt @@ -956,6 +956,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: tf.ones_like(tf.squeeze(x, -2)) @@ -999,6 +1000,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: tf.cast(tf.squeeze(x, -2) >= 0, x.dtype) @@ -1024,6 +1026,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: tf.cast(tf.squeeze(x, -2) >= 0, x.dtype) @@ -1048,6 +1051,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return raise_exc @@ -1080,6 +1084,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: def acquisition(x: TensorType) -> TensorType: x_ = tf.squeeze(x, -2) @@ -1111,6 +1116,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return pof diff --git a/tests/unit/acquisition/multi_objective/test_function.py b/tests/unit/acquisition/multi_objective/test_function.py index 7bcf0e99ef..69d5ceff8f 100644 --- a/tests/unit/acquisition/multi_objective/test_function.py +++ b/tests/unit/acquisition/multi_objective/test_function.py @@ -15,7 +15,7 @@ import itertools import math -from typing import Callable, Mapping, Optional, Sequence, cast +from typing import Any, Callable, Mapping, Optional, Sequence, cast import numpy.testing as npt import pytest @@ -85,6 +85,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: tf.ones((tf.shape(x)[0], 1), dtype=tf.float64) @@ -757,6 +758,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: tf.ones_like(tf.squeeze(x, -2)) @@ -799,6 +801,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: def acquisition(x: TensorType) -> TensorType: x_ = tf.squeeze(x, -2) @@ -836,6 +839,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return raise_exc diff --git a/tests/unit/acquisition/test_combination.py b/tests/unit/acquisition/test_combination.py index 95415f6994..834f527766 100644 --- a/tests/unit/acquisition/test_combination.py +++ b/tests/unit/acquisition/test_combination.py @@ -14,7 +14,7 @@ from __future__ import annotations from collections.abc import Mapping, Sequence -from typing import Optional +from typing import Any, Optional import numpy.testing as npt import pytest @@ -57,6 +57,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return raise_exc @@ -72,6 +73,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self._f @@ -80,6 +82,7 @@ def update_acquisition_function( function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: function(x) + 1 diff --git a/tests/unit/acquisition/test_interface.py b/tests/unit/acquisition/test_interface.py index 0d8b68421c..b0c46cd68f 100644 --- a/tests/unit/acquisition/test_interface.py +++ b/tests/unit/acquisition/test_interface.py @@ -13,7 +13,7 @@ # limitations under the License. from __future__ import annotations -from typing import Iterator, List, Mapping, Optional, Tuple, cast +from typing import Any, Iterator, List, Mapping, Optional, Tuple, cast import pytest @@ -50,6 +50,7 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return raise_exc @@ -60,6 +61,7 @@ def prepare_acquisition_function( model: ProbabilisticModel, dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return raise_exc @@ -84,16 +86,19 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: assert dataset is data["foo"] assert model is models["foo"] + assert metadata == {"iteration_id": 1} return raise_exc FOO: Tag = "foo" BAR: Tag = "bar" data = {FOO: empty_dataset([1], [1]), BAR: empty_dataset([1], [1])} + metadata = {"iteration_id": 1} models = {FOO: QuadraticMeanAndRBFKernel(), BAR: QuadraticMeanAndRBFKernel()} - Builder().using(FOO).prepare_acquisition_function(models, datasets=data) + Builder().using(FOO).prepare_acquisition_function(models, datasets=data, metadata=metadata) def test_single_model_greedy_acquisition_builder_raises_immediately_for_wrong_key() -> None: @@ -183,6 +188,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: assert datasets is not None assert len(datasets) == 1 @@ -191,6 +197,7 @@ def prepare_acquisition_function( assert "FOO" not in datasets assert isinstance(datasets, CustomDatasets) assert datasets.iteration_id == 2 + assert metadata is None return raise_exc data = CustomDatasets({OBJECTIVE: empty_dataset([1], [1])}, 2) diff --git a/tests/unit/acquisition/test_rule.py b/tests/unit/acquisition/test_rule.py index 13b6acabd2..6b0325b0c1 100644 --- a/tests/unit/acquisition/test_rule.py +++ b/tests/unit/acquisition/test_rule.py @@ -15,7 +15,7 @@ import copy from collections.abc import Mapping -from typing import Callable, Optional +from typing import Any, Callable, Optional import gpflow import numpy.testing as npt @@ -234,6 +234,7 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: -quadratic(tf.squeeze(x, -2) - 1) @@ -242,6 +243,7 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: self._updated = True return function @@ -264,6 +266,7 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: noise = tf.random.uniform([], -0.05, 0.05, dtype=tf.float64) return lambda x: -quadratic(tf.squeeze(x, -2) - 1) + noise @@ -273,6 +276,7 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return function @@ -310,6 +314,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda at: -tf.reduce_max(models[OBJECTIVE].predict(at)[0], axis=-2) @@ -362,6 +367,7 @@ def prepare_acquisition_function( model: ProbabilisticModel, dataset: Optional[Dataset] = None, pending_points: TensorType = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: if pending_points is None: return lambda at: -tf.reduce_max(model.predict(at)[0], axis=-2) @@ -378,6 +384,7 @@ def update_acquisition_function( dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: self._update_count += 1 return self.prepare_acquisition_function( @@ -440,6 +447,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda at: tf.squeeze(-models[OBJECTIVE].predict(at)[0], -1) @@ -555,6 +563,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: return (search_space.upper[None] + search_space.lower[None]) / 2 diff --git a/tests/unit/test_ask_tell_optimization.py b/tests/unit/test_ask_tell_optimization.py index 7c638f2baf..d99bfcc9de 100644 --- a/tests/unit/test_ask_tell_optimization.py +++ b/tests/unit/test_ask_tell_optimization.py @@ -13,7 +13,7 @@ # limitations under the License. from __future__ import annotations -from typing import Mapping, Optional +from typing import Any, Mapping, Optional import pytest import tensorflow as tf @@ -326,6 +326,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(state: int | None) -> tuple[int | None, TensorType]: self.states_received.append(state) diff --git a/tests/unit/test_bayesian_optimizer.py b/tests/unit/test_bayesian_optimizer.py index dead19ac21..f0afc6a720 100644 --- a/tests/unit/test_bayesian_optimizer.py +++ b/tests/unit/test_bayesian_optimizer.py @@ -16,7 +16,7 @@ import tempfile from collections.abc import Mapping from pathlib import Path -from typing import NoReturn, Optional +from typing import Any, NoReturn, Optional import numpy.testing as npt import pytest @@ -365,6 +365,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(state: int | None) -> tuple[int | None, TensorType]: self.states_received.append(state) @@ -437,6 +438,7 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> NoReturn: raise _Whoops @@ -496,6 +498,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> NoReturn: assert False @@ -532,6 +535,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(previous_state: int | None) -> tuple[int | None, TensorType]: if previous_state is None: @@ -615,6 +619,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(state: int | None) -> tuple[int | None, TensorType]: new_state = 0 if state is None else state + 1 diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 628a6e0df4..68929d686f 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -18,7 +18,7 @@ from collections.abc import Mapping from itertools import zip_longest from time import sleep -from typing import Optional +from typing import Any, Optional import numpy.testing as npt import pytest @@ -218,6 +218,7 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: sleep(acq_time) return self._qp diff --git a/tests/util/misc.py b/tests/util/misc.py index be71e7e5f6..5ab1f843f1 100644 --- a/tests/util/misc.py +++ b/tests/util/misc.py @@ -181,11 +181,13 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ :param search_space: Unused. :param models: Unused. :param datasets: Unused. + :param metadata: Unused. :return: The fixed value specified on initialisation. """ return self._qp diff --git a/trieste/acquisition/combination.py b/trieste/acquisition/combination.py index c302e0fc68..9bf2675c3f 100644 --- a/trieste/acquisition/combination.py +++ b/trieste/acquisition/combination.py @@ -15,7 +15,7 @@ from abc import abstractmethod from collections.abc import Mapping, Sequence -from typing import Callable, Optional +from typing import Any, Callable, Optional import tensorflow as tf @@ -52,6 +52,7 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: r""" Return an acquisition function. This acquisition function is defined by first building @@ -62,10 +63,12 @@ def prepare_acquisition_function( :param datasets: The data from the observer. :param models: The models over each dataset in ``datasets``. + :param metadata: The acquisition metadata to pass to the subfunctions (Optional). :return: The reduced acquisition function. """ self.functions = tuple( - acq.prepare_acquisition_function(models, datasets=datasets) for acq in self.acquisitions + acq.prepare_acquisition_function(models, datasets=datasets, metadata=metadata) + for acq in self.acquisitions ) def evaluate_acquisition_function_fn(at: TensorType) -> TensorType: @@ -78,14 +81,16 @@ def update_acquisition_function( function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param models: The model. :param datasets: Unused. + :param metadata: The acquisition metadata to pass to the subfunctions (Optional). """ self.functions = tuple( - acq.update_acquisition_function(function, models, datasets=datasets) + acq.update_acquisition_function(function, models, datasets=datasets, metadata=metadata) for function, acq in zip(self.functions, self.acquisitions) ) diff --git a/trieste/acquisition/function/active_learning.py b/trieste/acquisition/function/active_learning.py index adcd73e853..90a5fba80e 100644 --- a/trieste/acquisition/function/active_learning.py +++ b/trieste/acquisition/function/active_learning.py @@ -20,7 +20,7 @@ from __future__ import annotations import math -from typing import Optional, Sequence, Union +from typing import Any, Mapping, Optional, Sequence, Union import tensorflow as tf import tensorflow_probability as tfp @@ -54,11 +54,12 @@ def prepare_acquisition_function( self, model: SupportsPredictJoint, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. - + :param metadata: Unused. :return: The determinant of the predictive function. """ if not isinstance(model, SupportsPredictJoint): @@ -74,11 +75,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: SupportsPredictJoint, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: Unused. + :param metadata: Unused. """ return function # no need to update anything @@ -150,10 +153,12 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. + :param metadata: Unused. :return: The expected feasibility function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -165,6 +170,7 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return function # no need to update anything @@ -273,11 +279,12 @@ def prepare_acquisition_function( self, model: FastUpdateModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. - + :param metadata: Unused. :return: The integral of the predictive variance. """ if not isinstance(model, FastUpdateModel): @@ -293,11 +300,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: FastUpdateModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: Unused. + :param metadata: Unused. """ return function # no need to update anything @@ -437,11 +446,12 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. - + :param metadata: Unused. :return: The determinant of the predictive function. """ @@ -452,11 +462,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: Unused. + :param metadata: Unused. """ return function # no need to update anything diff --git a/trieste/acquisition/function/continuous_thompson_sampling.py b/trieste/acquisition/function/continuous_thompson_sampling.py index 38b824a10c..d0ea0c0a28 100644 --- a/trieste/acquisition/function/continuous_thompson_sampling.py +++ b/trieste/acquisition/function/continuous_thompson_sampling.py @@ -16,7 +16,7 @@ """ from __future__ import annotations -from typing import Any, Callable, Optional, Type +from typing import Any, Callable, Mapping, Optional, Type import tensorflow as tf @@ -60,11 +60,13 @@ def prepare_acquisition_function( model: HasTrajectorySampler, dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TrajectoryFunction: """ :param model: The model. :param dataset: The data from the observer (not used). :param pending_points: The points already in the current batch (not used). + :param metadata: Unused. :return: A negated trajectory sampled from the model. """ if not isinstance(model, HasTrajectorySampler): @@ -84,6 +86,7 @@ def update_acquisition_function( dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, + metadata: Optional[Mapping[str, Any]] = None, ) -> TrajectoryFunction: """ :param function: The trajectory function to update. @@ -93,6 +96,7 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, of to continue collecting batch of points for the current step. Defaults to ``True``. + :param metadata: Unused. :return: A new trajectory sampled from the model. """ @@ -138,10 +142,12 @@ def prepare_acquisition_function( self, model: HasTrajectorySampler, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TrajectoryFunction: """ :param model: The model. :param dataset: The data from the observer (not used). + :param metadata: Unused. :return: A negated trajectory sampled from the model. """ if not isinstance(model, HasTrajectorySampler): @@ -160,11 +166,13 @@ def update_acquisition_function( function: TrajectoryFunction, model: HasTrajectorySampler, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TrajectoryFunction: """ :param function: The trajectory function to update. :param model: The model. :param dataset: The data from the observer (not used). + :param metadata: Unused. :return: A new trajectory sampled from the model. """ if function is not self._negated_trajectory: diff --git a/trieste/acquisition/function/entropy.py b/trieste/acquisition/function/entropy.py index 7f04af0a76..46c06f75c8 100644 --- a/trieste/acquisition/function/entropy.py +++ b/trieste/acquisition/function/entropy.py @@ -16,7 +16,7 @@ """ from __future__ import annotations -from typing import List, Optional, TypeVar, cast, overload +from typing import Any, List, Mapping, Optional, TypeVar, cast, overload import tensorflow as tf import tensorflow_probability as tfp @@ -120,10 +120,12 @@ def prepare_acquisition_function( self, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. + :param metadata: Unused. :return: The max-value entropy search acquisition function modified for objective minimisation. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -145,11 +147,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer. + :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -325,11 +329,13 @@ def prepare_acquisition_function( model: GIBBONModelType, dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Must be populated. :param pending_points: The points we penalize with respect to. + :param metadata: Unused. :return: The GIBBON acquisition function modified for objective minimisation. :raise tf.errors.InvalidArgumentError: If ``dataset`` is empty. """ @@ -356,6 +362,7 @@ def update_acquisition_function( dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. @@ -366,6 +373,7 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, or to continue collecting batch of points for the current step. Defaults to ``True``. + :param metadata: Unused. :return: The updated acquisition function. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) @@ -676,6 +684,7 @@ def prepare_acquisition_function( self, model: MUMBOModelType, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The multifidelity model. @@ -696,6 +705,7 @@ def update_acquisition_function( function: AcquisitionFunction, model: MUMBOModelType, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. @@ -805,11 +815,15 @@ def __init__(self, fidelity_costs: List[float]): self._num_fidelities = len(self._fidelity_costs) def prepare_acquisition_function( - self, model: ProbabilisticModel, dataset: Optional[Dataset] = None + self, + model: ProbabilisticModel, + dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Not actually used here. + :param metadata: Unused. :return: The reciprocal of the costs corresponding to the fidelity level of each input. """ @@ -837,6 +851,7 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Nothing to do here, so just return previous cost function. @@ -844,5 +859,6 @@ def update_acquisition_function( :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer. + :param metadata: Unused. """ return function diff --git a/trieste/acquisition/function/function.py b/trieste/acquisition/function/function.py index f98af45fa3..e91957ba6c 100644 --- a/trieste/acquisition/function/function.py +++ b/trieste/acquisition/function/function.py @@ -17,7 +17,7 @@ """ from __future__ import annotations -from typing import Callable, Mapping, Optional, cast +from typing import Any, Callable, Mapping, Optional, cast import tensorflow as tf import tensorflow_probability as tfp @@ -54,11 +54,15 @@ def __repr__(self) -> str: return "ProbabilityOfImprovement()" def prepare_acquisition_function( - self, model: ProbabilisticModel, dataset: Optional[Dataset] = None + self, + model: ProbabilisticModel, + dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Must be populated. + :param metadata: Unused. :return: The probability of improvement function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -76,10 +80,12 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. + :param metadata: Unused. :param dataset: The data from the observer. Must be populated. """ tf.debugging.Assert(dataset is not None, []) @@ -117,10 +123,12 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Must be populated. + :param metadata: Unused. :return: The expected improvement function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -154,11 +162,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer. Must be populated. + :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -236,10 +246,12 @@ def prepare_acquisition_function( self, model: SupportsGetObservationNoise, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Must be populated. + :param metadata: Unused. :return: The expected improvement function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -262,11 +274,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: SupportsGetObservationNoise, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer. Must be populated. + :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -348,10 +362,12 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. + :param metadata: Unused. :return: The negative lower confidence bound function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -365,11 +381,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: Unused. + :param metadata: Unused. """ return function # no need to update anything @@ -456,10 +474,12 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. + :param metadata: Unused. :return: The probability of feasibility function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -471,11 +491,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: Unused. + :param metadata: Unused. """ return function # no need to update anything @@ -549,10 +571,12 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: Unused. :param dataset: Unused. + :param metadata: Unused. :return: The function for feasibility of constraints. """ return fast_constraints_feasibility(self._search_space, self._smoothing_function) @@ -562,11 +586,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: Unused. :param dataset: Unused. + :param metadata: Unused. :return: The function for feasibility of constraints. """ return function # No need to update anything. @@ -661,10 +687,12 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param models: The models over each tag. :param datasets: The data from the observer. + :param metadata: Unused. :return: The expected constrained improvement acquisition function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -715,11 +743,13 @@ def update_acquisition_function( function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param models: The models for each tag. :param datasets: The data from the observer. + :param metadata: Unused. """ tf.debugging.Assert(datasets is not None, [tf.constant([])]) datasets = cast(Mapping[Tag, Dataset], datasets) @@ -816,10 +846,12 @@ def prepare_acquisition_function( self, model: HasReparamSampler, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model over the specified ``dataset``. Must have output dimension [1]. :param dataset: The data from the observer. Cannot be empty. + :param metadata: Unused. :return: The estimated *expected improvement* acquisition function. :raise ValueError (or InvalidArgumentError): If ``dataset`` is not populated, ``model`` does not have an output dimension of [1] or does not have a ``reparam_sample`` method. @@ -854,11 +886,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: HasReparamSampler, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. Must have output dimension [1]. Unused here. :param dataset: The data from the observer. Cannot be empty + :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -958,10 +992,12 @@ def prepare_acquisition_function( self, model: SupportsReparamSamplerObservationNoise, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model over the specified ``dataset``. Must have output dimension [1]. :param dataset: The data from the observer. Cannot be empty. + :param metadata: Unused. :return: The estimated *expected improvement* acquisition function. :raise ValueError (or InvalidArgumentError): If ``dataset`` is not populated, ``model`` does not have an output dimension of [1], does not have a ``reparam_sample`` method, or @@ -998,11 +1034,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: SupportsReparamSamplerObservationNoise, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. Must have output dimension [1]. Unused here :param dataset: The data from the observer. Cannot be empty. + :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -1105,10 +1143,12 @@ def prepare_acquisition_function( self, model: HasReparamSampler, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. Must have event shape [1]. :param dataset: The data from the observer. Must be populated. + :param metadata: Unused. :return: The batch *expected improvement* acquisition function. :raise ValueError (or InvalidArgumentError): If ``dataset`` is not populated, or ``model`` does not have an event shape of [1]. @@ -1131,11 +1171,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: HasReparamSampler, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. Must have event shape [1]. :param dataset: The data from the observer. Must be populated. + :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -1224,10 +1266,12 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. Must have event shape [1]. :param dataset: The data from the observer. Must be populated. + :param metadata: Unused. :return: The batch *expected improvement* acquisition function. :raise ValueError (or InvalidArgumentError): If ``dataset`` is not populated, or ``model`` does not have an event shape of [1]. @@ -1260,11 +1304,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. Must have event shape [1]. :param dataset: The data from the observer. Must be populated. + :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, []) dataset = cast(Dataset, dataset) @@ -1831,10 +1877,12 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. + :param metadata: Unused. :return: The multiple optimism negative lower confidence bound function. """ return multiple_optimism_lower_confidence_bound(model, self._search_space.dimension) @@ -1844,11 +1892,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: Unused. + :param metadata: Unused. """ tf.debugging.Assert( isinstance(function, multiple_optimism_lower_confidence_bound), [tf.constant([])] @@ -1940,10 +1990,12 @@ def prepare_acquisition_function( self, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data to use to build the acquisition function (optional). + :param metadata: Unused. :return: An acquisition function. """ self._base_function = self._base_builder.prepare_acquisition_function(model, dataset) @@ -1959,11 +2011,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer (optional). + :param metadata: Unused. :return: The updated acquisition function. """ up_fn = self._base_builder.update_acquisition_function(self._base_function, model, dataset) diff --git a/trieste/acquisition/function/greedy_batch.py b/trieste/acquisition/function/greedy_batch.py index afa57b64b7..0a0fba229d 100644 --- a/trieste/acquisition/function/greedy_batch.py +++ b/trieste/acquisition/function/greedy_batch.py @@ -16,7 +16,7 @@ """ from __future__ import annotations -from typing import Callable, Dict, Mapping, Optional, Union, cast +from typing import Any, Callable, Dict, Mapping, Optional, Union, cast import gpflow import tensorflow as tf @@ -121,11 +121,13 @@ def prepare_acquisition_function( model: ProbabilisticModel, dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Must be populated. :param pending_points: The points we penalize with respect to. + :param metadata: Unused. :return: The (log) expected improvement penalized with respect to the pending points. :raise tf.errors.InvalidArgumentError: If the ``dataset`` is empty. """ @@ -146,6 +148,7 @@ def update_acquisition_function( dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. @@ -156,6 +159,7 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, of to continue collecting batch of points for the current step. Defaults to ``True``. + :param metadata: Unused. :return: The updated acquisition function. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) @@ -526,6 +530,7 @@ def prepare_acquisition_function( models: Mapping[Tag, FantasizerModelOrStack], datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ @@ -533,6 +538,7 @@ def prepare_acquisition_function( :param datasets: The data from the observer (optional). :param pending_points: Points already chosen to be in the current batch (of shape [M,D]), where M is the number of pending points and D is the search space dimension. + :param metadata: Unused. :return: An acquisition function. """ for model in models.values(): @@ -558,6 +564,7 @@ def update_acquisition_function( datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. @@ -568,6 +575,7 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, of to continue collecting batch of points for the current step. Defaults to ``True``. + :param metadata: Unused. :return: The updated acquisition function. """ if pending_points is None: diff --git a/trieste/acquisition/function/multi_objective.py b/trieste/acquisition/function/multi_objective.py index 86b09e2588..0cfb3bf896 100644 --- a/trieste/acquisition/function/multi_objective.py +++ b/trieste/acquisition/function/multi_objective.py @@ -18,7 +18,7 @@ import math from itertools import combinations, product -from typing import Callable, Mapping, Optional, Sequence, cast +from typing import Any, Callable, Mapping, Optional, Sequence, cast import tensorflow as tf import tensorflow_probability as tfp @@ -85,10 +85,12 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Must be populated. + :param metadata: Unused. :return: The expected hypervolume improvement acquisition function. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) @@ -115,11 +117,13 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer. Must be populated. + :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -313,10 +317,12 @@ def prepare_acquisition_function( self, model: HasReparamSampler, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. Must have event shape [1]. :param dataset: The data from the observer. Must be populated. + :param metadata: Unused. :return: The batch expected hypervolume improvement acquisition function. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) @@ -552,6 +558,7 @@ def prepare_acquisition_function( models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Creates a new instance of the acquisition function. @@ -559,6 +566,7 @@ def prepare_acquisition_function( :param models: The models. :param datasets: The data from the observer. Must be populated. :param pending_points: The points we penalize with respect to. + :param metadata: Unused. :return: The HIPPO acquisition function. :raise tf.errors.InvalidArgumentError: If the ``dataset`` is empty. """ @@ -583,6 +591,7 @@ def update_acquisition_function( datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Updates the acquisition function. @@ -595,6 +604,7 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, of to continue collecting batch of points for the current step. Defaults to ``True``. + :param metadata: Unused. :return: The updated acquisition function. """ tf.debugging.Assert(datasets is not None, [tf.constant([])]) From da636d0e4188d0da6166d7a5e07ac7d1214bdc7c Mon Sep 17 00:00:00 2001 From: Uri Granta Date: Tue, 11 Jul 2023 16:33:23 +0100 Subject: [PATCH 03/11] Expose in AskTell --- trieste/ask_tell_optimization.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/trieste/ask_tell_optimization.py b/trieste/ask_tell_optimization.py index 3b5c973963..fe60693bd2 100644 --- a/trieste/ask_tell_optimization.py +++ b/trieste/ask_tell_optimization.py @@ -21,7 +21,7 @@ from __future__ import annotations from copy import deepcopy -from typing import Dict, Generic, Mapping, TypeVar, cast, overload +from typing import Any, Dict, Generic, Mapping, Optional, TypeVar, cast, overload try: import pandas as pd @@ -375,10 +375,11 @@ def to_result(self, copy: bool = True) -> OptimizationResult[StateType]: record: Record[StateType] = self.to_record(copy=copy) return OptimizationResult(Ok(record), []) - def ask(self) -> TensorType: + def ask(self, metadata: Optional[Mapping[str, Any]] = None) -> TensorType: """Suggests a point (or points in batch mode) to observe by optimizing the acquisition function. If the acquisition is stateful, its state is saved. + :param metadata: Acquisition metadata to pass to the rule (Optional). :return: A :class:`TensorType` instance representing suggested point(s). """ # This trick deserves a comment to explain what's going on @@ -390,7 +391,7 @@ def ask(self) -> TensorType: with Timer() as query_point_generation_timer: points_or_stateful = self._acquisition_rule.acquire( - self._search_space, self._models, datasets=self._datasets + self._search_space, self._models, datasets=self._datasets, metadata=metadata ) if callable(points_or_stateful): From f14c9023b09931b657178ff9d905283caaddc400 Mon Sep 17 00:00:00 2001 From: Uri Granta Date: Tue, 11 Jul 2023 16:39:59 +0100 Subject: [PATCH 04/11] Undo changes --- docs/notebooks/code_overview.pct.py | 13 +---- .../acquisition/function/test_function.py | 8 +-- .../multi_objective/test_function.py | 6 +- tests/unit/acquisition/test_combination.py | 5 +- tests/unit/acquisition/test_interface.py | 11 +--- tests/unit/acquisition/test_rule.py | 11 +--- tests/unit/test_ask_tell_optimization.py | 3 +- tests/unit/test_bayesian_optimizer.py | 7 +-- tests/unit/test_logging.py | 3 +- tests/util/misc.py | 2 - trieste/acquisition/combination.py | 11 +--- .../acquisition/function/active_learning.py | 20 ++----- .../function/continuous_thompson_sampling.py | 10 +--- trieste/acquisition/function/entropy.py | 20 +------ trieste/acquisition/function/function.py | 58 +------------------ trieste/acquisition/function/greedy_batch.py | 10 +--- .../acquisition/function/multi_objective.py | 12 +--- trieste/acquisition/interface.py | 46 ++------------- trieste/acquisition/rule.py | 40 +------------ trieste/ask_tell_optimization.py | 7 +-- 20 files changed, 38 insertions(+), 265 deletions(-) diff --git a/docs/notebooks/code_overview.pct.py b/docs/notebooks/code_overview.pct.py index febbd93f85..85216ce818 100644 --- a/docs/notebooks/code_overview.pct.py +++ b/docs/notebooks/code_overview.pct.py @@ -80,7 +80,7 @@ # %% from __future__ import annotations -from typing import Any, Mapping, Optional +from typing import Optional import tensorflow as tf from trieste.types import TensorType @@ -203,10 +203,7 @@ class HasGizmoReparamSamplerAndObservationNoise( class ProbabilityOfValidity(SingleModelAcquisitionBuilder[ProbabilisticModel]): def prepare_acquisition_function( - self, - model: ProbabilisticModel, - dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, + self, model: ProbabilisticModel, dataset: Optional[Dataset] = None ) -> AcquisitionFunction: def acquisition(at: TensorType) -> TensorType: mean, _ = model.predict_y(tf.squeeze(at, -2)) @@ -222,10 +219,7 @@ def acquisition(at: TensorType) -> TensorType: # %% class ProbabilityOfValidity2(SingleModelAcquisitionBuilder[ProbabilisticModel]): def prepare_acquisition_function( - self, - model: ProbabilisticModel, - dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, + self, model: ProbabilisticModel, dataset: Optional[Dataset] = None ) -> AcquisitionFunction: @tf.function def acquisition(at: TensorType) -> TensorType: @@ -239,7 +233,6 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return function # no need to update anything diff --git a/tests/unit/acquisition/function/test_function.py b/tests/unit/acquisition/function/test_function.py index 64358b53e1..986092681e 100644 --- a/tests/unit/acquisition/function/test_function.py +++ b/tests/unit/acquisition/function/test_function.py @@ -15,7 +15,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any, Callable, Optional, Sequence +from typing import Callable, Optional, Sequence from unittest.mock import MagicMock import numpy.testing as npt @@ -956,7 +956,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: tf.ones_like(tf.squeeze(x, -2)) @@ -1000,7 +999,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: tf.cast(tf.squeeze(x, -2) >= 0, x.dtype) @@ -1026,7 +1024,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: tf.cast(tf.squeeze(x, -2) >= 0, x.dtype) @@ -1051,7 +1048,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return raise_exc @@ -1084,7 +1080,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: def acquisition(x: TensorType) -> TensorType: x_ = tf.squeeze(x, -2) @@ -1116,7 +1111,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return pof diff --git a/tests/unit/acquisition/multi_objective/test_function.py b/tests/unit/acquisition/multi_objective/test_function.py index 69d5ceff8f..7bcf0e99ef 100644 --- a/tests/unit/acquisition/multi_objective/test_function.py +++ b/tests/unit/acquisition/multi_objective/test_function.py @@ -15,7 +15,7 @@ import itertools import math -from typing import Any, Callable, Mapping, Optional, Sequence, cast +from typing import Callable, Mapping, Optional, Sequence, cast import numpy.testing as npt import pytest @@ -85,7 +85,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: tf.ones((tf.shape(x)[0], 1), dtype=tf.float64) @@ -758,7 +757,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: tf.ones_like(tf.squeeze(x, -2)) @@ -801,7 +799,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: def acquisition(x: TensorType) -> TensorType: x_ = tf.squeeze(x, -2) @@ -839,7 +836,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return raise_exc diff --git a/tests/unit/acquisition/test_combination.py b/tests/unit/acquisition/test_combination.py index 834f527766..95415f6994 100644 --- a/tests/unit/acquisition/test_combination.py +++ b/tests/unit/acquisition/test_combination.py @@ -14,7 +14,7 @@ from __future__ import annotations from collections.abc import Mapping, Sequence -from typing import Any, Optional +from typing import Optional import numpy.testing as npt import pytest @@ -57,7 +57,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return raise_exc @@ -73,7 +72,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self._f @@ -82,7 +80,6 @@ def update_acquisition_function( function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: function(x) + 1 diff --git a/tests/unit/acquisition/test_interface.py b/tests/unit/acquisition/test_interface.py index b0c46cd68f..0d8b68421c 100644 --- a/tests/unit/acquisition/test_interface.py +++ b/tests/unit/acquisition/test_interface.py @@ -13,7 +13,7 @@ # limitations under the License. from __future__ import annotations -from typing import Any, Iterator, List, Mapping, Optional, Tuple, cast +from typing import Iterator, List, Mapping, Optional, Tuple, cast import pytest @@ -50,7 +50,6 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return raise_exc @@ -61,7 +60,6 @@ def prepare_acquisition_function( model: ProbabilisticModel, dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return raise_exc @@ -86,19 +84,16 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: assert dataset is data["foo"] assert model is models["foo"] - assert metadata == {"iteration_id": 1} return raise_exc FOO: Tag = "foo" BAR: Tag = "bar" data = {FOO: empty_dataset([1], [1]), BAR: empty_dataset([1], [1])} - metadata = {"iteration_id": 1} models = {FOO: QuadraticMeanAndRBFKernel(), BAR: QuadraticMeanAndRBFKernel()} - Builder().using(FOO).prepare_acquisition_function(models, datasets=data, metadata=metadata) + Builder().using(FOO).prepare_acquisition_function(models, datasets=data) def test_single_model_greedy_acquisition_builder_raises_immediately_for_wrong_key() -> None: @@ -188,7 +183,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: assert datasets is not None assert len(datasets) == 1 @@ -197,7 +191,6 @@ def prepare_acquisition_function( assert "FOO" not in datasets assert isinstance(datasets, CustomDatasets) assert datasets.iteration_id == 2 - assert metadata is None return raise_exc data = CustomDatasets({OBJECTIVE: empty_dataset([1], [1])}, 2) diff --git a/tests/unit/acquisition/test_rule.py b/tests/unit/acquisition/test_rule.py index 6b0325b0c1..13b6acabd2 100644 --- a/tests/unit/acquisition/test_rule.py +++ b/tests/unit/acquisition/test_rule.py @@ -15,7 +15,7 @@ import copy from collections.abc import Mapping -from typing import Any, Callable, Optional +from typing import Callable, Optional import gpflow import numpy.testing as npt @@ -234,7 +234,6 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda x: -quadratic(tf.squeeze(x, -2) - 1) @@ -243,7 +242,6 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: self._updated = True return function @@ -266,7 +264,6 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: noise = tf.random.uniform([], -0.05, 0.05, dtype=tf.float64) return lambda x: -quadratic(tf.squeeze(x, -2) - 1) + noise @@ -276,7 +273,6 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return function @@ -314,7 +310,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda at: -tf.reduce_max(models[OBJECTIVE].predict(at)[0], axis=-2) @@ -367,7 +362,6 @@ def prepare_acquisition_function( model: ProbabilisticModel, dataset: Optional[Dataset] = None, pending_points: TensorType = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: if pending_points is None: return lambda at: -tf.reduce_max(model.predict(at)[0], axis=-2) @@ -384,7 +378,6 @@ def update_acquisition_function( dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: self._update_count += 1 return self.prepare_acquisition_function( @@ -447,7 +440,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return lambda at: tf.squeeze(-models[OBJECTIVE].predict(at)[0], -1) @@ -563,7 +555,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: return (search_space.upper[None] + search_space.lower[None]) / 2 diff --git a/tests/unit/test_ask_tell_optimization.py b/tests/unit/test_ask_tell_optimization.py index d99bfcc9de..7c638f2baf 100644 --- a/tests/unit/test_ask_tell_optimization.py +++ b/tests/unit/test_ask_tell_optimization.py @@ -13,7 +13,7 @@ # limitations under the License. from __future__ import annotations -from typing import Any, Mapping, Optional +from typing import Mapping, Optional import pytest import tensorflow as tf @@ -326,7 +326,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(state: int | None) -> tuple[int | None, TensorType]: self.states_received.append(state) diff --git a/tests/unit/test_bayesian_optimizer.py b/tests/unit/test_bayesian_optimizer.py index f0afc6a720..dead19ac21 100644 --- a/tests/unit/test_bayesian_optimizer.py +++ b/tests/unit/test_bayesian_optimizer.py @@ -16,7 +16,7 @@ import tempfile from collections.abc import Mapping from pathlib import Path -from typing import Any, NoReturn, Optional +from typing import NoReturn, Optional import numpy.testing as npt import pytest @@ -365,7 +365,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(state: int | None) -> tuple[int | None, TensorType]: self.states_received.append(state) @@ -438,7 +437,6 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> NoReturn: raise _Whoops @@ -498,7 +496,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> NoReturn: assert False @@ -535,7 +532,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(previous_state: int | None) -> tuple[int | None, TensorType]: if previous_state is None: @@ -619,7 +615,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(state: int | None) -> tuple[int | None, TensorType]: new_state = 0 if state is None else state + 1 diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 68929d686f..628a6e0df4 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -18,7 +18,7 @@ from collections.abc import Mapping from itertools import zip_longest from time import sleep -from typing import Any, Optional +from typing import Optional import numpy.testing as npt import pytest @@ -218,7 +218,6 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: sleep(acq_time) return self._qp diff --git a/tests/util/misc.py b/tests/util/misc.py index 5ab1f843f1..be71e7e5f6 100644 --- a/tests/util/misc.py +++ b/tests/util/misc.py @@ -181,13 +181,11 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ :param search_space: Unused. :param models: Unused. :param datasets: Unused. - :param metadata: Unused. :return: The fixed value specified on initialisation. """ return self._qp diff --git a/trieste/acquisition/combination.py b/trieste/acquisition/combination.py index 9bf2675c3f..c302e0fc68 100644 --- a/trieste/acquisition/combination.py +++ b/trieste/acquisition/combination.py @@ -15,7 +15,7 @@ from abc import abstractmethod from collections.abc import Mapping, Sequence -from typing import Any, Callable, Optional +from typing import Callable, Optional import tensorflow as tf @@ -52,7 +52,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: r""" Return an acquisition function. This acquisition function is defined by first building @@ -63,12 +62,10 @@ def prepare_acquisition_function( :param datasets: The data from the observer. :param models: The models over each dataset in ``datasets``. - :param metadata: The acquisition metadata to pass to the subfunctions (Optional). :return: The reduced acquisition function. """ self.functions = tuple( - acq.prepare_acquisition_function(models, datasets=datasets, metadata=metadata) - for acq in self.acquisitions + acq.prepare_acquisition_function(models, datasets=datasets) for acq in self.acquisitions ) def evaluate_acquisition_function_fn(at: TensorType) -> TensorType: @@ -81,16 +78,14 @@ def update_acquisition_function( function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param models: The model. :param datasets: Unused. - :param metadata: The acquisition metadata to pass to the subfunctions (Optional). """ self.functions = tuple( - acq.update_acquisition_function(function, models, datasets=datasets, metadata=metadata) + acq.update_acquisition_function(function, models, datasets=datasets) for function, acq in zip(self.functions, self.acquisitions) ) diff --git a/trieste/acquisition/function/active_learning.py b/trieste/acquisition/function/active_learning.py index 90a5fba80e..adcd73e853 100644 --- a/trieste/acquisition/function/active_learning.py +++ b/trieste/acquisition/function/active_learning.py @@ -20,7 +20,7 @@ from __future__ import annotations import math -from typing import Any, Mapping, Optional, Sequence, Union +from typing import Optional, Sequence, Union import tensorflow as tf import tensorflow_probability as tfp @@ -54,12 +54,11 @@ def prepare_acquisition_function( self, model: SupportsPredictJoint, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. - :param metadata: Unused. + :return: The determinant of the predictive function. """ if not isinstance(model, SupportsPredictJoint): @@ -75,13 +74,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: SupportsPredictJoint, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: Unused. - :param metadata: Unused. """ return function # no need to update anything @@ -153,12 +150,10 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. - :param metadata: Unused. :return: The expected feasibility function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -170,7 +165,6 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return function # no need to update anything @@ -279,12 +273,11 @@ def prepare_acquisition_function( self, model: FastUpdateModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. - :param metadata: Unused. + :return: The integral of the predictive variance. """ if not isinstance(model, FastUpdateModel): @@ -300,13 +293,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: FastUpdateModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: Unused. - :param metadata: Unused. """ return function # no need to update anything @@ -446,12 +437,11 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. - :param metadata: Unused. + :return: The determinant of the predictive function. """ @@ -462,13 +452,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: Unused. - :param metadata: Unused. """ return function # no need to update anything diff --git a/trieste/acquisition/function/continuous_thompson_sampling.py b/trieste/acquisition/function/continuous_thompson_sampling.py index d0ea0c0a28..38b824a10c 100644 --- a/trieste/acquisition/function/continuous_thompson_sampling.py +++ b/trieste/acquisition/function/continuous_thompson_sampling.py @@ -16,7 +16,7 @@ """ from __future__ import annotations -from typing import Any, Callable, Mapping, Optional, Type +from typing import Any, Callable, Optional, Type import tensorflow as tf @@ -60,13 +60,11 @@ def prepare_acquisition_function( model: HasTrajectorySampler, dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TrajectoryFunction: """ :param model: The model. :param dataset: The data from the observer (not used). :param pending_points: The points already in the current batch (not used). - :param metadata: Unused. :return: A negated trajectory sampled from the model. """ if not isinstance(model, HasTrajectorySampler): @@ -86,7 +84,6 @@ def update_acquisition_function( dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, - metadata: Optional[Mapping[str, Any]] = None, ) -> TrajectoryFunction: """ :param function: The trajectory function to update. @@ -96,7 +93,6 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, of to continue collecting batch of points for the current step. Defaults to ``True``. - :param metadata: Unused. :return: A new trajectory sampled from the model. """ @@ -142,12 +138,10 @@ def prepare_acquisition_function( self, model: HasTrajectorySampler, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TrajectoryFunction: """ :param model: The model. :param dataset: The data from the observer (not used). - :param metadata: Unused. :return: A negated trajectory sampled from the model. """ if not isinstance(model, HasTrajectorySampler): @@ -166,13 +160,11 @@ def update_acquisition_function( function: TrajectoryFunction, model: HasTrajectorySampler, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TrajectoryFunction: """ :param function: The trajectory function to update. :param model: The model. :param dataset: The data from the observer (not used). - :param metadata: Unused. :return: A new trajectory sampled from the model. """ if function is not self._negated_trajectory: diff --git a/trieste/acquisition/function/entropy.py b/trieste/acquisition/function/entropy.py index 46c06f75c8..7f04af0a76 100644 --- a/trieste/acquisition/function/entropy.py +++ b/trieste/acquisition/function/entropy.py @@ -16,7 +16,7 @@ """ from __future__ import annotations -from typing import Any, List, Mapping, Optional, TypeVar, cast, overload +from typing import List, Optional, TypeVar, cast, overload import tensorflow as tf import tensorflow_probability as tfp @@ -120,12 +120,10 @@ def prepare_acquisition_function( self, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. - :param metadata: Unused. :return: The max-value entropy search acquisition function modified for objective minimisation. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -147,13 +145,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer. - :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -329,13 +325,11 @@ def prepare_acquisition_function( model: GIBBONModelType, dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Must be populated. :param pending_points: The points we penalize with respect to. - :param metadata: Unused. :return: The GIBBON acquisition function modified for objective minimisation. :raise tf.errors.InvalidArgumentError: If ``dataset`` is empty. """ @@ -362,7 +356,6 @@ def update_acquisition_function( dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. @@ -373,7 +366,6 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, or to continue collecting batch of points for the current step. Defaults to ``True``. - :param metadata: Unused. :return: The updated acquisition function. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) @@ -684,7 +676,6 @@ def prepare_acquisition_function( self, model: MUMBOModelType, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The multifidelity model. @@ -705,7 +696,6 @@ def update_acquisition_function( function: AcquisitionFunction, model: MUMBOModelType, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. @@ -815,15 +805,11 @@ def __init__(self, fidelity_costs: List[float]): self._num_fidelities = len(self._fidelity_costs) def prepare_acquisition_function( - self, - model: ProbabilisticModel, - dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, + self, model: ProbabilisticModel, dataset: Optional[Dataset] = None ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Not actually used here. - :param metadata: Unused. :return: The reciprocal of the costs corresponding to the fidelity level of each input. """ @@ -851,7 +837,6 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Nothing to do here, so just return previous cost function. @@ -859,6 +844,5 @@ def update_acquisition_function( :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer. - :param metadata: Unused. """ return function diff --git a/trieste/acquisition/function/function.py b/trieste/acquisition/function/function.py index e91957ba6c..f98af45fa3 100644 --- a/trieste/acquisition/function/function.py +++ b/trieste/acquisition/function/function.py @@ -17,7 +17,7 @@ """ from __future__ import annotations -from typing import Any, Callable, Mapping, Optional, cast +from typing import Callable, Mapping, Optional, cast import tensorflow as tf import tensorflow_probability as tfp @@ -54,15 +54,11 @@ def __repr__(self) -> str: return "ProbabilityOfImprovement()" def prepare_acquisition_function( - self, - model: ProbabilisticModel, - dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, + self, model: ProbabilisticModel, dataset: Optional[Dataset] = None ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Must be populated. - :param metadata: Unused. :return: The probability of improvement function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -80,12 +76,10 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. - :param metadata: Unused. :param dataset: The data from the observer. Must be populated. """ tf.debugging.Assert(dataset is not None, []) @@ -123,12 +117,10 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Must be populated. - :param metadata: Unused. :return: The expected improvement function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -162,13 +154,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer. Must be populated. - :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -246,12 +236,10 @@ def prepare_acquisition_function( self, model: SupportsGetObservationNoise, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Must be populated. - :param metadata: Unused. :return: The expected improvement function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -274,13 +262,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: SupportsGetObservationNoise, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer. Must be populated. - :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -362,12 +348,10 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. - :param metadata: Unused. :return: The negative lower confidence bound function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -381,13 +365,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: Unused. - :param metadata: Unused. """ return function # no need to update anything @@ -474,12 +456,10 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. - :param metadata: Unused. :return: The probability of feasibility function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -491,13 +471,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: Unused. - :param metadata: Unused. """ return function # no need to update anything @@ -571,12 +549,10 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: Unused. :param dataset: Unused. - :param metadata: Unused. :return: The function for feasibility of constraints. """ return fast_constraints_feasibility(self._search_space, self._smoothing_function) @@ -586,13 +562,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: Unused. :param dataset: Unused. - :param metadata: Unused. :return: The function for feasibility of constraints. """ return function # No need to update anything. @@ -687,12 +661,10 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param models: The models over each tag. :param datasets: The data from the observer. - :param metadata: Unused. :return: The expected constrained improvement acquisition function. This function will raise :exc:`ValueError` or :exc:`~tf.errors.InvalidArgumentError` if used with a batch size greater than one. @@ -743,13 +715,11 @@ def update_acquisition_function( function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param models: The models for each tag. :param datasets: The data from the observer. - :param metadata: Unused. """ tf.debugging.Assert(datasets is not None, [tf.constant([])]) datasets = cast(Mapping[Tag, Dataset], datasets) @@ -846,12 +816,10 @@ def prepare_acquisition_function( self, model: HasReparamSampler, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model over the specified ``dataset``. Must have output dimension [1]. :param dataset: The data from the observer. Cannot be empty. - :param metadata: Unused. :return: The estimated *expected improvement* acquisition function. :raise ValueError (or InvalidArgumentError): If ``dataset`` is not populated, ``model`` does not have an output dimension of [1] or does not have a ``reparam_sample`` method. @@ -886,13 +854,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: HasReparamSampler, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. Must have output dimension [1]. Unused here. :param dataset: The data from the observer. Cannot be empty - :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -992,12 +958,10 @@ def prepare_acquisition_function( self, model: SupportsReparamSamplerObservationNoise, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model over the specified ``dataset``. Must have output dimension [1]. :param dataset: The data from the observer. Cannot be empty. - :param metadata: Unused. :return: The estimated *expected improvement* acquisition function. :raise ValueError (or InvalidArgumentError): If ``dataset`` is not populated, ``model`` does not have an output dimension of [1], does not have a ``reparam_sample`` method, or @@ -1034,13 +998,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: SupportsReparamSamplerObservationNoise, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. Must have output dimension [1]. Unused here :param dataset: The data from the observer. Cannot be empty. - :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -1143,12 +1105,10 @@ def prepare_acquisition_function( self, model: HasReparamSampler, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. Must have event shape [1]. :param dataset: The data from the observer. Must be populated. - :param metadata: Unused. :return: The batch *expected improvement* acquisition function. :raise ValueError (or InvalidArgumentError): If ``dataset`` is not populated, or ``model`` does not have an event shape of [1]. @@ -1171,13 +1131,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: HasReparamSampler, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. Must have event shape [1]. :param dataset: The data from the observer. Must be populated. - :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -1266,12 +1224,10 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. Must have event shape [1]. :param dataset: The data from the observer. Must be populated. - :param metadata: Unused. :return: The batch *expected improvement* acquisition function. :raise ValueError (or InvalidArgumentError): If ``dataset`` is not populated, or ``model`` does not have an event shape of [1]. @@ -1304,13 +1260,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. Must have event shape [1]. :param dataset: The data from the observer. Must be populated. - :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, []) dataset = cast(Dataset, dataset) @@ -1877,12 +1831,10 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: Unused. - :param metadata: Unused. :return: The multiple optimism negative lower confidence bound function. """ return multiple_optimism_lower_confidence_bound(model, self._search_space.dimension) @@ -1892,13 +1844,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: Unused. - :param metadata: Unused. """ tf.debugging.Assert( isinstance(function, multiple_optimism_lower_confidence_bound), [tf.constant([])] @@ -1990,12 +1940,10 @@ def prepare_acquisition_function( self, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data to use to build the acquisition function (optional). - :param metadata: Unused. :return: An acquisition function. """ self._base_function = self._base_builder.prepare_acquisition_function(model, dataset) @@ -2011,13 +1959,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer (optional). - :param metadata: Unused. :return: The updated acquisition function. """ up_fn = self._base_builder.update_acquisition_function(self._base_function, model, dataset) diff --git a/trieste/acquisition/function/greedy_batch.py b/trieste/acquisition/function/greedy_batch.py index 0a0fba229d..afa57b64b7 100644 --- a/trieste/acquisition/function/greedy_batch.py +++ b/trieste/acquisition/function/greedy_batch.py @@ -16,7 +16,7 @@ """ from __future__ import annotations -from typing import Any, Callable, Dict, Mapping, Optional, Union, cast +from typing import Callable, Dict, Mapping, Optional, Union, cast import gpflow import tensorflow as tf @@ -121,13 +121,11 @@ def prepare_acquisition_function( model: ProbabilisticModel, dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Must be populated. :param pending_points: The points we penalize with respect to. - :param metadata: Unused. :return: The (log) expected improvement penalized with respect to the pending points. :raise tf.errors.InvalidArgumentError: If the ``dataset`` is empty. """ @@ -148,7 +146,6 @@ def update_acquisition_function( dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. @@ -159,7 +156,6 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, of to continue collecting batch of points for the current step. Defaults to ``True``. - :param metadata: Unused. :return: The updated acquisition function. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) @@ -530,7 +526,6 @@ def prepare_acquisition_function( models: Mapping[Tag, FantasizerModelOrStack], datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ @@ -538,7 +533,6 @@ def prepare_acquisition_function( :param datasets: The data from the observer (optional). :param pending_points: Points already chosen to be in the current batch (of shape [M,D]), where M is the number of pending points and D is the search space dimension. - :param metadata: Unused. :return: An acquisition function. """ for model in models.values(): @@ -564,7 +558,6 @@ def update_acquisition_function( datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. @@ -575,7 +568,6 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, of to continue collecting batch of points for the current step. Defaults to ``True``. - :param metadata: Unused. :return: The updated acquisition function. """ if pending_points is None: diff --git a/trieste/acquisition/function/multi_objective.py b/trieste/acquisition/function/multi_objective.py index 0cfb3bf896..86b09e2588 100644 --- a/trieste/acquisition/function/multi_objective.py +++ b/trieste/acquisition/function/multi_objective.py @@ -18,7 +18,7 @@ import math from itertools import combinations, product -from typing import Any, Callable, Mapping, Optional, Sequence, cast +from typing import Callable, Mapping, Optional, Sequence, cast import tensorflow as tf import tensorflow_probability as tfp @@ -85,12 +85,10 @@ def prepare_acquisition_function( self, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer. Must be populated. - :param metadata: Unused. :return: The expected hypervolume improvement acquisition function. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) @@ -117,13 +115,11 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModel, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer. Must be populated. - :param metadata: Unused. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) dataset = cast(Dataset, dataset) @@ -317,12 +313,10 @@ def prepare_acquisition_function( self, model: HasReparamSampler, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. Must have event shape [1]. :param dataset: The data from the observer. Must be populated. - :param metadata: Unused. :return: The batch expected hypervolume improvement acquisition function. """ tf.debugging.Assert(dataset is not None, [tf.constant([])]) @@ -558,7 +552,6 @@ def prepare_acquisition_function( models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Creates a new instance of the acquisition function. @@ -566,7 +559,6 @@ def prepare_acquisition_function( :param models: The models. :param datasets: The data from the observer. Must be populated. :param pending_points: The points we penalize with respect to. - :param metadata: Unused. :return: The HIPPO acquisition function. :raise tf.errors.InvalidArgumentError: If the ``dataset`` is empty. """ @@ -591,7 +583,6 @@ def update_acquisition_function( datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Updates the acquisition function. @@ -604,7 +595,6 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, of to continue collecting batch of points for the current step. Defaults to ``True``. - :param metadata: Unused. :return: The updated acquisition function. """ tf.debugging.Assert(datasets is not None, [tf.constant([])]) diff --git a/trieste/acquisition/interface.py b/trieste/acquisition/interface.py index 817aaa9c57..e7c92859d4 100644 --- a/trieste/acquisition/interface.py +++ b/trieste/acquisition/interface.py @@ -18,7 +18,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Any, Callable, Generic, Mapping, Optional +from typing import Callable, Generic, Mapping, Optional from ..data import Dataset from ..models.interfaces import ProbabilisticModelType @@ -57,7 +57,6 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Prepare an acquisition function. We assume that this requires at least models, but @@ -65,7 +64,6 @@ def prepare_acquisition_function( :param models: The models for each tag. :param datasets: The data from the observer (optional). - :param metadata: Any metadata to pass to the acquisition function (optional). :return: An acquisition function. """ @@ -74,7 +72,6 @@ def update_acquisition_function( function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Update an acquisition function. By default this generates a new acquisition function each @@ -85,7 +82,6 @@ def update_acquisition_function( :param function: The acquisition function to update. :param models: The models for each tag. :param datasets: The data from the observer (optional). - :param metadata: Any metadata to pass to the acquisition function (optional). :return: The updated acquisition function. """ return self.prepare_acquisition_function(models, datasets=datasets) @@ -114,12 +110,9 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self.single_builder.prepare_acquisition_function( - models[tag], - dataset=None if datasets is None else datasets[tag], - metadata=metadata, + models[tag], dataset=None if datasets is None else datasets[tag] ) def update_acquisition_function( @@ -127,13 +120,9 @@ def update_acquisition_function( function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self.single_builder.update_acquisition_function( - function, - models[tag], - dataset=None if datasets is None else datasets[tag], - metadata=metadata, + function, models[tag], dataset=None if datasets is None else datasets[tag] ) def __repr__(self) -> str: @@ -146,12 +135,10 @@ def prepare_acquisition_function( self, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data to use to build the acquisition function (optional). - :param metadata: Any metadata to pass to the acquisition function (optional). :return: An acquisition function. """ @@ -160,16 +147,14 @@ def update_acquisition_function( function: AcquisitionFunction, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. :param model: The model. :param dataset: The data from the observer (optional). - :param metadata: Any metadata to pass to the acquisition function (optional). :return: The updated acquisition function. """ - return self.prepare_acquisition_function(model, dataset=dataset, metadata=metadata) + return self.prepare_acquisition_function(model, dataset=dataset) class GreedyAcquisitionFunctionBuilder(Generic[ProbabilisticModelType], ABC): @@ -189,7 +174,6 @@ def prepare_acquisition_function( models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Generate a new acquisition function. The first time this is called, ``pending_points`` @@ -200,7 +184,6 @@ def prepare_acquisition_function( :param datasets: The data from the observer (optional). :param pending_points: Points already chosen to be in the current batch (of shape [M,D]), where M is the number of pending points and D is the search space dimension. - :param metadata: Any metadata to pass to the acquisition function (optional). :return: An acquisition function. """ @@ -211,7 +194,6 @@ def update_acquisition_function( datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ Update an acquisition function. By default this generates a new acquisition function each @@ -227,7 +209,6 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, of to continue collecting batch of points for the current step. Defaults to ``True``. - :param metadata: Any metadata to pass to the acquisition function (optional). :return: The updated acquisition function. """ return self.prepare_acquisition_function( @@ -259,13 +240,11 @@ def prepare_acquisition_function( models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self.single_builder.prepare_acquisition_function( models[tag], dataset=None if datasets is None else datasets[tag], pending_points=pending_points, - metadata=metadata, ) def update_acquisition_function( @@ -275,7 +254,6 @@ def update_acquisition_function( datasets: Optional[Mapping[Tag, Dataset]] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self.single_builder.update_acquisition_function( function, @@ -283,7 +261,6 @@ def update_acquisition_function( dataset=None if datasets is None else datasets[tag], pending_points=pending_points, new_optimization_step=new_optimization_step, - metadata=metadata, ) def __repr__(self) -> str: @@ -297,14 +274,12 @@ def prepare_acquisition_function( model: ProbabilisticModelType, dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param model: The model. :param dataset: The data from the observer (optional). :param pending_points: Points already chosen to be in the current batch (of shape [M,D]), where M is the number of pending points and D is the search space dimension. - :param metadata: Any metadata to pass to the acquisition function (optional). :return: An acquisition function. """ @@ -315,7 +290,6 @@ def update_acquisition_function( dataset: Optional[Dataset] = None, pending_points: Optional[TensorType] = None, new_optimization_step: bool = True, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: """ :param function: The acquisition function to update. @@ -326,7 +300,6 @@ def update_acquisition_function( :param new_optimization_step: Indicates whether this call to update_acquisition_function is to start of a new optimization step, of to continue collecting batch of points for the current step. Defaults to ``True``. - :param metadata: Any metadata to pass to the acquisition function (optional). :return: The updated acquisition function. """ return self.prepare_acquisition_function( @@ -371,12 +344,9 @@ def prepare_acquisition_function( self, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self.single_builder.prepare_acquisition_function( - models[tag], - dataset=None if datasets is None else datasets[tag], - metadata=metadata, + models[tag], dataset=None if datasets is None else datasets[tag] ) def update_acquisition_function( @@ -384,13 +354,9 @@ def update_acquisition_function( function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: return self.single_builder.update_acquisition_function( - function, - models[tag], - dataset=None if datasets is None else datasets[tag], - metadata=metadata, + function, models[tag], dataset=None if datasets is None else datasets[tag] ) def __repr__(self) -> str: diff --git a/trieste/acquisition/rule.py b/trieste/acquisition/rule.py index 9b98818862..5056438e20 100644 --- a/trieste/acquisition/rule.py +++ b/trieste/acquisition/rule.py @@ -103,7 +103,6 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> ResultType: """ Return a value of type `T_co`. Typically this will be a set of query points, either on its @@ -118,7 +117,6 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model for each tag. :param datasets: The known observer query points and observations for each tag (optional). - :param metadata: Any metadata to use for acquisition (optional). :return: A value of type `T_co`. """ @@ -127,7 +125,6 @@ def acquire_single( search_space: SearchSpaceType, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> ResultType: """ A convenience wrapper for :meth:`acquire` that uses only one model, dataset pair. @@ -136,7 +133,6 @@ def acquire_single( is defined. :param model: The model to use. :param dataset: The known observer query points and observations (optional). - :param metadata: Any metadata to use for acquisition (optional). :return: A value of type `T_co`. """ if isinstance(dataset, dict) or isinstance(model, dict): @@ -148,7 +144,6 @@ def acquire_single( search_space, {OBJECTIVE: model}, datasets=None if dataset is None else {OBJECTIVE: dataset}, - metadata=metadata, ) @@ -273,7 +268,6 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ Return the query point(s) that optimizes the acquisition function produced by ``builder`` @@ -283,21 +277,18 @@ def acquire( :param models: The model for each tag. :param datasets: The known observer query points and observations. Whether this is required depends on the acquisition function used. - :param metadata: Any metadata to pass to the acquisition function (optional). :return: The single (or batch of) points to query. """ if self._acquisition_function is None: self._acquisition_function = self._builder.prepare_acquisition_function( models, datasets=datasets, - metadata=metadata, ) else: self._acquisition_function = self._builder.update_acquisition_function( self._acquisition_function, models, datasets=datasets, - metadata=metadata, ) summary_writer = logging.get_tensorboard_writer() @@ -330,7 +321,6 @@ def acquire( datasets=datasets, pending_points=points, new_optimization_step=False, - metadata=metadata, ) with tf.name_scope(f"EGO.optimizer[{i+1}]"): chosen_point = self._optimizer(search_space, self._acquisition_function) @@ -547,7 +537,6 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[AsynchronousRuleState | None, TensorType]: """ Constructs a function that, given ``AsynchronousRuleState``, @@ -565,7 +554,6 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model of the known data. Uses the single key `OBJECTIVE`. :param datasets: The known observer query points and observations. - :param metadata: Any metadata to pass to the acquisition function (optional). :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. """ @@ -582,14 +570,12 @@ def acquire( self._acquisition_function = self._builder.prepare_acquisition_function( models, datasets=datasets, - metadata=metadata, ) else: self._acquisition_function = self._builder.update_acquisition_function( self._acquisition_function, models, datasets=datasets, - metadata=metadata, ) def state_func( @@ -707,7 +693,6 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[AsynchronousRuleState | None, TensorType]: """ Constructs a function that, given ``AsynchronousRuleState``, @@ -723,7 +708,6 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model of the known data. Uses the single key `OBJECTIVE`. :param datasets: The known observer query points and observations. - :param metadata: Any metadata to pass to the acquisition function (optional). :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. """ @@ -750,7 +734,6 @@ def state_func( models, datasets=datasets, pending_points=state.pending_points, - metadata=metadata, ) else: self._acquisition_function = self._builder.update_acquisition_function( @@ -758,7 +741,6 @@ def state_func( models, datasets=datasets, pending_points=state.pending_points, - metadata=metadata, ) with tf.name_scope("AsynchronousOptimization.optimizer[0]"): @@ -776,7 +758,6 @@ def state_func( datasets=datasets, pending_points=state.pending_points, new_optimization_step=False, - metadata=metadata, ) with tf.name_scope(f"AsynchronousOptimization.optimizer[{i+1}]"): new_point = self._optimizer(search_space, self._acquisition_function) @@ -823,7 +804,6 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ Sample ``num_query_points`` (see :meth:`__init__`) points from the @@ -832,7 +812,6 @@ def acquire( :param search_space: The acquisition search space. :param models: Unused. :param datasets: Unused. - :param metadata: Unused. :return: The ``num_query_points`` points to query. """ samples = search_space.sample(self._num_query_points) @@ -926,7 +905,6 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ Sample `num_search_space_samples` (see :meth:`__init__`) points from the @@ -936,7 +914,6 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model of the known data. Uses the single key `OBJECTIVE`. :param datasets: The known observer query points and observations. - :param metadata: Unused. :return: The ``num_query_points`` points to query. :raise ValueError: If ``models`` do not contain the key `OBJECTIVE`, or it contains any other key. @@ -1042,7 +1019,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[State | None, TensorType]: """ Construct a local search space from ``search_space`` according the trust region algorithm, @@ -1074,7 +1050,6 @@ def acquire( :param models: The model for each tag. :param datasets: The known observer query points and observations. Uses the data for key `OBJECTIVE` to calculate the new trust region. - :param metadata: Any metadata to pass to the subrule (optional). :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. :raise KeyError: If ``datasets`` does not contain the key `OBJECTIVE`. @@ -1120,9 +1095,7 @@ def state_func( tf.reduce_min([global_upper, xmin + eps], axis=0), ) - points = self._rule.acquire( - acquisition_space, models, datasets=datasets, metadata=metadata - ) + points = self._rule.acquire(acquisition_space, models, datasets=datasets) state_ = TrustRegion.State(acquisition_space, eps, y_min, is_global) return state_, points @@ -1258,7 +1231,6 @@ def acquire( search_space: Box, models: Mapping[Tag, TrainableSupportsGetKernel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[State | None, TensorType]: """ Construct a local search space from ``search_space`` according the TURBO algorithm, @@ -1284,7 +1256,6 @@ def acquire( :param models: The model for each tag. :param datasets: The known observer query points and observations. Uses the data for key `OBJECTIVE` to calculate the new trust region. - :param metadata: Any metadata to pass to the subrule (optional). :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. :raise KeyError: If ``datasets`` does not contain the key `OBJECTIVE`. @@ -1353,9 +1324,7 @@ def state_func( local_model.optimize(local_dataset) # use local model and local dataset to choose next query point(s) - points = self._rule.acquire_single( - acquisition_space, local_model, local_dataset, metadata=metadata - ) + points = self._rule.acquire_single(acquisition_space, local_model, local_dataset) state_ = TURBO.State(acquisition_space, L, failure_counter, success_counter, y_min) return state_, points @@ -1464,7 +1433,6 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """Acquire a batch of points to observe based on the batch hypervolume Sharpe ratio indicator method. @@ -1475,7 +1443,6 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model for each tag. :param datasets: The known observer query points and observations. - :param metadata: Any metadata to pass to the acquisition function (optional). :return: The batch of points to query. """ if models.keys() != {OBJECTIVE}: @@ -1490,14 +1457,13 @@ def acquire( if self._acquisition_function is None: self._acquisition_function = self._builder.prepare_acquisition_function( - models, datasets=datasets, metadata=metadata + models, datasets=datasets ) else: self._acquisition_function = self._builder.update_acquisition_function( self._acquisition_function, models, datasets=datasets, - metadata=metadata, ) # Find non-dominated points diff --git a/trieste/ask_tell_optimization.py b/trieste/ask_tell_optimization.py index fe60693bd2..3b5c973963 100644 --- a/trieste/ask_tell_optimization.py +++ b/trieste/ask_tell_optimization.py @@ -21,7 +21,7 @@ from __future__ import annotations from copy import deepcopy -from typing import Any, Dict, Generic, Mapping, Optional, TypeVar, cast, overload +from typing import Dict, Generic, Mapping, TypeVar, cast, overload try: import pandas as pd @@ -375,11 +375,10 @@ def to_result(self, copy: bool = True) -> OptimizationResult[StateType]: record: Record[StateType] = self.to_record(copy=copy) return OptimizationResult(Ok(record), []) - def ask(self, metadata: Optional[Mapping[str, Any]] = None) -> TensorType: + def ask(self) -> TensorType: """Suggests a point (or points in batch mode) to observe by optimizing the acquisition function. If the acquisition is stateful, its state is saved. - :param metadata: Acquisition metadata to pass to the rule (Optional). :return: A :class:`TensorType` instance representing suggested point(s). """ # This trick deserves a comment to explain what's going on @@ -391,7 +390,7 @@ def ask(self, metadata: Optional[Mapping[str, Any]] = None) -> TensorType: with Timer() as query_point_generation_timer: points_or_stateful = self._acquisition_rule.acquire( - self._search_space, self._models, datasets=self._datasets, metadata=metadata + self._search_space, self._models, datasets=self._datasets ) if callable(points_or_stateful): From 60382cd87328cfa30e609f592d327b5c818bbb84 Mon Sep 17 00:00:00 2001 From: Uri Granta Date: Tue, 11 Jul 2023 20:57:52 +0100 Subject: [PATCH 05/11] Try again with MetadataAcquisitionFunctionBuilder class --- .../visualizing_with_tensorboard.pct.py | 2 +- tests/unit/acquisition/test_rule.py | 3 +- tests/unit/test_ask_tell_optimization.py | 3 +- tests/unit/test_bayesian_optimizer.py | 7 +- tests/unit/test_logging.py | 3 +- tests/util/misc.py | 2 + trieste/acquisition/interface.py | 126 +++++++++++++++++- trieste/acquisition/rule.py | 77 +++++++++-- trieste/ask_tell_optimization.py | 7 +- 9 files changed, 208 insertions(+), 22 deletions(-) diff --git a/docs/notebooks/visualizing_with_tensorboard.pct.py b/docs/notebooks/visualizing_with_tensorboard.pct.py index e5396c31a9..1e36ad7ee8 100644 --- a/docs/notebooks/visualizing_with_tensorboard.pct.py +++ b/docs/notebooks/visualizing_with_tensorboard.pct.py @@ -128,7 +128,7 @@ def log(self, dataset): # %% class EGOExtraLogging(trieste.acquisition.rule.EfficientGlobalOptimization): - def acquire(self, search_space, models, datasets=None): + def acquire(self, search_space, models, datasets=None, metadata=None): points = super().acquire(search_space, models, datasets) summary_writer = trieste.logging.get_tensorboard_writer() if summary_writer: diff --git a/tests/unit/acquisition/test_rule.py b/tests/unit/acquisition/test_rule.py index 13b6acabd2..f21ec03201 100644 --- a/tests/unit/acquisition/test_rule.py +++ b/tests/unit/acquisition/test_rule.py @@ -15,7 +15,7 @@ import copy from collections.abc import Mapping -from typing import Callable, Optional +from typing import Any, Callable, Optional import gpflow import numpy.testing as npt @@ -555,6 +555,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: return (search_space.upper[None] + search_space.lower[None]) / 2 diff --git a/tests/unit/test_ask_tell_optimization.py b/tests/unit/test_ask_tell_optimization.py index 7c638f2baf..d99bfcc9de 100644 --- a/tests/unit/test_ask_tell_optimization.py +++ b/tests/unit/test_ask_tell_optimization.py @@ -13,7 +13,7 @@ # limitations under the License. from __future__ import annotations -from typing import Mapping, Optional +from typing import Any, Mapping, Optional import pytest import tensorflow as tf @@ -326,6 +326,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(state: int | None) -> tuple[int | None, TensorType]: self.states_received.append(state) diff --git a/tests/unit/test_bayesian_optimizer.py b/tests/unit/test_bayesian_optimizer.py index dead19ac21..f0afc6a720 100644 --- a/tests/unit/test_bayesian_optimizer.py +++ b/tests/unit/test_bayesian_optimizer.py @@ -16,7 +16,7 @@ import tempfile from collections.abc import Mapping from pathlib import Path -from typing import NoReturn, Optional +from typing import Any, NoReturn, Optional import numpy.testing as npt import pytest @@ -365,6 +365,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(state: int | None) -> tuple[int | None, TensorType]: self.states_received.append(state) @@ -437,6 +438,7 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> NoReturn: raise _Whoops @@ -496,6 +498,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> NoReturn: assert False @@ -532,6 +535,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(previous_state: int | None) -> tuple[int | None, TensorType]: if previous_state is None: @@ -615,6 +619,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(state: int | None) -> tuple[int | None, TensorType]: new_state = 0 if state is None else state + 1 diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 628a6e0df4..68929d686f 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -18,7 +18,7 @@ from collections.abc import Mapping from itertools import zip_longest from time import sleep -from typing import Optional +from typing import Any, Optional import numpy.testing as npt import pytest @@ -218,6 +218,7 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: sleep(acq_time) return self._qp diff --git a/tests/util/misc.py b/tests/util/misc.py index be71e7e5f6..5ab1f843f1 100644 --- a/tests/util/misc.py +++ b/tests/util/misc.py @@ -181,11 +181,13 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ :param search_space: Unused. :param models: Unused. :param datasets: Unused. + :param metadata: Unused. :return: The fixed value specified on initialisation. """ return self._qp diff --git a/trieste/acquisition/interface.py b/trieste/acquisition/interface.py index e7c92859d4..f5c1cc0537 100644 --- a/trieste/acquisition/interface.py +++ b/trieste/acquisition/interface.py @@ -18,7 +18,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Callable, Generic, Mapping, Optional +from typing import Any, Callable, Generic, Mapping, Optional from ..data import Dataset from ..models.interfaces import ProbabilisticModelType @@ -365,6 +365,130 @@ def __repr__(self) -> str: return _Anon(self) +class MetadataAcquisitionFunctionBuilder(Generic[ProbabilisticModelType], ABC): + """An :class:`MetadataAcquisitionFunctionBuilder` builds and updates an acquisition function + using additional passed in metadata.""" + + @abstractmethod + def prepare_acquisition_function( + self, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + """ + Prepare an acquisition function. We assume that this requires at least models, but + it may sometimes also need data. + + :param models: The models for each tag. + :param datasets: The data from the observer (optional). + :param metadata: Metadata from the observer (optional). + :return: An acquisition function. + """ + + def update_acquisition_function( + self, + function: AcquisitionFunction, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + """ + Update an acquisition function. By default this generates a new acquisition function each + time. However, if the function is decorated with `@tf.function`, then you can override + this method to update its variables instead and avoid retracing the acquisition function on + every optimization loop. + + :param function: The acquisition function to update. + :param models: The models for each tag. + :param datasets: The data from the observer (optional). + :param metadata: Metadata from the observer (optional). + :return: The updated acquisition function. + """ + return self.prepare_acquisition_function(models, datasets=datasets, metadata=metadata) + + +class SingleModelMetadataAcquisitionBuilder(Generic[ProbabilisticModelType], ABC): + """ + Convenience acquisition function builder for an acquisition function (or component of a + composite acquisition function) that requires only one model, dataset pair. + """ + + def using(self, tag: Tag) -> MetadataAcquisitionFunctionBuilder[ProbabilisticModelType]: + """ + :param tag: The tag for the model, dataset pair to use to build this acquisition function. + :return: An acquisition function builder that selects the model and dataset specified by + ``tag``, as defined in :meth:`prepare_acquisition_function`. + """ + + class _Anon(MetadataAcquisitionFunctionBuilder[ProbabilisticModelType]): + def __init__( + self, single_builder: SingleModelMetadataAcquisitionBuilder[ProbabilisticModelType] + ): + self.single_builder = single_builder + + def prepare_acquisition_function( + self, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + return self.single_builder.prepare_acquisition_function( + models[tag], + dataset=None if datasets is None else datasets[tag], + metadata=metadata, + ) + + def update_acquisition_function( + self, + function: AcquisitionFunction, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + return self.single_builder.update_acquisition_function( + function, + models[tag], + dataset=None if datasets is None else datasets[tag], + metadata=metadata, + ) + + def __repr__(self) -> str: + return f"{self.single_builder!r} using tag {tag!r}" + + return _Anon(self) + + @abstractmethod + def prepare_acquisition_function( + self, + model: ProbabilisticModelType, + dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + """ + :param model: The model. + :param dataset: The data to use to build the acquisition function (optional). + :param metadata: The metadata to use to build the acquisition function (optional). + :return: An acquisition function. + """ + + def update_acquisition_function( + self, + function: AcquisitionFunction, + model: ProbabilisticModelType, + dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + """ + :param function: The acquisition function to update. + :param model: The model. + :param dataset: The data from the observer (optional). + :param metadata: The metadata from the observer (optional). + :return: The updated acquisition function. + """ + return self.prepare_acquisition_function(model, dataset=dataset, metadata=metadata) + + PenalizationFunction = Callable[[TensorType], TensorType] """ An :const:`PenalizationFunction` maps a query point (of dimension `D`) to a single diff --git a/trieste/acquisition/rule.py b/trieste/acquisition/rule.py index 5056438e20..5387ec7d1c 100644 --- a/trieste/acquisition/rule.py +++ b/trieste/acquisition/rule.py @@ -58,8 +58,10 @@ AcquisitionFunction, AcquisitionFunctionBuilder, GreedyAcquisitionFunctionBuilder, + MetadataAcquisitionFunctionBuilder, SingleModelAcquisitionBuilder, SingleModelGreedyAcquisitionBuilder, + SingleModelMetadataAcquisitionBuilder, SingleModelVectorizedAcquisitionBuilder, VectorizedAcquisitionFunctionBuilder, ) @@ -103,9 +105,10 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> ResultType: """ - Return a value of type `T_co`. Typically this will be a set of query points, either on its + Return a value of type `T_co`. Typically, this will be a set of query points, either on its own as a `TensorType` (see e.g. :class:`EfficientGlobalOptimization`), or within some context (see e.g. :class:`TrustRegion`). We assume that this requires at least models, but it may sometimes also need data. @@ -117,6 +120,7 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model for each tag. :param datasets: The known observer query points and observations for each tag (optional). + :param metadata: Any additional acquisition metadata (optional). :return: A value of type `T_co`. """ @@ -125,6 +129,7 @@ def acquire_single( search_space: SearchSpaceType, model: ProbabilisticModelType, dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> ResultType: """ A convenience wrapper for :meth:`acquire` that uses only one model, dataset pair. @@ -133,6 +138,7 @@ def acquire_single( is defined. :param model: The model to use. :param dataset: The known observer query points and observations (optional). + :param metadata: Any additional acquisition metadata (optional). :return: A value of type `T_co`. """ if isinstance(dataset, dict) or isinstance(model, dict): @@ -144,6 +150,7 @@ def acquire_single( search_space, {OBJECTIVE: model}, datasets=None if dataset is None else {OBJECTIVE: dataset}, + metadata=metadata, ) @@ -168,8 +175,10 @@ def __init__( builder: ( AcquisitionFunctionBuilder[ProbabilisticModelType] | GreedyAcquisitionFunctionBuilder[ProbabilisticModelType] + | MetadataAcquisitionFunctionBuilder[ProbabilisticModelType] | SingleModelAcquisitionBuilder[ProbabilisticModelType] | SingleModelGreedyAcquisitionBuilder[ProbabilisticModelType] + | SingleModelMetadataAcquisitionBuilder[ProbabilisticModelType] ), optimizer: AcquisitionOptimizer[SearchSpaceType] | None = None, num_query_points: int = 1, @@ -183,9 +192,11 @@ def __init__( AcquisitionFunctionBuilder[ProbabilisticModelType] | GreedyAcquisitionFunctionBuilder[ProbabilisticModelType] | VectorizedAcquisitionFunctionBuilder[ProbabilisticModelType] + | MetadataAcquisitionFunctionBuilder[ProbabilisticModelType] | SingleModelAcquisitionBuilder[ProbabilisticModelType] | SingleModelGreedyAcquisitionBuilder[ProbabilisticModelType] | SingleModelVectorizedAcquisitionBuilder[ProbabilisticModelType] + | SingleModelMetadataAcquisitionBuilder[ProbabilisticModelType] ] = None, optimizer: AcquisitionOptimizer[SearchSpaceType] | None = None, num_query_points: int = 1, @@ -227,6 +238,7 @@ def __init__( SingleModelAcquisitionBuilder, SingleModelGreedyAcquisitionBuilder, SingleModelVectorizedAcquisitionBuilder, + SingleModelMetadataAcquisitionBuilder, ), ): builder = builder.using(OBJECTIVE) @@ -235,7 +247,9 @@ def __init__( if isinstance(builder, VectorizedAcquisitionFunctionBuilder): # optimize batch elements independently optimizer = batchify_vectorize(optimizer, num_query_points) - elif isinstance(builder, AcquisitionFunctionBuilder): + elif isinstance( + builder, (AcquisitionFunctionBuilder, MetadataAcquisitionFunctionBuilder) + ): # optimize batch elements jointly optimizer = batchify_joint(optimizer, num_query_points) elif isinstance(builder, GreedyAcquisitionFunctionBuilder): @@ -246,6 +260,7 @@ def __init__( AcquisitionFunctionBuilder[ProbabilisticModelType], GreedyAcquisitionFunctionBuilder[ProbabilisticModelType], VectorizedAcquisitionFunctionBuilder[ProbabilisticModelType], + MetadataAcquisitionFunctionBuilder[ProbabilisticModelType], ] = builder self._optimizer = optimizer self._num_query_points = num_query_points @@ -268,6 +283,7 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ Return the query point(s) that optimizes the acquisition function produced by ``builder`` @@ -277,19 +293,37 @@ def acquire( :param models: The model for each tag. :param datasets: The known observer query points and observations. Whether this is required depends on the acquisition function used. + :param metadata: Any additional acquisition metadata. This is passed to any + :class:`~trieste.acquisition.MetadataAcquisitionFunctionBuilder` builder, and + ignored otherwise. :return: The single (or batch of) points to query. """ if self._acquisition_function is None: - self._acquisition_function = self._builder.prepare_acquisition_function( - models, - datasets=datasets, - ) + if isinstance(self._builder, MetadataAcquisitionFunctionBuilder): + self._acquisition_function = self._builder.prepare_acquisition_function( + models, + datasets=datasets, + metadata=metadata, + ) + else: + self._acquisition_function = self._builder.prepare_acquisition_function( + models, + datasets=datasets, + ) else: - self._acquisition_function = self._builder.update_acquisition_function( - self._acquisition_function, - models, - datasets=datasets, - ) + if isinstance(self._builder, MetadataAcquisitionFunctionBuilder): + self._acquisition_function = self._builder.update_acquisition_function( + self._acquisition_function, + models, + datasets=datasets, + metadata=metadata, + ) + else: + self._acquisition_function = self._builder.update_acquisition_function( + self._acquisition_function, + models, + datasets=datasets, + ) summary_writer = logging.get_tensorboard_writer() step_number = logging.get_step_number() @@ -537,6 +571,7 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[AsynchronousRuleState | None, TensorType]: """ Constructs a function that, given ``AsynchronousRuleState``, @@ -693,6 +728,7 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[AsynchronousRuleState | None, TensorType]: """ Constructs a function that, given ``AsynchronousRuleState``, @@ -708,6 +744,7 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model of the known data. Uses the single key `OBJECTIVE`. :param datasets: The known observer query points and observations. + :param metadata: Unused. :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. """ @@ -804,6 +841,7 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ Sample ``num_query_points`` (see :meth:`__init__`) points from the @@ -812,6 +850,7 @@ def acquire( :param search_space: The acquisition search space. :param models: Unused. :param datasets: Unused. + :param metadata: Unused. :return: The ``num_query_points`` points to query. """ samples = search_space.sample(self._num_query_points) @@ -905,6 +944,7 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ Sample `num_search_space_samples` (see :meth:`__init__`) points from the @@ -914,6 +954,7 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model of the known data. Uses the single key `OBJECTIVE`. :param datasets: The known observer query points and observations. + :param metadata: Unused. :return: The ``num_query_points`` points to query. :raise ValueError: If ``models`` do not contain the key `OBJECTIVE`, or it contains any other key. @@ -1019,6 +1060,7 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[State | None, TensorType]: """ Construct a local search space from ``search_space`` according the trust region algorithm, @@ -1050,6 +1092,7 @@ def acquire( :param models: The model for each tag. :param datasets: The known observer query points and observations. Uses the data for key `OBJECTIVE` to calculate the new trust region. + :param metadata: Any additional acquisition metadata (optional). :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. :raise KeyError: If ``datasets`` does not contain the key `OBJECTIVE`. @@ -1095,7 +1138,9 @@ def state_func( tf.reduce_min([global_upper, xmin + eps], axis=0), ) - points = self._rule.acquire(acquisition_space, models, datasets=datasets) + points = self._rule.acquire( + acquisition_space, models, datasets=datasets, metadata=metadata + ) state_ = TrustRegion.State(acquisition_space, eps, y_min, is_global) return state_, points @@ -1231,6 +1276,7 @@ def acquire( search_space: Box, models: Mapping[Tag, TrainableSupportsGetKernel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[State | None, TensorType]: """ Construct a local search space from ``search_space`` according the TURBO algorithm, @@ -1256,6 +1302,7 @@ def acquire( :param models: The model for each tag. :param datasets: The known observer query points and observations. Uses the data for key `OBJECTIVE` to calculate the new trust region. + :param metadata: Any additional acquisition metadata (optional). :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. :raise KeyError: If ``datasets`` does not contain the key `OBJECTIVE`. @@ -1324,7 +1371,9 @@ def state_func( local_model.optimize(local_dataset) # use local model and local dataset to choose next query point(s) - points = self._rule.acquire_single(acquisition_space, local_model, local_dataset) + points = self._rule.acquire_single( + acquisition_space, local_model, local_dataset, metadata=metadata + ) state_ = TURBO.State(acquisition_space, L, failure_counter, success_counter, y_min) return state_, points @@ -1433,6 +1482,7 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """Acquire a batch of points to observe based on the batch hypervolume Sharpe ratio indicator method. @@ -1443,6 +1493,7 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model for each tag. :param datasets: The known observer query points and observations. + :param metadata: Unused. :return: The batch of points to query. """ if models.keys() != {OBJECTIVE}: diff --git a/trieste/ask_tell_optimization.py b/trieste/ask_tell_optimization.py index 3b5c973963..5746b9a56a 100644 --- a/trieste/ask_tell_optimization.py +++ b/trieste/ask_tell_optimization.py @@ -21,7 +21,7 @@ from __future__ import annotations from copy import deepcopy -from typing import Dict, Generic, Mapping, TypeVar, cast, overload +from typing import Any, Dict, Generic, Mapping, Optional, TypeVar, cast, overload try: import pandas as pd @@ -375,10 +375,11 @@ def to_result(self, copy: bool = True) -> OptimizationResult[StateType]: record: Record[StateType] = self.to_record(copy=copy) return OptimizationResult(Ok(record), []) - def ask(self) -> TensorType: + def ask(self, metadata: Optional[Mapping[str, Any]] = None) -> TensorType: """Suggests a point (or points in batch mode) to observe by optimizing the acquisition function. If the acquisition is stateful, its state is saved. + :param metadata: Any acquisition metadata (optional). :return: A :class:`TensorType` instance representing suggested point(s). """ # This trick deserves a comment to explain what's going on @@ -390,7 +391,7 @@ def ask(self) -> TensorType: with Timer() as query_point_generation_timer: points_or_stateful = self._acquisition_rule.acquire( - self._search_space, self._models, datasets=self._datasets + self._search_space, self._models, datasets=self._datasets, metadata=metadata ) if callable(points_or_stateful): From 03eed23852b3ac8e3ff640253f8f8c56fd57f56c Mon Sep 17 00:00:00 2001 From: Uri Granta Date: Wed, 12 Jul 2023 06:19:16 +0100 Subject: [PATCH 06/11] Make backwards compatible --- .../visualizing_with_tensorboard.pct.py | 2 +- tests/unit/acquisition/test_rule.py | 3 +- tests/unit/test_ask_tell_optimization.py | 3 +- tests/unit/test_bayesian_optimizer.py | 7 +-- tests/unit/test_logging.py | 3 +- tests/util/misc.py | 2 - trieste/acquisition/rule.py | 53 ++++++++++++++----- trieste/ask_tell_optimization.py | 2 +- 8 files changed, 47 insertions(+), 28 deletions(-) diff --git a/docs/notebooks/visualizing_with_tensorboard.pct.py b/docs/notebooks/visualizing_with_tensorboard.pct.py index 1e36ad7ee8..e5396c31a9 100644 --- a/docs/notebooks/visualizing_with_tensorboard.pct.py +++ b/docs/notebooks/visualizing_with_tensorboard.pct.py @@ -128,7 +128,7 @@ def log(self, dataset): # %% class EGOExtraLogging(trieste.acquisition.rule.EfficientGlobalOptimization): - def acquire(self, search_space, models, datasets=None, metadata=None): + def acquire(self, search_space, models, datasets=None): points = super().acquire(search_space, models, datasets) summary_writer = trieste.logging.get_tensorboard_writer() if summary_writer: diff --git a/tests/unit/acquisition/test_rule.py b/tests/unit/acquisition/test_rule.py index f21ec03201..13b6acabd2 100644 --- a/tests/unit/acquisition/test_rule.py +++ b/tests/unit/acquisition/test_rule.py @@ -15,7 +15,7 @@ import copy from collections.abc import Mapping -from typing import Any, Callable, Optional +from typing import Callable, Optional import gpflow import numpy.testing as npt @@ -555,7 +555,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: return (search_space.upper[None] + search_space.lower[None]) / 2 diff --git a/tests/unit/test_ask_tell_optimization.py b/tests/unit/test_ask_tell_optimization.py index d99bfcc9de..7c638f2baf 100644 --- a/tests/unit/test_ask_tell_optimization.py +++ b/tests/unit/test_ask_tell_optimization.py @@ -13,7 +13,7 @@ # limitations under the License. from __future__ import annotations -from typing import Any, Mapping, Optional +from typing import Mapping, Optional import pytest import tensorflow as tf @@ -326,7 +326,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(state: int | None) -> tuple[int | None, TensorType]: self.states_received.append(state) diff --git a/tests/unit/test_bayesian_optimizer.py b/tests/unit/test_bayesian_optimizer.py index f0afc6a720..dead19ac21 100644 --- a/tests/unit/test_bayesian_optimizer.py +++ b/tests/unit/test_bayesian_optimizer.py @@ -16,7 +16,7 @@ import tempfile from collections.abc import Mapping from pathlib import Path -from typing import Any, NoReturn, Optional +from typing import NoReturn, Optional import numpy.testing as npt import pytest @@ -365,7 +365,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(state: int | None) -> tuple[int | None, TensorType]: self.states_received.append(state) @@ -438,7 +437,6 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> NoReturn: raise _Whoops @@ -498,7 +496,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> NoReturn: assert False @@ -535,7 +532,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(previous_state: int | None) -> tuple[int | None, TensorType]: if previous_state is None: @@ -619,7 +615,6 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> State[int | None, TensorType]: def go(state: int | None) -> tuple[int | None, TensorType]: new_state = 0 if state is None else state + 1 diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 68929d686f..628a6e0df4 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -18,7 +18,7 @@ from collections.abc import Mapping from itertools import zip_longest from time import sleep -from typing import Any, Optional +from typing import Optional import numpy.testing as npt import pytest @@ -218,7 +218,6 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: sleep(acq_time) return self._qp diff --git a/tests/util/misc.py b/tests/util/misc.py index 5ab1f843f1..be71e7e5f6 100644 --- a/tests/util/misc.py +++ b/tests/util/misc.py @@ -181,13 +181,11 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ :param search_space: Unused. :param models: Unused. :param datasets: Unused. - :param metadata: Unused. :return: The fixed value specified on initialisation. """ return self._qp diff --git a/trieste/acquisition/rule.py b/trieste/acquisition/rule.py index 5387ec7d1c..71d09a6413 100644 --- a/trieste/acquisition/rule.py +++ b/trieste/acquisition/rule.py @@ -105,7 +105,6 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> ResultType: """ Return a value of type `T_co`. Typically, this will be a set of query points, either on its @@ -124,6 +123,19 @@ def acquire( :return: A value of type `T_co`. """ + def acquire_with_metadata( + self, + search_space: SearchSpaceType, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> ResultType: + """ + Same as acquire, but accepts an additional metadata argument. By default this is just + dropped, but you can override this method to use the metadata during acquisition. + """ + return self.acquire(search_space, models, datasets=datasets) + def acquire_single( self, search_space: SearchSpaceType, @@ -146,7 +158,7 @@ def acquire_single( "AcquisitionRule.acquire_single method does not support multiple datasets " "or models: use acquire instead" ) - return self.acquire( + return self.acquire_with_metadata( search_space, {OBJECTIVE: model}, datasets=None if dataset is None else {OBJECTIVE: dataset}, @@ -283,6 +295,14 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + ) -> TensorType: + return self.acquire_with_metadata(search_space, models, datasets=datasets) + + def acquire_with_metadata( + self, + search_space: SearchSpaceType, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ @@ -566,12 +586,13 @@ def __repr__(self) -> str: {self._builder!r}, {self._optimizer!r})""" + # TODO: support metadata + def acquire( self, search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[AsynchronousRuleState | None, TensorType]: """ Constructs a function that, given ``AsynchronousRuleState``, @@ -728,7 +749,6 @@ def acquire( search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[AsynchronousRuleState | None, TensorType]: """ Constructs a function that, given ``AsynchronousRuleState``, @@ -744,7 +764,6 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model of the known data. Uses the single key `OBJECTIVE`. :param datasets: The known observer query points and observations. - :param metadata: Unused. :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. """ @@ -841,7 +860,6 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ Sample ``num_query_points`` (see :meth:`__init__`) points from the @@ -850,7 +868,6 @@ def acquire( :param search_space: The acquisition search space. :param models: Unused. :param datasets: Unused. - :param metadata: Unused. :return: The ``num_query_points`` points to query. """ samples = search_space.sample(self._num_query_points) @@ -944,7 +961,6 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """ Sample `num_search_space_samples` (see :meth:`__init__`) points from the @@ -954,7 +970,6 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model of the known data. Uses the single key `OBJECTIVE`. :param datasets: The known observer query points and observations. - :param metadata: Unused. :return: The ``num_query_points`` points to query. :raise ValueError: If ``models`` do not contain the key `OBJECTIVE`, or it contains any other key. @@ -1060,6 +1075,14 @@ def acquire( search_space: Box, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + ) -> types.State[State | None, TensorType]: + return self.acquire_with_metadata(search_space, models, datasets=datasets) + + def acquire_with_metadata( + self, + search_space: Box, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[State | None, TensorType]: """ @@ -1138,7 +1161,7 @@ def state_func( tf.reduce_min([global_upper, xmin + eps], axis=0), ) - points = self._rule.acquire( + points = self._rule.acquire_with_metadata( acquisition_space, models, datasets=datasets, metadata=metadata ) state_ = TrustRegion.State(acquisition_space, eps, y_min, is_global) @@ -1276,6 +1299,14 @@ def acquire( search_space: Box, models: Mapping[Tag, TrainableSupportsGetKernel], datasets: Optional[Mapping[Tag, Dataset]] = None, + ) -> types.State[State | None, TensorType]: + return self.acquire_with_metadata(search_space, models, datasets=datasets) + + def acquire_with_metadata( + self, + search_space: Box, + models: Mapping[Tag, TrainableSupportsGetKernel], + datasets: Optional[Mapping[Tag, Dataset]] = None, metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[State | None, TensorType]: """ @@ -1482,7 +1513,6 @@ def acquire( search_space: SearchSpace, models: Mapping[Tag, ProbabilisticModel], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> TensorType: """Acquire a batch of points to observe based on the batch hypervolume Sharpe ratio indicator method. @@ -1493,7 +1523,6 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model for each tag. :param datasets: The known observer query points and observations. - :param metadata: Unused. :return: The batch of points to query. """ if models.keys() != {OBJECTIVE}: diff --git a/trieste/ask_tell_optimization.py b/trieste/ask_tell_optimization.py index 5746b9a56a..87b8db53d8 100644 --- a/trieste/ask_tell_optimization.py +++ b/trieste/ask_tell_optimization.py @@ -390,7 +390,7 @@ def ask(self, metadata: Optional[Mapping[str, Any]] = None) -> TensorType: # so code below is needed to cater for both cases with Timer() as query_point_generation_timer: - points_or_stateful = self._acquisition_rule.acquire( + points_or_stateful = self._acquisition_rule.acquire_with_metadata( self._search_space, self._models, datasets=self._datasets, metadata=metadata ) From 879ff47218c84c1a15bee54e9ed5d5c038e25f61 Mon Sep 17 00:00:00 2001 From: Uri Granta Date: Wed, 12 Jul 2023 06:26:43 +0100 Subject: [PATCH 07/11] Support metadata in AssychronousOptimization too --- trieste/acquisition/rule.py | 56 +++++++++++++++++++++++++++---------- 1 file changed, 42 insertions(+), 14 deletions(-) diff --git a/trieste/acquisition/rule.py b/trieste/acquisition/rule.py index 71d09a6413..bb69dd1645 100644 --- a/trieste/acquisition/rule.py +++ b/trieste/acquisition/rule.py @@ -119,7 +119,6 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model for each tag. :param datasets: The known observer query points and observations for each tag (optional). - :param metadata: Any additional acquisition metadata (optional). :return: A value of type `T_co`. """ @@ -529,7 +528,9 @@ def __init__( self: "AsynchronousOptimization[SearchSpaceType, ProbabilisticModelType]", builder: ( AcquisitionFunctionBuilder[ProbabilisticModelType] + | MetadataAcquisitionFunctionBuilder[ProbabilisticModelType] | SingleModelAcquisitionBuilder[ProbabilisticModelType] + | SingleModelMetadataAcquisitionBuilder[ProbabilisticModelType] ), optimizer: AcquisitionOptimizer[SearchSpaceType] | None = None, num_query_points: int = 1, @@ -540,7 +541,9 @@ def __init__( self, builder: Optional[ AcquisitionFunctionBuilder[ProbabilisticModelType] + | MetadataAcquisitionFunctionBuilder[ProbabilisticModelType] | SingleModelAcquisitionBuilder[ProbabilisticModelType] + | SingleModelMetadataAcquisitionBuilder[ProbabilisticModelType] ] = None, optimizer: AcquisitionOptimizer[SearchSpaceType] | None = None, num_query_points: int = 1, @@ -568,7 +571,9 @@ def __init__( if optimizer is None: optimizer = automatic_optimizer_selector - if isinstance(builder, SingleModelAcquisitionBuilder): + if isinstance( + builder, (SingleModelAcquisitionBuilder, SingleModelMetadataAcquisitionBuilder) + ): builder = builder.using(OBJECTIVE) # even though we are only using batch acquisition functions @@ -576,7 +581,10 @@ def __init__( if num_query_points > 1: optimizer = batchify_joint(optimizer, num_query_points) - self._builder: AcquisitionFunctionBuilder[ProbabilisticModelType] = builder + self._builder: Union[ + AcquisitionFunctionBuilder[ProbabilisticModelType], + MetadataAcquisitionFunctionBuilder[ProbabilisticModelType], + ] = builder self._optimizer = optimizer self._acquisition_function: Optional[AcquisitionFunction] = None @@ -586,13 +594,20 @@ def __repr__(self) -> str: {self._builder!r}, {self._optimizer!r})""" - # TODO: support metadata - def acquire( self, search_space: SearchSpaceType, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + ) -> types.State[AsynchronousRuleState | None, TensorType]: + return self.acquire_with_metadata(search_space, models, datasets=datasets) + + def acquire_with_metadata( + self, + search_space: SearchSpaceType, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> types.State[AsynchronousRuleState | None, TensorType]: """ Constructs a function that, given ``AsynchronousRuleState``, @@ -610,6 +625,9 @@ def acquire( :param search_space: The local acquisition search space for *this step*. :param models: The model of the known data. Uses the single key `OBJECTIVE`. :param datasets: The known observer query points and observations. + :param metadata: Any additional acquisition metadata. This is passed to any + :class:`~trieste.acquisition.MetadataAcquisitionFunctionBuilder` builder, and + ignored otherwise. :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. """ @@ -623,16 +641,26 @@ def acquire( ) if self._acquisition_function is None: - self._acquisition_function = self._builder.prepare_acquisition_function( - models, - datasets=datasets, - ) + if isinstance(self._builder, MetadataAcquisitionFunctionBuilder): + self._acquisition_function = self._builder.prepare_acquisition_function( + models, datasets=datasets, metadata=metadata + ) + else: + self._acquisition_function = self._builder.prepare_acquisition_function( + models, + datasets=datasets, + ) else: - self._acquisition_function = self._builder.update_acquisition_function( - self._acquisition_function, - models, - datasets=datasets, - ) + if isinstance(self._builder, MetadataAcquisitionFunctionBuilder): + self._acquisition_function = self._builder.update_acquisition_function( + self._acquisition_function, models, datasets=datasets, metadata=metadata + ) + else: + self._acquisition_function = self._builder.update_acquisition_function( + self._acquisition_function, + models, + datasets=datasets, + ) def state_func( state: AsynchronousRuleState | None, From 7773c6fc87f8ce52bdc339447e5bc16189df88fa Mon Sep 17 00:00:00 2001 From: Uri Granta Date: Wed, 12 Jul 2023 06:31:17 +0100 Subject: [PATCH 08/11] Superfluous lines --- trieste/acquisition/rule.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/trieste/acquisition/rule.py b/trieste/acquisition/rule.py index bb69dd1645..7105aa722b 100644 --- a/trieste/acquisition/rule.py +++ b/trieste/acquisition/rule.py @@ -202,11 +202,9 @@ def __init__( builder: Optional[ AcquisitionFunctionBuilder[ProbabilisticModelType] | GreedyAcquisitionFunctionBuilder[ProbabilisticModelType] - | VectorizedAcquisitionFunctionBuilder[ProbabilisticModelType] | MetadataAcquisitionFunctionBuilder[ProbabilisticModelType] | SingleModelAcquisitionBuilder[ProbabilisticModelType] | SingleModelGreedyAcquisitionBuilder[ProbabilisticModelType] - | SingleModelVectorizedAcquisitionBuilder[ProbabilisticModelType] | SingleModelMetadataAcquisitionBuilder[ProbabilisticModelType] ] = None, optimizer: AcquisitionOptimizer[SearchSpaceType] | None = None, @@ -248,7 +246,6 @@ def __init__( ( SingleModelAcquisitionBuilder, SingleModelGreedyAcquisitionBuilder, - SingleModelVectorizedAcquisitionBuilder, SingleModelMetadataAcquisitionBuilder, ), ): @@ -270,7 +267,6 @@ def __init__( self._builder: Union[ AcquisitionFunctionBuilder[ProbabilisticModelType], GreedyAcquisitionFunctionBuilder[ProbabilisticModelType], - VectorizedAcquisitionFunctionBuilder[ProbabilisticModelType], MetadataAcquisitionFunctionBuilder[ProbabilisticModelType], ] = builder self._optimizer = optimizer From 68bd3feffe5c8cdc8a24d908146d270447e77eef Mon Sep 17 00:00:00 2001 From: Uri Granta Date: Wed, 12 Jul 2023 06:32:07 +0100 Subject: [PATCH 09/11] Format --- trieste/acquisition/rule.py | 1 - 1 file changed, 1 deletion(-) diff --git a/trieste/acquisition/rule.py b/trieste/acquisition/rule.py index 7105aa722b..60f077634c 100644 --- a/trieste/acquisition/rule.py +++ b/trieste/acquisition/rule.py @@ -62,7 +62,6 @@ SingleModelAcquisitionBuilder, SingleModelGreedyAcquisitionBuilder, SingleModelMetadataAcquisitionBuilder, - SingleModelVectorizedAcquisitionBuilder, VectorizedAcquisitionFunctionBuilder, ) from .multi_objective import Pareto From 02805e607561aa79bc4ab7bd0c2a943afd187366 Mon Sep 17 00:00:00 2001 From: Uri Granta Date: Wed, 12 Jul 2023 07:25:46 +0100 Subject: [PATCH 10/11] Simplify further --- trieste/acquisition/interface.py | 6 ++++-- trieste/acquisition/rule.py | 26 ++++---------------------- 2 files changed, 8 insertions(+), 24 deletions(-) diff --git a/trieste/acquisition/interface.py b/trieste/acquisition/interface.py index f5c1cc0537..58688315a8 100644 --- a/trieste/acquisition/interface.py +++ b/trieste/acquisition/interface.py @@ -365,7 +365,7 @@ def __repr__(self) -> str: return _Anon(self) -class MetadataAcquisitionFunctionBuilder(Generic[ProbabilisticModelType], ABC): +class MetadataAcquisitionFunctionBuilder(AcquisitionFunctionBuilder[ProbabilisticModelType], ABC): """An :class:`MetadataAcquisitionFunctionBuilder` builds and updates an acquisition function using additional passed in metadata.""" @@ -408,7 +408,9 @@ def update_acquisition_function( return self.prepare_acquisition_function(models, datasets=datasets, metadata=metadata) -class SingleModelMetadataAcquisitionBuilder(Generic[ProbabilisticModelType], ABC): +class SingleModelMetadataAcquisitionBuilder( + SingleModelAcquisitionBuilder[ProbabilisticModelType], ABC +): """ Convenience acquisition function builder for an acquisition function (or component of a composite acquisition function) that requires only one model, dataset pair. diff --git a/trieste/acquisition/rule.py b/trieste/acquisition/rule.py index 60f077634c..f9ce5e2241 100644 --- a/trieste/acquisition/rule.py +++ b/trieste/acquisition/rule.py @@ -61,7 +61,6 @@ MetadataAcquisitionFunctionBuilder, SingleModelAcquisitionBuilder, SingleModelGreedyAcquisitionBuilder, - SingleModelMetadataAcquisitionBuilder, VectorizedAcquisitionFunctionBuilder, ) from .multi_objective import Pareto @@ -129,7 +128,7 @@ def acquire_with_metadata( metadata: Optional[Mapping[str, Any]] = None, ) -> ResultType: """ - Same as acquire, but accepts an additional metadata argument. By default this is just + Same as acquire, but accepts an additional metadata argument. By default, this is just dropped, but you can override this method to use the metadata during acquisition. """ return self.acquire(search_space, models, datasets=datasets) @@ -185,10 +184,8 @@ def __init__( builder: ( AcquisitionFunctionBuilder[ProbabilisticModelType] | GreedyAcquisitionFunctionBuilder[ProbabilisticModelType] - | MetadataAcquisitionFunctionBuilder[ProbabilisticModelType] | SingleModelAcquisitionBuilder[ProbabilisticModelType] | SingleModelGreedyAcquisitionBuilder[ProbabilisticModelType] - | SingleModelMetadataAcquisitionBuilder[ProbabilisticModelType] ), optimizer: AcquisitionOptimizer[SearchSpaceType] | None = None, num_query_points: int = 1, @@ -201,10 +198,8 @@ def __init__( builder: Optional[ AcquisitionFunctionBuilder[ProbabilisticModelType] | GreedyAcquisitionFunctionBuilder[ProbabilisticModelType] - | MetadataAcquisitionFunctionBuilder[ProbabilisticModelType] | SingleModelAcquisitionBuilder[ProbabilisticModelType] | SingleModelGreedyAcquisitionBuilder[ProbabilisticModelType] - | SingleModelMetadataAcquisitionBuilder[ProbabilisticModelType] ] = None, optimizer: AcquisitionOptimizer[SearchSpaceType] | None = None, num_query_points: int = 1, @@ -245,7 +240,6 @@ def __init__( ( SingleModelAcquisitionBuilder, SingleModelGreedyAcquisitionBuilder, - SingleModelMetadataAcquisitionBuilder, ), ): builder = builder.using(OBJECTIVE) @@ -254,9 +248,7 @@ def __init__( if isinstance(builder, VectorizedAcquisitionFunctionBuilder): # optimize batch elements independently optimizer = batchify_vectorize(optimizer, num_query_points) - elif isinstance( - builder, (AcquisitionFunctionBuilder, MetadataAcquisitionFunctionBuilder) - ): + elif isinstance(builder, AcquisitionFunctionBuilder): # optimize batch elements jointly optimizer = batchify_joint(optimizer, num_query_points) elif isinstance(builder, GreedyAcquisitionFunctionBuilder): @@ -266,7 +258,6 @@ def __init__( self._builder: Union[ AcquisitionFunctionBuilder[ProbabilisticModelType], GreedyAcquisitionFunctionBuilder[ProbabilisticModelType], - MetadataAcquisitionFunctionBuilder[ProbabilisticModelType], ] = builder self._optimizer = optimizer self._num_query_points = num_query_points @@ -523,9 +514,7 @@ def __init__( self: "AsynchronousOptimization[SearchSpaceType, ProbabilisticModelType]", builder: ( AcquisitionFunctionBuilder[ProbabilisticModelType] - | MetadataAcquisitionFunctionBuilder[ProbabilisticModelType] | SingleModelAcquisitionBuilder[ProbabilisticModelType] - | SingleModelMetadataAcquisitionBuilder[ProbabilisticModelType] ), optimizer: AcquisitionOptimizer[SearchSpaceType] | None = None, num_query_points: int = 1, @@ -536,9 +525,7 @@ def __init__( self, builder: Optional[ AcquisitionFunctionBuilder[ProbabilisticModelType] - | MetadataAcquisitionFunctionBuilder[ProbabilisticModelType] | SingleModelAcquisitionBuilder[ProbabilisticModelType] - | SingleModelMetadataAcquisitionBuilder[ProbabilisticModelType] ] = None, optimizer: AcquisitionOptimizer[SearchSpaceType] | None = None, num_query_points: int = 1, @@ -566,9 +553,7 @@ def __init__( if optimizer is None: optimizer = automatic_optimizer_selector - if isinstance( - builder, (SingleModelAcquisitionBuilder, SingleModelMetadataAcquisitionBuilder) - ): + if isinstance(builder, SingleModelAcquisitionBuilder): builder = builder.using(OBJECTIVE) # even though we are only using batch acquisition functions @@ -576,10 +561,7 @@ def __init__( if num_query_points > 1: optimizer = batchify_joint(optimizer, num_query_points) - self._builder: Union[ - AcquisitionFunctionBuilder[ProbabilisticModelType], - MetadataAcquisitionFunctionBuilder[ProbabilisticModelType], - ] = builder + self._builder: AcquisitionFunctionBuilder[ProbabilisticModelType] = builder self._optimizer = optimizer self._acquisition_function: Optional[AcquisitionFunction] = None From 63d8abd192ff13dcfb507ae34cbd7b56079bd8b6 Mon Sep 17 00:00:00 2001 From: Uri Granta Date: Thu, 13 Jul 2023 08:14:27 +0100 Subject: [PATCH 11/11] Different approach --- trieste/acquisition/interface.py | 317 ++++++++++++++++++++----------- trieste/acquisition/rule.py | 79 +++----- 2 files changed, 227 insertions(+), 169 deletions(-) diff --git a/trieste/acquisition/interface.py b/trieste/acquisition/interface.py index 58688315a8..3828eb53fd 100644 --- a/trieste/acquisition/interface.py +++ b/trieste/acquisition/interface.py @@ -67,6 +67,23 @@ def prepare_acquisition_function( :return: An acquisition function. """ + def prepare_acquisition_function_with_metadata( + self, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + """ + Prepare an acquisition function using additional metadata. By default, this is just + dropped, but you can override this method to use the metadata during acquisition. + + :param models: The models for each tag. + :param datasets: The data from the observer (optional). + :param metadata: Metadata from the observer (optional). + :return: An acquisition function. + """ + return self.prepare_acquisition_function(models, datasets=datasets) + def update_acquisition_function( self, function: AcquisitionFunction, @@ -74,7 +91,7 @@ def update_acquisition_function( datasets: Optional[Mapping[Tag, Dataset]] = None, ) -> AcquisitionFunction: """ - Update an acquisition function. By default this generates a new acquisition function each + Update an acquisition function. By default, this generates a new acquisition function each time. However, if the function is decorated with `@tf.function`, then you can override this method to update its variables instead and avoid retracing the acquisition function on every optimization loop. @@ -86,6 +103,25 @@ def update_acquisition_function( """ return self.prepare_acquisition_function(models, datasets=datasets) + def update_acquisition_function_with_metadata( + self, + function: AcquisitionFunction, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + """ + Update an acquisition function. By default, this is just + dropped, but you can override this method to use the metadata during acquisition. + + :param function: The acquisition function to update. + :param models: The models for each tag. + :param datasets: The data from the observer (optional). + :param metadata: Metadata from the observer (optional). + :return: The updated acquisition function. + """ + return self.update_acquisition_function(function, models, datasets=datasets) + class SingleModelAcquisitionBuilder(Generic[ProbabilisticModelType], ABC): """ @@ -115,6 +151,18 @@ def prepare_acquisition_function( models[tag], dataset=None if datasets is None else datasets[tag] ) + def prepare_acquisition_function_with_metadata( + self, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + return self.single_builder.prepare_acquisition_function_with_metadata( + models[tag], + dataset=None if datasets is None else datasets[tag], + metadata=metadata, + ) + def update_acquisition_function( self, function: AcquisitionFunction, @@ -125,6 +173,20 @@ def update_acquisition_function( function, models[tag], dataset=None if datasets is None else datasets[tag] ) + def update_acquisition_function_with_metadata( + self, + function: AcquisitionFunction, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + return self.single_builder.update_acquisition_function_with_metadata( + function, + models[tag], + dataset=None if datasets is None else datasets[tag], + metadata=metadata, + ) + def __repr__(self) -> str: return f"{self.single_builder!r} using tag {tag!r}" @@ -142,6 +204,20 @@ def prepare_acquisition_function( :return: An acquisition function. """ + def prepare_acquisition_function_with_metadata( + self, + model: ProbabilisticModelType, + dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + """ + :param model: The model. + :param dataset: The data to use to build the acquisition function (optional). + :param metadata: Metadata from the observer (optional). + :return: An acquisition function. + """ + return self.prepare_acquisition_function(model, dataset=dataset) + def update_acquisition_function( self, function: AcquisitionFunction, @@ -156,6 +232,21 @@ def update_acquisition_function( """ return self.prepare_acquisition_function(model, dataset=dataset) + def update_acquisition_function_with_metadata( + self, + function: AcquisitionFunction, + model: ProbabilisticModelType, + dataset: Optional[Dataset] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + """ + :param function: The acquisition function to update. + :param model: The model. + :param dataset: The data from the observer (optional). + :return: The updated acquisition function. + """ + return self.update_acquisition_function(function, model, dataset=dataset) + class GreedyAcquisitionFunctionBuilder(Generic[ProbabilisticModelType], ABC): """ @@ -187,6 +278,20 @@ def prepare_acquisition_function( :return: An acquisition function. """ + def prepare_acquisition_function_with_metadata( + self, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + pending_points: Optional[TensorType] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + """ + Same as prepare_acquisition_function but accepts additional metadata argument. + """ + return self.prepare_acquisition_function( + models, datasets=datasets, pending_points=pending_points + ) + def update_acquisition_function( self, function: AcquisitionFunction, @@ -215,6 +320,26 @@ def update_acquisition_function( models, datasets=datasets, pending_points=pending_points ) + def update_acquisition_function_with_metadata( + self, + function: AcquisitionFunction, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + pending_points: Optional[TensorType] = None, + new_optimization_step: bool = True, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + """ + Same as update_acquisition_function but accepts additional metadata argument. + """ + return self.update_acquisition_function( + function, + models, + datasets=datasets, + pending_points=pending_points, + new_optimization_step=new_optimization_step, + ) + class SingleModelGreedyAcquisitionBuilder(Generic[ProbabilisticModelType], ABC): """ @@ -247,6 +372,20 @@ def prepare_acquisition_function( pending_points=pending_points, ) + def prepare_acquisition_function_with_metadata( + self, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + pending_points: Optional[TensorType] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + return self.single_builder.prepare_acquisition_function_with_metadata( + models[tag], + dataset=None if datasets is None else datasets[tag], + pending_points=pending_points, + metadata=metadata, + ) + def update_acquisition_function( self, function: AcquisitionFunction, @@ -263,6 +402,24 @@ def update_acquisition_function( new_optimization_step=new_optimization_step, ) + def update_acquisition_function_with_metadata( + self, + function: AcquisitionFunction, + models: Mapping[Tag, ProbabilisticModelType], + datasets: Optional[Mapping[Tag, Dataset]] = None, + pending_points: Optional[TensorType] = None, + new_optimization_step: bool = True, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + return self.single_builder.update_acquisition_function_with_metadata( + function, + models[tag], + dataset=None if datasets is None else datasets[tag], + pending_points=pending_points, + new_optimization_step=new_optimization_step, + metadata=metadata, + ) + def __repr__(self) -> str: return f"{self.single_builder!r} using tag {tag!r}" @@ -283,6 +440,20 @@ def prepare_acquisition_function( :return: An acquisition function. """ + def prepare_acquisition_function_with_metadata( + self, + model: ProbabilisticModelType, + dataset: Optional[Dataset] = None, + pending_points: Optional[TensorType] = None, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + """ + Same as prepare_acquisition_function but accepts additional metadata argument. + """ + return self.prepare_acquisition_function( + model, dataset=dataset, pending_points=pending_points + ) + def update_acquisition_function( self, function: AcquisitionFunction, @@ -308,6 +479,26 @@ def update_acquisition_function( pending_points=pending_points, ) + def update_acquisition_function_with_metadata( + self, + function: AcquisitionFunction, + model: ProbabilisticModelType, + dataset: Optional[Dataset] = None, + pending_points: Optional[TensorType] = None, + new_optimization_step: bool = True, + metadata: Optional[Mapping[str, Any]] = None, + ) -> AcquisitionFunction: + """ + Same as prepare_acquisition_function but accepts additional metadata argument. + """ + return self.update_acquisition_function( + function, + model, + dataset=dataset, + pending_points=pending_points, + new_optimization_step=new_optimization_step, + ) + class VectorizedAcquisitionFunctionBuilder(AcquisitionFunctionBuilder[ProbabilisticModelType]): """ @@ -349,106 +540,36 @@ def prepare_acquisition_function( models[tag], dataset=None if datasets is None else datasets[tag] ) - def update_acquisition_function( + def prepare_acquisition_function_with_metadata( self, - function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, + metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: - return self.single_builder.update_acquisition_function( - function, models[tag], dataset=None if datasets is None else datasets[tag] + return self.single_builder.prepare_acquisition_function_with_metadata( + models[tag], + dataset=None if datasets is None else datasets[tag], + metadata=metadata, ) - def __repr__(self) -> str: - return f"{self.single_builder!r} using tag {tag!r}" - - return _Anon(self) - - -class MetadataAcquisitionFunctionBuilder(AcquisitionFunctionBuilder[ProbabilisticModelType], ABC): - """An :class:`MetadataAcquisitionFunctionBuilder` builds and updates an acquisition function - using additional passed in metadata.""" - - @abstractmethod - def prepare_acquisition_function( - self, - models: Mapping[Tag, ProbabilisticModelType], - datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, - ) -> AcquisitionFunction: - """ - Prepare an acquisition function. We assume that this requires at least models, but - it may sometimes also need data. - - :param models: The models for each tag. - :param datasets: The data from the observer (optional). - :param metadata: Metadata from the observer (optional). - :return: An acquisition function. - """ - - def update_acquisition_function( - self, - function: AcquisitionFunction, - models: Mapping[Tag, ProbabilisticModelType], - datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, - ) -> AcquisitionFunction: - """ - Update an acquisition function. By default this generates a new acquisition function each - time. However, if the function is decorated with `@tf.function`, then you can override - this method to update its variables instead and avoid retracing the acquisition function on - every optimization loop. - - :param function: The acquisition function to update. - :param models: The models for each tag. - :param datasets: The data from the observer (optional). - :param metadata: Metadata from the observer (optional). - :return: The updated acquisition function. - """ - return self.prepare_acquisition_function(models, datasets=datasets, metadata=metadata) - - -class SingleModelMetadataAcquisitionBuilder( - SingleModelAcquisitionBuilder[ProbabilisticModelType], ABC -): - """ - Convenience acquisition function builder for an acquisition function (or component of a - composite acquisition function) that requires only one model, dataset pair. - """ - - def using(self, tag: Tag) -> MetadataAcquisitionFunctionBuilder[ProbabilisticModelType]: - """ - :param tag: The tag for the model, dataset pair to use to build this acquisition function. - :return: An acquisition function builder that selects the model and dataset specified by - ``tag``, as defined in :meth:`prepare_acquisition_function`. - """ - - class _Anon(MetadataAcquisitionFunctionBuilder[ProbabilisticModelType]): - def __init__( - self, single_builder: SingleModelMetadataAcquisitionBuilder[ProbabilisticModelType] - ): - self.single_builder = single_builder - - def prepare_acquisition_function( + def update_acquisition_function( self, + function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, - metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: - return self.single_builder.prepare_acquisition_function( - models[tag], - dataset=None if datasets is None else datasets[tag], - metadata=metadata, + return self.single_builder.update_acquisition_function( + function, models[tag], dataset=None if datasets is None else datasets[tag] ) - def update_acquisition_function( + def update_acquisition_function_with_metadata( self, function: AcquisitionFunction, models: Mapping[Tag, ProbabilisticModelType], datasets: Optional[Mapping[Tag, Dataset]] = None, metadata: Optional[Mapping[str, Any]] = None, ) -> AcquisitionFunction: - return self.single_builder.update_acquisition_function( + return self.single_builder.update_acquisition_function_with_metadata( function, models[tag], dataset=None if datasets is None else datasets[tag], @@ -460,36 +581,6 @@ def __repr__(self) -> str: return _Anon(self) - @abstractmethod - def prepare_acquisition_function( - self, - model: ProbabilisticModelType, - dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, - ) -> AcquisitionFunction: - """ - :param model: The model. - :param dataset: The data to use to build the acquisition function (optional). - :param metadata: The metadata to use to build the acquisition function (optional). - :return: An acquisition function. - """ - - def update_acquisition_function( - self, - function: AcquisitionFunction, - model: ProbabilisticModelType, - dataset: Optional[Dataset] = None, - metadata: Optional[Mapping[str, Any]] = None, - ) -> AcquisitionFunction: - """ - :param function: The acquisition function to update. - :param model: The model. - :param dataset: The data from the observer (optional). - :param metadata: The metadata from the observer (optional). - :return: The updated acquisition function. - """ - return self.prepare_acquisition_function(model, dataset=dataset, metadata=metadata) - PenalizationFunction = Callable[[TensorType], TensorType] """ diff --git a/trieste/acquisition/rule.py b/trieste/acquisition/rule.py index f9ce5e2241..16651c1fed 100644 --- a/trieste/acquisition/rule.py +++ b/trieste/acquisition/rule.py @@ -58,7 +58,6 @@ AcquisitionFunction, AcquisitionFunctionBuilder, GreedyAcquisitionFunctionBuilder, - MetadataAcquisitionFunctionBuilder, SingleModelAcquisitionBuilder, SingleModelGreedyAcquisitionBuilder, VectorizedAcquisitionFunctionBuilder, @@ -298,37 +297,17 @@ def acquire_with_metadata( :param models: The model for each tag. :param datasets: The known observer query points and observations. Whether this is required depends on the acquisition function used. - :param metadata: Any additional acquisition metadata. This is passed to any - :class:`~trieste.acquisition.MetadataAcquisitionFunctionBuilder` builder, and - ignored otherwise. + :param metadata: Any additional acquisition metadata. (optional) :return: The single (or batch of) points to query. """ if self._acquisition_function is None: - if isinstance(self._builder, MetadataAcquisitionFunctionBuilder): - self._acquisition_function = self._builder.prepare_acquisition_function( - models, - datasets=datasets, - metadata=metadata, - ) - else: - self._acquisition_function = self._builder.prepare_acquisition_function( - models, - datasets=datasets, - ) + self._acquisition_function = self._builder.prepare_acquisition_function_with_metadata( + models, datasets=datasets, metadata=metadata + ) else: - if isinstance(self._builder, MetadataAcquisitionFunctionBuilder): - self._acquisition_function = self._builder.update_acquisition_function( - self._acquisition_function, - models, - datasets=datasets, - metadata=metadata, - ) - else: - self._acquisition_function = self._builder.update_acquisition_function( - self._acquisition_function, - models, - datasets=datasets, - ) + self._acquisition_function = self._builder.update_acquisition_function_with_metadata( + self._acquisition_function, models, datasets=datasets, metadata=metadata + ) summary_writer = logging.get_tensorboard_writer() step_number = logging.get_step_number() @@ -354,12 +333,15 @@ def acquire_with_metadata( for i in range( self._num_query_points - 1 ): # greedily allocate remaining batch elements - self._acquisition_function = self._builder.update_acquisition_function( - self._acquisition_function, - models, - datasets=datasets, - pending_points=points, - new_optimization_step=False, + self._acquisition_function = ( + self._builder.update_acquisition_function_with_metadata( + self._acquisition_function, + models, + datasets=datasets, + pending_points=points, + new_optimization_step=False, + metadata=metadata, + ) ) with tf.name_scope(f"EGO.optimizer[{i+1}]"): chosen_point = self._optimizer(search_space, self._acquisition_function) @@ -602,9 +584,7 @@ def acquire_with_metadata( :param search_space: The local acquisition search space for *this step*. :param models: The model of the known data. Uses the single key `OBJECTIVE`. :param datasets: The known observer query points and observations. - :param metadata: Any additional acquisition metadata. This is passed to any - :class:`~trieste.acquisition.MetadataAcquisitionFunctionBuilder` builder, and - ignored otherwise. + :param metadata: Any additional acquisition metadata. (optional) :return: A function that constructs the next acquisition state and the recommended query points from the previous acquisition state. """ @@ -618,26 +598,13 @@ def acquire_with_metadata( ) if self._acquisition_function is None: - if isinstance(self._builder, MetadataAcquisitionFunctionBuilder): - self._acquisition_function = self._builder.prepare_acquisition_function( - models, datasets=datasets, metadata=metadata - ) - else: - self._acquisition_function = self._builder.prepare_acquisition_function( - models, - datasets=datasets, - ) + self._acquisition_function = self._builder.prepare_acquisition_function_with_metadata( + models, datasets=datasets, metadata=metadata + ) else: - if isinstance(self._builder, MetadataAcquisitionFunctionBuilder): - self._acquisition_function = self._builder.update_acquisition_function( - self._acquisition_function, models, datasets=datasets, metadata=metadata - ) - else: - self._acquisition_function = self._builder.update_acquisition_function( - self._acquisition_function, - models, - datasets=datasets, - ) + self._acquisition_function = self._builder.update_acquisition_function_with_metadata( + self._acquisition_function, models, datasets=datasets, metadata=metadata + ) def state_func( state: AsynchronousRuleState | None,