Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Log ei change #259

Merged
merged 13 commits into from
Jan 8, 2025
34 changes: 1 addition & 33 deletions docs/examples/bayes_exp/bayesian_exploration.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2023-09-19T18:54:51.865697700Z",
"start_time": "2023-09-19T18:54:48.783739700Z"
},
"execution": {
"iopub.execute_input": "2024-09-13T15:49:35.830514Z",
"iopub.status.busy": "2024-09-13T15:49:35.830266Z",
Expand Down Expand Up @@ -99,10 +95,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2023-09-19T18:54:51.916698600Z",
"start_time": "2023-09-19T18:54:51.867698800Z"
},
"execution": {
"iopub.execute_input": "2024-09-13T15:49:37.522962Z",
"iopub.status.busy": "2024-09-13T15:49:37.522764Z",
Expand All @@ -122,10 +114,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2023-09-19T18:54:55.212579300Z",
"start_time": "2023-09-19T18:54:51.897698400Z"
},
"execution": {
"iopub.execute_input": "2024-09-13T15:49:37.531732Z",
"iopub.status.busy": "2024-09-13T15:49:37.531635Z",
Expand All @@ -147,10 +135,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2023-09-19T18:54:55.227580Z",
"start_time": "2023-09-19T18:54:55.212579300Z"
},
"execution": {
"iopub.execute_input": "2024-09-13T15:49:39.056947Z",
"iopub.status.busy": "2024-09-13T15:49:39.056826Z",
Expand All @@ -171,10 +155,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2023-09-19T18:54:55.947579900Z",
"start_time": "2023-09-19T18:54:55.230580300Z"
},
"execution": {
"iopub.execute_input": "2024-09-13T15:49:39.062334Z",
"iopub.status.busy": "2024-09-13T15:49:39.062246Z",
Expand Down Expand Up @@ -238,10 +218,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2023-09-19T18:54:58.766007500Z",
"start_time": "2023-09-19T18:54:58.724008300Z"
},
"execution": {
"iopub.execute_input": "2024-09-13T15:49:40.599399Z",
"iopub.status.busy": "2024-09-13T15:49:40.599290Z",
Expand All @@ -258,7 +234,7 @@
"for name, val in X.generator.model.named_parameters():\n",
" print(f\"{name}:{val}\")\n",
"\n",
"X.generator.model.models[2].covar_module.base_kernel.lengthscale"
"X.generator.model.models[2].covar_module.lengthscale"
]
},
{
Expand All @@ -279,10 +255,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2023-09-19T18:54:58.776010Z",
"start_time": "2023-09-19T18:54:58.746008900Z"
},
"execution": {
"iopub.execute_input": "2024-09-13T15:49:40.605375Z",
"iopub.status.busy": "2024-09-13T15:49:40.605292Z",
Expand All @@ -302,10 +274,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"ExecuteTime": {
"end_time": "2023-09-19T18:55:02.760921Z",
"start_time": "2023-09-19T18:54:58.766007500Z"
},
"execution": {
"iopub.execute_input": "2024-09-13T15:49:40.611135Z",
"iopub.status.busy": "2024-09-13T15:49:40.611035Z",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@
"for name, val in X.generator.model.named_parameters():\n",
" print(f\"{name}:{val}\")\n",
"\n",
"X.generator.model.models[2].covar_module.base_kernel.lengthscale"
"X.generator.model.models[2].covar_module.lengthscale"
]
},
{
Expand Down
17 changes: 0 additions & 17 deletions docs/examples/multi_objective_bayes_opt/mobo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -242,23 +242,6 @@
"print(candidate[[\"x1\", \"x2\"]].to_numpy())\n",
"ax.plot(*candidate[[\"x1\", \"x2\"]].to_numpy()[0], \"o\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"execution": {
"iopub.execute_input": "2024-09-13T15:57:37.628734Z",
"iopub.status.busy": "2024-09-13T15:57:37.628594Z",
"iopub.status.idle": "2024-09-13T15:57:37.935451Z",
"shell.execute_reply": "2024-09-13T15:57:37.935103Z"
}
},
"outputs": [],
"source": [
"%%time\n",
"candidate = X.generator.generate(1)"
]
}
],
"metadata": {
Expand Down
15 changes: 3 additions & 12 deletions xopt/generators/bayesian/bayesian_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,10 +97,6 @@ class BayesianGenerator(Generator, ABC):
computation_time : Optional[pd.DataFrame]
A data frame tracking computation time in seconds.

log_transform_acquisition_function: Optional[bool]
Flag to determine if final acquisition function value should be
log-transformed before optimization.

n_interpolate_samples: Optional[PositiveInt]
Number of interpolation points to generate between last observation and next
observation, requires n_candidates to be 1.
Expand Down Expand Up @@ -159,16 +155,11 @@ class BayesianGenerator(Generator, ABC):
None,
description="data frame tracking computation time in seconds",
)
log_transform_acquisition_function: Optional[bool] = Field(
False,
description="flag to log transform the acquisition function before optimization",
)
custom_objective: Optional[CustomXoptObjective] = Field(
None,
description="custom objective for optimization, replaces objective specified by VOCS",
)
n_interpolate_points: Optional[PositiveInt] = None
memory_length: Optional[PositiveInt] = None

n_candidates: int = 1

Expand Down Expand Up @@ -507,6 +498,9 @@ def get_acquisition(self, model):
model, acq, self._get_constraint_callables(), sampler=sampler
)

# log transform the result to handle the constraints
acq = LogAcquisitionFunction(acq)

# apply fixed features if specified in the generator
if self.fixed_features is not None:
# get input dim
Expand All @@ -521,9 +515,6 @@ def get_acquisition(self, model):
acq_function=acq, d=dim, columns=columns, values=values
)

if self.log_transform_acquisition_function:
acq = LogAcquisitionFunction(acq)

return acq

def get_optimum(self):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from botorch.acquisition import AcquisitionFunction
from botorch.utils import t_batch_mode_transform
from botorch.utils.safe_math import log_softplus
from torch import Tensor
from torch.nn import Module
Expand All @@ -13,7 +12,6 @@ def __init__(
Module.__init__(self)
self.acq_func = acq_function

@t_batch_mode_transform(expected_q=1, assert_output_shape=False)
def forward(self, X: Tensor) -> Tensor:
# apply a softplus transform to avoid numerical gradient issues
return log_softplus(self.acq_func(X), 1e-6)
8 changes: 4 additions & 4 deletions xopt/generators/bayesian/expected_improvement.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import torch
from botorch.acquisition import (
ExpectedImprovement,
qExpectedImprovement,
ScalarizedPosteriorTransform,
LogExpectedImprovement,
qLogExpectedImprovement,
)

from xopt.generators.bayesian.bayesian_generator import (
Expand Down Expand Up @@ -30,7 +30,7 @@ def _get_acquisition(self, model):
if self.n_candidates > 1 or isinstance(objective, CustomXoptObjective):
# MC sampling for generating multiple candidate points
sampler = self._get_sampler(model)
acq = qExpectedImprovement(
acq = qLogExpectedImprovement(
model,
best_f=best_f,
sampler=sampler,
Expand All @@ -42,7 +42,7 @@ def _get_acquisition(self, model):
# note that the analytic version cannot handle custom objectives
weights = set_botorch_weights(self.vocs).to(**self.tkwargs)
posterior_transform = ScalarizedPosteriorTransform(weights)
acq = ExpectedImprovement(
acq = LogExpectedImprovement(
model, best_f=best_f, posterior_transform=posterior_transform
)

Expand Down
6 changes: 4 additions & 2 deletions xopt/generators/bayesian/mggpo.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@

import pandas as pd
import torch
from botorch.acquisition.multi_objective import qNoisyExpectedHypervolumeImprovement
from botorch.acquisition.multi_objective.logei import (
qLogNoisyExpectedHypervolumeImprovement,
)
from pydantic import Field

from xopt.generators.bayesian.objectives import create_mobo_objective
Expand Down Expand Up @@ -68,7 +70,7 @@ def _get_acquisition(self, model):
inputs = self.get_input_data(self.data)
sampler = self._get_sampler(model)

acq = qNoisyExpectedHypervolumeImprovement(
acq = qLogNoisyExpectedHypervolumeImprovement(
model,
X_baseline=inputs,
prune_baseline=True,
Expand Down
10 changes: 2 additions & 8 deletions xopt/generators/bayesian/mobo.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import torch
from botorch.acquisition import FixedFeatureAcquisitionFunction
from botorch.acquisition.multi_objective import qNoisyExpectedHypervolumeImprovement
from botorch.acquisition.multi_objective.logei import (
qLogNoisyExpectedHypervolumeImprovement,
)
Expand All @@ -24,7 +23,7 @@ class MOBOGenerator(MultiObjectiveBayesianGenerator):
description="flag to specify if pareto front points are to be used during "
"optimization of the acquisition function",
)
__doc__ = """Implements Multi-Objective Bayesian Optimization using the Expected
__doc__ = """Implements Multi-Objective Bayesian Optimization using the Log Expected
Hypervolume Improvement acquisition function"""

def _get_objective(self):
Expand Down Expand Up @@ -60,12 +59,7 @@ def _get_acquisition(self, model):
inputs = self.get_input_data(self.data)
sampler = self._get_sampler(model)

if self.log_transform_acquisition_function:
acqclass = qLogNoisyExpectedHypervolumeImprovement
else:
acqclass = qNoisyExpectedHypervolumeImprovement

acq = acqclass(
acq = qLogNoisyExpectedHypervolumeImprovement(
model,
X_baseline=inputs,
constraints=self._get_constraint_callables(),
Expand Down
13 changes: 3 additions & 10 deletions xopt/generators/bayesian/upper_confidence_bound.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,19 +34,12 @@ class UpperConfidenceBoundGenerator(BayesianGenerator):
def validate_vocs_without_constraints(cls, v):
if v.constraints:
warnings.warn(
f"Using {cls.__name__} with constraints may lead to numerical issues if the base acquisition "
f"function has negative values."
f"Using {cls.__name__} with constraints will lead to invalid values "
f"if the base acquisition function has negative values. Use with "
f"caution."
)
return v

@field_validator("log_transform_acquisition_function")
def validate_log_transform_acquisition_function(cls, v):
if v:
raise ValueError(
"Log transform cannot be applied to potentially negative UCB "
"acquisition function."
)

def _get_acquisition(self, model):
objective = self._get_objective()
if self.n_candidates > 1 or isinstance(objective, CustomXoptObjective):
Expand Down
1 change: 0 additions & 1 deletion xopt/tests/generators/bayesian/test_mobo.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,6 @@ def test_log_mobo(self):
gen = MOBOGenerator(
vocs=tnk_vocs,
reference_point=reference_point,
log_transform_acquisition_function=True,
)
gen = deepcopy(gen)
gen.n_monte_carlo_samples = 20
Expand Down
15 changes: 0 additions & 15 deletions xopt/tests/generators/bayesian/test_upper_confidence_bound.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,21 +88,6 @@ def test_in_xopt(self):
for _ in range(1):
X.step()

def test_positivity(self):
# for UCB to work properly with constraints, it must always be positive.
# to acheive this we set infeasible cost
ucb_gen = UpperConfidenceBoundGenerator(
vocs=TEST_VOCS_BASE,
)
ucb_gen.add_data(
pd.DataFrame({"x1": -1.0, "x2": -1.0, "y1": 100.0, "c1": -100}, index=[0])
)
ucb_gen.train_model()
# evaluate acqf
acqf = ucb_gen.get_acquisition(ucb_gen.model)
with torch.no_grad():
assert acqf(torch.tensor((-1.0, -1.0)).reshape(1, 1, 2)) >= 0.0

def test_fixed_feature(self):
# test with fixed feature not in vocs
gen = UpperConfidenceBoundGenerator(vocs=TEST_VOCS_BASE)
Expand Down
Loading