Bayesian optimization with fixed features¶
In some contexts a variable/feature needs to be fixed during optimization. However, we
can leverage previous measurements near the fixed variable value to potentially
jump-start optimization using observed model covariances established by the GP kernel
. In this example, we start with a number of random observations in 2D input space
and then proceed with BO at a fixed value for one of the variables. This notebook
uses the 2D Rosenbrock test function as an example. Note that because we are optimizing a problem with no noise we set use_low_noise_prior=True in the GP model constructor.
# set values if testing
import os
from xopt.generators.bayesian.visualize import plot_model_prediction
from xopt import Xopt, Evaluator
from xopt.generators.bayesian import UpperConfidenceBoundGenerator
from xopt.resources.test_functions.rosenbrock import (
evaluate_rosenbrock,
make_rosenbrock_vocs,
)
# Ignore all warnings
import warnings
warnings.filterwarnings("ignore")
SMOKE_TEST = os.environ.get("SMOKE_TEST")
NUM_MC_SAMPLES = 1 if SMOKE_TEST else 128
NUM_RESTARTS = 1 if SMOKE_TEST else 20
# make rosenbrock function vocs in 2D
vocs = make_rosenbrock_vocs(2)
# define a fixed value for the BO generator
fixed_features = {"x0": -1.0}
generator = UpperConfidenceBoundGenerator(vocs=vocs, fixed_features=fixed_features)
generator.numerical_optimizer.n_restarts = NUM_RESTARTS
generator.n_monte_carlo_samples = NUM_MC_SAMPLES
generator.gp_constructor.use_low_noise_prior = True
evaluator = Evaluator(function=evaluate_rosenbrock)
X = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)
X
Xopt
________________________________
Version: 2.6.7.dev55+g7aa2f3618.d20250930
Data size: 0
Config as YAML:
dump_file: null
evaluator:
function: xopt.resources.test_functions.rosenbrock.evaluate_rosenbrock
function_kwargs:
dummy: 1
label: y
max_workers: 1
vectorized: false
generator:
beta: 2.0
computation_time: null
custom_objective: null
fixed_features:
x0: -1.0
gp_constructor:
covar_modules: {}
custom_noise_prior: null
mean_modules: {}
name: standard
trainable_mean_keys: []
transform_inputs: true
use_cached_hyperparameters: false
use_low_noise_prior: true
max_travel_distances: null
model: null
n_candidates: 1
n_interpolate_points: null
n_monte_carlo_samples: 128
name: upper_confidence_bound
numerical_optimizer:
max_iter: 2000
max_time: 5.0
n_restarts: 20
name: LBFGS
supports_batch_generation: true
supports_constraints: true
supports_single_objective: true
turbo_controller: null
use_cuda: false
max_evaluations: null
serialize_inline: false
serialize_torch: false
strict: true
vocs:
constants: {}
constraints: {}
objectives:
y: MINIMIZE
observables: []
variables:
x0:
- -2.0
- 2.0
x1:
- -2.0
- 2.0
Generate some initial random samples in 2D space¶
X.random_evaluate(10)
| x0 | x1 | y | xopt_runtime | xopt_error | |
|---|---|---|---|---|---|
| 0 | 1.822762 | -0.715318 | 1631.041468 | 0.000007 | False |
| 1 | 1.594683 | 1.152906 | 193.593565 | 0.000004 | False |
| 2 | 0.432707 | -0.510571 | 49.015205 | 0.000003 | False |
| 3 | -0.675611 | 0.990674 | 31.347154 | 0.000002 | False |
| 4 | -1.461406 | 0.165948 | 394.053685 | 0.000002 | False |
| 5 | 1.925545 | -1.567639 | 2783.799728 | 0.000002 | False |
| 6 | 1.081800 | 0.379565 | 62.531409 | 0.000002 | False |
| 7 | 0.222229 | -0.119671 | 3.462950 | 0.000002 | False |
| 8 | -0.982458 | -0.205924 | 141.088909 | 0.000002 | False |
| 9 | 1.339336 | 1.661679 | 1.861292 | 0.000002 | False |
Run BO steps with fixed features¶
for i in range(5):
X.step()
X.data
| x0 | x1 | y | xopt_runtime | xopt_error | |
|---|---|---|---|---|---|
| 0 | 1.822762 | -0.715318 | 1631.041468 | 0.000007 | False |
| 1 | 1.594683 | 1.152906 | 193.593565 | 0.000004 | False |
| 2 | 0.432707 | -0.510571 | 49.015205 | 0.000003 | False |
| 3 | -0.675611 | 0.990674 | 31.347154 | 0.000002 | False |
| 4 | -1.461406 | 0.165948 | 394.053685 | 0.000002 | False |
| 5 | 1.925545 | -1.567639 | 2783.799728 | 0.000002 | False |
| 6 | 1.081800 | 0.379565 | 62.531409 | 0.000002 | False |
| 7 | 0.222229 | -0.119671 | 3.462950 | 0.000002 | False |
| 8 | -0.982458 | -0.205924 | 141.088909 | 0.000002 | False |
| 9 | 1.339336 | 1.661679 | 1.861292 | 0.000002 | False |
| 10 | -1.000000 | -2.000000 | 904.000000 | 0.000008 | False |
| 11 | -1.000000 | 2.000000 | 104.000000 | 0.000006 | False |
| 12 | -1.000000 | 2.000000 | 104.000000 | 0.000006 | False |
| 13 | -1.000000 | 0.989894 | 4.010212 | 0.000006 | False |
| 14 | -1.000000 | 0.719945 | 11.843089 | 0.000006 | False |
Visualize model and evaluations¶
Note that for the BO samples, they all are on the line $x_0=-1$
ax = plot_model_prediction(
model=X.generator.model,
vocs=X.vocs,
data=X.data,
show_samples=False,
n_grid=100,
)
ax.plot(
*X.data[["x0", "x1"]].to_numpy()[:10].T, "+C1", label="random samples", zorder=10
)
ax.plot(*X.data[["x0", "x1"]].to_numpy()[10:].T, "+C3", label="GP samples", zorder=10)
ax.axvline(-1.0, ls="--")
ax.legend();
--------------------------------------------------------------------------- TypeError Traceback (most recent call last) Cell In[5], line 1 ----> 1 ax = plot_model_prediction( 2 model=X.generator.model, 3 vocs=X.vocs, 4 data=X.data, 5 show_samples=False, 6 n_grid=100, 7 ) 8 ax.plot( 9 *X.data[["x0", "x1"]].to_numpy()[:10].T, "+C1", label="random samples", zorder=10 10 ) 11 ax.plot(*X.data[["x0", "x1"]].to_numpy()[10:].T, "+C3", label="GP samples", zorder=10) TypeError: plot_model_prediction() missing 1 required positional argument: 'tkwargs'
Run with fixed feature that is not in vocs¶
We can also run fixed features where the fixed variable is not listed in vocs, as long as the generator data contains data corresponding to the fixed feature name. To satisfy this requirements we add the data from the last optimization run.
# make rosenbrock function vocs in 2-D but remove the `x0` name (set to a fixed
# feature in the next cell)
vocs = make_rosenbrock_vocs(2)
vocs.variables = {"x1": [-2.0, 2.0]}
# define a fixed value for the BO generator
fixed_features = {"x0": -1.0}
generator = UpperConfidenceBoundGenerator(vocs=vocs, fixed_features=fixed_features)
generator.numerical_optimizer.n_restarts = NUM_RESTARTS
generator.n_monte_carlo_samples = NUM_MC_SAMPLES
evaluator = Evaluator(function=evaluate_rosenbrock)
X2 = Xopt(generator=generator, evaluator=evaluator, vocs=vocs)
X2.add_data(X.data)
# run an optimization step
X2.step()
X2.data
| x0 | x1 | y | xopt_runtime | xopt_error | |
|---|---|---|---|---|---|
| 0 | 1.822762 | -0.715318 | 1631.041468 | 0.000007 | False |
| 1 | 1.594683 | 1.152906 | 193.593565 | 0.000004 | False |
| 2 | 0.432707 | -0.510571 | 49.015205 | 0.000003 | False |
| 3 | -0.675611 | 0.990674 | 31.347154 | 0.000002 | False |
| 4 | -1.461406 | 0.165948 | 394.053685 | 0.000002 | False |
| 5 | 1.925545 | -1.567639 | 2783.799728 | 0.000002 | False |
| 6 | 1.081800 | 0.379565 | 62.531409 | 0.000002 | False |
| 7 | 0.222229 | -0.119671 | 3.462950 | 0.000002 | False |
| 8 | -0.982458 | -0.205924 | 141.088909 | 0.000002 | False |
| 9 | 1.339336 | 1.661679 | 1.861292 | 0.000002 | False |
| 10 | -1.000000 | -2.000000 | 904.000000 | 0.000008 | False |
| 11 | -1.000000 | 2.000000 | 104.000000 | 0.000006 | False |
| 12 | -1.000000 | 2.000000 | 104.000000 | 0.000006 | False |
| 13 | -1.000000 | 0.989894 | 4.010212 | 0.000006 | False |
| 14 | -1.000000 | 0.719945 | 11.843089 | 0.000006 | False |
| 15 | -1.000000 | 1.119363 | 5.424750 | 0.000007 | False |