Custom objectives
Bayesian Optimization with Custom Objectives¶
In this tutorial we demonstrate the use of Xopt to preform Bayesian Optimization on custom objectives. In this case, we develop models of individual components of the objective function and combine samples from these models to calculate predicted objective values.
In this example we try to maximize the objective function
Define the test problem¶
from xopt.vocs import VOCS
import torch
from xopt.evaluator import Evaluator
from xopt.generators.bayesian import ExpectedImprovementGenerator
from xopt import Xopt
from xopt.generators.bayesian.objectives import CustomXoptObjective
from torch import Tensor
from typing import Optional
# define variables and function objectives
vocs = VOCS(variables={"x": [0.0, 2.0]}, observables=["g1", "g2"])
# define a test function to optimize
def sin_function(input_dict):
return {"g1": (input_dict["x"]) ** 2, "g2": (input_dict["x"] - 2.0) ** 2}
Create Xopt objects¶
Create the evaluator to evaluate our test function and create a generator that uses
the Upper Confidence Bound acquisition function to perform Bayesian Optimization. Note that because we are optimizing a problem with no noise we set use_low_noise_prior=True
in the GP model constructor.
class MyObjective(CustomXoptObjective):
def forward(self, samples: Tensor, X: Optional[Tensor] = None) -> Tensor:
return torch.min(
samples[..., self.vocs.output_names.index("g1")],
samples[..., self.vocs.output_names.index("g2")],
)
evaluator = Evaluator(function=sin_function)
generator = ExpectedImprovementGenerator(
vocs=vocs,
custom_objective=MyObjective(vocs),
)
generator.gp_constructor.use_low_noise_prior = True
X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)
print(X)
X.random_evaluate(2)
n_steps = 5
# test points for plotting
test_x = torch.linspace(*X.vocs.bounds.flatten(), 50).double()
for i in range(n_steps):
# get the Gaussian process model from the generator
model = X.generator.train_model()
X.generator.visualize_model()
# do the optimization step
X.step()
Xopt ________________________________ Version: 2.6.4.dev3+g752027b3.d20250606 Data size: 0 Config as YAML: dump_file: null evaluator: function: __main__.sin_function function_kwargs: {} max_workers: 1 vectorized: false generator: computation_time: null fixed_features: null gp_constructor: covar_modules: {} custom_noise_prior: null mean_modules: {} name: standard trainable_mean_keys: [] transform_inputs: true use_cached_hyperparameters: false use_low_noise_prior: true max_travel_distances: null model: null n_candidates: 1 n_interpolate_points: null n_monte_carlo_samples: 128 name: expected_improvement numerical_optimizer: max_iter: 2000 max_time: 5.0 n_restarts: 20 name: LBFGS supports_batch_generation: true supports_constraints: true supports_single_objective: true turbo_controller: null use_cuda: false max_evaluations: null serialize_inline: false serialize_torch: false strict: true vocs: constants: {} constraints: {} objectives: {} observables: - g1 - g2 variables: x: - 0.0 - 2.0
# access the collected data
X.data
x | g1 | g2 | xopt_runtime | xopt_error | |
---|---|---|---|---|---|
0 | 1.092348 | 1.193225 | 0.823832 | 0.000003 | False |
1 | 1.891971 | 3.579556 | 0.011670 | 0.000002 | False |
2 | 0.722311 | 0.521733 | 1.632489 | 0.000003 | False |
3 | 0.000000 | 0.000000 | 4.000000 | 0.000003 | False |
4 | 0.999923 | 0.999846 | 1.000154 | 0.000004 | False |
5 | 1.000390 | 1.000779 | 0.999221 | 0.000004 | False |
6 | 1.002576 | 1.005159 | 0.994854 | 0.000003 | False |
Getting the optimization result¶
To get the best point (without evaluating it) we ask the generator to predict the optimum based on the posterior mean.
X.generator.get_optimum()
x | |
---|---|
0 | 1.000154 |
Customizing optimization¶
Each generator has a set of options that can be modified to effect optimization behavior
X.generator.dict()
/tmp/ipykernel_7910/1542263183.py:1: PydanticDeprecatedSince20: The `dict` method is deprecated; use `model_dump` instead. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.11/migration/ X.generator.dict()
{'supports_single_objective': True, 'supports_constraints': True, 'model': ModelListGP( (models): ModuleList( (0-1): 2 x SingleTaskGP( (likelihood): GaussianLikelihood( (noise_covar): HomoskedasticNoise( (noise_prior): GammaPrior() (raw_noise_constraint): GreaterThan(1.000E-04) ) ) (mean_module): ConstantMean() (covar_module): RBFKernel( (lengthscale_prior): LogNormalPrior() (raw_lengthscale_constraint): GreaterThan(2.500E-02) ) (outcome_transform): Standardize() (input_transform): Normalize() ) ) (likelihood): LikelihoodList( (likelihoods): ModuleList( (0-1): 2 x GaussianLikelihood( (noise_covar): HomoskedasticNoise( (noise_prior): GammaPrior() (raw_noise_constraint): GreaterThan(1.000E-04) ) ) ) ) ), 'n_monte_carlo_samples': 128, 'turbo_controller': None, 'use_cuda': False, 'gp_constructor': {'name': 'standard', 'use_low_noise_prior': True, 'covar_modules': {}, 'mean_modules': {}, 'trainable_mean_keys': [], 'transform_inputs': True, 'custom_noise_prior': None, 'use_cached_hyperparameters': False}, 'numerical_optimizer': {'name': 'LBFGS', 'n_restarts': 20, 'max_iter': 2000, 'max_time': 5.0}, 'max_travel_distances': None, 'fixed_features': None, 'computation_time': training acquisition_optimization 0 0.157378 0.541453 1 0.113592 0.319399 2 0.138428 0.666759 3 0.144869 3.594479 4 0.298776 0.435665, 'custom_objective': MyObjective(), 'n_interpolate_points': None, 'n_candidates': 1}