Multi-objective Bayesian Optimization¶
TNK function $n=2$ variables: $x_i \in [0, \pi], i=1,2$
Objectives:
- $f_i(x) = x_i$
Constraints:
- $g_1(x) = -x_1^2 -x_2^2 + 1 + 0.1 \cos\left(16 \arctan \frac{x_1}{x_2}\right) \le 0$
- $g_2(x) = (x_1 - 1/2)^2 + (x_2-1/2)^2 \le 0.5$
In [1]:
Copied!
# set values if testing
import os
from copy import deepcopy
import pandas as pd
import numpy as np
from xopt import Xopt, Evaluator
from xopt.generators.bayesian.mggpo import MGGPOGenerator
from xopt.resources.test_functions.tnk import evaluate_TNK, tnk_vocs
from matplotlib import pyplot as plt
# Ignore all warnings
import warnings
warnings.filterwarnings("ignore")
SMOKE_TEST = os.environ.get("SMOKE_TEST")
N_MC_SAMPLES = 1 if SMOKE_TEST else 128
NUM_RESTARTS = 1 if SMOKE_TEST else 20
evaluator = Evaluator(function=evaluate_TNK)
evaluator.max_workers = 10
# test check options
vocs = deepcopy(tnk_vocs)
gen = MGGPOGenerator(vocs=vocs, reference_point={"y1": 1.5, "y2": 1.5})
gen.n_monte_carlo_samples = N_MC_SAMPLES
gen.numerical_optimizer.n_restarts = NUM_RESTARTS
gen.gp_constructor.use_low_noise_prior = True
X = Xopt(evaluator=evaluator, generator=gen, vocs=vocs)
X.evaluate_data(pd.DataFrame({"x1": [1.0, 0.75], "x2": [0.75, 1.0]}))
X
# set values if testing
import os
from copy import deepcopy
import pandas as pd
import numpy as np
from xopt import Xopt, Evaluator
from xopt.generators.bayesian.mggpo import MGGPOGenerator
from xopt.resources.test_functions.tnk import evaluate_TNK, tnk_vocs
from matplotlib import pyplot as plt
# Ignore all warnings
import warnings
warnings.filterwarnings("ignore")
SMOKE_TEST = os.environ.get("SMOKE_TEST")
N_MC_SAMPLES = 1 if SMOKE_TEST else 128
NUM_RESTARTS = 1 if SMOKE_TEST else 20
evaluator = Evaluator(function=evaluate_TNK)
evaluator.max_workers = 10
# test check options
vocs = deepcopy(tnk_vocs)
gen = MGGPOGenerator(vocs=vocs, reference_point={"y1": 1.5, "y2": 1.5})
gen.n_monte_carlo_samples = N_MC_SAMPLES
gen.numerical_optimizer.n_restarts = NUM_RESTARTS
gen.gp_constructor.use_low_noise_prior = True
X = Xopt(evaluator=evaluator, generator=gen, vocs=vocs)
X.evaluate_data(pd.DataFrame({"x1": [1.0, 0.75], "x2": [0.75, 1.0]}))
X
Out[1]:
Xopt
________________________________
Version: 2.6.7.dev55+g7aa2f3618.d20250930
Data size: 2
Config as YAML:
dump_file: null
evaluator:
function: xopt.resources.test_functions.tnk.evaluate_TNK
function_kwargs:
raise_probability: 0
random_sleep: 0
sleep: 0
max_workers: 10
vectorized: false
generator:
computation_time: null
custom_objective: null
fixed_features: null
ga_generator:
crossover_probability: 0.9
mutation_probability: 1.0
output_path: null
population: null
population_file: null
population_size: 64
supports_constraints: true
supports_multi_objective: true
supports_single_objective: true
gp_constructor:
covar_modules: {}
custom_noise_prior: null
mean_modules: {}
name: standard
trainable_mean_keys: []
transform_inputs: true
use_cached_hyperparameters: false
use_low_noise_prior: true
max_travel_distances: null
model: null
n_candidates: 1
n_interpolate_points: null
n_monte_carlo_samples: 128
name: mggpo
numerical_optimizer:
max_iter: 2000
max_time: 5.0
n_restarts: 20
name: LBFGS
population_size: 64
reference_point:
y1: 1.5
y2: 1.5
supports_batch_generation: true
supports_constraints: true
supports_multi_objective: true
turbo_controller: null
use_cuda: false
max_evaluations: null
serialize_inline: false
serialize_torch: false
strict: true
vocs:
constants:
a: dummy_constant
constraints:
c1:
- GREATER_THAN
- 0.0
c2:
- LESS_THAN
- 0.5
objectives:
y1: MINIMIZE
y2: MINIMIZE
observables: []
variables:
x1:
- 0.0
- 3.14159
x2:
- 0.0
- 3.14159
In [2]:
Copied!
for i in range(10):
print(i)
X.step()
for i in range(10):
print(i)
X.step()
0
1
2
3
4
5
6
7
8
9
In [3]:
Copied!
X.generator.data
X.generator.data
Out[3]:
| x1 | x2 | a | y1 | y2 | c1 | c2 | xopt_runtime | xopt_error | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | 1.000000 | 0.750000 | dummy_constant | 1.000000 | 0.750000 | 0.626888 | 0.312500 | 0.000160 | False |
| 1 | 0.750000 | 1.000000 | dummy_constant | 0.750000 | 1.000000 | 0.626888 | 0.312500 | 0.000136 | False |
| 2 | 0.095593 | 1.521237 | dummy_constant | 0.095593 | 1.521237 | 1.269615 | 1.206469 | 0.000147 | False |
| 3 | 1.479003 | 0.054022 | dummy_constant | 1.479003 | 0.054022 | 1.106950 | 1.157344 | 0.000130 | False |
| 4 | 1.674079 | 0.124703 | dummy_constant | 1.674079 | 0.124703 | 1.780894 | 1.519310 | 0.000126 | False |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 97 | 0.701029 | 0.747417 | dummy_constant | 0.701029 | 0.747417 | -0.037091 | 0.101628 | 0.000121 | False |
| 98 | 0.563335 | 0.822224 | dummy_constant | 0.563335 | 0.822224 | 0.091672 | 0.107840 | 0.000128 | False |
| 99 | 0.822158 | 0.591116 | dummy_constant | 0.822158 | 0.591116 | 0.110674 | 0.112088 | 0.000122 | False |
| 100 | 0.999358 | 0.243040 | dummy_constant | 0.999358 | 0.243040 | 0.135829 | 0.315387 | 0.000121 | False |
| 101 | 0.618541 | 0.826438 | dummy_constant | 0.618541 | 0.826438 | 0.131194 | 0.120614 | 0.000121 | False |
102 rows × 9 columns
plot results¶
In [4]:
Copied!
fig, ax = plt.subplots()
theta = np.linspace(0, np.pi / 2)
r = np.sqrt(1 + 0.1 * np.cos(16 * theta))
x_1 = r * np.sin(theta)
x_2_lower = r * np.cos(theta)
x_2_upper = (0.5 - (x_1 - 0.5) ** 2) ** 0.5 + 0.5
z = np.zeros_like(x_1)
# ax2.plot(x_1, x_2_lower,'r')
ax.fill_between(x_1, z, x_2_lower, fc="white")
circle = plt.Circle(
(0.5, 0.5), 0.5**0.5, color="r", alpha=0.25, zorder=0, label="Valid Region"
)
ax.add_patch(circle)
history = pd.concat(
[X.data, tnk_vocs.feasibility_data(X.data)], axis=1, ignore_index=False
)
ax.plot(*history[["x1", "x2"]][history["feasible"]].to_numpy().T, ".C1")
ax.plot(*history[["x1", "x2"]][~history["feasible"]].to_numpy().T, ".C2")
ax.set_xlim(0, 3.14)
ax.set_ylim(0, 3.14)
ax.set_xlabel("x1")
ax.set_ylabel("x2")
ax.set_aspect("equal")
fig, ax = plt.subplots()
theta = np.linspace(0, np.pi / 2)
r = np.sqrt(1 + 0.1 * np.cos(16 * theta))
x_1 = r * np.sin(theta)
x_2_lower = r * np.cos(theta)
x_2_upper = (0.5 - (x_1 - 0.5) ** 2) ** 0.5 + 0.5
z = np.zeros_like(x_1)
# ax2.plot(x_1, x_2_lower,'r')
ax.fill_between(x_1, z, x_2_lower, fc="white")
circle = plt.Circle(
(0.5, 0.5), 0.5**0.5, color="r", alpha=0.25, zorder=0, label="Valid Region"
)
ax.add_patch(circle)
history = pd.concat(
[X.data, tnk_vocs.feasibility_data(X.data)], axis=1, ignore_index=False
)
ax.plot(*history[["x1", "x2"]][history["feasible"]].to_numpy().T, ".C1")
ax.plot(*history[["x1", "x2"]][~history["feasible"]].to_numpy().T, ".C2")
ax.set_xlim(0, 3.14)
ax.set_ylim(0, 3.14)
ax.set_xlabel("x1")
ax.set_ylabel("x2")
ax.set_aspect("equal")
In [5]:
Copied!
X.generator.visualize_model(show_feasibility=True)
X.generator.visualize_model(show_feasibility=True)
Out[5]:
(<Figure size 800x1980 with 22 Axes>,
array([[<Axes: title={'center': 'Posterior Mean [y1]'}, xlabel='x1', ylabel='x2'>,
<Axes: title={'center': 'Posterior SD [y1]'}, xlabel='x1', ylabel='x2'>],
[<Axes: title={'center': 'Posterior Mean [y2]'}, xlabel='x1', ylabel='x2'>,
<Axes: title={'center': 'Posterior SD [y2]'}, xlabel='x1', ylabel='x2'>],
[<Axes: title={'center': 'Posterior Mean [c1]'}, xlabel='x1', ylabel='x2'>,
<Axes: title={'center': 'Posterior SD [c1]'}, xlabel='x1', ylabel='x2'>],
[<Axes: title={'center': 'Posterior Mean [c2]'}, xlabel='x1', ylabel='x2'>,
<Axes: title={'center': 'Posterior SD [c2]'}, xlabel='x1', ylabel='x2'>],
[<Axes: title={'center': 'Acq. Function'}, xlabel='x1', ylabel='x2'>,
<Axes: >],
[<Axes: title={'center': 'Feasibility'}, xlabel='x1', ylabel='x2'>,
<Axes: >]], dtype=object))
In [6]:
Copied!
X.generator.update_pareto_front_history()
X.generator.pareto_front_history.plot(y="hypervolume", label="Hypervolume")
X.generator.pareto_front_history.plot(y="n_non_dominated", label="n_non_dominated")
X.generator.update_pareto_front_history()
X.generator.pareto_front_history.plot(y="hypervolume", label="Hypervolume")
X.generator.pareto_front_history.plot(y="n_non_dominated", label="n_non_dominated")
Out[6]:
<Axes: >
In [ ]:
Copied!