Multi-objective Bayesian Optimization¶
TNK function $n=2$ variables: $x_i \in [0, \pi], i=1,2$
Objectives:
- $f_i(x) = x_i$
Constraints:
- $g_1(x) = -x_1^2 -x_2^2 + 1 + 0.1 \cos\left(16 \arctan \frac{x_1}{x_2}\right) \le 0$
- $g_2(x) = (x_1 - 1/2)^2 + (x_2-1/2)^2 \le 0.5$
In [1]:
Copied!
# set values if testing
import os
import pandas as pd
import numpy as np
from xopt import Xopt, Evaluator
from xopt.generators.bayesian import MOBOGenerator
from xopt.resources.test_functions.tnk import evaluate_TNK, tnk_vocs
from xopt.vocs import get_feasibility_data
import matplotlib.pyplot as plt
# Ignore all warnings
import warnings
warnings.filterwarnings("ignore")
SMOKE_TEST = os.environ.get("SMOKE_TEST")
N_MC_SAMPLES = 1 if SMOKE_TEST else 128
NUM_RESTARTS = 1 if SMOKE_TEST else 20
N_STEPS = 1 if SMOKE_TEST else 30
MAX_ITER = 1 if SMOKE_TEST else 200
evaluator = Evaluator(function=evaluate_TNK)
print(tnk_vocs.dict())
# set values if testing
import os
import pandas as pd
import numpy as np
from xopt import Xopt, Evaluator
from xopt.generators.bayesian import MOBOGenerator
from xopt.resources.test_functions.tnk import evaluate_TNK, tnk_vocs
from xopt.vocs import get_feasibility_data
import matplotlib.pyplot as plt
# Ignore all warnings
import warnings
warnings.filterwarnings("ignore")
SMOKE_TEST = os.environ.get("SMOKE_TEST")
N_MC_SAMPLES = 1 if SMOKE_TEST else 128
NUM_RESTARTS = 1 if SMOKE_TEST else 20
N_STEPS = 1 if SMOKE_TEST else 30
MAX_ITER = 1 if SMOKE_TEST else 200
evaluator = Evaluator(function=evaluate_TNK)
print(tnk_vocs.dict())
/home/runner/work/Xopt/Xopt/.venv/lib/python3.12/site-packages/pyro/ops/stats.py:527: SyntaxWarning: invalid escape sequence '\g'
we have :math:`ES^{*}(P,Q) \ge ES^{*}(Q,Q)` with equality holding if and only if :math:`P=Q`, i.e.
{'variables': {'x1': {'dtype': None, 'default_value': None, 'domain': [0.0, 3.14159], 'type': 'ContinuousVariable'}, 'x2': {'dtype': None, 'default_value': None, 'domain': [0.0, 3.14159], 'type': 'ContinuousVariable'}}, 'objectives': {'y1': {'dtype': None, 'type': 'MinimizeObjective'}, 'y2': {'dtype': None, 'type': 'MinimizeObjective'}}, 'constraints': {'c1': {'dtype': None, 'value': 0.0, 'type': 'GreaterThanConstraint'}, 'c2': {'dtype': None, 'value': 0.5, 'type': 'LessThanConstraint'}}, 'constants': {'a': {'dtype': None, 'value': 'dummy_constant', 'type': 'Constant'}}, 'observables': {}}
In [2]:
Copied!
generator = MOBOGenerator(vocs=tnk_vocs, reference_point={"y1": 1.5, "y2": 1.5})
generator.n_monte_carlo_samples = N_MC_SAMPLES
generator.numerical_optimizer.n_restarts = NUM_RESTARTS
generator.numerical_optimizer.max_iter = MAX_ITER
generator.gp_constructor.use_low_noise_prior = True
X = Xopt(generator=generator, evaluator=evaluator)
X.evaluate_data(pd.DataFrame({"x1": [1.0, 0.75], "x2": [0.75, 1.0]}))
for i in range(N_STEPS):
print(i)
X.step()
generator = MOBOGenerator(vocs=tnk_vocs, reference_point={"y1": 1.5, "y2": 1.5})
generator.n_monte_carlo_samples = N_MC_SAMPLES
generator.numerical_optimizer.n_restarts = NUM_RESTARTS
generator.numerical_optimizer.max_iter = MAX_ITER
generator.gp_constructor.use_low_noise_prior = True
X = Xopt(generator=generator, evaluator=evaluator)
X.evaluate_data(pd.DataFrame({"x1": [1.0, 0.75], "x2": [0.75, 1.0]}))
for i in range(N_STEPS):
print(i)
X.step()
0
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
In [3]:
Copied!
X.generator.data
X.generator.data
Out[3]:
| x1 | x2 | a | y1 | y2 | c1 | c2 | xopt_runtime | xopt_error | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | 1.000000 | 0.750000 | dummy_constant | 1.000000 | 0.750000 | 0.626888 | 0.312500 | 0.004124 | False |
| 1 | 0.750000 | 1.000000 | dummy_constant | 0.750000 | 1.000000 | 0.626888 | 0.312500 | 0.000274 | False |
| 2 | 0.533506 | 1.467016 | dummy_constant | 0.533506 | 1.467016 | 1.360435 | 0.936242 | 0.002221 | False |
| 3 | 0.424530 | 0.575105 | dummy_constant | 0.424530 | 0.575105 | -0.415826 | 0.011336 | 0.005146 | False |
| 4 | 0.006439 | 0.027089 | dummy_constant | 0.006439 | 0.027089 | -0.916261 | 0.467247 | 0.002707 | False |
| 5 | 3.141590 | 0.000000 | dummy_constant | 3.141590 | 0.000000 | 8.769588 | 7.227998 | 0.005047 | False |
| 6 | 1.075811 | 0.054487 | dummy_constant | 1.075811 | 0.054487 | 0.091365 | 0.530040 | 0.002462 | False |
| 7 | 1.050112 | 0.077596 | dummy_constant | 1.050112 | 0.077596 | 0.070678 | 0.481048 | 0.000295 | False |
| 8 | 0.000000 | 0.709647 | dummy_constant | 0.000000 | 0.709647 | -0.596401 | 0.293952 | 0.000503 | False |
| 9 | 0.675077 | 0.719971 | dummy_constant | 0.675077 | 0.719971 | -0.112956 | 0.079039 | 0.000289 | False |
| 10 | 0.061557 | 1.029337 | dummy_constant | 0.061557 | 1.029337 | 0.005620 | 0.472430 | 0.000258 | False |
| 11 | 0.918135 | 0.547919 | dummy_constant | 0.918135 | 0.547919 | 0.211696 | 0.177133 | 0.000257 | False |
| 12 | 0.203425 | 1.067807 | dummy_constant | 0.203425 | 1.067807 | 0.280756 | 0.410362 | 0.000274 | False |
| 13 | 1.003244 | 0.000000 | dummy_constant | 1.003244 | 0.000000 | -0.093502 | 0.503254 | 0.000274 | False |
| 14 | 0.929191 | 0.433088 | dummy_constant | 0.929191 | 0.433088 | -0.025825 | 0.188682 | 0.000211 | False |
| 15 | 0.429184 | 0.918798 | dummy_constant | 0.429184 | 0.918798 | -0.047531 | 0.180406 | 0.000277 | False |
| 16 | 0.749176 | 0.711948 | dummy_constant | 0.749176 | 0.711948 | -0.023674 | 0.107011 | 0.000282 | False |
| 17 | 0.000000 | 0.040147 | dummy_constant | 0.000000 | 0.040147 | -1.098388 | 0.461464 | 0.000282 | False |
| 18 | 0.841903 | 0.601629 | dummy_constant | 0.841903 | 0.601629 | 0.158391 | 0.127226 | 0.000267 | False |
| 19 | 0.000000 | 0.998973 | dummy_constant | 0.000000 | 0.998973 | -0.102053 | 0.498974 | 0.000267 | False |
| 20 | 1.016075 | 0.277714 | dummy_constant | 1.016075 | 0.277714 | 0.152447 | 0.315745 | 0.000263 | False |
| 21 | 0.566981 | 0.846027 | dummy_constant | 0.566981 | 0.846027 | 0.137205 | 0.124221 | 0.000268 | False |
| 22 | 0.093192 | 1.059117 | dummy_constant | 0.093192 | 1.059117 | 0.113834 | 0.478105 | 0.000201 | False |
| 23 | 1.028702 | 0.043706 | dummy_constant | 1.028702 | 0.043706 | -0.017659 | 0.487730 | 0.000264 | False |
| 24 | 0.030233 | 1.022616 | dummy_constant | 0.030233 | 1.022616 | -0.042368 | 0.493809 | 0.000265 | False |
| 25 | 1.020429 | 0.028379 | dummy_constant | 1.020429 | 0.028379 | -0.048186 | 0.493273 | 0.000270 | False |
| 26 | 0.308193 | 0.960113 | dummy_constant | 0.308193 | 0.960113 | -0.008650 | 0.248494 | 0.000208 | False |
| 27 | 1.039267 | 0.052099 | dummy_constant | 1.039267 | 0.052099 | 0.013221 | 0.491424 | 0.000270 | False |
| 28 | 0.602232 | 0.812353 | dummy_constant | 0.602232 | 0.812353 | 0.093537 | 0.108016 | 0.000262 | False |
| 29 | 0.037359 | 1.024318 | dummy_constant | 0.037359 | 1.024318 | -0.032841 | 0.488946 | 0.000261 | False |
| 30 | 1.046122 | 0.059531 | dummy_constant | 1.046122 | 0.059531 | 0.036503 | 0.492262 | 0.000270 | False |
| 31 | 0.777920 | 0.655526 | dummy_constant | 0.777920 | 0.655526 | 0.014229 | 0.101428 | 0.000262 | False |
plot results¶
In [4]:
Copied!
fig, ax = plt.subplots()
theta = np.linspace(0, np.pi / 2)
r = np.sqrt(1 + 0.1 * np.cos(16 * theta))
x_1 = r * np.sin(theta)
x_2_lower = r * np.cos(theta)
x_2_upper = (0.5 - (x_1 - 0.5) ** 2) ** 0.5 + 0.5
z = np.zeros_like(x_1)
# ax2.plot(x_1, x_2_lower,'r')
ax.fill_between(x_1, z, x_2_lower, fc="white")
circle = plt.Circle(
(0.5, 0.5), 0.5**0.5, color="r", alpha=0.25, zorder=0, label="Valid Region"
)
ax.add_patch(circle)
history = pd.concat(
[X.data, get_feasibility_data(tnk_vocs, X.data)], axis=1, ignore_index=False
)
ax.plot(*history[["x1", "x2"]][history["feasible"]].to_numpy().T, ".C1")
ax.plot(*history[["x1", "x2"]][~history["feasible"]].to_numpy().T, ".C2")
ax.set_xlim(0, 3.14)
ax.set_ylim(0, 3.14)
ax.set_xlabel("x1")
ax.set_ylabel("x2")
ax.set_aspect("equal")
fig, ax = plt.subplots()
theta = np.linspace(0, np.pi / 2)
r = np.sqrt(1 + 0.1 * np.cos(16 * theta))
x_1 = r * np.sin(theta)
x_2_lower = r * np.cos(theta)
x_2_upper = (0.5 - (x_1 - 0.5) ** 2) ** 0.5 + 0.5
z = np.zeros_like(x_1)
# ax2.plot(x_1, x_2_lower,'r')
ax.fill_between(x_1, z, x_2_lower, fc="white")
circle = plt.Circle(
(0.5, 0.5), 0.5**0.5, color="r", alpha=0.25, zorder=0, label="Valid Region"
)
ax.add_patch(circle)
history = pd.concat(
[X.data, get_feasibility_data(tnk_vocs, X.data)], axis=1, ignore_index=False
)
ax.plot(*history[["x1", "x2"]][history["feasible"]].to_numpy().T, ".C1")
ax.plot(*history[["x1", "x2"]][~history["feasible"]].to_numpy().T, ".C2")
ax.set_xlim(0, 3.14)
ax.set_ylim(0, 3.14)
ax.set_xlabel("x1")
ax.set_ylabel("x2")
ax.set_aspect("equal")
Plot path through input space¶
In [5]:
Copied!
ax = history.plot("x1", "x2")
ax.set_ylim(0, 3.14)
ax.set_xlim(0, 3.14)
ax.set_aspect("equal")
ax = history.plot("x1", "x2")
ax.set_ylim(0, 3.14)
ax.set_xlim(0, 3.14)
ax.set_aspect("equal")
In [6]:
Copied!
## visualize model
X.generator.visualize_model(show_feasibility=True)
## visualize model
X.generator.visualize_model(show_feasibility=True)
Out[6]:
(<Figure size 800x1980 with 22 Axes>,
array([[<Axes: title={'center': 'Posterior Mean [y1]'}, xlabel='x1', ylabel='x2'>,
<Axes: title={'center': 'Posterior SD [y1]'}, xlabel='x1', ylabel='x2'>],
[<Axes: title={'center': 'Posterior Mean [y2]'}, xlabel='x1', ylabel='x2'>,
<Axes: title={'center': 'Posterior SD [y2]'}, xlabel='x1', ylabel='x2'>],
[<Axes: title={'center': 'Posterior Mean [c1]'}, xlabel='x1', ylabel='x2'>,
<Axes: title={'center': 'Posterior SD [c1]'}, xlabel='x1', ylabel='x2'>],
[<Axes: title={'center': 'Posterior Mean [c2]'}, xlabel='x1', ylabel='x2'>,
<Axes: title={'center': 'Posterior SD [c2]'}, xlabel='x1', ylabel='x2'>],
[<Axes: title={'center': 'Acq. Function'}, xlabel='x1', ylabel='x2'>,
<Axes: >],
[<Axes: title={'center': 'Feasibility'}, xlabel='x1', ylabel='x2'>,
<Axes: >]], dtype=object))
In [7]:
Copied!
X.generator.update_pareto_front_history()
X.generator.pareto_front_history.plot(y="hypervolume", label="Hypervolume")
X.generator.update_pareto_front_history()
X.generator.pareto_front_history.plot(y="hypervolume", label="Hypervolume")
Out[7]:
<Axes: >
In [8]:
Copied!
X.generator.pareto_front_history
X.generator.pareto_front_history
Out[8]:
| iteration | hypervolume | n_non_dominated | |
|---|---|---|---|
| 0 | 0 | 0.375000 | 1 |
| 1 | 1 | 0.500000 | 2 |
| 2 | 2 | 0.500000 | 2 |
| 3 | 3 | 0.500000 | 2 |
| 4 | 4 | 0.500000 | 2 |
| 5 | 5 | 0.500000 | 2 |
| 6 | 6 | 0.500000 | 2 |
| 7 | 7 | 0.802506 | 3 |
| 8 | 8 | 0.802506 | 3 |
| 9 | 9 | 0.802506 | 3 |
| 10 | 10 | 1.126531 | 4 |
| 11 | 11 | 1.173667 | 4 |
| 12 | 12 | 1.173667 | 4 |
| 13 | 13 | 1.173667 | 4 |
| 14 | 14 | 1.173667 | 4 |
| 15 | 15 | 1.173667 | 4 |
| 16 | 16 | 1.173667 | 4 |
| 17 | 17 | 1.173667 | 4 |
| 18 | 18 | 1.204036 | 5 |
| 19 | 19 | 1.204036 | 5 |
| 20 | 20 | 1.213233 | 6 |
| 21 | 21 | 1.260933 | 6 |
| 22 | 22 | 1.260933 | 6 |
| 23 | 23 | 1.260933 | 6 |
| 24 | 24 | 1.260933 | 6 |
| 25 | 25 | 1.260933 | 6 |
| 26 | 26 | 1.260933 | 6 |
| 27 | 27 | 1.274850 | 6 |
| 28 | 28 | 1.282921 | 7 |
| 29 | 29 | 1.282921 | 7 |
| 30 | 30 | 1.282921 | 7 |
| 31 | 31 | 1.292955 | 8 |
In [9]:
Copied!
X.data
X.data
Out[9]:
| x1 | x2 | a | y1 | y2 | c1 | c2 | xopt_runtime | xopt_error | |
|---|---|---|---|---|---|---|---|---|---|
| 0 | 1.000000 | 0.750000 | dummy_constant | 1.000000 | 0.750000 | 0.626888 | 0.312500 | 0.004124 | False |
| 1 | 0.750000 | 1.000000 | dummy_constant | 0.750000 | 1.000000 | 0.626888 | 0.312500 | 0.000274 | False |
| 2 | 0.533506 | 1.467016 | dummy_constant | 0.533506 | 1.467016 | 1.360435 | 0.936242 | 0.002221 | False |
| 3 | 0.424530 | 0.575105 | dummy_constant | 0.424530 | 0.575105 | -0.415826 | 0.011336 | 0.005146 | False |
| 4 | 0.006439 | 0.027089 | dummy_constant | 0.006439 | 0.027089 | -0.916261 | 0.467247 | 0.002707 | False |
| 5 | 3.141590 | 0.000000 | dummy_constant | 3.141590 | 0.000000 | 8.769588 | 7.227998 | 0.005047 | False |
| 6 | 1.075811 | 0.054487 | dummy_constant | 1.075811 | 0.054487 | 0.091365 | 0.530040 | 0.002462 | False |
| 7 | 1.050112 | 0.077596 | dummy_constant | 1.050112 | 0.077596 | 0.070678 | 0.481048 | 0.000295 | False |
| 8 | 0.000000 | 0.709647 | dummy_constant | 0.000000 | 0.709647 | -0.596401 | 0.293952 | 0.000503 | False |
| 9 | 0.675077 | 0.719971 | dummy_constant | 0.675077 | 0.719971 | -0.112956 | 0.079039 | 0.000289 | False |
| 10 | 0.061557 | 1.029337 | dummy_constant | 0.061557 | 1.029337 | 0.005620 | 0.472430 | 0.000258 | False |
| 11 | 0.918135 | 0.547919 | dummy_constant | 0.918135 | 0.547919 | 0.211696 | 0.177133 | 0.000257 | False |
| 12 | 0.203425 | 1.067807 | dummy_constant | 0.203425 | 1.067807 | 0.280756 | 0.410362 | 0.000274 | False |
| 13 | 1.003244 | 0.000000 | dummy_constant | 1.003244 | 0.000000 | -0.093502 | 0.503254 | 0.000274 | False |
| 14 | 0.929191 | 0.433088 | dummy_constant | 0.929191 | 0.433088 | -0.025825 | 0.188682 | 0.000211 | False |
| 15 | 0.429184 | 0.918798 | dummy_constant | 0.429184 | 0.918798 | -0.047531 | 0.180406 | 0.000277 | False |
| 16 | 0.749176 | 0.711948 | dummy_constant | 0.749176 | 0.711948 | -0.023674 | 0.107011 | 0.000282 | False |
| 17 | 0.000000 | 0.040147 | dummy_constant | 0.000000 | 0.040147 | -1.098388 | 0.461464 | 0.000282 | False |
| 18 | 0.841903 | 0.601629 | dummy_constant | 0.841903 | 0.601629 | 0.158391 | 0.127226 | 0.000267 | False |
| 19 | 0.000000 | 0.998973 | dummy_constant | 0.000000 | 0.998973 | -0.102053 | 0.498974 | 0.000267 | False |
| 20 | 1.016075 | 0.277714 | dummy_constant | 1.016075 | 0.277714 | 0.152447 | 0.315745 | 0.000263 | False |
| 21 | 0.566981 | 0.846027 | dummy_constant | 0.566981 | 0.846027 | 0.137205 | 0.124221 | 0.000268 | False |
| 22 | 0.093192 | 1.059117 | dummy_constant | 0.093192 | 1.059117 | 0.113834 | 0.478105 | 0.000201 | False |
| 23 | 1.028702 | 0.043706 | dummy_constant | 1.028702 | 0.043706 | -0.017659 | 0.487730 | 0.000264 | False |
| 24 | 0.030233 | 1.022616 | dummy_constant | 0.030233 | 1.022616 | -0.042368 | 0.493809 | 0.000265 | False |
| 25 | 1.020429 | 0.028379 | dummy_constant | 1.020429 | 0.028379 | -0.048186 | 0.493273 | 0.000270 | False |
| 26 | 0.308193 | 0.960113 | dummy_constant | 0.308193 | 0.960113 | -0.008650 | 0.248494 | 0.000208 | False |
| 27 | 1.039267 | 0.052099 | dummy_constant | 1.039267 | 0.052099 | 0.013221 | 0.491424 | 0.000270 | False |
| 28 | 0.602232 | 0.812353 | dummy_constant | 0.602232 | 0.812353 | 0.093537 | 0.108016 | 0.000262 | False |
| 29 | 0.037359 | 1.024318 | dummy_constant | 0.037359 | 1.024318 | -0.032841 | 0.488946 | 0.000261 | False |
| 30 | 1.046122 | 0.059531 | dummy_constant | 1.046122 | 0.059531 | 0.036503 | 0.492262 | 0.000270 | False |
| 31 | 0.777920 | 0.655526 | dummy_constant | 0.777920 | 0.655526 | 0.014229 | 0.101428 | 0.000262 | False |