Time dependent Bayesian Optimization¶
In this example we demonstrate time dependent optimization. In this case we are not only interested in finding an optimum point in input space, but also maintain the ideal point over time.
# set values if testing
import os
import time
import warnings
import torch
from matplotlib import pyplot as plt
from tqdm import trange
from xopt.generators.bayesian import TDUpperConfidenceBoundGenerator
from xopt.vocs import VOCS
from xopt.evaluator import Evaluator
from xopt import Xopt
SMOKE_TEST = os.environ.get("SMOKE_TEST")
N_MC_SAMPLES = 1 if SMOKE_TEST else 128
NUM_RESTARTS = 1 if SMOKE_TEST else 20
N_STEPS = 1 if SMOKE_TEST else 250
warnings.filterwarnings("ignore")
Time dependent test problem¶
Optimization is carried out over a single variable x
. The test function is a simple
quadratic, with a minimum location that drifts and changes as a function of time t
.
Define test functions
# location of time dependent minimum
def k(t_):
return torch.where(
t_ < 50, 0.25 * torch.sin(t_ * 6 / 10.0) + 0.1e-2 * t_, -1.5e-2 * (t_ - 50.0)
)
# define function in time and position space
def g(x_, t_):
return (x_ - k(t_)) ** 2
# create callable function for Xopt
def f(inputs):
x_ = inputs["x"]
current_time = time.time()
t_ = current_time - start_time
y_ = g(x_, torch.tensor(t_))
return {"y": float(y_), "time": float(current_time)}
Define Xopt objects including optimization algorithm¶
variables = {"x": [-1, 1]}
objectives = {"y": "MINIMIZE"}
vocs = VOCS(variables=variables, objectives=objectives)
evaluator = Evaluator(function=f)
Run optimization¶
generator = TDUpperConfidenceBoundGenerator(
vocs=vocs,
beta=0.01,
added_time=0.1,
forgetting_time=10.0,
)
generator.n_monte_carlo_samples = N_MC_SAMPLES
generator.numerical_optimizer.n_restarts = NUM_RESTARTS
generator.max_travel_distances = [0.1]
generator.gp_constructor.use_low_noise_prior = True
start_time = time.time()
X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)
X.random_evaluate(2)
for _ in trange(N_STEPS):
# note that in this example we can ignore warnings if computation
# time is greater than added time
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=RuntimeWarning)
X.step()
time.sleep(0.1)
0%| | 0/250 [00:00<?, ?it/s]
0%| | 1/250 [00:00<01:59, 2.09it/s]
1%| | 2/250 [00:00<01:43, 2.39it/s]
1%| | 3/250 [00:01<01:29, 2.77it/s]
2%|▏ | 4/250 [00:01<01:17, 3.16it/s]
2%|▏ | 5/250 [00:01<01:13, 3.31it/s]
2%|▏ | 6/250 [00:01<01:10, 3.44it/s]
3%|▎ | 7/250 [00:02<01:09, 3.50it/s]
3%|▎ | 8/250 [00:02<01:10, 3.46it/s]
4%|▎ | 9/250 [00:02<01:10, 3.43it/s]
4%|▍ | 10/250 [00:03<01:14, 3.23it/s]
4%|▍ | 11/250 [00:03<01:11, 3.34it/s]
5%|▍ | 12/250 [00:03<01:09, 3.45it/s]
5%|▌ | 13/250 [00:03<01:08, 3.48it/s]
6%|▌ | 14/250 [00:04<01:07, 3.48it/s]
6%|▌ | 15/250 [00:04<01:07, 3.49it/s]
6%|▋ | 16/250 [00:04<01:07, 3.48it/s]
7%|▋ | 17/250 [00:05<01:09, 3.37it/s]
7%|▋ | 18/250 [00:05<01:12, 3.19it/s]
8%|▊ | 19/250 [00:05<01:10, 3.29it/s]
8%|▊ | 20/250 [00:06<01:11, 3.24it/s]
8%|▊ | 21/250 [00:06<01:10, 3.25it/s]
9%|▉ | 22/250 [00:06<01:09, 3.29it/s]
9%|▉ | 23/250 [00:06<01:07, 3.38it/s]
10%|▉ | 24/250 [00:07<01:06, 3.41it/s]
10%|█ | 25/250 [00:07<01:06, 3.40it/s]
10%|█ | 26/250 [00:07<01:04, 3.45it/s]
11%|█ | 27/250 [00:08<01:05, 3.43it/s]
11%|█ | 28/250 [00:08<01:04, 3.42it/s]
12%|█▏ | 29/250 [00:08<01:05, 3.39it/s]
12%|█▏ | 30/250 [00:09<01:07, 3.28it/s]
12%|█▏ | 31/250 [00:09<01:06, 3.29it/s]
13%|█▎ | 32/250 [00:09<01:05, 3.34it/s]
13%|█▎ | 33/250 [00:09<01:06, 3.25it/s]
14%|█▎ | 34/250 [00:10<01:08, 3.15it/s]
14%|█▍ | 35/250 [00:10<01:08, 3.14it/s]
14%|█▍ | 36/250 [00:10<01:07, 3.19it/s]
15%|█▍ | 37/250 [00:11<01:07, 3.17it/s]
15%|█▌ | 38/250 [00:11<01:19, 2.66it/s]
16%|█▌ | 39/250 [00:12<01:16, 2.77it/s]
16%|█▌ | 40/250 [00:12<01:12, 2.89it/s]
16%|█▋ | 41/250 [00:12<01:13, 2.84it/s]
17%|█▋ | 42/250 [00:13<01:12, 2.86it/s]
17%|█▋ | 43/250 [00:13<01:07, 3.06it/s]
18%|█▊ | 44/250 [00:13<01:08, 3.00it/s]
18%|█▊ | 45/250 [00:14<01:10, 2.90it/s]
18%|█▊ | 46/250 [00:14<01:13, 2.78it/s]
19%|█▉ | 47/250 [00:14<01:10, 2.87it/s]
19%|█▉ | 48/250 [00:15<01:08, 2.96it/s]
20%|█▉ | 49/250 [00:15<01:03, 3.15it/s]
20%|██ | 50/250 [00:15<01:07, 2.97it/s]
20%|██ | 51/250 [00:16<01:08, 2.92it/s]
21%|██ | 52/250 [00:16<01:08, 2.87it/s]
21%|██ | 53/250 [00:16<01:08, 2.86it/s]
22%|██▏ | 54/250 [00:17<01:02, 3.15it/s]
22%|██▏ | 55/250 [00:17<01:01, 3.16it/s]
22%|██▏ | 56/250 [00:17<01:03, 3.08it/s]
23%|██▎ | 57/250 [00:18<01:00, 3.17it/s]
23%|██▎ | 58/250 [00:18<00:58, 3.26it/s]
24%|██▎ | 59/250 [00:18<01:00, 3.18it/s]
24%|██▍ | 60/250 [00:19<01:04, 2.93it/s]
24%|██▍ | 61/250 [00:19<01:04, 2.94it/s]
25%|██▍ | 62/250 [00:19<01:03, 2.98it/s]
25%|██▌ | 63/250 [00:20<01:01, 3.04it/s]
26%|██▌ | 64/250 [00:20<01:00, 3.07it/s]
26%|██▌ | 65/250 [00:20<00:58, 3.14it/s]
26%|██▋ | 66/250 [00:20<00:57, 3.20it/s]
27%|██▋ | 67/250 [00:21<00:56, 3.25it/s]
27%|██▋ | 68/250 [00:21<00:55, 3.28it/s]
28%|██▊ | 69/250 [00:21<00:53, 3.38it/s]
28%|██▊ | 70/250 [00:22<00:53, 3.36it/s]
28%|██▊ | 71/250 [00:22<00:52, 3.38it/s]
29%|██▉ | 72/250 [00:22<00:51, 3.43it/s]
29%|██▉ | 73/250 [00:23<00:59, 3.00it/s]
30%|██▉ | 74/250 [00:23<01:04, 2.73it/s]
30%|███ | 75/250 [00:23<00:59, 2.93it/s]
30%|███ | 76/250 [00:24<00:55, 3.15it/s]
31%|███ | 77/250 [00:24<00:52, 3.31it/s]
31%|███ | 78/250 [00:24<00:50, 3.42it/s]
32%|███▏ | 79/250 [00:25<00:50, 3.38it/s]
32%|███▏ | 80/250 [00:25<00:50, 3.37it/s]
32%|███▏ | 81/250 [00:25<00:49, 3.39it/s]
33%|███▎ | 82/250 [00:25<00:50, 3.33it/s]
33%|███▎ | 83/250 [00:26<00:49, 3.40it/s]
34%|███▎ | 84/250 [00:26<00:49, 3.34it/s]
34%|███▍ | 85/250 [00:26<00:48, 3.42it/s]
34%|███▍ | 86/250 [00:27<00:48, 3.35it/s]
35%|███▍ | 87/250 [00:27<00:50, 3.21it/s]
35%|███▌ | 88/250 [00:27<00:48, 3.36it/s]
36%|███▌ | 89/250 [00:28<00:49, 3.28it/s]
36%|███▌ | 90/250 [00:28<00:50, 3.15it/s]
36%|███▋ | 91/250 [00:28<00:51, 3.09it/s]
37%|███▋ | 92/250 [00:28<00:48, 3.25it/s]
37%|███▋ | 93/250 [00:29<00:51, 3.05it/s]
38%|███▊ | 94/250 [00:29<00:48, 3.25it/s]
38%|███▊ | 95/250 [00:29<00:45, 3.41it/s]
38%|███▊ | 96/250 [00:30<00:44, 3.44it/s]
39%|███▉ | 97/250 [00:30<00:45, 3.36it/s]
39%|███▉ | 98/250 [00:30<00:49, 3.07it/s]
40%|███▉ | 99/250 [00:31<00:52, 2.87it/s]
40%|████ | 100/250 [00:31<00:52, 2.86it/s]
40%|████ | 101/250 [00:32<00:54, 2.74it/s]
41%|████ | 102/250 [00:32<00:51, 2.89it/s]
41%|████ | 103/250 [00:32<00:49, 2.96it/s]
42%|████▏ | 104/250 [00:32<00:50, 2.89it/s]
42%|████▏ | 105/250 [00:33<00:51, 2.84it/s]
42%|████▏ | 106/250 [00:33<00:48, 2.94it/s]
43%|████▎ | 107/250 [00:34<00:52, 2.70it/s]
43%|████▎ | 108/250 [00:34<00:50, 2.79it/s]
44%|████▎ | 109/250 [00:34<00:48, 2.88it/s]
44%|████▍ | 110/250 [00:35<00:48, 2.90it/s]
44%|████▍ | 111/250 [00:35<00:51, 2.69it/s]
45%|████▍ | 112/250 [00:35<00:54, 2.54it/s]
45%|████▌ | 113/250 [00:36<00:52, 2.61it/s]
46%|████▌ | 114/250 [00:36<00:50, 2.67it/s]
46%|████▌ | 115/250 [00:37<00:47, 2.83it/s]
46%|████▋ | 116/250 [00:37<00:45, 2.97it/s]
47%|████▋ | 117/250 [00:37<00:51, 2.60it/s]
47%|████▋ | 118/250 [00:38<00:49, 2.67it/s]
48%|████▊ | 119/250 [00:38<00:49, 2.66it/s]
48%|████▊ | 120/250 [00:38<00:46, 2.81it/s]
48%|████▊ | 121/250 [00:39<00:42, 3.02it/s]
49%|████▉ | 122/250 [00:39<00:42, 3.02it/s]
49%|████▉ | 123/250 [00:39<00:42, 3.02it/s]
50%|████▉ | 124/250 [00:40<00:40, 3.09it/s]
50%|█████ | 125/250 [00:40<00:40, 3.06it/s]
50%|█████ | 126/250 [00:40<00:40, 3.03it/s]
51%|█████ | 127/250 [00:41<00:40, 3.01it/s]
51%|█████ | 128/250 [00:41<00:41, 2.96it/s]
52%|█████▏ | 129/250 [00:41<00:41, 2.91it/s]
52%|█████▏ | 130/250 [00:42<00:40, 2.96it/s]
52%|█████▏ | 131/250 [00:42<00:38, 3.08it/s]
53%|█████▎ | 132/250 [00:42<00:36, 3.24it/s]
53%|█████▎ | 133/250 [00:42<00:35, 3.30it/s]
54%|█████▎ | 134/250 [00:43<00:35, 3.27it/s]
54%|█████▍ | 135/250 [00:43<00:38, 2.99it/s]
54%|█████▍ | 136/250 [00:44<00:38, 2.96it/s]
55%|█████▍ | 137/250 [00:44<00:36, 3.07it/s]
55%|█████▌ | 138/250 [00:44<00:35, 3.15it/s]
56%|█████▌ | 139/250 [00:44<00:36, 3.01it/s]
56%|█████▌ | 140/250 [00:45<00:35, 3.08it/s]
56%|█████▋ | 141/250 [00:45<00:33, 3.28it/s]
57%|█████▋ | 142/250 [00:45<00:33, 3.26it/s]
57%|█████▋ | 143/250 [00:46<00:32, 3.31it/s]
58%|█████▊ | 144/250 [00:46<00:32, 3.28it/s]
58%|█████▊ | 145/250 [00:46<00:33, 3.16it/s]
58%|█████▊ | 146/250 [00:47<00:32, 3.17it/s]
59%|█████▉ | 147/250 [00:47<00:37, 2.78it/s]
59%|█████▉ | 148/250 [00:47<00:33, 3.08it/s]
60%|█████▉ | 149/250 [00:48<00:31, 3.20it/s]
60%|██████ | 150/250 [00:48<00:32, 3.09it/s]
60%|██████ | 151/250 [00:48<00:32, 3.05it/s]
61%|██████ | 152/250 [00:49<00:30, 3.20it/s]
61%|██████ | 153/250 [00:49<00:29, 3.33it/s]
62%|██████▏ | 154/250 [00:49<00:30, 3.13it/s]
62%|██████▏ | 155/250 [00:50<00:31, 3.02it/s]
62%|██████▏ | 156/250 [00:50<00:31, 2.98it/s]
63%|██████▎ | 157/250 [00:50<00:28, 3.21it/s]
63%|██████▎ | 158/250 [00:51<00:31, 2.95it/s]
64%|██████▎ | 159/250 [00:51<00:30, 2.97it/s]
64%|██████▍ | 160/250 [00:51<00:29, 3.02it/s]
64%|██████▍ | 161/250 [00:52<00:29, 3.06it/s]
65%|██████▍ | 162/250 [00:52<00:28, 3.12it/s]
65%|██████▌ | 163/250 [00:52<00:27, 3.21it/s]
66%|██████▌ | 164/250 [00:52<00:26, 3.30it/s]
66%|██████▌ | 165/250 [00:53<00:25, 3.30it/s]
66%|██████▋ | 166/250 [00:53<00:27, 3.10it/s]
67%|██████▋ | 167/250 [00:54<00:28, 2.88it/s]
67%|██████▋ | 168/250 [00:54<00:29, 2.73it/s]
68%|██████▊ | 169/250 [00:54<00:27, 2.91it/s]
68%|██████▊ | 170/250 [00:55<00:27, 2.87it/s]
68%|██████▊ | 171/250 [00:55<00:26, 2.97it/s]
69%|██████▉ | 172/250 [00:55<00:26, 2.95it/s]
69%|██████▉ | 173/250 [00:56<00:25, 3.02it/s]
70%|██████▉ | 174/250 [00:56<00:24, 3.10it/s]
70%|███████ | 175/250 [00:56<00:23, 3.13it/s]
70%|███████ | 176/250 [00:56<00:23, 3.19it/s]
71%|███████ | 177/250 [00:57<00:22, 3.20it/s]
71%|███████ | 178/250 [00:57<00:21, 3.33it/s]
72%|███████▏ | 179/250 [00:57<00:21, 3.29it/s]
72%|███████▏ | 180/250 [00:58<00:20, 3.47it/s]
72%|███████▏ | 181/250 [00:58<00:20, 3.35it/s]
73%|███████▎ | 182/250 [00:58<00:19, 3.47it/s]
73%|███████▎ | 183/250 [00:58<00:18, 3.60it/s]
74%|███████▎ | 184/250 [00:59<00:19, 3.40it/s]
74%|███████▍ | 185/250 [00:59<00:19, 3.31it/s]
74%|███████▍ | 186/250 [00:59<00:19, 3.33it/s]
75%|███████▍ | 187/250 [01:00<00:20, 3.14it/s]
75%|███████▌ | 188/250 [01:00<00:19, 3.25it/s]
76%|███████▌ | 189/250 [01:00<00:17, 3.41it/s]
76%|███████▌ | 190/250 [01:01<00:18, 3.30it/s]
76%|███████▋ | 191/250 [01:01<00:17, 3.46it/s]
77%|███████▋ | 192/250 [01:01<00:16, 3.54it/s]
77%|███████▋ | 193/250 [01:02<00:17, 3.17it/s]
78%|███████▊ | 194/250 [01:02<00:18, 2.98it/s]
78%|███████▊ | 195/250 [01:02<00:18, 2.93it/s]
78%|███████▊ | 196/250 [01:03<00:18, 2.92it/s]
79%|███████▉ | 197/250 [01:03<00:18, 2.86it/s]
79%|███████▉ | 198/250 [01:03<00:17, 3.00it/s]
80%|███████▉ | 199/250 [01:04<00:16, 3.12it/s]
80%|████████ | 200/250 [01:04<00:15, 3.22it/s]
80%|████████ | 201/250 [01:04<00:14, 3.33it/s]
81%|████████ | 202/250 [01:04<00:14, 3.38it/s]
81%|████████ | 203/250 [01:05<00:13, 3.48it/s]
82%|████████▏ | 204/250 [01:05<00:13, 3.50it/s]
82%|████████▏ | 205/250 [01:05<00:12, 3.50it/s]
82%|████████▏ | 206/250 [01:06<00:12, 3.53it/s]
83%|████████▎ | 207/250 [01:06<00:12, 3.52it/s]
83%|████████▎ | 208/250 [01:06<00:11, 3.54it/s]
84%|████████▎ | 209/250 [01:06<00:11, 3.55it/s]
84%|████████▍ | 210/250 [01:07<00:11, 3.59it/s]
84%|████████▍ | 211/250 [01:07<00:11, 3.54it/s]
85%|████████▍ | 212/250 [01:07<00:10, 3.57it/s]
85%|████████▌ | 213/250 [01:07<00:10, 3.60it/s]
86%|████████▌ | 214/250 [01:08<00:10, 3.58it/s]
86%|████████▌ | 215/250 [01:08<00:09, 3.55it/s]
86%|████████▋ | 216/250 [01:08<00:09, 3.54it/s]
87%|████████▋ | 217/250 [01:09<00:09, 3.49it/s]
87%|████████▋ | 218/250 [01:09<00:09, 3.49it/s]
88%|████████▊ | 219/250 [01:09<00:08, 3.51it/s]
88%|████████▊ | 219/250 [01:12<00:10, 3.03it/s]
--------------------------------------------------------------------------- ModelFittingError Traceback (most recent call last) Cell In[4], line 21 19 with warnings.catch_warnings(): 20 warnings.filterwarnings("ignore", category=RuntimeWarning) ---> 21 X.step() 22 time.sleep(0.1) File ~/miniconda3/envs/xopt-dev/lib/python3.13/site-packages/xopt/base.py:252, in Xopt.step(self) 250 # generate samples and submit to evaluator 251 logger.debug(f"Generating {n_generate} candidates") --> 252 new_samples = self.generator.generate(n_generate) 254 if new_samples is not None: 255 # Evaluate data 256 self.evaluate_data(new_samples) File ~/miniconda3/envs/xopt-dev/lib/python3.13/site-packages/xopt/generators/bayesian/time_dependent.py:144, in TimeDependentBayesianGenerator.generate(self, n_candidates) 130 """ 131 Generate candidates for Bayesian Optimization. 132 (...) 141 The generated candidates. 142 """ 143 self.target_prediction_time = time.time() + self.added_time --> 144 output = super().generate(n_candidates) 146 if time.time() > self.target_prediction_time: 147 warnings.warn( 148 "target prediction time is in the past! Increase " 149 "added time for accurate results", 150 RuntimeWarning, 151 ) File ~/miniconda3/envs/xopt-dev/lib/python3.13/site-packages/xopt/generators/bayesian/bayesian_generator.py:312, in BayesianGenerator.generate(self, n_candidates) 310 # update internal model with internal data 311 start_time = time.perf_counter() --> 312 model = self.train_model(self.get_training_data(self.data)) 313 timing_results["training"] = time.perf_counter() - start_time 315 # propose candidates given model File ~/miniconda3/envs/xopt-dev/lib/python3.13/site-packages/xopt/generators/bayesian/bayesian_generator.py:420, in BayesianGenerator.train_model(self, data, update_internal) 417 bounds[1] = bounds[0] + 1e-8 418 variable_bounds[key] = bounds --> 420 _model = self.gp_constructor.build_model( 421 self.model_input_names, 422 self.vocs.output_names, 423 data, 424 {name: variable_bounds[name] for name in self.model_input_names}, 425 **self.tkwargs, 426 ) 428 if update_internal: 429 self.model = _model File ~/miniconda3/envs/xopt-dev/lib/python3.13/site-packages/xopt/generators/bayesian/models/time_dependent.py:127, in TimeDependentModelConstructor.build_model(self, input_names, outcome_names, data, input_bounds, dtype, device) 123 covar_modules[name] = ProductKernel(spectral_kernel, matern_kernel) 125 self.covar_modules = covar_modules --> 127 return super().build_model( 128 new_input_names, outcome_names, data, new_input_bounds, dtype, device 129 ) File ~/miniconda3/envs/xopt-dev/lib/python3.13/site-packages/xopt/generators/bayesian/models/standard.py:225, in StandardModelConstructor.build_model(self, input_names, outcome_names, data, input_bounds, dtype, device) 215 kwargs = { 216 "input_transform": input_transform, 217 "outcome_transform": outcome_transform, 218 "covar_module": covar_module, 219 "mean_module": mean_module, 220 } 222 if train_Yvar is None: 223 # train basic single-task-gp model 224 models.append( --> 225 self.build_single_task_gp( 226 train_X.to(**tkwargs), 227 train_Y.to(**tkwargs), 228 likelihood=self.get_likelihood(**tkwargs), 229 train=not self.use_cached_hyperparameters, 230 **kwargs, 231 ) 232 ) 233 else: 234 # train heteroskedastic single-task-gp model 235 # turn off warnings 236 models.append( 237 self.build_heteroskedastic_gp( 238 train_X.to(**tkwargs), (...) 243 ) 244 ) File ~/miniconda3/envs/xopt-dev/lib/python3.13/site-packages/xopt/generators/bayesian/base_model.py:153, in ModelConstructor.build_single_task_gp(X, Y, train, **kwargs) 151 if train: 152 mll = ExactMarginalLogLikelihood(model.likelihood, model) --> 153 fit_gpytorch_mll(mll) 154 return model File ~/miniconda3/envs/xopt-dev/lib/python3.13/site-packages/botorch/fit.py:115, in fit_gpytorch_mll(mll, closure, optimizer, closure_kwargs, optimizer_kwargs, **kwargs) 112 if optimizer is not None: # defer to per-method defaults 113 kwargs["optimizer"] = optimizer --> 115 return FitGPyTorchMLL( 116 mll, 117 type(mll.likelihood), 118 type(mll.model), 119 closure=closure, 120 closure_kwargs=closure_kwargs, 121 optimizer_kwargs=optimizer_kwargs, 122 **kwargs, 123 ) File ~/miniconda3/envs/xopt-dev/lib/python3.13/site-packages/botorch/utils/dispatcher.py:95, in Dispatcher.__call__(self, *args, **kwargs) 93 func = self.__getitem__(types=types) 94 try: ---> 95 return func(*args, **kwargs) 96 except MDNotImplementedError: 97 # Traverses registered methods in order, yields whenever a match is found 98 funcs = self.dispatch_iter(*types) File ~/miniconda3/envs/xopt-dev/lib/python3.13/site-packages/botorch/fit.py:263, in _fit_fallback(mll, _, __, closure, optimizer, closure_kwargs, optimizer_kwargs, max_attempts, pick_best_of_all_attempts, warning_handler, caught_exception_types, **ignore) 260 mll.load_state_dict(best_state_dict) 261 return mll.eval() --> 263 raise ModelFittingError("All attempts to fit the model have failed.") ModelFittingError: All attempts to fit the model have failed.
Visualize GP model of objective function and plot trajectory¶
data = X.data
xbounds = generator.vocs.bounds
tbounds = [data["time"].min(), data["time"].max()]
model = X.generator.model
n = 100
t = torch.linspace(*tbounds, n, dtype=torch.double)
x = torch.linspace(*xbounds.flatten(), n, dtype=torch.double)
tt, xx = torch.meshgrid(t, x)
pts = torch.hstack([ele.reshape(-1, 1) for ele in (tt, xx)]).double()
tt, xx = tt.numpy(), xx.numpy()
# NOTE: the model inputs are such that t is the last dimension
gp_pts = torch.flip(pts, dims=[-1])
gt_vals = g(gp_pts.T[0], gp_pts.T[1] - start_time)
with torch.no_grad():
post = model.posterior(gp_pts)
mean = post.mean
std = torch.sqrt(post.variance)
fig, ax = plt.subplots()
ax.set_title("model mean")
ax.set_xlabel("unix time")
ax.set_ylabel("x")
c = ax.pcolor(tt, xx, mean.reshape(n, n), rasterized=True)
ax.plot(data["time"].to_numpy(), data["x"].to_numpy(), "oC1", label="samples")
ax.plot(t, k(t - start_time), "C3--", label="ideal path", zorder=10)
ax.legend()
fig.colorbar(c)
fig2, ax2 = plt.subplots()
ax2.set_title("model uncertainty")
ax2.set_xlabel("unix time")
ax2.set_ylabel("x")
c = ax2.pcolor(tt, xx, std.reshape(n, n))
fig2.colorbar(c)
fig3, ax3 = plt.subplots()
ax3.set_title("ground truth value")
ax3.set_xlabel("unix time")
ax3.set_ylabel("x")
c = ax3.pcolor(tt, xx, gt_vals.reshape(n, n))
fig3.colorbar(c)
ax2.plot(data["time"].to_numpy(), data["x"].to_numpy(), "oC1")
ax3.plot(data["time"].to_numpy(), data["x"].to_numpy(), "oC1")
plot the acquisition function¶
# note that target time is only updated during the generate call
target_time = X.generator.target_prediction_time
print(target_time - start_time)
my_acq_func = X.generator.get_acquisition(model)
with torch.no_grad():
acq_pts = x.unsqueeze(-1).unsqueeze(-1)
full_acq = my_acq_func.acq_func(gp_pts.unsqueeze(1))
fixed_acq = my_acq_func(acq_pts)
fig, ax = plt.subplots()
c = ax.pcolor(tt, xx, full_acq.reshape(n, n))
ax.set_xlabel("unix time")
ax.set_ylabel("x")
ax.set_title("acquisition function")
fig.colorbar(c)
fi2, ax2 = plt.subplots()
ax2.plot(x.flatten(), fixed_acq.flatten())
ax2.set_xlabel("x")
ax2.set_ylabel("acquisition function")
ax2.set_title("acquisition function at last time step")
69.80084657669067
Run Time Dependent BO with Model Caching¶
Instead of retraining the GP model hyperparameters at every step, we can instead hold
on to previously determined model parameters by setting
use_catched_hyperparameters=True
in the model constructor. This reduces the time
needed to make decisions, leading to faster feedback when addressing time-critical
optimization tasks. However, this can come at the cost of model accuracy when the
target function changes behavior (change in lengthscale for example).
generator = TDUpperConfidenceBoundGenerator(
vocs=vocs,
beta=0.01,
added_time=0.1,
forgetting_time=20.0,
)
generator.n_monte_carlo_samples = N_MC_SAMPLES
generator.numerical_optimizer.n_restarts = NUM_RESTARTS
generator.max_travel_distances = [0.1]
start_time = time.time()
X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)
X.random_evaluate(2)
for i in trange(N_STEPS):
# note that in this example we can ignore warnings if computation time is greater
# than added time
if i == 50:
X.generator.gp_constructor.use_cached_hyperparameters = True
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=RuntimeWarning)
X.step()
time.sleep(0.1)
0%| | 0/250 [00:00<?, ?it/s]
0%| | 1/250 [00:00<00:53, 4.67it/s]
1%| | 2/250 [00:00<00:54, 4.54it/s]
1%| | 3/250 [00:00<01:16, 3.24it/s]
2%|▏ | 4/250 [00:01<01:17, 3.19it/s]
2%|▏ | 5/250 [00:01<01:13, 3.32it/s]
2%|▏ | 6/250 [00:01<01:11, 3.43it/s]
3%|▎ | 7/250 [00:02<01:24, 2.88it/s]
3%|▎ | 8/250 [00:02<01:29, 2.71it/s]
4%|▎ | 9/250 [00:02<01:30, 2.65it/s]
4%|▍ | 10/250 [00:03<01:52, 2.13it/s]
4%|▍ | 11/250 [00:04<01:56, 2.05it/s]
5%|▍ | 12/250 [00:04<01:47, 2.22it/s]
5%|▌ | 13/250 [00:05<02:27, 1.61it/s]
6%|▌ | 14/250 [00:06<02:33, 1.53it/s]
6%|▌ | 15/250 [00:07<02:49, 1.39it/s]
6%|▋ | 16/250 [00:08<03:04, 1.27it/s]
7%|▋ | 17/250 [00:08<02:45, 1.40it/s]
7%|▋ | 18/250 [00:09<02:42, 1.43it/s]
8%|▊ | 19/250 [00:09<02:38, 1.46it/s]
8%|▊ | 20/250 [00:10<02:41, 1.43it/s]
8%|▊ | 21/250 [00:11<02:33, 1.49it/s]
9%|▉ | 22/250 [00:12<02:42, 1.40it/s]
9%|▉ | 23/250 [00:12<02:47, 1.35it/s]
10%|▉ | 24/250 [00:14<03:12, 1.17it/s]
10%|█ | 25/250 [00:14<03:05, 1.21it/s]
10%|█ | 26/250 [00:15<02:40, 1.40it/s]
11%|█ | 27/250 [00:15<02:24, 1.54it/s]
11%|█ | 28/250 [00:16<02:50, 1.31it/s]
12%|█▏ | 29/250 [00:17<02:39, 1.38it/s]
12%|█▏ | 30/250 [00:18<02:29, 1.47it/s]
12%|█▏ | 31/250 [00:18<02:22, 1.54it/s]
13%|█▎ | 32/250 [00:19<02:17, 1.59it/s]
13%|█▎ | 33/250 [00:19<02:08, 1.68it/s]
14%|█▎ | 34/250 [00:20<02:27, 1.46it/s]
14%|█▍ | 35/250 [00:20<01:56, 1.84it/s]
14%|█▍ | 36/250 [00:21<01:58, 1.81it/s]
15%|█▍ | 37/250 [00:21<01:42, 2.08it/s]
15%|█▌ | 38/250 [00:22<01:40, 2.11it/s]
16%|█▌ | 39/250 [00:22<01:40, 2.11it/s]
16%|█▌ | 40/250 [00:23<01:36, 2.17it/s]
16%|█▋ | 41/250 [00:23<01:38, 2.13it/s]
17%|█▋ | 42/250 [00:24<01:38, 2.12it/s]
17%|█▋ | 43/250 [00:24<01:34, 2.20it/s]
18%|█▊ | 44/250 [00:24<01:33, 2.21it/s]
18%|█▊ | 45/250 [00:25<01:26, 2.38it/s]
18%|█▊ | 46/250 [00:25<01:32, 2.19it/s]
19%|█▉ | 47/250 [00:26<01:28, 2.29it/s]
19%|█▉ | 48/250 [00:26<01:32, 2.18it/s]
20%|█▉ | 49/250 [00:27<01:37, 2.07it/s]
20%|██ | 50/250 [00:27<01:47, 1.85it/s]
20%|██ | 51/250 [00:28<01:27, 2.28it/s]
21%|██ | 52/250 [00:28<01:12, 2.72it/s]
21%|██ | 53/250 [00:28<01:02, 3.14it/s]
22%|██▏ | 54/250 [00:28<00:55, 3.52it/s]
22%|██▏ | 55/250 [00:28<00:50, 3.84it/s]
22%|██▏ | 56/250 [00:29<00:47, 4.11it/s]
23%|██▎ | 57/250 [00:29<00:44, 4.32it/s]
23%|██▎ | 58/250 [00:29<00:42, 4.48it/s]
24%|██▎ | 59/250 [00:29<00:41, 4.60it/s]
24%|██▍ | 60/250 [00:29<00:40, 4.69it/s]
24%|██▍ | 61/250 [00:30<00:39, 4.76it/s]
25%|██▍ | 62/250 [00:30<00:39, 4.80it/s]
25%|██▌ | 63/250 [00:30<00:38, 4.83it/s]
26%|██▌ | 64/250 [00:30<00:38, 4.85it/s]
26%|██▌ | 65/250 [00:30<00:37, 4.87it/s]
26%|██▋ | 66/250 [00:31<00:37, 4.89it/s]
27%|██▋ | 67/250 [00:31<00:37, 4.89it/s]
27%|██▋ | 68/250 [00:31<00:37, 4.90it/s]
28%|██▊ | 69/250 [00:31<00:36, 4.90it/s]
28%|██▊ | 70/250 [00:31<00:36, 4.90it/s]
28%|██▊ | 71/250 [00:32<00:36, 4.90it/s]
29%|██▉ | 72/250 [00:32<00:36, 4.91it/s]
29%|██▉ | 73/250 [00:32<00:36, 4.91it/s]
30%|██▉ | 74/250 [00:32<00:35, 4.91it/s]
30%|███ | 75/250 [00:32<00:35, 4.91it/s]
30%|███ | 76/250 [00:33<00:35, 4.91it/s]
31%|███ | 77/250 [00:33<00:35, 4.91it/s]
31%|███ | 78/250 [00:33<00:35, 4.91it/s]
32%|███▏ | 79/250 [00:33<00:34, 4.91it/s]
32%|███▏ | 80/250 [00:33<00:34, 4.91it/s]
32%|███▏ | 81/250 [00:34<00:34, 4.91it/s]
33%|███▎ | 82/250 [00:34<00:34, 4.91it/s]
33%|███▎ | 83/250 [00:34<00:34, 4.91it/s]
34%|███▎ | 84/250 [00:34<00:33, 4.91it/s]
34%|███▍ | 85/250 [00:34<00:33, 4.91it/s]
34%|███▍ | 86/250 [00:35<00:33, 4.91it/s]
35%|███▍ | 87/250 [00:35<00:33, 4.91it/s]
35%|███▌ | 88/250 [00:35<00:33, 4.90it/s]
36%|███▌ | 89/250 [00:35<00:32, 4.91it/s]
36%|███▌ | 90/250 [00:36<00:32, 4.91it/s]
36%|███▋ | 91/250 [00:36<00:32, 4.91it/s]
37%|███▋ | 92/250 [00:36<00:32, 4.91it/s]
37%|███▋ | 93/250 [00:36<00:31, 4.91it/s]
38%|███▊ | 94/250 [00:36<00:31, 4.91it/s]
38%|███▊ | 95/250 [00:37<00:31, 4.91it/s]
38%|███▊ | 96/250 [00:37<00:31, 4.91it/s]
39%|███▉ | 97/250 [00:37<00:31, 4.91it/s]
39%|███▉ | 98/250 [00:37<00:30, 4.91it/s]
40%|███▉ | 99/250 [00:37<00:30, 4.91it/s]
40%|████ | 100/250 [00:38<00:30, 4.91it/s]
40%|████ | 101/250 [00:38<00:30, 4.91it/s]
41%|████ | 102/250 [00:38<00:30, 4.91it/s]
41%|████ | 103/250 [00:38<00:29, 4.91it/s]
42%|████▏ | 104/250 [00:38<00:29, 4.91it/s]
42%|████▏ | 105/250 [00:39<00:29, 4.91it/s]
42%|████▏ | 106/250 [00:39<00:29, 4.92it/s]
43%|████▎ | 107/250 [00:39<00:29, 4.91it/s]
43%|████▎ | 108/250 [00:39<00:28, 4.91it/s]
44%|████▎ | 109/250 [00:39<00:28, 4.91it/s]
44%|████▍ | 110/250 [00:40<00:28, 4.91it/s]
44%|████▍ | 111/250 [00:40<00:28, 4.91it/s]
45%|████▍ | 112/250 [00:40<00:28, 4.91it/s]
45%|████▌ | 113/250 [00:40<00:27, 4.91it/s]
46%|████▌ | 114/250 [00:40<00:27, 4.91it/s]
46%|████▌ | 115/250 [00:41<00:27, 4.90it/s]
46%|████▋ | 116/250 [00:41<00:27, 4.90it/s]
47%|████▋ | 117/250 [00:41<00:27, 4.90it/s]
47%|████▋ | 118/250 [00:41<00:26, 4.90it/s]
48%|████▊ | 119/250 [00:41<00:26, 4.90it/s]
48%|████▊ | 120/250 [00:42<00:26, 4.91it/s]
48%|████▊ | 121/250 [00:42<00:26, 4.90it/s]
49%|████▉ | 122/250 [00:42<00:26, 4.91it/s]
49%|████▉ | 123/250 [00:42<00:25, 4.91it/s]
50%|████▉ | 124/250 [00:42<00:25, 4.91it/s]
50%|█████ | 125/250 [00:43<00:25, 4.91it/s]
50%|█████ | 126/250 [00:43<00:25, 4.91it/s]
51%|█████ | 127/250 [00:43<00:25, 4.91it/s]
51%|█████ | 128/250 [00:43<00:24, 4.91it/s]
52%|█████▏ | 129/250 [00:43<00:24, 4.91it/s]
52%|█████▏ | 130/250 [00:44<00:24, 4.91it/s]
52%|█████▏ | 131/250 [00:44<00:24, 4.91it/s]
53%|█████▎ | 132/250 [00:44<00:24, 4.91it/s]
53%|█████▎ | 133/250 [00:44<00:23, 4.91it/s]
54%|█████▎ | 134/250 [00:44<00:23, 4.91it/s]
54%|█████▍ | 135/250 [00:45<00:23, 4.91it/s]
54%|█████▍ | 136/250 [00:45<00:23, 4.91it/s]
55%|█████▍ | 137/250 [00:45<00:23, 4.90it/s]
55%|█████▌ | 138/250 [00:45<00:22, 4.90it/s]
56%|█████▌ | 139/250 [00:45<00:22, 4.90it/s]
56%|█████▌ | 140/250 [00:46<00:22, 4.90it/s]
56%|█████▋ | 141/250 [00:46<00:22, 4.91it/s]
57%|█████▋ | 142/250 [00:46<00:21, 4.91it/s]
57%|█████▋ | 143/250 [00:46<00:21, 4.91it/s]
58%|█████▊ | 144/250 [00:47<00:21, 4.90it/s]
58%|█████▊ | 145/250 [00:47<00:21, 4.91it/s]
58%|█████▊ | 146/250 [00:47<00:21, 4.91it/s]
59%|█████▉ | 147/250 [00:47<00:20, 4.91it/s]
59%|█████▉ | 148/250 [00:47<00:20, 4.91it/s]
60%|█████▉ | 149/250 [00:48<00:20, 4.91it/s]
60%|██████ | 150/250 [00:48<00:20, 4.92it/s]
60%|██████ | 151/250 [00:48<00:20, 4.92it/s]
61%|██████ | 152/250 [00:48<00:19, 4.92it/s]
61%|██████ | 153/250 [00:48<00:19, 4.92it/s]
62%|██████▏ | 154/250 [00:49<00:19, 4.92it/s]
62%|██████▏ | 155/250 [00:49<00:19, 4.91it/s]
62%|██████▏ | 156/250 [00:49<00:19, 4.91it/s]
63%|██████▎ | 157/250 [00:49<00:18, 4.91it/s]
63%|██████▎ | 158/250 [00:49<00:18, 4.91it/s]
64%|██████▎ | 159/250 [00:50<00:18, 4.91it/s]
64%|██████▍ | 160/250 [00:50<00:18, 4.91it/s]
64%|██████▍ | 161/250 [00:50<00:18, 4.91it/s]
65%|██████▍ | 162/250 [00:50<00:17, 4.91it/s]
65%|██████▌ | 163/250 [00:50<00:17, 4.91it/s]
66%|██████▌ | 164/250 [00:51<00:17, 4.91it/s]
66%|██████▌ | 165/250 [00:51<00:17, 4.91it/s]
66%|██████▋ | 166/250 [00:51<00:17, 4.91it/s]
67%|██████▋ | 167/250 [00:51<00:16, 4.91it/s]
67%|██████▋ | 168/250 [00:51<00:16, 4.91it/s]
68%|██████▊ | 169/250 [00:52<00:16, 4.90it/s]
68%|██████▊ | 170/250 [00:52<00:16, 4.91it/s]
68%|██████▊ | 171/250 [00:52<00:16, 4.91it/s]
69%|██████▉ | 172/250 [00:52<00:15, 4.91it/s]
69%|██████▉ | 173/250 [00:52<00:15, 4.92it/s]
70%|██████▉ | 174/250 [00:53<00:15, 4.91it/s]
70%|███████ | 175/250 [00:53<00:15, 4.91it/s]
70%|███████ | 176/250 [00:53<00:15, 4.91it/s]
71%|███████ | 177/250 [00:53<00:14, 4.91it/s]
71%|███████ | 178/250 [00:53<00:14, 4.91it/s]
72%|███████▏ | 179/250 [00:54<00:14, 4.91it/s]
72%|███████▏ | 180/250 [00:54<00:14, 4.91it/s]
72%|███████▏ | 181/250 [00:54<00:14, 4.91it/s]
73%|███████▎ | 182/250 [00:54<00:13, 4.92it/s]
73%|███████▎ | 183/250 [00:54<00:13, 4.91it/s]
74%|███████▎ | 184/250 [00:55<00:13, 4.91it/s]
74%|███████▍ | 185/250 [00:55<00:13, 4.91it/s]
74%|███████▍ | 186/250 [00:55<00:13, 4.91it/s]
75%|███████▍ | 187/250 [00:55<00:12, 4.91it/s]
75%|███████▌ | 188/250 [00:55<00:12, 4.91it/s]
76%|███████▌ | 189/250 [00:56<00:12, 4.91it/s]
76%|███████▌ | 190/250 [00:56<00:12, 4.91it/s]
76%|███████▋ | 191/250 [00:56<00:12, 4.91it/s]
77%|███████▋ | 192/250 [00:56<00:11, 4.92it/s]
77%|███████▋ | 193/250 [00:56<00:11, 4.91it/s]
78%|███████▊ | 194/250 [00:57<00:11, 4.91it/s]
78%|███████▊ | 195/250 [00:57<00:11, 4.91it/s]
78%|███████▊ | 196/250 [00:57<00:11, 4.91it/s]
79%|███████▉ | 197/250 [00:57<00:10, 4.91it/s]
79%|███████▉ | 198/250 [00:58<00:10, 4.91it/s]
80%|███████▉ | 199/250 [00:58<00:10, 4.92it/s]
80%|████████ | 200/250 [00:58<00:10, 4.92it/s]
80%|████████ | 201/250 [00:58<00:09, 4.92it/s]
81%|████████ | 202/250 [00:58<00:09, 4.92it/s]
81%|████████ | 203/250 [00:59<00:09, 4.92it/s]
82%|████████▏ | 204/250 [00:59<00:09, 4.92it/s]
82%|████████▏ | 205/250 [00:59<00:09, 4.92it/s]
82%|████████▏ | 206/250 [00:59<00:08, 4.92it/s]
83%|████████▎ | 207/250 [00:59<00:08, 4.92it/s]
83%|████████▎ | 208/250 [01:00<00:08, 4.92it/s]
84%|████████▎ | 209/250 [01:00<00:08, 4.92it/s]
84%|████████▍ | 210/250 [01:00<00:08, 4.91it/s]
84%|████████▍ | 211/250 [01:00<00:07, 4.91it/s]
85%|████████▍ | 212/250 [01:00<00:07, 4.91it/s]
85%|████████▌ | 213/250 [01:01<00:07, 4.91it/s]
86%|████████▌ | 214/250 [01:01<00:07, 4.91it/s]
86%|████████▌ | 215/250 [01:01<00:07, 4.91it/s]
86%|████████▋ | 216/250 [01:01<00:06, 4.91it/s]
87%|████████▋ | 217/250 [01:01<00:06, 4.91it/s]
87%|████████▋ | 218/250 [01:02<00:06, 4.90it/s]
88%|████████▊ | 219/250 [01:02<00:06, 4.91it/s]
88%|████████▊ | 220/250 [01:02<00:06, 4.91it/s]
88%|████████▊ | 221/250 [01:02<00:05, 4.90it/s]
89%|████████▉ | 222/250 [01:02<00:05, 4.91it/s]
89%|████████▉ | 223/250 [01:03<00:05, 4.91it/s]
90%|████████▉ | 224/250 [01:03<00:05, 4.91it/s]
90%|█████████ | 225/250 [01:03<00:05, 4.91it/s]
90%|█████████ | 226/250 [01:03<00:04, 4.91it/s]
91%|█████████ | 227/250 [01:03<00:04, 4.91it/s]
91%|█████████ | 228/250 [01:04<00:04, 4.91it/s]
92%|█████████▏| 229/250 [01:04<00:04, 4.90it/s]
92%|█████████▏| 230/250 [01:04<00:04, 4.91it/s]
92%|█████████▏| 231/250 [01:04<00:03, 4.91it/s]
93%|█████████▎| 232/250 [01:04<00:03, 4.91it/s]
93%|█████████▎| 233/250 [01:05<00:03, 4.91it/s]
94%|█████████▎| 234/250 [01:05<00:03, 4.90it/s]
94%|█████████▍| 235/250 [01:05<00:03, 4.90it/s]
94%|█████████▍| 236/250 [01:05<00:02, 4.91it/s]
95%|█████████▍| 237/250 [01:05<00:02, 4.91it/s]
95%|█████████▌| 238/250 [01:06<00:02, 4.91it/s]
96%|█████████▌| 239/250 [01:06<00:02, 4.91it/s]
96%|█████████▌| 240/250 [01:06<00:02, 4.91it/s]
96%|█████████▋| 241/250 [01:06<00:01, 4.91it/s]
97%|█████████▋| 242/250 [01:06<00:01, 4.91it/s]
97%|█████████▋| 243/250 [01:07<00:01, 4.91it/s]
98%|█████████▊| 244/250 [01:07<00:01, 4.91it/s]
98%|█████████▊| 245/250 [01:07<00:01, 4.91it/s]
98%|█████████▊| 246/250 [01:07<00:00, 4.91it/s]
99%|█████████▉| 247/250 [01:07<00:00, 4.91it/s]
99%|█████████▉| 248/250 [01:08<00:00, 4.91it/s]
100%|█████████▉| 249/250 [01:08<00:00, 4.91it/s]
100%|██████████| 250/250 [01:08<00:00, 4.91it/s]
100%|██████████| 250/250 [01:08<00:00, 3.64it/s]
# plot total computation time
ax = X.generator.computation_time.sum(axis=1).plot()
ax.set_xlabel("Iteration")
ax.set_ylabel("total BO computation time (s)")
Text(0, 0.5, 'total BO computation time (s)')
data = X.data
xbounds = generator.vocs.bounds
tbounds = [data["time"].min(), data["time"].max()]
model = X.generator.model
n = 100
t = torch.linspace(*tbounds, n, dtype=torch.double)
x = torch.linspace(*xbounds.flatten(), n, dtype=torch.double)
tt, xx = torch.meshgrid(t, x)
pts = torch.hstack([ele.reshape(-1, 1) for ele in (tt, xx)]).double()
tt, xx = tt.numpy(), xx.numpy()
# NOTE: the model inputs are such that t is the last dimension
gp_pts = torch.flip(pts, dims=[-1])
gt_vals = g(gp_pts.T[0], gp_pts.T[1] - start_time)
with torch.no_grad():
post = model.posterior(gp_pts)
mean = post.mean
std = torch.sqrt(post.variance)
fig, ax = plt.subplots()
ax.set_title("model mean")
ax.set_xlabel("unix time")
ax.set_ylabel("x")
c = ax.pcolor(tt, xx, mean.reshape(n, n))
ax.plot(data["time"].to_numpy(), data["x"].to_numpy(), "oC1", label="samples")
ax.plot(t, k(t - start_time), "C3--", label="ideal path", zorder=10)
ax.legend()
fig.colorbar(c)
fig2, ax2 = plt.subplots()
ax2.set_title("model uncertainty")
ax2.set_xlabel("unix time")
ax2.set_ylabel("x")
c = ax2.pcolor(tt, xx, std.reshape(n, n))
fig2.colorbar(c)
fig3, ax3 = plt.subplots()
ax3.set_title("ground truth value")
ax3.set_xlabel("unix time")
ax3.set_ylabel("x")
c = ax3.pcolor(tt, xx, gt_vals.reshape(n, n))
fig3.colorbar(c)
ax2.plot(data["time"].to_numpy(), data["x"].to_numpy(), "oC1")
ax3.plot(data["time"].to_numpy(), data["x"].to_numpy(), "oC1")