Skip to content

Numerical Optimizers

GridOptimizer

Bases: NumericalOptimizer

Numerical optimizer that uses a brute-force grid search to find the optimum.

Attributes:

Name Type Description
name str

The name of the optimizer. Default is "grid".

n_grid_points PositiveInt

Number of mesh points per axis to sample. Algorithm time scales as n_grid_points^input_dimension.

Methods:

Name Description
optimize

Optimize the given function within the specified bounds.

Parameters:

Name Type Description Default
function callable

The acquisition function to be optimized.

required
bounds Tensor

The bounds within which to optimize the acquisition function. Must have shape [2, ndim].

required
n_candidates int

Number of candidates to return, default is 1.

required

Returns:

Name Type Description
candidates Tensor

The optimized candidates.

Source code in xopt/numerical_optimizer.py
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
class GridOptimizer(NumericalOptimizer):
    """
    Numerical optimizer that uses a brute-force grid search to find the optimum.

    Attributes
    ----------
    name : str
        The name of the optimizer. Default is "grid".
    n_grid_points : PositiveInt
        Number of mesh points per axis to sample. Algorithm time scales as `n_grid_points`^`input_dimension`.

    Methods
    -------
    optimize
        Optimize the given function within the specified bounds.

    Parameters
    ----------
    function : callable
        The acquisition function to be optimized.
    bounds : Tensor
        The bounds within which to optimize the acquisition function. Must have shape [2, ndim].
    n_candidates : int, optional
        Number of candidates to return, default is 1.

    Returns
    -------
    candidates : Tensor
        The optimized candidates.
    """

    name: str = Field("grid", frozen=True)
    n_grid_points: PositiveInt = Field(
        10, description="number of grid points per axis used for optimization"
    )

    def optimize(self, function, bounds, n_candidates=1):
        """
        Optimize the given function within the specified bounds using a brute-force grid search.

        Parameters
        ----------
        function : Callable
            The function to be optimized.
        bounds : Tensor
            A tensor specifying the bounds for the optimization. It must have the shape [2, ndim].
        n_candidates : int, optional
            The number of candidates to generate (default is 1).

        Returns
        -------
        candidates : Tensor
            The optimized candidates.
        """
        assert isinstance(bounds, Tensor)
        # create mesh
        if len(bounds) != 2:
            raise ValueError("bounds must have the shape [2, ndim]")

        dim = len(bounds[0])
        # add in a machine eps
        eps = 1e-5
        linspace_list = [
            torch.linspace(
                bounds.T[i][0] + eps, bounds.T[i][1] - eps, self.n_grid_points
            )
            for i in range(dim)
        ]

        xx = torch.meshgrid(*linspace_list, indexing="ij")
        mesh_pts = torch.stack(xx).flatten(start_dim=1).T.double()

        # evaluate the function on grid points
        f_values = function(mesh_pts.unsqueeze(1))

        # get the best n_candidates
        _, indicies = torch.sort(f_values)
        x_min = mesh_pts[indicies.squeeze().flipud()]
        return x_min[:n_candidates]

optimize(function, bounds, n_candidates=1)

Optimize the given function within the specified bounds using a brute-force grid search.

Parameters:

Name Type Description Default
function Callable

The function to be optimized.

required
bounds Tensor

A tensor specifying the bounds for the optimization. It must have the shape [2, ndim].

required
n_candidates int

The number of candidates to generate (default is 1).

1

Returns:

Name Type Description
candidates Tensor

The optimized candidates.

Source code in xopt/numerical_optimizer.py
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
def optimize(self, function, bounds, n_candidates=1):
    """
    Optimize the given function within the specified bounds using a brute-force grid search.

    Parameters
    ----------
    function : Callable
        The function to be optimized.
    bounds : Tensor
        A tensor specifying the bounds for the optimization. It must have the shape [2, ndim].
    n_candidates : int, optional
        The number of candidates to generate (default is 1).

    Returns
    -------
    candidates : Tensor
        The optimized candidates.
    """
    assert isinstance(bounds, Tensor)
    # create mesh
    if len(bounds) != 2:
        raise ValueError("bounds must have the shape [2, ndim]")

    dim = len(bounds[0])
    # add in a machine eps
    eps = 1e-5
    linspace_list = [
        torch.linspace(
            bounds.T[i][0] + eps, bounds.T[i][1] - eps, self.n_grid_points
        )
        for i in range(dim)
    ]

    xx = torch.meshgrid(*linspace_list, indexing="ij")
    mesh_pts = torch.stack(xx).flatten(start_dim=1).T.double()

    # evaluate the function on grid points
    f_values = function(mesh_pts.unsqueeze(1))

    # get the best n_candidates
    _, indicies = torch.sort(f_values)
    x_min = mesh_pts[indicies.squeeze().flipud()]
    return x_min[:n_candidates]

yaml(**kwargs)

serialize first then dump to yaml string

Source code in xopt/pydantic.py
231
232
233
234
235
236
237
238
def yaml(self, **kwargs):
    """serialize first then dump to yaml string"""
    output = json.loads(
        self.to_json(
            **kwargs,
        )
    )
    return yaml.dump(output)

LBFGSOptimizer

Bases: NumericalOptimizer

LBFGSOptimizer is a numerical optimizer that uses the Limited-memory Broyden–Fletcher–Goldfarb–Shanno (LBFGS) algorithm.

Attributes:

Name Type Description
n_restarts PositiveInt

Number of restarts during acquisition function optimization, default is 20.

max_iter PositiveInt

Maximum number of iterations for the optimizer, default is 2000.

max_time Optional[PositiveFloat]

Maximum time allowed for optimization, default is None (no time limit).

Methods:

Name Description
optimize

Optimize the given acquisition function within the specified bounds.

Parameters:

Name Type Description Default
function callable

The acquisition function to be optimized.

required
bounds Tensor

The bounds within which to optimize the acquisition function. Must have shape [2, ndim].

required
n_candidates int

Number of candidates to return, default is 1.

required
**kwargs dict

Additional keyword arguments to pass to the optimizer.

required

Returns:

Name Type Description
candidates Tensor

The optimized candidates.

Source code in xopt/numerical_optimizer.py
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
class LBFGSOptimizer(NumericalOptimizer):
    """
    LBFGSOptimizer is a numerical optimizer that uses the Limited-memory Broyden–Fletcher–Goldfarb–Shanno (LBFGS) algorithm.

    Attributes
    ----------
    n_restarts : PositiveInt
        Number of restarts during acquisition function optimization, default is 20.
    max_iter : PositiveInt
        Maximum number of iterations for the optimizer, default is 2000.
    max_time : Optional[PositiveFloat]
        Maximum time allowed for optimization, default is None (no time limit).

    Methods
    -------
    optimize
        Optimize the given acquisition function within the specified bounds.

    Parameters
    ----------
    function : callable
        The acquisition function to be optimized.
    bounds : Tensor
        The bounds within which to optimize the acquisition function. Must have shape [2, ndim].
    n_candidates : int, optional
        Number of candidates to return, default is 1.
    **kwargs : dict
        Additional keyword arguments to pass to the optimizer.

    Returns
    -------
    candidates : Tensor
        The optimized candidates.
    """

    name: str = Field("LBFGS", frozen=True)
    n_restarts: PositiveInt = Field(
        20, description="number of restarts during acquisition function optimization"
    )
    max_iter: PositiveInt = Field(
        2000, description="maximum number of optimization steps"
    )
    max_time: Optional[PositiveFloat] = Field(
        5.0, description="maximum time for optimization in seconds"
    )

    def optimize(
        self,
        function: AcquisitionFunction,
        bounds: Tensor,
        n_candidates: int = 1,
        **kwargs: Any,
    ):
        """
        Optimize the given function within the specified bounds using LBFGS.

        Parameters
        ----------
        function : Callable
            The function to be optimized.
        bounds : Tensor
            A tensor specifying the bounds for the optimization. It must have the shape [2, ndim].
        n_candidates : int, optional
            The number of candidates to generate (default is 1).
        **kwargs : dict
            Additional keyword arguments to be passed to the function optimizer.

        Returns
        -------
        candidates : Tensor
            The optimized candidates.
        """

        assert isinstance(bounds, Tensor)
        if len(bounds) != 2:
            raise ValueError("bounds must have the shape [2, ndim]")

        # emperical testing showed that the max time is overrun slightly on the botorch side
        # fix by slightly reducing the max time passed to this function
        if self.max_time is not None:
            max_time = self.max_time * 0.8 - 0.01
        else:
            max_time = None

        candidates, _ = optimize_acqf(
            acq_function=function,
            bounds=bounds,
            q=n_candidates,
            raw_samples=self.n_restarts,
            num_restarts=self.n_restarts,
            timeout_sec=max_time,
            options={"maxiter": self.max_iter},
            **kwargs,
        )
        return candidates

optimize(function, bounds, n_candidates=1, **kwargs)

Optimize the given function within the specified bounds using LBFGS.

Parameters:

Name Type Description Default
function Callable

The function to be optimized.

required
bounds Tensor

A tensor specifying the bounds for the optimization. It must have the shape [2, ndim].

required
n_candidates int

The number of candidates to generate (default is 1).

1
**kwargs dict

Additional keyword arguments to be passed to the function optimizer.

{}

Returns:

Name Type Description
candidates Tensor

The optimized candidates.

Source code in xopt/numerical_optimizer.py
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
def optimize(
    self,
    function: AcquisitionFunction,
    bounds: Tensor,
    n_candidates: int = 1,
    **kwargs: Any,
):
    """
    Optimize the given function within the specified bounds using LBFGS.

    Parameters
    ----------
    function : Callable
        The function to be optimized.
    bounds : Tensor
        A tensor specifying the bounds for the optimization. It must have the shape [2, ndim].
    n_candidates : int, optional
        The number of candidates to generate (default is 1).
    **kwargs : dict
        Additional keyword arguments to be passed to the function optimizer.

    Returns
    -------
    candidates : Tensor
        The optimized candidates.
    """

    assert isinstance(bounds, Tensor)
    if len(bounds) != 2:
        raise ValueError("bounds must have the shape [2, ndim]")

    # emperical testing showed that the max time is overrun slightly on the botorch side
    # fix by slightly reducing the max time passed to this function
    if self.max_time is not None:
        max_time = self.max_time * 0.8 - 0.01
    else:
        max_time = None

    candidates, _ = optimize_acqf(
        acq_function=function,
        bounds=bounds,
        q=n_candidates,
        raw_samples=self.n_restarts,
        num_restarts=self.n_restarts,
        timeout_sec=max_time,
        options={"maxiter": self.max_iter},
        **kwargs,
    )
    return candidates

yaml(**kwargs)

serialize first then dump to yaml string

Source code in xopt/pydantic.py
231
232
233
234
235
236
237
238
def yaml(self, **kwargs):
    """serialize first then dump to yaml string"""
    output = json.loads(
        self.to_json(
            **kwargs,
        )
    )
    return yaml.dump(output)

NumericalOptimizer

Bases: XoptBaseModel, ABC

Base class for numerical optimizers.

Attributes:

Name Type Description
name str

The name of the optimizer. Default is "base_numerical_optimizer".

model_config ConfigDict

Configuration dictionary with extra fields forbidden.

Methods:

Name Description
optimize

Abstract method to optimize a function to produce a number of candidate points that minimize the function.

Source code in xopt/numerical_optimizer.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
class NumericalOptimizer(XoptBaseModel, ABC):
    """
    Base class for numerical optimizers.

    Attributes
    ----------
    name : str
        The name of the optimizer. Default is "base_numerical_optimizer".
    model_config : ConfigDict
        Configuration dictionary with extra fields forbidden.

    Methods
    -------
    optimize
        Abstract method to optimize a function to produce a number of candidate points that minimize the function.
    """

    @abstractmethod
    def optimize(
        self,
        function: AcquisitionFunction,
        bounds: Tensor,
        n_candidates: int = 1,
        **kwargs: Any,
    ) -> Tensor:
        """Optimize a function to produce a number of candidate points that minimize the function."""
        raise NotImplementedError

optimize(function, bounds, n_candidates=1, **kwargs) abstractmethod

Optimize a function to produce a number of candidate points that minimize the function.

Source code in xopt/numerical_optimizer.py
30
31
32
33
34
35
36
37
38
39
@abstractmethod
def optimize(
    self,
    function: AcquisitionFunction,
    bounds: Tensor,
    n_candidates: int = 1,
    **kwargs: Any,
) -> Tensor:
    """Optimize a function to produce a number of candidate points that minimize the function."""
    raise NotImplementedError

yaml(**kwargs)

serialize first then dump to yaml string

Source code in xopt/pydantic.py
231
232
233
234
235
236
237
238
def yaml(self, **kwargs):
    """serialize first then dump to yaml string"""
    output = json.loads(
        self.to_json(
            **kwargs,
        )
    )
    return yaml.dump(output)