Skip to content

Extremum Seeking Generator

ExtremumSeekingGenerator

Bases: SequentialGenerator

Extremum seeking algorithm.

Reference: Extremum Seeking-Based Control System for Particle Accelerator Beam Loss Minimization A. Scheinker, E. -C. Huang and C. Taylor doi: 10.1109/TCST.2021.3136133

This algorithm must be stepped serially.

Attributes:

Name Type Description
name str

The name of the generator.

k PositiveFloat

Feedback gain.

oscillation_size PositiveFloat

Oscillation size.

decay_rate PositiveFloat

Decay rate.

_nES int

Number of extremum seeking parameters.

_wES ndarray

Frequencies for extremum seeking.

_dtES float

Time step for extremum seeking.

_aES ndarray

Amplitudes for extremum seeking.

_p_ave ndarray

Average of parameter bounds.

_p_diff ndarray

Difference of parameter bounds.

_amplitude float

Amplitude of oscillation.

_i int

Evaluation counter.

_last_input ndarray

Last input values.

_last_outcome float

Last outcome value.

Methods:

Name Description
add_data

Add new data to the generator.

p_normalize

Normalize parameters.

p_un_normalize

Un-normalize parameters.

generate

Generate a specified number of candidate samples.

Source code in xopt/generators/sequential/extremumseeking.py
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
class ExtremumSeekingGenerator(SequentialGenerator):
    """
    Extremum seeking algorithm.

    Reference:
    Extremum Seeking-Based Control System for Particle Accelerator
    Beam Loss Minimization
    A. Scheinker, E. -C. Huang and C. Taylor
    doi: 10.1109/TCST.2021.3136133

    This algorithm must be stepped serially.

    Attributes
    ----------
    name : str
        The name of the generator.
    k : PositiveFloat
        Feedback gain.
    oscillation_size : PositiveFloat
        Oscillation size.
    decay_rate : PositiveFloat
        Decay rate.
    _nES : int
        Number of extremum seeking parameters.
    _wES : np.ndarray
        Frequencies for extremum seeking.
    _dtES : float
        Time step for extremum seeking.
    _aES : np.ndarray
        Amplitudes for extremum seeking.
    _p_ave : np.ndarray
        Average of parameter bounds.
    _p_diff : np.ndarray
        Difference of parameter bounds.
    _amplitude : float
        Amplitude of oscillation.
    _i : int
        Evaluation counter.
    _last_input : np.ndarray
        Last input values.
    _last_outcome : float
        Last outcome value.

    Methods
    -------
    add_data(self, new_data: pd.DataFrame)
        Add new data to the generator.
    p_normalize(self, p: np.ndarray) -> np.ndarray
        Normalize parameters.
    p_un_normalize(self, p: np.ndarray) -> np.ndarray
        Un-normalize parameters.
    generate(self, n_candidates: int) -> List[Dict[str, float]]
        Generate a specified number of candidate samples.
    """

    name = "extremum_seeking"
    k: PositiveFloat = Field(2.0, description="feedback gain")
    oscillation_size: PositiveFloat = Field(0.1, description="oscillation size")
    decay_rate: PositiveFloat = Field(1.0, description="decay rate")
    supports_single_objective: bool = True

    _nES: int = 0
    _wES: np.ndarray = np.array([])
    _dtES: float = 0.0
    _aES: np.ndarray = np.array([])
    _p_ave: np.ndarray = np.array([])
    _p_diff: np.ndarray = np.array([])
    _amplitude: float = 1.0
    _i: int = -1
    _last_input: np.ndarray = None
    _last_outcome: float = None

    def __init__(self, **kwargs):
        super().__init__(**kwargs)

        self._nES = len(self.vocs.variables)
        self._wES = np.linspace(1.0, 1.75, int(np.ceil(self._nES / 2)))
        self._dtES = 2 * np.pi / (10 * np.max(self._wES))
        self._aES = np.zeros(self._nES)
        for n in np.arange(self._nES):
            jw = int(np.floor(n / 2))
            self._aES[n] = self._wES[jw] * (self.oscillation_size) ** 2
        bound_low, bound_up = np.array(self.vocs.bounds).T
        self._p_ave = (bound_up + bound_low) / 2
        self._p_diff = bound_up - bound_low

    def _reset(self):
        self._i = 0
        self._last_input = get_variable_data(self.vocs, self.data).to_numpy()[-1]
        self._last_outcome = get_objective_data(self.vocs, self.data).to_numpy()[-1, 0]

    def _add_data(self, new_data: pd.DataFrame):
        self.data = new_data.iloc[-1:]
        self._last_input = self.data[self.vocs.variable_names].to_numpy()[0]

        res = get_objective_data(self.vocs, new_data).to_numpy()
        self._last_outcome = res[0, 0]

        self._i += 1

    def _set_data(self, data: pd.DataFrame):
        self.data = data.iloc[-1:]

        self._i = 0
        self._last_input = get_variable_data(self.vocs, self.data).to_numpy()[-1]
        self._last_outcome = get_objective_data(self.vocs, self.data).to_numpy()[-1, 0]
        # TODO: add proper reload tests

    def p_normalize(self, p: np.ndarray) -> np.ndarray:
        """
        Normalize parameters.

        Parameters
        ----------
        p : np.ndarray
            The parameters to normalize.

        Returns
        -------
        np.ndarray
            The normalized parameters.
        """
        p_norm = 2.0 * (p - self._p_ave) / self._p_diff
        return p_norm

    def p_un_normalize(self, p: np.ndarray) -> np.ndarray:
        """
        Un-normalize parameters.

        Parameters
        ----------
        p : np.ndarray
            The parameters to un-normalize.

        Returns
        -------
        np.ndarray
            The un-normalized parameters.
        """
        p_un_norm = p * self._p_diff / 2.0 + self._p_ave
        return p_un_norm

    def _generate(self, first_gen: bool = False) -> List[Dict[str, float]]:
        """
        Generate next candidate.

        Returns
        -------
        List[Dict[str, float]]
            A list of dictionaries containing the generated samples.
        """
        if first_gen:
            self.reset()

        p_n = self.p_normalize(self._last_input)

        # ES step for each parameter
        p_next_n = np.zeros(self._nES)

        # Loop through each parameter
        for j in np.arange(self._nES):
            # Use the same frequency for each two parameters
            # Alternating Sine and Cosine
            jw = int(np.floor(j / 2))
            if not j % 2:
                p_next_n[j] = p_n[j] + self._amplitude * self._dtES * np.cos(
                    self._dtES * self._i * self._wES[jw] + self.k * self._last_outcome
                ) * np.sqrt(self._aES[j] * self._wES[jw])
            else:
                p_next_n[j] = p_n[j] + self._amplitude * self._dtES * np.sin(
                    self._dtES * self._i * self._wES[jw] + self.k * self._last_outcome
                ) * np.sqrt(self._aES[j] * self._wES[jw])

            # For each new ES value, check that we stay within min/max constraints
            if p_next_n[j] < -1.0:
                p_next_n[j] = -1.0
            if p_next_n[j] > 1.0:
                p_next_n[j] = 1.0

        p_next = self.p_un_normalize(p_next_n)

        self._amplitude *= self.decay_rate  # decay the osc amplitude

        # Return the next value
        p_next = [float(ele) for ele in p_next]
        return [dict(zip(self.vocs.variable_names, p_next))]

add_data(new_data)

Add new data to the generator.

Parameters:

Name Type Description Default
new_data DataFrame

The new data to add.

required

Raises:

Type Description
ValueError

If the generator is active but no candidate was generated, or if the new data does not contain the last candidate.

Source code in xopt/generators/sequential/sequential_generator.py
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
def add_data(self, new_data: pd.DataFrame):
    """
    Add new data to the generator.

    Parameters
    ----------
    new_data : pd.DataFrame
        The new data to add.

    Raises
    ------
    ValueError
        If the generator is active but no candidate was generated, or if the new data does not contain the last candidate.
    """
    # if the generator is active then the new data must contain the last candidate
    if self.is_active:
        if self._last_candidate is None:
            raise SeqGeneratorError(
                "Generator is active, but no candidate was generated. Cannot add data."
            )
        if len(new_data) > 1:
            raise SeqGeneratorError(
                "Cannot add more than one data point when generator is active."
            )
        else:
            # check if the last candidate is in the new data
            self.validate_point(new_data.iloc[0].to_dict())

    # do not call super, this will likely need to be customized for some generators
    if self.data is not None:
        self.data = pd.concat([self.data, new_data], axis=0, ignore_index=True)
    else:
        self.data = new_data

    # update internal state of the generator
    self._add_data(new_data)

generate(n_candidates=1)

Generate a new candidate point.

Parameters:

Name Type Description Default
n_candidates int

Number of candidates to generate, by default 1.

1

Returns:

Type Description
dict

A dictionary representing the candidate point.

Raises:

Type Description
ValueError

If more than one candidate is requested.

Source code in xopt/generators/sequential/sequential_generator.py
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
def generate(self, n_candidates: int = 1) -> dict:
    """
    Generate a new candidate point.

    Parameters
    ----------
    n_candidates : int, optional
        Number of candidates to generate, by default 1.

    Returns
    -------
    dict
        A dictionary representing the candidate point.

    Raises
    ------
    ValueError
        If more than one candidate is requested.
    """
    # we cannot generate more than one candidate at a time
    if n_candidates > 1:
        raise SeqGeneratorError(
            "Sequential generators can only generate one candidate at a time."
        )

    # if the generator is not active, we need to start it
    if not self.is_active:
        candidate = self._generate(True)
        self.is_active = True
    else:
        candidate = self._generate()

    # need to store the candidate to validate adding data to the generator
    self._last_candidate = candidate

    return candidate

model_dump(*args, **kwargs)

overwrite model dump to remove faux class attrs

Source code in xopt/generator.py
152
153
154
155
156
157
158
159
160
def model_dump(self, *args: Any, **kwargs: Any) -> dict[str, Any]:
    """overwrite model dump to remove faux class attrs"""

    res = super().model_dump(*args, **kwargs)

    res.pop("supports_batch_generation", None)
    res.pop("supports_multi_objective", None)

    return res

p_normalize(p)

Normalize parameters.

Parameters:

Name Type Description Default
p ndarray

The parameters to normalize.

required

Returns:

Type Description
ndarray

The normalized parameters.

Source code in xopt/generators/sequential/extremumseeking.py
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
def p_normalize(self, p: np.ndarray) -> np.ndarray:
    """
    Normalize parameters.

    Parameters
    ----------
    p : np.ndarray
        The parameters to normalize.

    Returns
    -------
    np.ndarray
        The normalized parameters.
    """
    p_norm = 2.0 * (p - self._p_ave) / self._p_diff
    return p_norm

p_un_normalize(p)

Un-normalize parameters.

Parameters:

Name Type Description Default
p ndarray

The parameters to un-normalize.

required

Returns:

Type Description
ndarray

The un-normalized parameters.

Source code in xopt/generators/sequential/extremumseeking.py
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
def p_un_normalize(self, p: np.ndarray) -> np.ndarray:
    """
    Un-normalize parameters.

    Parameters
    ----------
    p : np.ndarray
        The parameters to un-normalize.

    Returns
    -------
    np.ndarray
        The un-normalized parameters.
    """
    p_un_norm = p * self._p_diff / 2.0 + self._p_ave
    return p_un_norm

reset()

Reset the generator.

Source code in xopt/generators/sequential/sequential_generator.py
167
168
169
170
171
172
173
def reset(self):
    """
    Reset the generator.
    """
    self.is_active = False
    self._last_candidate = None
    self._reset()

set_data(data)

Set the full dataset for the generator. Typically only used when loading from a save file. This skips active generator lockout.

Parameters:

Name Type Description Default
data DataFrame

The data to set.

required
Source code in xopt/generators/sequential/sequential_generator.py
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
def set_data(self, data: pd.DataFrame):
    """
    Set the full dataset for the generator. Typically only used when loading from a save file. This skips active
    generator lockout.

    Parameters
    ----------
    data : pd.DataFrame
        The data to set.
    """
    # TODO: make a flag for generator that support multiple data sets
    if self._data_set:
        raise SeqGeneratorError(
            "Data has already been initialized for this generator."
        )
    self._set_data(data)
    self._data_set = True

validate_point(point)

determine if an input point was generated by the generator

Source code in xopt/generators/sequential/sequential_generator.py
60
61
62
63
64
65
66
67
68
69
70
71
72
def validate_point(self, point: Dict[str, float]):
    """determine if an input point was generated by the generator"""
    last_candidate = np.array(
        [self._last_candidate[0][ele] for ele in self.vocs.variable_names]
    )
    point_variables = np.array(
        [point[ele] for ele in self.vocs.variable_names]
    ).flatten()
    if not np.allclose(last_candidate, point_variables, atol=0.0, rtol=1e-6):
        raise SeqGeneratorError(
            "Cannot add data that was not generated by the generator when generator is active. "
            "Call reset() to reset the generator first in order to add data via other methods."
        )

yaml(**kwargs)

serialize first then dump to yaml string

Source code in xopt/pydantic.py
231
232
233
234
235
236
237
238
def yaml(self, **kwargs):
    """serialize first then dump to yaml string"""
    output = json.loads(
        self.to_json(
            **kwargs,
        )
    )
    return yaml.dump(output)