Skip to content

Commit 61d04aa

Browse files
author
Weslley da Silva Pereira
committed
Solve a bug in the termination of the acquisition optimizers.
- The pymoo optimization now always terminate after 5000 func evals by default. - Synced pool_size default with DEFAULT_N_MAX_EVALS_OPTIMIZER - Don't use SpaceFillingSampler to generate candidates. It is too expensive. - Use a low dispersion sequence (LHD) to generate candidates in the minimize_surrogate - Improve GP kernel - Avoid using non default rtol inside the algorithms
1 parent 1e26fc3 commit 61d04aa

20 files changed

+1346
-4139
lines changed

examples/gosac.ipynb

Lines changed: 486 additions & 3293 deletions
Large diffs are not rendered by default.

examples/legacy_opt/optimization_program_1.py

Lines changed: 3 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -36,17 +36,14 @@
3636

3737
from copy import deepcopy
3838
import importlib
39-
from math import sqrt
4039
from typing import Optional
4140
import numpy as np
4241
import matplotlib.pyplot as plt
4342

4443
import soogo
4544
from soogo import OptimizeResult
46-
from soogo.sampling import dds_uniform_sample
4745
from soogo.acquisition import (
4846
CoordinatePerturbation,
49-
TargetValueAcquisition,
5047
Acquisition,
5148
MinimizeSurrogate,
5249
MultipleAcquisition,
@@ -335,7 +332,7 @@ def main(config: int) -> list[OptimizeResult]:
335332
optres = read_and_run(
336333
data_file="datainput_BraninWithInteger",
337334
acquisitionFunc=CoordinatePerturbation(
338-
sampler=dds_uniform_sample,
335+
sampling_strategy="dds_uniform",
339336
pool_size=200,
340337
weightpattern=[0.3, 0.5, 0.8, 0.95],
341338
sigma=BoundedParameter(0.2, 0.2 * 0.5**5, 0.2),
@@ -351,12 +348,7 @@ def main(config: int) -> list[OptimizeResult]:
351348
elif config == 5:
352349
optres = read_and_run(
353350
data_file="datainput_BraninWithInteger",
354-
acquisitionFunc=MultipleAcquisition(
355-
(
356-
TargetValueAcquisition(),
357-
MaximizeDistance(),
358-
)
359-
),
351+
acquisitionFunc=None,
360352
maxeval=100,
361353
Ntrials=3,
362354
batchSize=1,
@@ -388,10 +380,7 @@ def main(config: int) -> list[OptimizeResult]:
388380
optres = read_and_run(
389381
data_file="datainput_BraninWithInteger",
390382
acquisitionFunc=MultipleAcquisition(
391-
(
392-
MinimizeSurrogate(100, 0.005 * sqrt(2)),
393-
MaximizeDistance(rtol=0.005 * sqrt(2)),
394-
)
383+
(MinimizeSurrogate(), MaximizeDistance())
395384
),
396385
maxeval=100,
397386
Ntrials=3,

examples/legacy_opt/optimization_program_2.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,6 @@
9696
acquisitionFunc=CoordinatePerturbation(
9797
pool_size=nCand,
9898
weightpattern=[0.3, 0.5],
99-
rtol=1e-3,
10099
sigma=BoundedParameter(0.2, 0.2 * 0.5**5, 0.2),
101100
perturbation_strategy="fixed",
102101
termination=RobustCondition(UnsuccessfulImprovement(0.001), 5),

examples/optimization.ipynb

Lines changed: 654 additions & 22 deletions
Large diffs are not rendered by default.

examples/socemo.ipynb

Lines changed: 108 additions & 692 deletions
Large diffs are not rendered by default.

examples/vlse_benchmark/vlse_bench.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
__deprecated__ = False
2424

2525
import os
26-
import numpy as np
2726
import pickle
2827
import time
2928
from benchmark import (
@@ -196,8 +195,8 @@ def run_optimizer(
196195
"optimizer": optimize.surrogate_optimization,
197196
"acquisition": acquisition.MultipleAcquisition(
198197
(
199-
acquisition.MinimizeSurrogate(1, 0.005 * np.sqrt(2), seed=42),
200-
acquisition.MaximizeDistance(rtol=0.005 * np.sqrt(2), seed=42),
198+
acquisition.MinimizeSurrogate(seed=42),
199+
acquisition.MaximizeDistance(seed=42),
201200
)
202201
),
203202
}

soogo/acquisition/base.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
DefaultSingleObjectiveTermination,
3131
DefaultMultiObjectiveTermination,
3232
)
33+
from pymoo.util.display.multi import MultiObjectiveOutput
3334

3435
# Local imports
3536
from ..model import Surrogate
@@ -89,7 +90,7 @@ class Acquisition(ABC):
8990
DEFAULT_RTOL = 1e-6
9091

9192
#: Default maximum number of function evaluations for the acquisition
92-
DEFAULT_N_MAX_EVALS_OPTIMIZER = 1000
93+
DEFAULT_N_MAX_EVALS_OPTIMIZER = 5000
9394

9495
def __init__(
9596
self,
@@ -135,39 +136,38 @@ def default_optimizer(
135136
:return: The default optimizer.
136137
"""
137138
if not multi_objective:
139+
termination = DefaultSingleObjectiveTermination(
140+
xtol=self.rtol, n_max_evals=n_max_evals_optimizer
141+
)
138142
if not mixed_integer:
139-
return DE(
140-
termination=DefaultSingleObjectiveTermination(
141-
xtol=self.rtol, n_max_evals=n_max_evals_optimizer
142-
)
143-
)
143+
optim = DE()
144+
optim.termination = termination
145+
return optim
144146
else:
145147
return MixedVariableGA(
146148
eliminate_duplicates=ListDuplicateElimination(),
147149
mating=MixedVariableMating(
148150
eliminate_duplicates=ListDuplicateElimination()
149151
),
150-
termination=DefaultSingleObjectiveTermination(
151-
xtol=self.rtol, n_max_evals=n_max_evals_optimizer
152-
),
152+
termination=termination,
153153
)
154154
else:
155+
termination = DefaultMultiObjectiveTermination(
156+
xtol=self.rtol, n_max_evals=n_max_evals_optimizer
157+
)
155158
if not mixed_integer:
156-
return NSGA2(
157-
termination=DefaultMultiObjectiveTermination(
158-
xtol=self.rtol, n_max_evals=n_max_evals_optimizer
159-
)
160-
)
159+
optim = NSGA2()
160+
optim.termination = termination
161+
return optim
161162
else:
162163
return MixedVariableGA(
163164
eliminate_duplicates=ListDuplicateElimination(),
164165
mating=MixedVariableMating(
165166
eliminate_duplicates=ListDuplicateElimination()
166167
),
167168
survival=RankAndCrowding(),
168-
termination=DefaultMultiObjectiveTermination(
169-
xtol=self.rtol, n_max_evals=n_max_evals_optimizer
170-
),
169+
output=MultiObjectiveOutput(),
170+
termination=termination,
171171
)
172172

173173
@classmethod

soogo/acquisition/maximize_ei.py

Lines changed: 20 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,9 @@
2727

2828
from .base import Acquisition
2929
from ..model import GaussianProcess
30-
from ..sampling import SpaceFillingSampler
30+
from ..sampling import random_sample
3131
from ..integrations.pymoo import PymooProblem
32+
from .utils import FarEnoughSampleFilter
3233

3334
logger = logging.getLogger(__name__)
3435

@@ -76,21 +77,18 @@ class MaximizeEI(Acquisition):
7677

7778
def __init__(
7879
self,
79-
sampler=None,
80-
pool_size: int = 100,
80+
pool_size: int = Acquisition.DEFAULT_N_MAX_EVALS_OPTIMIZER,
8181
avoid_clusters: bool = True,
82+
n_max_evals_optimizer: Optional[int] = None,
8283
seed=None,
8384
**kwargs,
8485
) -> None:
85-
super().__init__(**kwargs)
86+
super().__init__(
87+
n_max_evals_optimizer=n_max_evals_optimizer or pool_size, **kwargs
88+
)
8689
self.pool_size = pool_size
8790
self.avoid_clusters = avoid_clusters
8891
self.rng = np.random.default_rng(seed)
89-
self.sampler = (
90-
sampler
91-
if sampler is not None
92-
else SpaceFillingSampler(seed=self.rng)
93-
)
9492

9593
def optimize(
9694
self,
@@ -171,30 +169,14 @@ def optimize(
171169
xs = res.X
172170

173171
# Returns xs if n == 1
174-
if res.success and n == 1:
172+
# print(f"MaximizeEI selected point with EI = {-res.F[0]}")
173+
# print(f"At location: x = {res.X}")
174+
# print(f"Success: {res.success}")
175+
if n == 1:
175176
return np.asarray([xs])
176177

177178
# Generate the complete pool of candidates
178-
pool_size = self.pool_size
179-
exclusion_set = (
180-
np.vstack((exclusion_set, surrogateModel.X))
181-
if exclusion_set is not None
182-
else surrogateModel.X
183-
)
184-
if xs is not None:
185-
exclusion_set = np.concatenate((exclusion_set, [xs]), axis=0)
186-
pool_size -= 1
187-
if xbest is not None:
188-
exclusion_set = np.concatenate((exclusion_set, [xbest]), axis=0)
189-
pool_size -= 1
190-
x = (
191-
self.sampler.generate(
192-
self.pool_size, bounds, current_sample=exclusion_set
193-
)
194-
if pool_size > 0
195-
else np.empty((0, dim))
196-
)
197-
179+
x = random_sample(self.pool_size, bounds, seed=self.rng)
198180
if xs is not None:
199181
x = np.concatenate(([xs], x), axis=0)
200182
if xbest is not None:
@@ -287,4 +269,11 @@ def optimize(
287269
iBest[j] = np.argmax(score)
288270
eiCand[iBest[j]] = 0.0 # Remove this candidate expectancy
289271

290-
return x[iBest, :]
272+
exclusion_set = (
273+
np.vstack((exclusion_set, surrogateModel.X))
274+
if exclusion_set is not None
275+
else surrogateModel.X
276+
)
277+
return FarEnoughSampleFilter(exclusion_set, self.tol(bounds))(
278+
x[iBest, :]
279+
)

soogo/acquisition/minimize_mo_surrogate.py

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -32,10 +32,6 @@ class MinimizeMOSurrogate(Acquisition):
3232
"""Obtain pareto-optimal sample points for the multi-objective surrogate
3333
model.
3434
35-
:param optimizer: Continuous multi-objective optimizer. If None, use
36-
NSGA2 from pymoo.
37-
:param mi_optimizer: Mixed-integer multi-objective optimizer. If None, use
38-
MixedVariableGA from pymoo with RankAndCrowding survival strategy.
3935
:param seed: Seed for random number generator.
4036
4137
.. attribute:: rng
@@ -44,12 +40,8 @@ class MinimizeMOSurrogate(Acquisition):
4440
4541
"""
4642

47-
def __init__(
48-
self, optimizer=None, mi_optimizer=None, seed=None, **kwargs
49-
) -> None:
50-
super().__init__(
51-
optimizer, mi_optimizer, multi_objective=True, **kwargs
52-
)
43+
def __init__(self, seed=None, **kwargs) -> None:
44+
super().__init__(multi_objective=True, **kwargs)
5345
self.rng = np.random.default_rng(seed)
5446

5547
def optimize(

soogo/acquisition/minimize_surrogate.py

Lines changed: 17 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from scipy.spatial.distance import cdist
2424
from scipy.special import gamma
2525
from scipy.optimize import minimize
26+
from scipy.stats.qmc import LatinHypercube
2627
from typing import Optional
2728

2829
from .base import Acquisition
@@ -76,8 +77,6 @@ class MinimizeSurrogate(Acquisition):
7677
collected for the new sample.
7778
7879
:param pool_size: Number of uniform candidates generated each iteration.
79-
:param rtol: Minimum distance between a candidate and already selected
80-
points, relative to the domain size. Default is ``1e-3``.
8180
:param seed: Seed or random number generator.
8281
8382
.. attribute:: pool_size
@@ -98,9 +97,12 @@ class MinimizeSurrogate(Acquisition):
9897
"""
9998

10099
def __init__(
101-
self, pool_size: int, rtol: float = 1e-3, seed=None, **kwargs
100+
self,
101+
pool_size: int = Acquisition.DEFAULT_N_MAX_EVALS_OPTIMIZER,
102+
seed=None,
103+
**kwargs,
102104
) -> None:
103-
super().__init__(rtol=rtol, **kwargs)
105+
super().__init__(**kwargs)
104106
self.pool_size = pool_size
105107
self.rng = np.random.default_rng(seed)
106108

@@ -147,12 +149,15 @@ def optimize(
147149
)
148150

149151
# Local space to store information
150-
candidates = np.empty((self.pool_size * maxiter, dim))
151-
distCandidates = np.empty(
152-
(self.pool_size * maxiter, self.pool_size * maxiter)
152+
candidates = random_sample(
153+
self.pool_size,
154+
bounds,
155+
iindex=surrogateModel.iindex,
156+
seed=LatinHypercube(d=dim, seed=self.rng),
153157
)
154-
fcand = np.empty(self.pool_size * maxiter)
155-
startpID = np.full((self.pool_size * maxiter,), False)
158+
distCandidates = np.empty((self.pool_size, self.pool_size))
159+
fcand = np.empty(self.pool_size)
160+
startpID = np.full((self.pool_size,), False)
156161
selected = np.empty((n, dim))
157162

158163
# Create a KDTree with the training data points
@@ -165,9 +170,10 @@ def optimize(
165170

166171
iter = 0
167172
k = 0
173+
iter_pool_size = int(np.ceil(self.pool_size / maxiter))
168174
while iter < maxiter and k < n and remevals > 0:
169-
iStart = iter * self.pool_size
170-
iEnd = (iter + 1) * self.pool_size
175+
iStart = iter * iter_pool_size
176+
iEnd = min((iter + 1) * iter_pool_size, self.pool_size)
171177

172178
# if computational budget is exhausted, then return
173179
if remevals <= iEnd - iStart:
@@ -179,14 +185,6 @@ def optimize(
179185
# Consider only the best points to start local minimization
180186
counterLocalStart = iEnd // maxiter
181187

182-
# Choose candidate points uniformly in the search space
183-
candidates[iStart:iEnd, :] = random_sample(
184-
self.pool_size,
185-
bounds,
186-
iindex=surrogateModel.iindex,
187-
seed=self.rng,
188-
)
189-
190188
# Compute the distance between the candidate points
191189
distCandidates[iStart:iEnd, iStart:iEnd] = cdist(
192190
candidates[iStart:iEnd, :], candidates[iStart:iEnd, :]

0 commit comments

Comments
 (0)