Skip to content

Commit a15f7c4

Browse files
Dibyendu-IITKGPpre-commit-ci[bot]BradyPlanden
authored
random_search optimiser added in the pints framework (#580)
* random_search optimiser added in the pints framework * style: pre-commit fixes * description texts updated * style: pre-commit fixes * random_search updated * population size input in random search modified * example updated * unit tests added for randomsearch * style: pre-commit fixes * none type boundary handling modified * style: pre-commit fixes * updated * updated * unit tests updated * unit test updated * unit tests modified * updated * style: pre-commit fixes * randomsearch modified * style: pre-commit fixes * boundary logic updated * unit tests updated * unit tests updated * unit test changed * style: pre-commit fixes * fix: RandomSearch with multistart * unit test updated * unit test modified * suggested changes incorporated * suggested changes updated * unit tests for RandomSearch added * style: pre-commit fixes * unit tests modified * style: pre-commit fixes * changelog updated * unit tests added * style: pre-commit fixes * unit test modified * Apply suggestions from code review * Updates unit tests, upper pin to BPX, bugfix _cuckoo and _random_search for population_size setting. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Brady Planden <[email protected]> Co-authored-by: Brady Planden <[email protected]>
1 parent c637f62 commit a15f7c4

File tree

7 files changed

+311
-7
lines changed

7 files changed

+311
-7
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222

2323
## Optimisations
2424

25+
- [#580](https://github.com/pybop-team/PyBOP/pull/580) - Random Search optimiser is implimented.
2526
- [#588](https://github.com/pybop-team/PyBOP/pull/588) - Makes `minimising` a property of `BaseOptimiser` set by the cost class.
2627
- [#512](https://github.com/pybop-team/PyBOP/pull/513) - Refactors `LogPosterior` with attributes pointing to composed likelihood object.
2728
- [#551](https://github.com/pybop-team/PyBOP/pull/551) - Refactors Optimiser arguments, `population_size` and `max_iterations` as default args, improves optimiser docstrings
Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
import numpy as np
2+
3+
import pybop
4+
5+
# Define model
6+
parameter_set = pybop.ParameterSet.pybamm("Chen2020")
7+
parameter_set.update(
8+
{
9+
"Negative electrode active material volume fraction": 0.7,
10+
"Positive electrode active material volume fraction": 0.67,
11+
}
12+
)
13+
model = pybop.lithium_ion.SPM(parameter_set=parameter_set)
14+
15+
# Fitting parameters
16+
parameters = pybop.Parameters(
17+
pybop.Parameter(
18+
"Negative electrode active material volume fraction",
19+
bounds=[0.4, 0.75],
20+
initial_value=0.41,
21+
),
22+
pybop.Parameter(
23+
"Positive electrode active material volume fraction",
24+
bounds=[0.4, 0.75],
25+
initial_value=0.41,
26+
),
27+
)
28+
experiment = pybop.Experiment(
29+
[
30+
(
31+
"Discharge at 0.5C for 3 minutes (4 second period)",
32+
"Charge at 0.5C for 3 minutes (4 second period)",
33+
),
34+
]
35+
)
36+
values = model.predict(initial_state={"Initial SoC": 0.7}, experiment=experiment)
37+
38+
sigma = 0.002
39+
corrupt_values = values["Voltage [V]"].data + np.random.normal(
40+
0, sigma, len(values["Voltage [V]"].data)
41+
)
42+
43+
# Form dataset
44+
dataset = pybop.Dataset(
45+
{
46+
"Time [s]": values["Time [s]"].data,
47+
"Current function [A]": values["Current [A]"].data,
48+
"Voltage [V]": corrupt_values,
49+
}
50+
)
51+
52+
# Generate problem, cost function, and optimisation class
53+
problem = pybop.FittingProblem(model, parameters, dataset)
54+
cost = pybop.GaussianLogLikelihood(problem, sigma0=sigma * 4)
55+
optim = pybop.Optimisation(
56+
cost,
57+
optimiser=pybop.RandomSearch,
58+
max_iterations=100,
59+
)
60+
61+
results = optim.run()
62+
63+
# Plot the timeseries output
64+
pybop.plot.quick(problem, problem_inputs=results.x, title="Optimised Comparison")
65+
66+
# Plot convergence
67+
pybop.plot.convergence(optim)
68+
69+
# Plot the parameter traces
70+
pybop.plot.parameters(optim)
71+
72+
# Plot the cost landscape with optimisation path
73+
pybop.plot.contour(optim, steps=10)

pybop/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -124,6 +124,7 @@
124124
#
125125

126126
from .optimisers._cuckoo import CuckooSearchImpl
127+
from .optimisers._random_search import RandomSearchImpl
127128
from .optimisers._adamw import AdamWImpl
128129
from .optimisers._gradient_descent import GradientDescentImpl
129130
from .optimisers.base_optimiser import BaseOptimiser, OptimisationResult, MultiOptimisationResult
@@ -142,6 +143,7 @@
142143
SNES,
143144
XNES,
144145
CuckooSearch,
146+
RandomSearch,
145147
AdamW,
146148
)
147149
from .optimisers.optimisation import Optimisation

pybop/optimisers/_cuckoo.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,6 @@ def __init__(self, x0, sigma0=0.05, boundaries=None, pa=0.25):
5454
self._dim = len(x0)
5555

5656
# Population size and abandon rate
57-
self._n = self._population_size
5857
self._pa = pa
5958
self.step_size = self._sigma0
6059
self.beta = 1.5
@@ -68,14 +67,14 @@ def __init__(self, x0, sigma0=0.05, boundaries=None, pa=0.25):
6867
self._nests = np.random.uniform(
6968
low=self._boundaries.lower(),
7069
high=self._boundaries.upper(),
71-
size=(self._n, self._dim),
70+
size=(self._population_size, self._dim),
7271
)
7372
else:
7473
self._nests = np.random.normal(
75-
self._x0, self._sigma0, size=(self._n, self._dim)
74+
self._x0, self._sigma0, size=(self._population_size, self._dim)
7675
)
7776

78-
self._fitness = np.full(self._n, np.inf)
77+
self._fitness = np.full(self._population_size, np.inf)
7978

8079
# Initialise best solutions
8180
self._x_best = np.copy(x0)
@@ -112,7 +111,7 @@ def tell(self, replies):
112111
self._iterations += 1
113112

114113
# Compare cuckoos with current nests
115-
for i in range(self._n):
114+
for i in range(self._population_size):
116115
f_new = replies[i]
117116
if f_new < self._fitness[i]:
118117
self._nests[i] = self.cuckoos[i]
@@ -122,7 +121,7 @@ def tell(self, replies):
122121
self._x_best = self.cuckoos[i]
123122

124123
# Abandon some worse nests
125-
n_abandon = int(self._pa * self._n)
124+
n_abandon = int(self._pa * self._population_size)
126125
worst_nests = np.argsort(self._fitness)[-n_abandon:]
127126
for idx in worst_nests:
128127
self.abandon_nests(idx)

pybop/optimisers/_random_search.py

Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
import numpy as np
2+
from pints import PopulationBasedOptimiser
3+
4+
5+
class RandomSearchImpl(PopulationBasedOptimiser):
6+
"""
7+
Random Search (RS) optimisation algorithm.
8+
This algorithm explores the parameter space by randomly sampling points.
9+
10+
The algorithm does the following:
11+
1. Initialise a population of solutions.
12+
2. At each iteration, generate `n` number of random positions within boundaries.
13+
3. Evaluate the quality/fitness of the positions.
14+
4. Replace the best position with improved position if found.
15+
16+
Parameters:
17+
population_size (optional): Number of solutions to evaluate per iteration.
18+
19+
References:
20+
The Random Search algorithm implemented in this work is based on principles outlined
21+
in "Introduction to Stochastic Search and Optimization: Estimation, Simulation, and
22+
Control" by Spall, J. C. (2003).
23+
24+
The implementation inherits from the PINTS PopulationOptimiser.
25+
"""
26+
27+
def __init__(self, x0, sigma0=0.05, boundaries=None):
28+
super().__init__(x0, sigma0, boundaries=boundaries)
29+
30+
# Problem dimensionality
31+
self._dim = len(x0)
32+
33+
# Initialise best solution
34+
self._x_best = np.copy(x0)
35+
self._f_best = np.inf
36+
self._running = False
37+
self._ready_for_tell = False
38+
39+
def ask(self):
40+
"""
41+
Returns a list of positions to evaluate in the optimiser-space.
42+
"""
43+
self._ready_for_tell = True
44+
self._running = True
45+
46+
# Generate random solutions
47+
if self._boundaries:
48+
self._candidates = np.random.uniform(
49+
low=self._boundaries.lower(),
50+
high=self._boundaries.upper(),
51+
size=(self._population_size, self._dim),
52+
)
53+
return self._candidates
54+
55+
self._candidates = np.random.normal(
56+
self._x0, self._sigma0, size=(self._population_size, self._dim)
57+
)
58+
return self.clip_candidates(self._candidates)
59+
60+
def tell(self, replies):
61+
"""
62+
Receives a list of cost function values from points previously specified
63+
by `self.ask()`, and updates the optimiser state accordingly.
64+
"""
65+
if not self._ready_for_tell:
66+
raise RuntimeError("ask() must be called before tell().")
67+
68+
# Evaluate solutions and update the best
69+
for i in range(self._population_size):
70+
f_new = replies[i]
71+
if f_new < self._f_best:
72+
self._f_best = f_new
73+
self._x_best = self._candidates[i]
74+
75+
def running(self):
76+
"""
77+
Returns ``True`` if the optimisation is in progress.
78+
"""
79+
return self._running
80+
81+
def x_best(self):
82+
"""
83+
Returns the best parameter values found so far.
84+
"""
85+
return self._x_best
86+
87+
def f_best(self):
88+
"""
89+
Returns the best score found so far.
90+
"""
91+
return self._f_best
92+
93+
def name(self):
94+
"""
95+
Returns the name of the optimiser.
96+
"""
97+
return "Random Search"
98+
99+
def clip_candidates(self, x):
100+
"""
101+
Clip the input array to the boundaries if available.
102+
"""
103+
if self._boundaries:
104+
x = np.clip(x, self._boundaries.lower(), self._boundaries.upper())
105+
return x
106+
107+
def _suggested_population_size(self):
108+
"""
109+
Returns a suggested population size based on the dimension of the parameter space.
110+
"""
111+
return 4 + int(3 * np.log(self._n_parameters))

pybop/optimisers/pints_optimisers.py

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
BasePintsOptimiser,
1111
CuckooSearchImpl,
1212
GradientDescentImpl,
13+
RandomSearchImpl,
1314
)
1415

1516

@@ -652,3 +653,73 @@ def __init__(
652653
parallel,
653654
**optimiser_kwargs,
654655
)
656+
657+
658+
class RandomSearch(BasePintsOptimiser):
659+
"""
660+
Adapter for the Random Search optimiser in PyBOP.
661+
662+
Random Search is a simple optimisation algorithm that samples parameter sets randomly
663+
within the given boundaries and identifies the best solution based on fitness.
664+
665+
This optimiser has been implemented for benchmarking and comparisons, convergence will be
666+
better with one of other optimisers in the majority of cases.
667+
668+
Parameters
669+
----------
670+
cost : callable
671+
The cost function to be minimized.
672+
max_iterations : int, optional
673+
Maximum number of iterations for the optimisation.
674+
min_iterations : int, optional (default=2)
675+
Minimum number of iterations before termination.
676+
max_unchanged_iterations : int, optional (default=15)
677+
Maximum number of iterations without improvement before termination.
678+
multistart : int, optional (default=1)
679+
Number of optimiser restarts from randomly sample position. These positions
680+
are sampled from the priors.
681+
parallel : bool, optional (default=False)
682+
Whether to run the optimisation in parallel.
683+
**optimiser_kwargs : optional
684+
Valid PINTS option keys and their values, for example:
685+
x0 : array_like
686+
Initial position from which optimisation will start.
687+
population_size : int
688+
Number of solutions to evaluate per iteration.
689+
bounds : dict
690+
A dictionary with 'lower' and 'upper' keys containing arrays for lower and
691+
upper bounds on the parameters.
692+
absolute_tolerance : float
693+
Absolute tolerance for convergence checking.
694+
relative_tolerance : float
695+
Relative tolerance for convergence checking.
696+
max_evaluations : int
697+
Maximum number of function evaluations.
698+
threshold : float
699+
Threshold value for early termination.
700+
701+
See Also
702+
--------
703+
pybop.RandomSearchImpl : PyBOP implementation of Random Search algorithm.
704+
"""
705+
706+
def __init__(
707+
self,
708+
cost,
709+
max_iterations: int = None,
710+
min_iterations: int = 2,
711+
max_unchanged_iterations: int = 15,
712+
multistart: int = 1,
713+
parallel: bool = False,
714+
**optimiser_kwargs,
715+
):
716+
super().__init__(
717+
cost,
718+
RandomSearchImpl,
719+
max_iterations,
720+
min_iterations,
721+
max_unchanged_iterations,
722+
multistart,
723+
parallel,
724+
**optimiser_kwargs,
725+
)

0 commit comments

Comments
 (0)