import torch
from evox.core import Algorithm, Mutable, Parameter
from evox.utils import clamp
from .utils import min_by
[docs]
class PSO(Algorithm):
"""The basic Particle Swarm Optimization (PSO) algorithm.
## Class Methods
* `__init__`: Initializes the PSO algorithm with given parameters (population size, lower and upper bounds, inertia weight, cognitive weight, and social weight).
* `step`: Performs a single optimization step using Particle Swarm Optimization (PSO), updating local best positions and fitness values, and adjusting velocity and positions based on inertia, cognitive, and social components.
Note that the `evaluate` method is not defined in this class, it is a proxy function of `Problem.evaluate` set by workflow; therefore, it cannot be used in class methods other than `step`.
"""
def __init__(
self,
pop_size: int,
lb: torch.Tensor,
ub: torch.Tensor,
w: float = 0.6,
phi_p: float = 2.5,
phi_g: float = 0.8,
device: torch.device | None = None,
):
"""
Initialize the PSO algorithm with the given parameters.
:param pop_size: The size of the population.
:param w: The inertia weight. Defaults to 0.6.
:param phi_p: The cognitive weight. Defaults to 2.5.
:param phi_g: The social weight. Defaults to 0.8.
:param lb: The lower bounds of the particle positions. Must be a 1D tensor.
:param ub: The upper bounds of the particle positions. Must be a 1D tensor.
:param device: The device to use for the tensors. Defaults to None.
"""
super().__init__()
device = torch.get_default_device() if device is None else device
assert lb.shape == ub.shape and lb.ndim == 1 and ub.ndim == 1 and lb.dtype == ub.dtype
self.pop_size = pop_size
self.dim = lb.shape[0]
# Here, Parameter is used to indicate that these values are hyper-parameters
# so that they can be correctly traced and vector-mapped
self.w = Parameter(w, device=device)
self.phi_p = Parameter(phi_p, device=device)
self.phi_g = Parameter(phi_g, device=device)
# setup
lb = lb[None, :].to(device=device)
ub = ub[None, :].to(device=device)
length = ub - lb
pop = torch.rand(self.pop_size, self.dim, device=device)
pop = length * pop + lb
velocity = torch.rand(self.pop_size, self.dim, device=device)
velocity = 2 * length * velocity - length
# write to self
self.lb = lb
self.ub = ub
# mutable
self.pop = Mutable(pop)
self.velocity = Mutable(velocity)
self.fit = Mutable(torch.full((self.pop_size,), torch.inf, device=device))
self.local_best_location = Mutable(pop)
self.local_best_fit = Mutable(torch.full((self.pop_size,), torch.inf, device=device))
self.global_best_location = Mutable(pop[0])
self.global_best_fit = Mutable(torch.tensor(torch.inf, device=device))
[docs]
def step(self):
"""
Perform a normal optimization step using PSO.
This function evaluates the fitness of the current population, updates the
local best positions and fitness values, and adjusts the velocity and
positions of particles based on inertia, cognitive, and social components.
It ensures that the updated positions and velocities are clamped within the
specified bounds.
The local best positions and fitness values are updated if the current
fitness is better than the recorded local best. The global best position
and fitness are determined using helper functions.
The velocity is updated based on the weighted sum of the previous velocity,
the cognitive component (personal best), and the social component (global
best). The population positions are then updated using the new velocities.
"""
compare = self.local_best_fit > self.fit
self.local_best_location = torch.where(compare[:, None], self.pop, self.local_best_location)
self.local_best_fit = torch.where(compare, self.fit, self.local_best_fit)
self.global_best_location, self.global_best_fit = min_by(
[self.global_best_location.unsqueeze(0), self.pop],
[self.global_best_fit.unsqueeze(0), self.fit],
)
rg = torch.rand(self.pop_size, self.dim, device=self.fit.device)
rp = torch.rand(self.pop_size, self.dim, device=self.fit.device)
velocity = (
self.w * self.velocity
+ self.phi_p * rp * (self.local_best_location - self.pop)
+ self.phi_g * rg * (self.global_best_location - self.pop)
)
pop = self.pop + velocity
self.pop = clamp(pop, self.lb, self.ub)
self.velocity = clamp(velocity, self.lb, self.ub)
self.fit = self.evaluate(self.pop)
[docs]
def init_step(self):
"""Perform the first step of the PSO optimization.
See `step` for more details.
"""
self.fit = self.evaluate(self.pop)
self.local_best_fit = self.fit
self.global_best_location, self.global_best_fit = min_by([self.pop], [self.fit])