From ba4ea474ef87b4cf164bbfe870419b166bfab4c4 Mon Sep 17 00:00:00 2001 From: Yash Solanki <252yash@gmail.com> Date: Mon, 2 Jun 2025 20:00:22 +0530 Subject: [PATCH 1/3] Feat: Add COBYLA optimizer (1 / n) - Add and `COBYLA` class struct ` to `local_opt` - Add example to `examples/optimization_algorithm` Fixes #73 --- examples/optimization_algorithms/cobyla.py | 26 +++ src/gradient_free_optimizers/__init__.py | 2 + .../optimizer_search/COBYLA.py | 39 ++++ .../optimizer_search/__init__.py | 2 + .../optimizers/__init__.py | 2 + .../core_optimizer/core_optimizer.py | 1 + .../optimizers/local_opt/COBYLA.py | 174 ++++++++++++++++++ .../optimizers/local_opt/__init__.py | 2 + 8 files changed, 248 insertions(+) create mode 100644 examples/optimization_algorithms/cobyla.py create mode 100644 src/gradient_free_optimizers/optimizer_search/COBYLA.py create mode 100644 src/gradient_free_optimizers/optimizers/local_opt/COBYLA.py diff --git a/examples/optimization_algorithms/cobyla.py b/examples/optimization_algorithms/cobyla.py new file mode 100644 index 00000000..922904e3 --- /dev/null +++ b/examples/optimization_algorithms/cobyla.py @@ -0,0 +1,26 @@ +import numpy as np +from gradient_free_optimizers import COBYLA + + +def sphere_function(para: np.array): + x = para[0] + y = para[1] + + return -(x * x + y * y) + +def constraint_1(para): + return para[0] > -5 + +search_space = { + "x": np.arange(-10, 10, 0.1), + "y": np.arange(-10, 10, 0.1), +} + +opt = COBYLA( + search_space=search_space, + rho_beg=1.0, + rho_end=0.01, + x_0=np.array([0.0, 0.0]), + constraints=[constraint_1] +) +opt.search(sphere_function, n_iter=10) diff --git a/src/gradient_free_optimizers/__init__.py b/src/gradient_free_optimizers/__init__.py index 417cc246..0f5fb7d2 100644 --- a/src/gradient_free_optimizers/__init__.py +++ b/src/gradient_free_optimizers/__init__.py @@ -31,6 +31,7 @@ TreeStructuredParzenEstimators, ForestOptimizer, EnsembleOptimizer, + COBYLA ) @@ -58,4 +59,5 @@ "TreeStructuredParzenEstimators", "ForestOptimizer", "EnsembleOptimizer", + "COBYLA" ] diff --git a/src/gradient_free_optimizers/optimizer_search/COBYLA.py b/src/gradient_free_optimizers/optimizer_search/COBYLA.py new file mode 100644 index 00000000..cfd9b120 --- /dev/null +++ b/src/gradient_free_optimizers/optimizer_search/COBYLA.py @@ -0,0 +1,39 @@ +# Author: Simon Blanke +# Email: simon.blanke@yahoo.com +# License: MIT License + +from typing import List, Dict, Literal, Union + +from ..search import Search +from ..optimizers import COBYLA as _COBYLA + + +class COBYLA(_COBYLA, Search): + def __init__( + self, + rho_beg, + rho_end, + x_0, + search_space: Dict[str, list], + initialize: Dict[ + Literal["grid", "vertices", "random", "warm_start"], + Union[int, list[dict]], + ] = {"grid": 4, "random": 2, "vertices": 4}, + constraints: List[callable] = [], + random_state: int = None, + rand_rest_p: float = 0, + nth_process: int = None, + ): + initialize={"grid": 4, "random": 2, "vertices": 4}, + rand_rest_p=0, + super().__init__( + search_space=search_space, + rho_beg=rho_beg, + rho_end=rho_end, + x_0=x_0, + initialize=initialize, + constraints=constraints, + random_state=random_state, + rand_rest_p=rand_rest_p, + nth_process=nth_process, + ) diff --git a/src/gradient_free_optimizers/optimizer_search/__init__.py b/src/gradient_free_optimizers/optimizer_search/__init__.py index 66e301b9..361c78b5 100644 --- a/src/gradient_free_optimizers/optimizer_search/__init__.py +++ b/src/gradient_free_optimizers/optimizer_search/__init__.py @@ -25,6 +25,7 @@ from .tree_structured_parzen_estimators import TreeStructuredParzenEstimators from .forest_optimization import ForestOptimizer from .ensemble_optimizer import EnsembleOptimizer +from .COBYLA import COBYLA __all__ = [ @@ -51,4 +52,5 @@ "TreeStructuredParzenEstimators", "ForestOptimizer", "EnsembleOptimizer", + "COBYLA", ] diff --git a/src/gradient_free_optimizers/optimizers/__init__.py b/src/gradient_free_optimizers/optimizers/__init__.py index d4ba1d78..13d58152 100644 --- a/src/gradient_free_optimizers/optimizers/__init__.py +++ b/src/gradient_free_optimizers/optimizers/__init__.py @@ -8,6 +8,7 @@ RepulsingHillClimbingOptimizer, SimulatedAnnealingOptimizer, DownhillSimplexOptimizer, + COBYLA, ) from .global_opt import ( @@ -68,4 +69,5 @@ "TreeStructuredParzenEstimators", "ForestOptimizer", "EnsembleOptimizer", + "COBYLA" ] diff --git a/src/gradient_free_optimizers/optimizers/core_optimizer/core_optimizer.py b/src/gradient_free_optimizers/optimizers/core_optimizer/core_optimizer.py index b89bee26..664912e8 100644 --- a/src/gradient_free_optimizers/optimizers/core_optimizer/core_optimizer.py +++ b/src/gradient_free_optimizers/optimizers/core_optimizer/core_optimizer.py @@ -37,6 +37,7 @@ def __init__( self.search_space = search_space self.initialize = initialize + print(constraints) self.constraints = constraints self.random_state = random_state self.rand_rest_p = rand_rest_p diff --git a/src/gradient_free_optimizers/optimizers/local_opt/COBYLA.py b/src/gradient_free_optimizers/optimizers/local_opt/COBYLA.py new file mode 100644 index 00000000..7bd1df55 --- /dev/null +++ b/src/gradient_free_optimizers/optimizers/local_opt/COBYLA.py @@ -0,0 +1,174 @@ +# copyright: hyperactive developers, MIT License (see LICENSE file) + +# todo: write an informative docstring for the file or module, remove the above + +__author__ = ["SimonBlanke", "yashnator"] + +from ..base_optimizer import BaseOptimizer + +import numpy as np + +class COBYLA(BaseOptimizer): + """TODO: write docstring + + COBYLA: Constrained Optimization BY Linear Approximation + + Reference: + Powell, M.J.D. (1994). A Direct Search Optimization Method That Models the Objective and Constraint Functions by Linear Interpolation. + In: Gomez, S., Hennart, JP. (eds) Advances in Optimization and Numerical Analysis. Mathematics and Its Applications, vol 275. Springer, + Dordrecht. + Source: https://doi.org/10.1007/978-94-015-8330-5_4 + + Parameters + ---------- + rho_beg : int + Initial tolerance (large enough for coarse exploration) + rho_end : string, optional (default='default') + Required tolerance + x_0 : np.array + Initial point for creating a simplex for the optimization + + Examples + -------- + # TODO: Write examples + + >>> import numpy as np + >>> from gradient_free_optimizers import COBYLA + >>> + >>> def sphere_function(para: np.array): + ... x = para[0] + ... y = para[1] + ... return -(x * x + y * y) + >>> + >>> def constraint_1(para): + ... return para[0] > -5 + >>> + >>> search_space = { + ... "x": np.arange(-10, 10, 0.1), + ... "y": np.arange(-10, 10, 0.1), + ... } + >>> + >>> opt = COBYLA( + ... search_space=search_space, + ... rho_beg=1.0, + ... rho_end=0.01, + ... x_0=np.array([0.0, 0.0]), + ... constraints=[constraint_1] + ... ) + >>> opt.search(sphere_function, n_iter=10) + """ + + _tags = { + "authors": ["SimonBlanke", "yashnator"], + } + + def _generate_initial_simplex(self, x_0_initial, rho_beg): + n = x_0_initial.shape[0] + arr = np.ones((n + 1, 1)) * x_0_initial + rho_beg * np.eye(n + 1, n) + print(arr) + return arr + + def _vertices_to_oppsite_face_distances(self): + """ + Compute the distances from each vertex of an n-dimensional-simplex + to the opposite (n-1)-dimensional face. + + For each vertex, the opposite hyperplane is obtained after removing the current + vertex and then finding the projection on the subspace spanned by the hyperplane. + The distance is then the L2 norm between projection and the current vertex. + + Args: + self: instance of current COBYLA class + + Returns: + distances: (n+1,) array of distances from each vertex to its opposite face. + """ + distances = np.zeros(self.dim + 1) + for j in range(0, self.dim + 1): + face_vertices = np.delete(self.simplex, j, axis=0) + start_vertex = face_vertices[0] + A = np.stack([v - start_vertex for v in face_vertices[1:]], axis=1) + b = self.simplex[j] - start_vertex + + AtA = A.T @ A + proj_j = A @ np.linalg.solve(AtA, A.T @ b) + distances[j] = np.linalg.norm(b - proj_j) + return distances + + def _is_simplex_acceptable(self): + eta = [np.linalg.norm(x - self.simplex[0]) for x in self.simplex] + eta_constraint = self.beta * self._rho + for eta_j in eta: + if eta_j > eta_constraint: + return False + sigma = self._vertices_to_oppsite_face_distances() + sigma_constraint = self.alpha * self._rho + print(sigma) + for sigma_j in sigma: + if sigma_j < sigma_constraint: + return False + return True + + def _eval_constraints(self, pos): + # TODO: evalute constraints in optimized way + + return None + + def _merit_value(self, pos): + # TODO: write the merit function using the _eval_constraints + return 0 + + def __init__( + self, + search_space, + x_0: np.array, + rho_beg: int, + rho_end: int, + initialize={"grid": 4, "random": 2, "vertices": 4}, + constraints=[], + random_state=None, + rand_rest_p=0, + nth_process=None, + alpha = 0.25, + beta = 2.1 + ): + super().__init__( + search_space=search_space, + initialize=initialize, + constraints=constraints, + random_state=random_state, + rand_rest_p=rand_rest_p, + nth_process=nth_process, + ) + self.dim = np.shape(x_0)[0] + self.simplex = self._generate_initial_simplex(x_0, rho_beg) + self.rho_beg = rho_beg + self.rho_end = rho_end + self._rho = rho_beg + self.state = 0 + self.FLAG = 0 + + if alpha <= 0 or alpha >= 1: + raise ValueError("Parameter alpha must belong to the range (0, 1)") + + if beta <= 1: + raise ValueError("Parameter beta must belong to the range (1, ∞)") + + self.alpha = alpha + self.beta = beta + + def iterate(self): + # TODO: Impl + return self.simplex[0] + + def _score(self, pos): + # TODO: Impl + return 0.0 + + def evaluate(self, pos): + # TODO: Impl + return self._merit_value(pos) + + def finish_search(self): + # TODO: Impl + return None \ No newline at end of file diff --git a/src/gradient_free_optimizers/optimizers/local_opt/__init__.py b/src/gradient_free_optimizers/optimizers/local_opt/__init__.py index bbd5b19e..39c9a740 100644 --- a/src/gradient_free_optimizers/optimizers/local_opt/__init__.py +++ b/src/gradient_free_optimizers/optimizers/local_opt/__init__.py @@ -8,6 +8,7 @@ from .repulsing_hill_climbing_optimizer import RepulsingHillClimbingOptimizer from .simulated_annealing import SimulatedAnnealingOptimizer from .downhill_simplex import DownhillSimplexOptimizer +from .COBYLA import COBYLA __all__ = [ "HillClimbingOptimizer", @@ -15,4 +16,5 @@ "RepulsingHillClimbingOptimizer", "SimulatedAnnealingOptimizer", "DownhillSimplexOptimizer", + "COBYLA" ] From d92ecaa59118cc75ab46df72ee457ae65678bfd1 Mon Sep 17 00:00:00 2001 From: Yash Solanki <252yash@gmail.com> Date: Tue, 3 Jun 2025 00:37:43 +0530 Subject: [PATCH 2/3] Feat: Feat: Add COBYLA optimizer (2 / n) - Add merit function - Add docstrings --- examples/optimization_algorithms/cobyla.py | 2 +- .../core_optimizer/core_optimizer.py | 1 - .../optimizers/local_opt/COBYLA.py | 151 +++++++++++------- 3 files changed, 93 insertions(+), 61 deletions(-) diff --git a/examples/optimization_algorithms/cobyla.py b/examples/optimization_algorithms/cobyla.py index 922904e3..a942a2be 100644 --- a/examples/optimization_algorithms/cobyla.py +++ b/examples/optimization_algorithms/cobyla.py @@ -9,7 +9,7 @@ def sphere_function(para: np.array): return -(x * x + y * y) def constraint_1(para): - return para[0] > -5 + return para[0] + 5 search_space = { "x": np.arange(-10, 10, 0.1), diff --git a/src/gradient_free_optimizers/optimizers/core_optimizer/core_optimizer.py b/src/gradient_free_optimizers/optimizers/core_optimizer/core_optimizer.py index 664912e8..b89bee26 100644 --- a/src/gradient_free_optimizers/optimizers/core_optimizer/core_optimizer.py +++ b/src/gradient_free_optimizers/optimizers/core_optimizer/core_optimizer.py @@ -37,7 +37,6 @@ def __init__( self.search_space = search_space self.initialize = initialize - print(constraints) self.constraints = constraints self.random_state = random_state self.rand_rest_p = rand_rest_p diff --git a/src/gradient_free_optimizers/optimizers/local_opt/COBYLA.py b/src/gradient_free_optimizers/optimizers/local_opt/COBYLA.py index 7bd1df55..7012b4ac 100644 --- a/src/gradient_free_optimizers/optimizers/local_opt/COBYLA.py +++ b/src/gradient_free_optimizers/optimizers/local_opt/COBYLA.py @@ -1,9 +1,3 @@ -# copyright: hyperactive developers, MIT License (see LICENSE file) - -# todo: write an informative docstring for the file or module, remove the above - -__author__ = ["SimonBlanke", "yashnator"] - from ..base_optimizer import BaseOptimizer import numpy as np @@ -57,15 +51,58 @@ class COBYLA(BaseOptimizer): ... ) >>> opt.search(sphere_function, n_iter=10) """ + + name = "COBYLA" + _name_ = "COBYLA" + __name__ = "COBYLA" - _tags = { - "authors": ["SimonBlanke", "yashnator"], - } + optimizer_type = "local" + computationally_expensive = False + + def __init__( + self, + search_space, + x_0: np.array, + rho_beg: int, + rho_end: int, + initialize={"grid": 4, "random": 2, "vertices": 4}, + constraints=[], + random_state=None, + rand_rest_p=0, + nth_process=None, + alpha = 0.25, + beta = 2.1, + ): + super().__init__( + search_space=search_space, + initialize=initialize, + constraints=constraints, + random_state=random_state, + rand_rest_p=rand_rest_p, + nth_process=nth_process, + ) + self.dim = np.shape(x_0)[0] + self.simplex = self._generate_initial_simplex(x_0, rho_beg) + self.rho_beg = rho_beg + self.rho_end = rho_end + self._rho = rho_beg + + self._mu = 0 + self.state = 0 + self.FLAG = 0 + + if alpha <= 0 or alpha >= 1: + raise ValueError("Parameter alpha must belong to the range (0, 1)") + + if beta <= 1: + raise ValueError("Parameter beta must belong to the range (1, ∞)") + + self.alpha = alpha + self.beta = beta def _generate_initial_simplex(self, x_0_initial, rho_beg): n = x_0_initial.shape[0] arr = np.ones((n + 1, 1)) * x_0_initial + rho_beg * np.eye(n + 1, n) - print(arr) return arr def _vertices_to_oppsite_face_distances(self): @@ -76,9 +113,6 @@ def _vertices_to_oppsite_face_distances(self): For each vertex, the opposite hyperplane is obtained after removing the current vertex and then finding the projection on the subspace spanned by the hyperplane. The distance is then the L2 norm between projection and the current vertex. - - Args: - self: instance of current COBYLA class Returns: distances: (n+1,) array of distances from each vertex to its opposite face. @@ -96,6 +130,22 @@ def _vertices_to_oppsite_face_distances(self): return distances def _is_simplex_acceptable(self): + """ + Determine whether the current simplex is acceptable according to COBYLA criteria. + + In COBYLA, a simplex is acceptable if: + 1. The distances between each vertex and the first vertex (η_j) do not exceed + a threshold defined by `beta * rho`, controlling simplex edge lengths. + 2. The distance from each vertex to its opposite (n-1)-dimensional face (σ_j) + is not too small, ensuring the simplex is well-shaped. These distances must + exceed a threshold given by `alpha * rho`. + + This function enforces these two geometric conditions to ensure that the simplex + maintains good numerical stability and approximation quality. + + Returns: + bool: True if both η and σ conditions are satisfied, False otherwise. + """ eta = [np.linalg.norm(x - self.simplex[0]) for x in self.simplex] eta_constraint = self.beta * self._rho for eta_j in eta: @@ -103,59 +153,42 @@ def _is_simplex_acceptable(self): return False sigma = self._vertices_to_oppsite_face_distances() sigma_constraint = self.alpha * self._rho - print(sigma) for sigma_j in sigma: if sigma_j < sigma_constraint: return False return True def _eval_constraints(self, pos): - # TODO: evalute constraints in optimized way - - return None + """ + Evaluates constraints for the given position + + Returns: + np.array: array containing the evaluated value of constraints + """ + return [np.clip(f(pos), 0, np.inf) for f in self.constraints] - def _merit_value(self, pos): - # TODO: write the merit function using the _eval_constraints - return 0 + def _phi(self, pos): + """ + Compute the merit function Φ used in COBYLA to evaluate candidate points. Given by: - def __init__( - self, - search_space, - x_0: np.array, - rho_beg: int, - rho_end: int, - initialize={"grid": 4, "random": 2, "vertices": 4}, - constraints=[], - random_state=None, - rand_rest_p=0, - nth_process=None, - alpha = 0.25, - beta = 2.1 - ): - super().__init__( - search_space=search_space, - initialize=initialize, - constraints=constraints, - random_state=random_state, - rand_rest_p=rand_rest_p, - nth_process=nth_process, - ) - self.dim = np.shape(x_0)[0] - self.simplex = self._generate_initial_simplex(x_0, rho_beg) - self.rho_beg = rho_beg - self.rho_end = rho_end - self._rho = rho_beg - self.state = 0 - self.FLAG = 0 - - if alpha <= 0 or alpha >= 1: - raise ValueError("Parameter alpha must belong to the range (0, 1)") - - if beta <= 1: - raise ValueError("Parameter beta must belong to the range (1, ∞)") - - self.alpha = alpha - self.beta = beta + Φ(x) = f(x) + μ * max_i(max(c_i(x), 0)) + + Args: + pos (np.array): The point in parameter space at which to evaluate the merit function. + + Returns: + float: The value of the merit function Φ at the given point. + """ + c = self._eval_constraints(pos) + return self.objective_function(pos) + self._mu * np.max(c) + + def _rearrange_optimum_to_top(self): + """ + Rearrages simplex vertices such that first row is the optimal position + """ + opt_idx = np.argmin([self._phi(vert) for vert in self.simplex]) + if opt_idx != 0: + self.simplex[[0, opt_idx]] = self.simplex[[opt_idx, 0]] def iterate(self): # TODO: Impl @@ -167,7 +200,7 @@ def _score(self, pos): def evaluate(self, pos): # TODO: Impl - return self._merit_value(pos) + return self._phi(self.simplex[0]) def finish_search(self): # TODO: Impl From 6acddb9b979177511c07053cb00acbb5bc0cdc21 Mon Sep 17 00:00:00 2001 From: Yash Solanki <252yash@gmail.com> Date: Sun, 15 Jun 2025 19:13:08 +0530 Subject: [PATCH 3/3] Feat: Feat: Add COBYLA optimizer (3 / n) - Add linear approximation at optimal point - Solve LP at optimal point --- .../optimizers/local_opt/COBYLA.py | 69 ++++++++++++++++--- 1 file changed, 61 insertions(+), 8 deletions(-) diff --git a/src/gradient_free_optimizers/optimizers/local_opt/COBYLA.py b/src/gradient_free_optimizers/optimizers/local_opt/COBYLA.py index 7012b4ac..a4ef181e 100644 --- a/src/gradient_free_optimizers/optimizers/local_opt/COBYLA.py +++ b/src/gradient_free_optimizers/optimizers/local_opt/COBYLA.py @@ -1,6 +1,7 @@ from ..base_optimizer import BaseOptimizer import numpy as np +from scipy.optimize import linprog class COBYLA(BaseOptimizer): """TODO: write docstring @@ -73,6 +74,12 @@ def __init__( alpha = 0.25, beta = 2.1, ): + if alpha <= 0 or alpha >= 1: + raise ValueError("Parameter alpha must belong to the range (0, 1)") + + if beta <= 1: + raise ValueError("Parameter beta must belong to the range (1, ∞)") + super().__init__( search_space=search_space, initialize=initialize, @@ -90,13 +97,6 @@ def __init__( self._mu = 0 self.state = 0 self.FLAG = 0 - - if alpha <= 0 or alpha >= 1: - raise ValueError("Parameter alpha must belong to the range (0, 1)") - - if beta <= 1: - raise ValueError("Parameter beta must belong to the range (1, ∞)") - self.alpha = alpha self.beta = beta @@ -118,7 +118,7 @@ def _vertices_to_oppsite_face_distances(self): distances: (n+1,) array of distances from each vertex to its opposite face. """ distances = np.zeros(self.dim + 1) - for j in range(0, self.dim + 1): + for j in range(self.dim + 1): face_vertices = np.delete(self.simplex, j, axis=0) start_vertex = face_vertices[0] A = np.stack([v - start_vertex for v in face_vertices[1:]], axis=1) @@ -189,9 +189,62 @@ def _rearrange_optimum_to_top(self): opt_idx = np.argmin([self._phi(vert) for vert in self.simplex]) if opt_idx != 0: self.simplex[[0, opt_idx]] = self.simplex[[opt_idx, 0]] + + def _linear_approx(self, vert_index, f: callable): + """ + Builds a linear approximation of function f at simplex[vert_index], + using the other n vertices of the simplex for interpolation. + + Returns a function: approx(x) = f(x_j) + grad^T @ (x - x_j) + """ + x_j = self.simplex[vert_index] + remaining_vertices = np.delete(self.simplex, vert_index, axis=0) + + A = remaining_vertices - x_j + f_xj = f(x_j) + b = np.array([f(x_i) - f_xj for x_i in remaining_vertices]) + + # Solve for grad + # A * grad.T = grad(xi - xj) + grad = np.linalg.solve(A, b) + + return (f_xj, grad) + + def _linear_approx_at_x0(self): + func_approx = self._linear_approx(0, self.objective_function) + constraint_approx = [self._linear_approx(0, c_i) for c_i in self.constraints] + return (func_approx, constraint_approx) + + def _solve_LP(self): + (f0, grad_f), constraint_approximations = self._linear_approx_at_x0() + # print(f0) + # print(grad_f) + # print(constraint_approximations) + x0 = self.simplex[0] + c = grad_f + A_ub = [] + b_ub = [] + for (ci_val, grad_ci) in constraint_approximations: + # grad_ci^T s ≥ -ci_val => -grad_ci^T s ≤ ci_val + A_ub.append(-grad_ci) + b_ub.append(ci_val) + bounds = [(-self._rho, self._rho) for _ in range(len(x0))] + res = linprog(c=c, A_ub=A_ub, b_ub=b_ub, bounds=bounds, method='highs') + if res.success: + s = res.x + self.simplex[0] = x0 + s + else: + raise RuntimeError("LP step failed: " + res.message) + + + def _revise_mu(self): + return None def iterate(self): # TODO: Impl + # TODO: Check if simplex becomes degenerate + self._rearrange_optimum_to_top() + self._solve_LP() return self.simplex[0] def _score(self, pos):