diff --git a/.tools/envs/testenv-linux.yml b/.tools/envs/testenv-linux.yml index 398c56cce..821396f99 100644 --- a/.tools/envs/testenv-linux.yml +++ b/.tools/envs/testenv-linux.yml @@ -29,6 +29,7 @@ dependencies: - annotated-types # dev, tests - iminuit # dev, tests - cma # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - bayesian-optimization>=2.0.4 # dev, tests - DFO-LS>=1.5.3 # dev, tests diff --git a/.tools/envs/testenv-nevergrad.yml b/.tools/envs/testenv-nevergrad.yml index 874b9fa5e..bc62ac649 100644 --- a/.tools/envs/testenv-nevergrad.yml +++ b/.tools/envs/testenv-nevergrad.yml @@ -27,6 +27,7 @@ dependencies: - annotated-types # dev, tests - iminuit # dev, tests - cma # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - DFO-LS>=1.5.3 # dev, tests - Py-BOBYQA # dev, tests diff --git a/.tools/envs/testenv-numpy.yml b/.tools/envs/testenv-numpy.yml index c54dc010f..81364ac9d 100644 --- a/.tools/envs/testenv-numpy.yml +++ b/.tools/envs/testenv-numpy.yml @@ -27,6 +27,7 @@ dependencies: - annotated-types # dev, tests - iminuit # dev, tests - cma # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - bayesian-optimization>=2.0.4 # dev, tests - DFO-LS>=1.5.3 # dev, tests diff --git a/.tools/envs/testenv-others.yml b/.tools/envs/testenv-others.yml index 308d142aa..ba3b6c640 100644 --- a/.tools/envs/testenv-others.yml +++ b/.tools/envs/testenv-others.yml @@ -27,6 +27,7 @@ dependencies: - annotated-types # dev, tests - iminuit # dev, tests - cma # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - bayesian-optimization>=2.0.4 # dev, tests - DFO-LS>=1.5.3 # dev, tests diff --git a/.tools/envs/testenv-pandas.yml b/.tools/envs/testenv-pandas.yml index bccee25c6..82ea8d414 100644 --- a/.tools/envs/testenv-pandas.yml +++ b/.tools/envs/testenv-pandas.yml @@ -27,6 +27,7 @@ dependencies: - annotated-types # dev, tests - iminuit # dev, tests - cma # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - bayesian-optimization>=2.0.4 # dev, tests - DFO-LS>=1.5.3 # dev, tests diff --git a/.tools/envs/testenv-plotly.yml b/.tools/envs/testenv-plotly.yml index eccdf512d..abb3f83d2 100644 --- a/.tools/envs/testenv-plotly.yml +++ b/.tools/envs/testenv-plotly.yml @@ -27,6 +27,7 @@ dependencies: - annotated-types # dev, tests - iminuit # dev, tests - cma # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - bayesian-optimization>=2.0.4 # dev, tests - DFO-LS>=1.5.3 # dev, tests diff --git a/docs/source/algorithms.md b/docs/source/algorithms.md index bd8837b9a..9fbab6d56 100644 --- a/docs/source/algorithms.md +++ b/docs/source/algorithms.md @@ -4701,6 +4701,39 @@ package. To use it, you need to have - **seed**: Seed for the random number generator for reproducibility. ``` +## Pygad Optimizer + +We wrap the pygad optimizer. To use it you need to have +[pygad](https://pygad.readthedocs.io/en/latest/) installed. + +```{eval-rst} +.. dropdown:: pygad + + **How to use this algorithm:** + + .. code-block:: + + import optimagic as om + om.minimize( + ..., + algorithm=om.algos.pygad(num_generations=100, ...) + ) + + or + + .. code-block:: + + om.minimize( + ..., + algorithm="pygad", + algo_options={"num_generations": 100, ...} + ) + + **Description and available options:** + + .. autoclass:: optimagic.optimizers.pygad_optimizer.Pygad +``` + ## References ```{eval-rst} diff --git a/docs/source/refs.bib b/docs/source/refs.bib index f8005d2e9..485894194 100644 --- a/docs/source/refs.bib +++ b/docs/source/refs.bib @@ -1068,4 +1068,13 @@ @inproceedings{gardner2014bayesian year={2014} } +@article{gad2023pygad, + title={Pygad: An intuitive genetic algorithm python library}, + author={Gad, Ahmed Fawzy}, + journal={Multimedia Tools and Applications}, + pages={1--14}, + year={2023}, + publisher={Springer} +} + @Comment{jabref-meta: databaseType:bibtex;} diff --git a/environment.yml b/environment.yml index 6bb4f01db..bce5712b3 100644 --- a/environment.yml +++ b/environment.yml @@ -39,6 +39,7 @@ dependencies: - annotated-types # dev, tests - iminuit # dev, tests - cma # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - bayesian-optimization>=2.0.4 # dev, tests # - nevergrad # incompatible with bayesian-optimization>=2.0.4 diff --git a/pyproject.toml b/pyproject.toml index c74752252..4b6951c24 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -380,6 +380,7 @@ module = [ "pdbp", "iminuit", "nevergrad", + "pygad", "yaml", ] ignore_missing_imports = true diff --git a/src/optimagic/algorithms.py b/src/optimagic/algorithms.py index f86792478..4ca6054d7 100644 --- a/src/optimagic/algorithms.py +++ b/src/optimagic/algorithms.py @@ -53,6 +53,7 @@ NloptVAR, ) from optimagic.optimizers.pounders import Pounders +from optimagic.optimizers.pygad_optimizer import Pygad from optimagic.optimizers.pygmo_optimizers import ( PygmoBeeColony, PygmoCmaes, @@ -200,6 +201,7 @@ class BoundedGlobalGradientFreeParallelScalarAlgorithms(AlgoSelection): nevergrad_randomsearch: Type[NevergradRandomSearch] = NevergradRandomSearch nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -411,6 +413,7 @@ class BoundedGlobalGradientFreeScalarAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -459,6 +462,7 @@ class BoundedGlobalGradientFreeParallelAlgorithms(AlgoSelection): nevergrad_randomsearch: Type[NevergradRandomSearch] = NevergradRandomSearch nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -527,6 +531,7 @@ class GlobalGradientFreeParallelScalarAlgorithms(AlgoSelection): nevergrad_randomsearch: Type[NevergradRandomSearch] = NevergradRandomSearch nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -687,6 +692,7 @@ class BoundedGradientFreeParallelScalarAlgorithms(AlgoSelection): nevergrad_randomsearch: Type[NevergradRandomSearch] = NevergradRandomSearch nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -794,6 +800,7 @@ class BoundedGlobalParallelScalarAlgorithms(AlgoSelection): nevergrad_randomsearch: Type[NevergradRandomSearch] = NevergradRandomSearch nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -1138,6 +1145,7 @@ class BoundedGlobalGradientFreeAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -1215,6 +1223,7 @@ class GlobalGradientFreeScalarAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -1267,6 +1276,7 @@ class GlobalGradientFreeParallelAlgorithms(AlgoSelection): nevergrad_randomsearch: Type[NevergradRandomSearch] = NevergradRandomSearch nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -1455,6 +1465,7 @@ class BoundedGradientFreeScalarAlgorithms(AlgoSelection): nlopt_newuoa: Type[NloptNEWUOA] = NloptNEWUOA nlopt_neldermead: Type[NloptNelderMead] = NloptNelderMead nlopt_sbplx: Type[NloptSbplx] = NloptSbplx + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -1531,6 +1542,7 @@ class BoundedGradientFreeParallelAlgorithms(AlgoSelection): nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -1624,6 +1636,7 @@ class GradientFreeParallelScalarAlgorithms(AlgoSelection): nevergrad_randomsearch: Type[NevergradRandomSearch] = NevergradRandomSearch nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -1710,6 +1723,7 @@ class BoundedGlobalScalarAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -1767,6 +1781,7 @@ class BoundedGlobalParallelAlgorithms(AlgoSelection): nevergrad_randomsearch: Type[NevergradRandomSearch] = NevergradRandomSearch nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -1848,6 +1863,7 @@ class GlobalParallelScalarAlgorithms(AlgoSelection): nevergrad_randomsearch: Type[NevergradRandomSearch] = NevergradRandomSearch nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -2095,6 +2111,7 @@ class BoundedParallelScalarAlgorithms(AlgoSelection): nevergrad_randomsearch: Type[NevergradRandomSearch] = NevergradRandomSearch nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -2371,6 +2388,7 @@ class GlobalGradientFreeAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -2478,6 +2496,7 @@ class BoundedGradientFreeAlgorithms(AlgoSelection): nlopt_neldermead: Type[NloptNelderMead] = NloptNelderMead nlopt_sbplx: Type[NloptSbplx] = NloptSbplx pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -2588,6 +2607,7 @@ class GradientFreeScalarAlgorithms(AlgoSelection): nlopt_neldermead: Type[NloptNelderMead] = NloptNelderMead nlopt_praxis: Type[NloptPRAXIS] = NloptPRAXIS nlopt_sbplx: Type[NloptSbplx] = NloptSbplx + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -2672,6 +2692,7 @@ class GradientFreeParallelAlgorithms(AlgoSelection): nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -2728,6 +2749,7 @@ class BoundedGlobalAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -2823,6 +2845,7 @@ class GlobalScalarAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -2884,6 +2907,7 @@ class GlobalParallelAlgorithms(AlgoSelection): nevergrad_randomsearch: Type[NevergradRandomSearch] = NevergradRandomSearch nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -3177,6 +3201,7 @@ class BoundedScalarAlgorithms(AlgoSelection): nlopt_sbplx: Type[NloptSbplx] = NloptSbplx nlopt_tnewton: Type[NloptTNewton] = NloptTNewton nlopt_var: Type[NloptVAR] = NloptVAR + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -3276,6 +3301,7 @@ class BoundedParallelAlgorithms(AlgoSelection): nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -3389,6 +3415,7 @@ class ParallelScalarAlgorithms(AlgoSelection): nevergrad_randomsearch: Type[NevergradRandomSearch] = NevergradRandomSearch nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -3521,6 +3548,7 @@ class GradientFreeAlgorithms(AlgoSelection): nlopt_praxis: Type[NloptPRAXIS] = NloptPRAXIS nlopt_sbplx: Type[NloptSbplx] = NloptSbplx pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -3598,6 +3626,7 @@ class GlobalAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -3757,6 +3786,7 @@ class BoundedAlgorithms(AlgoSelection): nlopt_tnewton: Type[NloptTNewton] = NloptTNewton nlopt_var: Type[NloptVAR] = NloptVAR pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -3907,6 +3937,7 @@ class ScalarAlgorithms(AlgoSelection): nlopt_sbplx: Type[NloptSbplx] = NloptSbplx nlopt_tnewton: Type[NloptTNewton] = NloptTNewton nlopt_var: Type[NloptVAR] = NloptVAR + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -4033,6 +4064,7 @@ class ParallelAlgorithms(AlgoSelection): nevergrad_samplingsearch: Type[NevergradSamplingSearch] = NevergradSamplingSearch nevergrad_tbpsa: Type[NevergradTBPSA] = NevergradTBPSA pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -4111,6 +4143,7 @@ class Algorithms(AlgoSelection): nlopt_tnewton: Type[NloptTNewton] = NloptTNewton nlopt_var: Type[NloptVAR] = NloptVAR pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch diff --git a/src/optimagic/config.py b/src/optimagic/config.py index ce6cd4d60..fbd938fb8 100644 --- a/src/optimagic/config.py +++ b/src/optimagic/config.py @@ -39,6 +39,7 @@ def _is_installed(module_name: str) -> bool: IS_IMINUIT_INSTALLED = _is_installed("iminuit") IS_NEVERGRAD_INSTALLED = _is_installed("nevergrad") IS_BAYESOPT_INSTALLED = _is_installed("bayes_opt") +IS_PYGAD_INSTALLED = _is_installed("pygad") # ====================================================================================== diff --git a/src/optimagic/optimization/algo_options.py b/src/optimagic/optimization/algo_options.py index 2f7c6fca5..609bed519 100644 --- a/src/optimagic/optimization/algo_options.py +++ b/src/optimagic/optimization/algo_options.py @@ -108,6 +108,25 @@ """ +CONVERGENCE_TARGET_CRITERION = None +"""float or None: Stop when the criterion value is better than or equal to + this target. The definition of "better" depends on the optimization direction. + + - Minimization: criterion <= target + - Maximization: criterion >= target + + Used in population-based algorithms like genetic algorithms. + To disable, set to None. +""" + +CONVERGENCE_SATURATE_GENERATIONS = None +"""int or None: Stop when the best criterion value has not improved for this + many consecutive generations. + + Used in population-based algorithms like genetic algorithms. + To disable, set to None. +""" + MAX_LINE_SEARCH_STEPS = 20 """int: Inspired by scipy L-BFGS-B.""" diff --git a/src/optimagic/optimizers/pygad/__init__.py b/src/optimagic/optimizers/pygad/__init__.py new file mode 100644 index 000000000..ff2989293 --- /dev/null +++ b/src/optimagic/optimizers/pygad/__init__.py @@ -0,0 +1,56 @@ +"""PyGAD optimizer configuration classes and utilities. + +This module provides easy access to PyGAD mutation classes and Protocols. + +Example: + # >>> import optimagic as om + # >>> mutation = om.optimizers.pygad.RandomMutation( + # ... probability=0.15, + # ... by_replacement=True, + # ... ) + # >>> result = om.minimize( + # ... ..., + # ... algorithm=om.algos.pygad(mutation=mutation), + # ... ) + +""" + +from optimagic.optimizers.pygad_optimizer import ( + AdaptiveMutation as _AdaptiveMutation, +) +from optimagic.optimizers.pygad_optimizer import ( + CrossoverFunction, + GeneConstraintFunction, + MutationFunction, + ParentSelectionFunction, +) +from optimagic.optimizers.pygad_optimizer import ( + InversionMutation as _InversionMutation, +) +from optimagic.optimizers.pygad_optimizer import ( + RandomMutation as _RandomMutation, +) +from optimagic.optimizers.pygad_optimizer import ( + ScrambleMutation as _ScrambleMutation, +) +from optimagic.optimizers.pygad_optimizer import ( + SwapMutation as _SwapMutation, +) + +RandomMutation = _RandomMutation +AdaptiveMutation = _AdaptiveMutation +SwapMutation = _SwapMutation +InversionMutation = _InversionMutation +ScrambleMutation = _ScrambleMutation + +__all__ = [ + "RandomMutation", + "AdaptiveMutation", + "SwapMutation", + "InversionMutation", + "ScrambleMutation", + "MutationFunction", + "CrossoverFunction", + "ParentSelectionFunction", + "GeneConstraintFunction", +] diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py new file mode 100644 index 000000000..e49e36517 --- /dev/null +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -0,0 +1,995 @@ +"""Implement PyGAD genetic algorithm optimizer.""" + +from __future__ import annotations + +import warnings +from dataclasses import dataclass +from typing import ( + Any, + Callable, + ClassVar, + Literal, + Protocol, + runtime_checkable, +) + +import numpy as np +from numpy.typing import NDArray + +from optimagic import mark +from optimagic.config import IS_PYGAD_INSTALLED +from optimagic.exceptions import NotInstalledError +from optimagic.optimization.algo_options import ( + CONVERGENCE_SATURATE_GENERATIONS, + CONVERGENCE_TARGET_CRITERION, + STOPPING_MAXITER, + get_population_size, +) +from optimagic.optimization.algorithm import Algorithm, InternalOptimizeResult +from optimagic.optimization.internal_optimization_problem import ( + InternalOptimizationProblem, +) +from optimagic.typing import ( + AggregationLevel, + Direction, + PositiveFloat, + PositiveInt, + ProbabilityFloat, + PyTree, +) + + +@runtime_checkable +class ParentSelectionFunction(Protocol): + """Protocol for user-defined parent selection functions. + + Args: + fitness: Array of fitness values for all solutions in the population. + num_parents: Number of parents to select. + ga_instance: The PyGAD GA instance. + + Returns: + Tuple of (selected_parents, parent_indices) where: + - selected_parents: 2D array of selected parent solutions + - parent_indices: 1D array of indices of selected parents + + """ + + def __call__( + self, fitness: NDArray[np.float64], num_parents: int, ga_instance: Any + ) -> tuple[NDArray[np.float64], NDArray[np.int_]]: ... + + +@runtime_checkable +class CrossoverFunction(Protocol): + """Protocol for user-defined crossover functions. + + Args: + parents: 2D array of parent solutions selected for mating. + offspring_size: Tuple (num_offspring, num_genes) specifying the shape + of the offspring population to be generated. + ga_instance: The PyGAD GA instance. + + Returns: + 2D array of offspring solutions generated from the parents. + + """ + + def __call__( + self, + parents: NDArray[np.float64], + offspring_size: tuple[int, int], + ga_instance: Any, + ) -> NDArray[np.float64]: ... + + +@runtime_checkable +class MutationFunction(Protocol): + """Protocol for user-defined mutation functions. + + Args: + offspring: 2D array of offspring solutions to be mutated. + ga_instance: The PyGAD GA instance. + + Returns: + 2D array of mutated offspring solutions. + + """ + + def __call__( + self, offspring: NDArray[np.float64], ga_instance: Any + ) -> NDArray[np.float64]: ... + + +@runtime_checkable +class GeneConstraintFunction(Protocol): + """Protocol for user-defined gene constraint functions. + + Gene constraint functions are applied to individual genes to enforce + specific constraints on their values. Each function receives the current + solution and a list of candidate values, then returns the constrained + values. + + Args: + solution: Current solution array containing all gene values. + values: List or array of candidate values for the gene being + constrained. + + Returns: + Constrained values as a list or array, ensuring they satisfy the + gene's specific constraints. + + """ + + def __call__( + self, + solution: NDArray[np.float64], + values: list[float] | NDArray[np.float64], + ) -> list[float] | NDArray[np.float64]: ... + + +@dataclass(frozen=True) +class _BuiltinMutation: + """Base class for all built-in PyGAD mutation configurations. + + Note: + This is an internal base class. Users should not inherit from it + directly. To configure a built-in mutation, use one of its subclasses + (e.g., `RandomMutation`, `AdaptiveMutation`). To define a custom + mutation, provide a function that conforms to the `MutationFunction` + protocol. + + """ + + mutation_type: ClassVar[str] = "random" + + def to_pygad_params(self) -> dict[str, Any]: + """Convert mutation configuration to PyGAD parameters. + + Default implementation that works for simple mutations. Complex + mutations (RandomMutation, AdaptiveMutation) should override this. + + Returns: + Dictionary of PyGAD mutation parameters. + + """ + return { + "mutation_type": self.mutation_type, + "mutation_probability": None, + "mutation_percent_genes": "default", + "mutation_num_genes": None, + "mutation_by_replacement": False, + } + + +@dataclass(frozen=True) +class RandomMutation(_BuiltinMutation): + """Configuration for the random mutation in PyGAD. + + The random mutation selects a subset of genes in each solution and either + replaces each selected gene with a new random value or adds a random value + to it. + + The exact behavior depends on the `by_replacement` parameter: If + `by_replacement` is True, the selected genes are replaced with new values; + if False, random values are added to the existing gene values. + + The mutation rate is determined by the mutation probability, the number of + genes, or the percentage of genes (with priority: probability > num_genes + > percent_genes). + + """ + + mutation_type: ClassVar[str] = "random" + + probability: ProbabilityFloat | None = None + """Probability of mutating each gene. + + If specified, takes precedence over num_genes and percent_genes. Range [0, 1]. + + """ + + num_genes: PositiveInt | None = None + """Number of genes to mutate per solution. + + Takes precedence over percent_genes but is ignored if probability is specified. + + """ + + percent_genes: PositiveFloat | str = "default" + """Percentage of genes to mutate in each solution. + + - "default": Uses 10% of genes (PyGAD default) + - Numeric value: Percentage (0-100) + + Ignored if probability or num_genes are specified. + + """ + + by_replacement: bool = False + """If True, replace gene values with random values. + + If False, add random values to existing gene values. + + """ + + def to_pygad_params(self) -> dict[str, Any]: + """Convert RandomMutation configuration to PyGAD parameters.""" + return { + "mutation_type": self.mutation_type, + "mutation_probability": self.probability, + "mutation_percent_genes": self.percent_genes, + "mutation_num_genes": self.num_genes, + "mutation_by_replacement": self.by_replacement, + } + + +@dataclass(frozen=True) +class SwapMutation(_BuiltinMutation): + """Configuration for the swap mutation in PyGAD. + + The swap mutation selects two random genes and exchanges their values. This + operation maintains all gene values, altering only their positions within the + chromosome. + + No additional parameters are required for this mutation type. + + """ + + mutation_type: ClassVar[str] = "swap" + + +@dataclass(frozen=True) +class InversionMutation(_BuiltinMutation): + """Configuration for the inversion mutation in PyGAD. + + The inversion mutation selects a contiguous segment of genes and reverses their + order. All gene values remain unchanged; only the ordering within the selected + segment is altered. + + No additional parameters are required for this mutation type. + + """ + + mutation_type: ClassVar[str] = "inversion" + + +@dataclass(frozen=True) +class ScrambleMutation(_BuiltinMutation): + """Configuration for the scramble mutation in PyGAD. + + The scramble mutation randomly shuffles the genes within a contiguous segment. This + preserves gene values but changes their order within the chosen segment. + + No additional parameters are required for this mutation type. + + """ + + mutation_type: ClassVar[str] = "scramble" + + +@dataclass(frozen=True) +class AdaptiveMutation(_BuiltinMutation): + """Configuration for the adaptive mutation in PyGAD. + + The adaptive mutation dynamically adjusts the mutation rate based on + solution quality. Solutions whose objective value is worse than the + current population median receive a higher mutation rate to encourage + exploration, while better-than-median solutions receive a lower rate + to preserve promising traits. + + If no mutation rate parameters are specified, this mutation defaults to using + probabilities, with a 10% rate for bad solutions (`probability_bad=0.1`) + and a 5% rate for good solutions (`probability_good=0.05`). + + **Parameter Precedence:** + The mutation rate is determined by the first set of parameters found, in the + following order of priority: + 1. `probability_bad` and `probability_good` + 2. `num_genes_bad` and `num_genes_good` + 3. `percent_genes_bad` and `percent_genes_good` + + """ + + mutation_type: ClassVar[str] = "adaptive" + + probability_bad: ProbabilityFloat | None = None + """Probability of mutating each gene for below-average fitness solutions. + + If specified, takes precedence over num_genes_bad and percent_genes_bad. Range [0, + 1]. If no mutation rate parameters are provided at all, this defaults to + 0.1 (10% mutation rate for bad fitness solutions). + + """ + + probability_good: ProbabilityFloat | None = None + """Probability of mutating each gene for above-average fitness solutions. + + If specified, takes precedence over num_genes_good and percent_genes_good. Range [0, + 1]. If no mutation rate parameters are provided at all, this defaults to + 0.05 (5% mutation rate for good fitness solutions). + + """ + + num_genes_bad: PositiveInt | None = None + """Number of genes to mutate for below-average fitness solutions. + + Takes precedence over percent_genes_bad but is ignored if probability_bad is + specified. + + """ + + num_genes_good: PositiveInt | None = None + """Number of genes to mutate for above-average fitness solutions. + + Takes precedence over percent_genes_good but is ignored if probability_good is + specified. + + """ + + percent_genes_bad: PositiveFloat | None = None + """Percentage of genes to mutate for below-average fitness solutions. + + Ignored if probability_bad or num_genes_bad are specified. + + """ + + percent_genes_good: PositiveFloat | None = None + """Percentage of genes to mutate for above-average fitness solutions. + + Ignored if probability_good or num_genes_good are specified. + + """ + + by_replacement: bool = False + """If True, replace gene values with random values. + + If False, add random values to existing gene values. + + """ + + def to_pygad_params(self) -> dict[str, Any]: + """Convert AdaptiveMutation configuration to PyGAD parameters.""" + mutation_probability: list[float] | None = None + mutation_num_genes: list[int] | None = None + mutation_percent_genes: list[float] | str | None = None + + if self.probability_bad is not None and self.probability_good is not None: + mutation_probability = [self.probability_bad, self.probability_good] + elif self.num_genes_bad is not None and self.num_genes_good is not None: + mutation_num_genes = [self.num_genes_bad, self.num_genes_good] + elif self.percent_genes_bad is not None and self.percent_genes_good is not None: + mutation_percent_genes = [self.percent_genes_bad, self.percent_genes_good] + else: + mutation_probability = [ + self.probability_bad or 0.1, + self.probability_good or 0.05, + ] + + return { + "mutation_type": self.mutation_type, + "mutation_probability": mutation_probability, + "mutation_percent_genes": mutation_percent_genes, + "mutation_num_genes": mutation_num_genes, + "mutation_by_replacement": self.by_replacement, + } + + +@mark.minimizer( + name="pygad", + solver_type=AggregationLevel.SCALAR, + is_available=IS_PYGAD_INSTALLED, + is_global=True, + needs_jac=False, + needs_hess=False, + needs_bounds=True, + supports_parallelism=True, + supports_bounds=True, + supports_infinite_bounds=False, + supports_linear_constraints=False, + supports_nonlinear_constraints=False, + disable_history=False, +) +@dataclass(frozen=True) +class Pygad(Algorithm): + """Minimize a scalar function using the PyGAD genetic algorithm. + + This optimizer wraps the PyGAD genetic algorithm package :cite:`gad2023pygad`, + a population-based evolutionary method for global optimization. It maintains a + population of candidate solutions and evolves them over generations using + biologically inspired operations: selection (choosing parents based on fitness), + crossover (combining genes from parents), and mutation (introducing random + variations). + + The algorithm is well-suited for global optimization problems with multiple local + optima, black-box optimization where gradients are unavailable or difficult to + compute. + + All variables must have finite bounds. Parallel fitness evaluation is supported via + batch processing. + + For more details, see the + `PyGAD documentation `_. + + """ + + population_size: PositiveInt | None = None + """Number of solutions in each generation. + + Larger populations explore the search space more thoroughly but require + more fitness evaluations per generation. If None, optimagic sets this to + ``max(10, 10 * (problem_dimension + 1))``. + + """ + + num_parents_mating: PositiveInt | None = 10 + """Number of parents selected for mating in each generation. + + Higher values can speed up convergence but may risk premature convergence. + If None, defaults to ``max(2, population_size // 2)``. + + """ + + num_generations: PositiveInt | None = 50 + """Number of generations to evolve the population.""" + + stopping_maxiter: PositiveInt = STOPPING_MAXITER + """Maximum number of iterations (generations) to run. + + This corresponds to PyGAD's num_generations parameter. + + """ + + initial_population: list[PyTree] | None = None + """Optional initial population as a list of parameter PyTrees. + + If None, the population is initialized randomly within parameter bounds. + + """ + + parent_selection_type: ( + Literal["sss", "rws", "sus", "rank", "random", "tournament"] + | ParentSelectionFunction + ) = "sss" + """Parent selection strategy used to choose parents for crossover. + + Available methods: + + * ``"sss"``: Steady-State Selection (selects the best individuals to continue) + * ``"rws"``: Roulette Wheel Selection (probabilistic, fitness-proportional) + * ``"sus"``: Stochastic Universal Sampling (even sampling across population) + * ``"rank"``: Rank Selection (selects based on rank order) + * ``"random"``: Random Selection + * ``"tournament"``: Tournament Selection (best from K randomly chosen individuals) + + Alternatively, provide a custom function with signature + ``(fitness, num_parents, ga_instance) -> tuple[NDArray, NDArray]``. + + """ + + keep_parents: int = -1 + """Number of best parents to keep in the next generation. + + Only used if ``keep_elitism = 0``. Values: + + * ``-1``: Keep all parents in the next generation (default) + * ``0``: Keep no parents in the next generation + * Positive integer: Keep the specified number of best parents + + """ + + keep_elitism: int = 1 + """Number of elite (best) solutions preserved each generation. + + Range: 0 to population_size. If greater than 0, takes precedence over + ``keep_parents``. When 0, elitism is disabled and ``keep_parents`` + controls parent retention. + + """ + + K_tournament: PositiveInt = 3 + """Tournament size for parent selection when + ``parent_selection_type="tournament"``.""" + + crossover_type: ( + Literal["single_point", "two_points", "uniform", "scattered"] + | CrossoverFunction + | None + ) = "single_point" + """Crossover operator for generating offspring. + + Available methods: + + * ``"single_point"``: Single-point crossover + * ``"two_points"``: Two-point crossover + * ``"uniform"``: Uniform crossover (randomly mixes genes) + * ``"scattered"``: Scattered crossover (random mask) + + Or provide a custom function with signature + ``(parents, offspring_size, ga_instance) -> NDArray``. + + """ + + crossover_probability: ProbabilityFloat | None = None + """Probability of applying crossover to selected parents. + + Range [0, 1]. If None, uses PyGAD's default. + + """ + + mutation: ( + Literal["random", "swap", "inversion", "scramble", "adaptive"] + | type[_BuiltinMutation] + | _BuiltinMutation + | MutationFunction + | None + ) = "random" + """Mutation operator for introducing genetic diversity. + + Available options: + + **String values for default configurations:** + + * ``"random"``: Random mutation with default parameters + * ``"swap"``: Swap mutation with default parameters + * ``"inversion"``: Inversion mutation with default parameters + * ``"scramble"``: Scramble mutation with default parameters + * ``"adaptive"``: Adaptive random mutation with default parameters + + **Mutation classes for default configurations:** + + * Any mutation class (e.g., ``RandomMutation``, ``SwapMutation``, + ``AdaptiveMutation``, etc.) + * All classes can be used without parameters for default behavior + + **Configured mutation instances:** + + * Any mutation instance (e.g., ``RandomMutation(...)``, + ``SwapMutation()``, etc.) + * All mutation classes inherit from ``_BuiltinMutation`` + + **Custom function:** + + * Custom function with signature ``(offspring, ga_instance) -> NDArray`` + + **Disable mutation:** + + * ``None`` to disable mutation + + """ + + allow_duplicate_genes: bool = True + """If True, duplicate gene values are allowed within a solution.""" + + gene_constraint: list[GeneConstraintFunction | None] | None = None + """Optional list of per-gene constraint functions. + + Each with signature ``(solution, values) -> list[float] | NDArray``. + + """ + + sample_size: PositiveInt = 100 + """Number of values to sample when enforcing uniqueness or gene constraints.""" + + batch_size: PositiveInt | None = None + """Number of solutions to evaluate in parallel batches. + + If None and ``n_cores > 1``, automatically set to ``n_cores``. + + """ + + convergence_target_criterion: PositiveFloat | None = CONVERGENCE_TARGET_CRITERION + """Target criterion value for early stopping. + + Default: None. + + """ + + convergence_saturate_generations: PositiveInt | None = ( + CONVERGENCE_SATURATE_GENERATIONS + ) + """Maximum generations without fitness improvement before stopping. + + Default: None. + + """ + + n_cores: PositiveInt = 1 + """Number of CPU cores for parallel fitness evaluation.""" + + seed: int | None = None + """Random seed for reproducibility.""" + + def _solve_internal_problem( + self, problem: InternalOptimizationProblem, x0: NDArray[np.float64] + ) -> InternalOptimizeResult: + if not IS_PYGAD_INSTALLED: + raise NotInstalledError( + "The 'pygad' algorithm requires the pygad package to be " + "installed. You can install it with 'pip install pygad'." + ) + + _validate_user_defined_functions( + parent_selection_type=self.parent_selection_type, + crossover_type=self.crossover_type, + gene_constraint=self.gene_constraint, + ) + import pygad + + if ( + problem.bounds.lower is None + or problem.bounds.upper is None + or not np.isfinite(problem.bounds.lower).all() + or not np.isfinite(problem.bounds.upper).all() + ): + raise ValueError("pygad requires finite bounds for all parameters.") + + # Determine effective batch_size for parallel processing + effective_batch_size = _determine_effective_batch_size( + self.batch_size, self.n_cores + ) + + if ( + effective_batch_size is not None + and effective_batch_size > 1 + and self.n_cores > 1 + ): + + def _fitness_func_batch( + _ga_instance: Any, + batch_solutions: NDArray[np.float64], + _batch_indices: list[int] | NDArray[np.int_], + ) -> list[float]: + solutions_list: list[NDArray[np.float64]] = [ + np.asarray(batch_solutions[i]) + for i in range(batch_solutions.shape[0]) + ] + batch_results = problem.batch_fun( + solutions_list, + n_cores=self.n_cores, + batch_size=effective_batch_size, + ) + + return [-float(result) for result in batch_results] + + fitness_function: Any = _fitness_func_batch + else: + + def _fitness_func_single( + _ga_instance: Any, solution: NDArray[np.float64], _solution_idx: int + ) -> float: + return -float(problem.fun(solution)) + + fitness_function = _fitness_func_single + + population_size = get_population_size( + population_size=self.population_size, x=x0, lower_bound=10 + ) + + num_parents_mating = ( + self.num_parents_mating + if self.num_parents_mating is not None + else max(2, population_size // 2) + ) + + if self.initial_population is not None: + initial_population = np.array( + [ + problem.converter.params_to_internal(params) + for params in self.initial_population + ] + ) + else: + num_genes = len(x0) + + initial_population = np.random.uniform( + problem.bounds.lower, + problem.bounds.upper, + size=(population_size, num_genes), + ) + + initial_population[0] = x0 + + gene_space = [ + {"low": problem.bounds.lower[i], "high": problem.bounds.upper[i]} + for i in range(len(x0)) + ] + + # Convert mutation parameter to PyGAD parameters + mutation_params = _convert_mutation_to_pygad_params(self.mutation) + + # Build stop criteria from convergence parameters + stop_criteria = _build_stop_criteria( + self.convergence_target_criterion, + self.convergence_saturate_generations, + direction=problem.direction, + ) + + ga_instance = pygad.GA( + num_generations=self.num_generations, + num_parents_mating=num_parents_mating, + fitness_func=fitness_function, + fitness_batch_size=effective_batch_size, + initial_population=initial_population, + gene_space=gene_space, + parent_selection_type=self.parent_selection_type, + keep_parents=self.keep_parents, + keep_elitism=self.keep_elitism, + K_tournament=self.K_tournament, + crossover_type=self.crossover_type, + crossover_probability=self.crossover_probability, + mutation_type=mutation_params["mutation_type"], + mutation_probability=mutation_params["mutation_probability"], + mutation_by_replacement=mutation_params["mutation_by_replacement"], + mutation_percent_genes=mutation_params["mutation_percent_genes"], + mutation_num_genes=mutation_params["mutation_num_genes"], + allow_duplicate_genes=self.allow_duplicate_genes, + gene_constraint=self.gene_constraint, + sample_size=self.sample_size, + stop_criteria=stop_criteria, + parallel_processing=None, + random_seed=self.seed, + ) + + ga_instance.run() + + result = _process_pygad_result(ga_instance) + + return result + + +def _convert_mutation_to_pygad_params(mutation: Any) -> dict[str, Any]: + """Convert the mutation parameter to PyGAD mutation parameters. + + Handles strings, classes, instances, and custom functions using the + new mutation dataclass system with built-in conversion methods. + + Returns: + Dictionary of PyGAD mutation parameters. + + """ + params: dict[str, Any] + + if mutation is None: + params = _get_default_mutation_params(mutation_type=None) + + elif isinstance(mutation, str): + mutation_instance = _create_mutation_from_string(mutation) + params = mutation_instance.to_pygad_params() + + elif isinstance(mutation, type) and issubclass(mutation, _BuiltinMutation): + mutation_instance = mutation() + params = mutation_instance.to_pygad_params() + + elif isinstance(mutation, _BuiltinMutation): + params = mutation.to_pygad_params() + + elif isinstance(mutation, MutationFunction): + params = _get_default_mutation_params(mutation_type=mutation) + + else: + raise ValueError(f"Unsupported mutation type: {type(mutation)}") + + return params + + +def _get_default_mutation_params(mutation_type: Any = "random") -> dict[str, Any]: + """Get default PyGAD mutation parameters.""" + return { + "mutation_type": mutation_type, + "mutation_probability": None, + "mutation_percent_genes": None if mutation_type is None else "default", + "mutation_num_genes": None, + "mutation_by_replacement": None if mutation_type is None else False, + } + + +def _create_mutation_from_string(mutation_type: str) -> _BuiltinMutation: + """Create a mutation instance from a string type. + + Args: + mutation_type: String mutation type (e.g., "random", "swap", etc.) + + Returns: + Appropriate mutation instance. + + Raises: + ValueError: If mutation_type is not supported. + + """ + mutation_map = { + "random": RandomMutation, + "swap": SwapMutation, + "inversion": InversionMutation, + "scramble": ScrambleMutation, + "adaptive": AdaptiveMutation, + } + + if mutation_type not in mutation_map: + raise ValueError(f"Unsupported mutation type: {mutation_type}") + + return mutation_map[mutation_type]() + + +def _determine_effective_batch_size(batch_size: int | None, n_cores: int) -> int | None: + """Determine the effective batch_size for parallel processing. + + Behavior: + - If `batch_size` is explicitly provided: + - The value is returned unchanged. + - A warning is issued if it is less than `n_cores`, as this may + underutilize available cores. + - If `batch_size` is `None`: + - If `n_cores` > 1, defaults to `n_cores`. + - Otherwise, returns None (i.e., single-threaded evaluation). + + Args: + batch_size: User-specified batch size or None + n_cores: Number of cores for parallel processing + + Returns: + Effective batch size for PyGAD, or None for single-threaded + processing + + """ + result = None + + if batch_size is not None: + if batch_size < n_cores: + warnings.warn( + f"batch_size ({batch_size}) is smaller than " + f"n_cores ({n_cores}). This may reduce parallel efficiency. " + f"Consider setting batch_size >= n_cores." + ) + result = batch_size + elif n_cores > 1: + result = n_cores + + return result + + +def _build_stop_criteria( + target_criterion: float | None, + saturate_generations: int | None, + direction: Direction, +) -> str | list[str] | None: + """Build PyGAD stop criteria from optimagic convergence parameters. + + Args: + target_criterion: Target value that the objective function should reach. + saturate_generations: Max generations without improvement before stopping. + direction: Direction of optimization (Direction.MINIMIZE or Direction.MAXIMIZE). + + Returns: + PyGAD stop criteria string, list of strings, or None. + + """ + criteria = [] + + if target_criterion is not None: + pygad_target_fitness = ( + -target_criterion if direction is Direction.MINIMIZE else target_criterion + ) + criteria.append(f"reach_{pygad_target_fitness}") + + if saturate_generations is not None: + criteria.append(f"saturate_{saturate_generations}") + + return criteria[0] if len(criteria) == 1 else (criteria or None) + + +def _validate_user_defined_functions( + parent_selection_type: str | Callable[..., object] | None, + crossover_type: str | Callable[..., object] | None, + gene_constraint: list[GeneConstraintFunction | None] | None, +) -> None: + """Validate user-provided functions for selection, crossover, and constraints.""" + + if parent_selection_type is None: + pass + elif isinstance(parent_selection_type, str): + _validate_string_choice( + parent_selection_type, + ["sss", "rws", "sus", "rank", "random", "tournament"], + "parent_selection_type", + ) + elif callable(parent_selection_type): + _validate_protocol_function( + parent_selection_type, + ParentSelectionFunction, + "parent_selection_type", + ) + else: + raise ValueError( + "parent_selection_type must be a string, callable, or None, " + f"got {type(parent_selection_type)}" + ) + + if crossover_type is None: + pass + elif isinstance(crossover_type, str): + _validate_string_choice( + crossover_type, + ["single_point", "two_points", "uniform", "scattered"], + "crossover_type", + ) + elif callable(crossover_type): + _validate_protocol_function( + crossover_type, + CrossoverFunction, + "crossover_type", + ) + else: + raise ValueError( + "crossover_type must be a string, callable, or None, " + f"got {type(crossover_type)}" + ) + + if gene_constraint is not None: + if not isinstance(gene_constraint, list): + raise ValueError( + f"gene_constraint must be a list or None, got {type(gene_constraint)}" + ) + for i, constraint_func in enumerate(gene_constraint): + if constraint_func is not None: + if not callable(constraint_func): + raise TypeError( + f"gene_constraint[{i}] must be callable, or None, " + f"got {type(constraint_func)}" + ) + _validate_protocol_function( + constraint_func, + GeneConstraintFunction, + f"gene_constraint[{i}]", + ) + + +def _validate_string_choice(value: str, valid_choices: list[str], name: str) -> None: + """Ensure a string parameter is one of the allowed choices.""" + if value not in valid_choices: + raise ValueError(f"{name} must be one of {valid_choices}, got '{value}'.") + + +def _validate_protocol_function( + func: Callable[..., Any], protocol: Any, name: str +) -> None: + """Ensure a callable satisfies the expected protocol interface.""" + + if not isinstance(func, protocol): + raise TypeError(f"{name} must implement {protocol.__name__}.") + + +def _process_pygad_result(ga_instance: Any) -> InternalOptimizeResult: + """Process PyGAD result into InternalOptimizeResult. + + Args: + ga_instance: The PyGAD instance after running the optimization + + Returns: + InternalOptimizeResult: Processed optimization results + + """ + best_solution, best_fitness, _ = ga_instance.best_solution() + + best_criterion = -best_fitness + + completed_generations = ga_instance.generations_completed + success = ga_instance.run_completed + if success: + message = ( + "Optimization terminated successfully.\n" + f"Generations completed: {completed_generations}" + ) + else: + message = ( + "Optimization failed to complete.\n" + f"Generations completed: {completed_generations}" + ) + + return InternalOptimizeResult( + x=best_solution, + fun=best_criterion, + success=success, + message=message, + n_fun_evals=ga_instance.generations_completed * ga_instance.pop_size[0], + ) diff --git a/src/optimagic/typing.py b/src/optimagic/typing.py index 443bad959..db03c24cc 100644 --- a/src/optimagic/typing.py +++ b/src/optimagic/typing.py @@ -122,6 +122,8 @@ def __call__( """Type alias for positive floats (greater than 0).""" NonNegativeFloat = Annotated[float, Ge(0)] """Type alias for non-negative floats (greater than or equal to 0).""" +ProbabilityFloat = Annotated[float, Ge(0), Le(1)] +"""Type alias for probability floats (between 0 and 1, inclusive).""" NegativeFloat = Annotated[float, Lt(0)] """Type alias for negative floats (less than 0).""" GtOneFloat = Annotated[float, Gt(1)] diff --git a/tests/optimagic/optimizers/test_pygad_optimizer.py b/tests/optimagic/optimizers/test_pygad_optimizer.py new file mode 100644 index 000000000..6e1373176 --- /dev/null +++ b/tests/optimagic/optimizers/test_pygad_optimizer.py @@ -0,0 +1,284 @@ +"""Test helper functions for PyGAD optimizer.""" + +import warnings + +import pytest + +from optimagic.optimizers.pygad_optimizer import ( + AdaptiveMutation, + InversionMutation, + RandomMutation, + ScrambleMutation, + SwapMutation, + _convert_mutation_to_pygad_params, + _create_mutation_from_string, + _determine_effective_batch_size, + _get_default_mutation_params, +) + + +@pytest.mark.parametrize( + "batch_size, n_cores, expected", + [ + (None, 1, None), + (None, 4, 4), + (10, 4, 10), + (4, 4, 4), + (2, 4, 2), + (5, 1, 5), + (0, 4, 0), + (None, 100, 100), + (1, 1, 1), + ], +) +def test_determine_effective_batch_size_return_values(batch_size, n_cores, expected): + result = _determine_effective_batch_size(batch_size, n_cores) + assert result == expected + + +@pytest.mark.parametrize( + "batch_size, n_cores, should_warn", + [ + (2, 4, True), + (1, 8, True), + (0, 4, True), + (4, 4, False), + (8, 4, False), + (None, 4, False), + (5, 1, False), + (None, 1, False), + ], +) +def test_determine_effective_batch_size_warnings(batch_size, n_cores, should_warn): + if should_warn: + warning_pattern = ( + f"batch_size \\({batch_size}\\) is smaller than " + f"n_cores \\({n_cores}\\)\\. This may reduce parallel efficiency\\. " + f"Consider setting batch_size >= n_cores\\." + ) + with pytest.warns(UserWarning, match=warning_pattern): + result = _determine_effective_batch_size(batch_size, n_cores) + assert result == batch_size + else: + with warnings.catch_warnings(): + warnings.simplefilter("error") + result = _determine_effective_batch_size(batch_size, n_cores) + + +# Tests for _get_default_mutation_params +@pytest.mark.parametrize( + "mutation_type, expected", + [ + ( + "random", + { + "mutation_type": "random", + "mutation_probability": None, + "mutation_percent_genes": "default", + "mutation_num_genes": None, + "mutation_by_replacement": False, + }, + ), + ( + None, + { + "mutation_type": None, + "mutation_probability": None, + "mutation_percent_genes": None, + "mutation_num_genes": None, + "mutation_by_replacement": None, + }, + ), + ], +) +def test_get_default_mutation_params(mutation_type, expected): + result = _get_default_mutation_params(mutation_type) + assert result == expected + + +# Tests for _create_mutation_from_string +@pytest.mark.parametrize( + "mutation_type, expected_class", + [ + ("random", RandomMutation), + ("swap", SwapMutation), + ("inversion", InversionMutation), + ("scramble", ScrambleMutation), + ("adaptive", AdaptiveMutation), + ], +) +def test_create_mutation_from_string_valid(mutation_type, expected_class): + result = _create_mutation_from_string(mutation_type) + assert isinstance(result, expected_class) + + +def test_create_mutation_from_string_invalid(): + with pytest.raises(ValueError, match="Unsupported mutation type: invalid"): + _create_mutation_from_string("invalid") + + +# Tests for _convert_mutation_to_pygad_params +def test_convert_mutation_none(): + result = _convert_mutation_to_pygad_params(None) + expected = { + "mutation_type": None, + "mutation_probability": None, + "mutation_percent_genes": None, + "mutation_num_genes": None, + "mutation_by_replacement": None, + } + assert result == expected + + +@pytest.mark.parametrize( + "mutation_string", + ["random", "swap", "inversion", "scramble", "adaptive"], +) +def test_convert_mutation_string(mutation_string): + result = _convert_mutation_to_pygad_params(mutation_string) + assert result["mutation_type"] == mutation_string + assert "mutation_probability" in result + assert "mutation_percent_genes" in result + assert "mutation_num_genes" in result + assert "mutation_by_replacement" in result + + +@pytest.mark.parametrize( + "mutation_class", + [ + RandomMutation, + SwapMutation, + InversionMutation, + ScrambleMutation, + AdaptiveMutation, + ], +) +def test_convert_mutation_class(mutation_class): + result = _convert_mutation_to_pygad_params(mutation_class) + assert result["mutation_type"] == mutation_class.mutation_type + assert "mutation_probability" in result + assert "mutation_percent_genes" in result + assert "mutation_num_genes" in result + assert "mutation_by_replacement" in result + + +def test_convert_mutation_instance(): + # Test RandomMutation instance + mutation = RandomMutation(probability=0.2, by_replacement=True) + result = _convert_mutation_to_pygad_params(mutation) + assert result["mutation_type"] == "random" + assert result["mutation_probability"] == 0.2 + assert result["mutation_by_replacement"] is True + + # Test SwapMutation instance + mutation = SwapMutation() + result = _convert_mutation_to_pygad_params(mutation) + assert result["mutation_type"] == "swap" + + # Test AdaptiveMutation instance + mutation = AdaptiveMutation(probability_bad=0.3, probability_good=0.1) + result = _convert_mutation_to_pygad_params(mutation) + assert result["mutation_type"] == "adaptive" + assert result["mutation_probability"] == [0.3, 0.1] + + +def test_convert_mutation_custom_function(): + def custom_mutation(offspring, ga_instance): + return offspring + + result = _convert_mutation_to_pygad_params(custom_mutation) + assert result["mutation_type"] == custom_mutation + + +def test_convert_mutation_invalid_type(): + with pytest.raises(ValueError, match="Unsupported mutation type"): + _convert_mutation_to_pygad_params(123) + + +# Tests for mutation dataclasses +def test_random_mutation_default(): + mutation = RandomMutation() + result = mutation.to_pygad_params() + assert result["mutation_type"] == "random" + assert result["mutation_probability"] is None + assert result["mutation_percent_genes"] == "default" + assert result["mutation_num_genes"] is None + assert result["mutation_by_replacement"] is False + + +def test_random_mutation_with_parameters(): + mutation = RandomMutation( + probability=0.15, num_genes=5, percent_genes=20.0, by_replacement=True + ) + result = mutation.to_pygad_params() + assert result["mutation_type"] == "random" + assert result["mutation_probability"] == 0.15 + assert result["mutation_percent_genes"] == 20.0 + assert result["mutation_num_genes"] == 5 + assert result["mutation_by_replacement"] is True + + +@pytest.mark.parametrize( + "mutation_class, expected_type", + [ + (SwapMutation, "swap"), + (InversionMutation, "inversion"), + (ScrambleMutation, "scramble"), + ], +) +def test_simple_mutations(mutation_class, expected_type): + mutation = mutation_class() + result = mutation.to_pygad_params() + assert result["mutation_type"] == expected_type + assert result["mutation_probability"] is None + assert result["mutation_percent_genes"] == "default" + assert result["mutation_num_genes"] is None + assert result["mutation_by_replacement"] is False + + +def test_adaptive_mutation_default(): + mutation = AdaptiveMutation() + result = mutation.to_pygad_params() + assert result["mutation_type"] == "adaptive" + assert result["mutation_probability"] == [0.1, 0.05] # Default values + assert result["mutation_percent_genes"] is None + assert result["mutation_num_genes"] is None + assert result["mutation_by_replacement"] is False + + +def test_adaptive_mutation_with_probabilities(): + mutation = AdaptiveMutation(probability_bad=0.2, probability_good=0.08) + result = mutation.to_pygad_params() + assert result["mutation_type"] == "adaptive" + assert result["mutation_probability"] == [0.2, 0.08] + assert result["mutation_percent_genes"] is None + assert result["mutation_num_genes"] is None + assert result["mutation_by_replacement"] is False + + +def test_adaptive_mutation_with_num_genes(): + mutation = AdaptiveMutation(num_genes_bad=10, num_genes_good=5) + result = mutation.to_pygad_params() + assert result["mutation_type"] == "adaptive" + assert result["mutation_probability"] is None + assert result["mutation_num_genes"] == [10, 5] + assert result["mutation_percent_genes"] is None + assert result["mutation_by_replacement"] is False + + +def test_adaptive_mutation_with_percent_genes(): + mutation = AdaptiveMutation(percent_genes_bad=25.0, percent_genes_good=10.0) + result = mutation.to_pygad_params() + assert result["mutation_type"] == "adaptive" + assert result["mutation_probability"] is None + assert result["mutation_num_genes"] is None + assert result["mutation_percent_genes"] == [25.0, 10.0] + assert result["mutation_by_replacement"] is False + + +def test_mutation_type_class_variables(): + assert RandomMutation.mutation_type == "random" + assert SwapMutation.mutation_type == "swap" + assert InversionMutation.mutation_type == "inversion" + assert ScrambleMutation.mutation_type == "scramble" + assert AdaptiveMutation.mutation_type == "adaptive"