Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,35 @@ from opt.classical import BFGS
from opt import ParticleSwarm, AdamW, BFGS
```

### Quick Demo

All optimizers include a standardized demo that can be run directly or customized:

```python
from opt.demo import run_demo
from opt.swarm_intelligence import ParticleSwarm

# Run with default settings
run_demo(ParticleSwarm)

# Or customize parameters
run_demo(
ParticleSwarm,
max_iter=200,
population_size=50,
c1=2.0,
c2=2.0
)
```

You can also run demos directly from the command line:

```bash
python -m opt.swarm_intelligence.particle_swarm
python -m opt.gradient_based.adamw
python -m opt.classical.simulated_annealing
```

## Project Structure

Optimizers are organized into categorical subfolders:
Expand Down
8 changes: 3 additions & 5 deletions opt/classical/bfgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
from scipy.optimize import minimize

from opt.abstract_optimizer import AbstractOptimizer
from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
Expand Down Expand Up @@ -131,7 +130,6 @@ def bounded_func(x: np.ndarray) -> float:


if __name__ == "__main__":
optimizer = BFGS(func=shifted_ackley, lower_bound=-2.768, upper_bound=+2.768, dim=2)
best_solution, best_fitness = optimizer.search()
print(f"Best solution: {best_solution}")
print(f"Best fitness: {best_fitness}")
from opt.demo import run_demo

run_demo(BFGS)
10 changes: 3 additions & 7 deletions opt/classical/conjugate_gradient.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
from scipy.optimize import minimize

from opt.abstract_optimizer import AbstractOptimizer
from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
Expand Down Expand Up @@ -137,9 +136,6 @@ def bounded_func(x: np.ndarray) -> float:


if __name__ == "__main__":
optimizer = ConjugateGradient(
func=shifted_ackley, lower_bound=-2.768, upper_bound=+2.768, dim=2
)
best_solution, best_fitness = optimizer.search()
print(f"Best solution: {best_solution}")
print(f"Best fitness: {best_fitness}")
from opt.demo import run_demo

run_demo(ConjugateGradient)
10 changes: 3 additions & 7 deletions opt/classical/hill_climbing.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
import numpy as np

from opt.abstract_optimizer import AbstractOptimizer
from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
Expand Down Expand Up @@ -132,9 +131,6 @@ def search(self) -> tuple[np.ndarray, float]:


if __name__ == "__main__":
optimizer = HillClimbing(
func=shifted_ackley, dim=2, lower_bound=-32.768, upper_bound=+32.768
)
best_solution, best_fitness = optimizer.search()
print(f"Best solution found: {best_solution}")
print(f"Best fitness found: {best_fitness}")
from opt.demo import run_demo

run_demo(HillClimbing)
10 changes: 3 additions & 7 deletions opt/classical/lbfgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
from scipy.optimize import minimize

from opt.abstract_optimizer import AbstractOptimizer
from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
Expand Down Expand Up @@ -126,9 +125,6 @@ def search(self) -> tuple[np.ndarray, float]:


if __name__ == "__main__":
optimizer = LBFGS(
func=shifted_ackley, lower_bound=-2.768, upper_bound=+2.768, dim=2
)
best_solution, best_fitness = optimizer.search()
print(f"Best solution: {best_solution}")
print(f"Best fitness: {best_fitness}")
from opt.demo import run_demo

run_demo(LBFGS)
10 changes: 3 additions & 7 deletions opt/classical/nelder_mead.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
from scipy.optimize import minimize

from opt.abstract_optimizer import AbstractOptimizer
from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
Expand Down Expand Up @@ -134,9 +133,6 @@ def bounded_func(x: np.ndarray) -> float:


if __name__ == "__main__":
optimizer = NelderMead(
func=shifted_ackley, lower_bound=-2.768, upper_bound=+2.768, dim=2
)
best_solution, best_fitness = optimizer.search()
print(f"Best solution: {best_solution}")
print(f"Best fitness: {best_fitness}")
from opt.demo import run_demo

run_demo(NelderMead)
10 changes: 3 additions & 7 deletions opt/classical/powell.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
from scipy.optimize import minimize

from opt.abstract_optimizer import AbstractOptimizer
from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
Expand Down Expand Up @@ -136,9 +135,6 @@ def bounded_func(x: np.ndarray) -> float:


if __name__ == "__main__":
optimizer = Powell(
func=shifted_ackley, lower_bound=-2.768, upper_bound=+2.768, dim=2
)
best_solution, best_fitness = optimizer.search()
print(f"Best solution: {best_solution}")
print(f"Best fitness: {best_fitness}")
from opt.demo import run_demo

run_demo(Powell)
12 changes: 2 additions & 10 deletions opt/classical/simulated_annealing.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import numpy as np

from opt.abstract_optimizer import AbstractOptimizer
from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
Expand Down Expand Up @@ -143,13 +142,6 @@ def search(self) -> tuple[np.ndarray, float]:


if __name__ == "__main__":
# Create a SimulatedAnnealing object
optimizer = SimulatedAnnealing(
shifted_ackley, dim=2, lower_bound=-2.768, upper_bound=+2.768
)
from opt.demo import run_demo

# Perform the optimization

best_solution, best_cost = optimizer.search()
print(f"Best solution: {best_solution}")
print(f"Best cost: {best_cost}")
run_demo(SimulatedAnnealing)
10 changes: 3 additions & 7 deletions opt/classical/tabu_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@ def objective_function(x):
import numpy as np

from opt.abstract_optimizer import AbstractOptimizer
from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
Expand Down Expand Up @@ -184,9 +183,6 @@ def search(self) -> tuple[np.ndarray, float]:


if __name__ == "__main__":
optimizer = TabuSearch(
func=shifted_ackley, dim=2, lower_bound=-2.768, upper_bound=+2.768
)
best_solution, best_fitness = optimizer.search()
print(f"Best solution found: {best_solution}")
print(f"Best fitness value: {best_fitness}")
from opt.demo import run_demo

run_demo(TabuSearch)
10 changes: 3 additions & 7 deletions opt/classical/trust_region.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
from scipy.optimize import minimize

from opt.abstract_optimizer import AbstractOptimizer
from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
Expand Down Expand Up @@ -155,9 +154,6 @@ def bounded_func(x: np.ndarray) -> float:


if __name__ == "__main__":
optimizer = TrustRegion(
func=shifted_ackley, lower_bound=-2.768, upper_bound=+2.768, dim=2
)
best_solution, best_fitness = optimizer.search()
print(f"Best solution: {best_solution}")
print(f"Best fitness: {best_fitness}")
from opt.demo import run_demo

run_demo(TrustRegion)
97 changes: 97 additions & 0 deletions opt/demo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
"""Centralized demo runner for optimizer demonstrations.
This module provides a standardized way to run demonstrations for any optimizer,
ensuring consistent output formatting and reducing code duplication across the
codebase.
Example:
>>> from opt.demo import run_demo
>>> from opt.swarm_intelligence.particle_swarm import ParticleSwarm
>>> solution, fitness = run_demo(ParticleSwarm)
Running ParticleSwarm demo...
Function: shifted_ackley
Dimensions: 2
Bounds: [-2.768, 2.768]
Max iterations: 100
<BLANKLINE>
Results:
Best solution found: [...]
Best fitness value: ...
"""

from __future__ import annotations

from typing import TYPE_CHECKING
from typing import Any

import numpy as np

from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
from collections.abc import Callable

from opt.abstract_optimizer import AbstractOptimizer


def run_demo(
optimizer_class: type[AbstractOptimizer],
*,
func: Callable[[np.ndarray], float] = shifted_ackley,
dim: int = 2,
lower_bound: float = -2.768,
upper_bound: float = 2.768,
max_iter: int = 100,
**kwargs: Any,
) -> tuple[np.ndarray, float]:
"""Run a standardized demo for any optimizer.
Args:
optimizer_class: The optimizer class to demonstrate.
func: Benchmark function to optimize. Defaults to shifted_ackley.
dim: Dimensionality of the search space. Defaults to 2.
lower_bound: Lower bound of the search space. Defaults to -2.768.
upper_bound: Upper bound of the search space. Defaults to 2.768.
max_iter: Maximum iterations. Defaults to 100.
**kwargs: Additional optimizer-specific parameters.
Returns:
Tuple of (best_solution, best_fitness).
Example:
>>> from opt.demo import run_demo
>>> from opt.swarm_intelligence.particle_swarm import ParticleSwarm
>>> solution, fitness = run_demo(ParticleSwarm)
Running ParticleSwarm demo...
Function: shifted_ackley
Dimensions: 2
Bounds: [-2.768, 2.768]
Max iterations: 100
<BLANKLINE>
Results:
Best solution found: [...]
Best fitness value: ...
"""
print(f"Running {optimizer_class.__name__} demo...")
print(f" Function: {func.__name__}")
print(f" Dimensions: {dim}")
print(f" Bounds: [{lower_bound}, {upper_bound}]")
print(f" Max iterations: {max_iter}")

optimizer = optimizer_class(
func=func,
dim=dim,
lower_bound=lower_bound,
upper_bound=upper_bound,
max_iter=max_iter,
**kwargs,
)

best_solution, best_fitness = optimizer.search()
Comment on lines +82 to +91
Copy link

Copilot AI Dec 21, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The run_demo function does not handle cases where an optimizer might fail during instantiation or execution. Consider adding error handling to provide a more helpful error message when optimizers with incompatible signatures or parameters fail. For example, if an optimizer requires additional mandatory parameters beyond the standard set, the error message should guide users on how to provide them.

Suggested change
optimizer = optimizer_class(
func=func,
dim=dim,
lower_bound=lower_bound,
upper_bound=upper_bound,
max_iter=max_iter,
**kwargs,
)
best_solution, best_fitness = optimizer.search()
try:
optimizer = optimizer_class(
func=func,
dim=dim,
lower_bound=lower_bound,
upper_bound=upper_bound,
max_iter=max_iter,
**kwargs,
)
except TypeError as exc:
raise TypeError(
f"Failed to instantiate optimizer {optimizer_class.__name__}. "
"This often happens when the optimizer's __init__ signature does not match "
"the parameters provided by run_demo. If the optimizer requires additional "
"mandatory arguments, pass them as keyword arguments to run_demo via **kwargs. "
f"Provided extra keyword arguments: {sorted(kwargs.keys())}. "
f"Original error: {exc}"
) from exc
except Exception as exc: # pragma: no cover - defensive programming
raise RuntimeError(
f"Unexpected error while instantiating optimizer {optimizer_class.__name__}: {exc}"
) from exc
try:
best_solution, best_fitness = optimizer.search()
except Exception as exc:
raise RuntimeError(
f"Error while running search() for optimizer {optimizer_class.__name__}. "
"Check that all required parameters were provided and are valid. "
f"Original error: {exc}"
) from exc

Copilot uses AI. Check for mistakes.

print("\nResults:")
print(f" Best solution found: {best_solution}")
print(f" Best fitness value: {best_fitness:.6f}")

return best_solution, best_fitness
10 changes: 3 additions & 7 deletions opt/evolutionary/cma_es.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
from scipy.linalg import sqrtm

from opt.abstract_optimizer import AbstractOptimizer
from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
Expand Down Expand Up @@ -172,9 +171,6 @@ def search(self) -> tuple[np.ndarray, float]:


if __name__ == "__main__":
optimizer = CMAESAlgorithm(
func=shifted_ackley, dim=2, lower_bound=-12.768, upper_bound=12.768
)
best_solution, best_fitness = optimizer.search()
print(f"Best solution: {best_solution}")
print(f"Best fitness: {best_fitness}")
from opt.demo import run_demo

run_demo(CMAESAlgorithm)
10 changes: 3 additions & 7 deletions opt/evolutionary/cultural_algorithm.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
import numpy as np

from opt.abstract_optimizer import AbstractOptimizer
from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
Expand Down Expand Up @@ -160,9 +159,6 @@ def search(self) -> tuple[np.ndarray, float]:


if __name__ == "__main__":
optimizer = CulturalAlgorithm(
func=shifted_ackley, dim=2, lower_bound=-2.768, upper_bound=+2.768
)
best_solution, best_fitness = optimizer.search()
print(f"Best solution found: {best_solution}")
print(f"Best fitness found: {best_fitness}")
from opt.demo import run_demo

run_demo(CulturalAlgorithm)
10 changes: 3 additions & 7 deletions opt/evolutionary/differential_evolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
import numpy as np

from opt.abstract_optimizer import AbstractOptimizer
from opt.benchmark.functions import shifted_ackley


if TYPE_CHECKING:
Expand Down Expand Up @@ -142,9 +141,6 @@ def search(self) -> tuple[np.ndarray, float]:


if __name__ == "__main__":
optimizer = DifferentialEvolution(
func=shifted_ackley, lower_bound=-32.768, upper_bound=+32.768, dim=2
)
best_solution, best_fitness = optimizer.search()
print(f"Best solution found: {best_solution}")
print(f"Best fitness found: {best_fitness}")
from opt.demo import run_demo

run_demo(DifferentialEvolution)
Loading
Loading