Skip to content

Commit c2f7488

Browse files
CopilotAnselmoo
andauthored
Centralize optimizer demo blocks to eliminate 800+ lines of duplication (#44)
* Initial plan * feat: add centralized demo module for optimizer demonstrations Co-authored-by: Anselmoo <13209783+Anselmoo@users.noreply.github.com> * refactor: update 109 optimizers to use centralized demo runner Co-authored-by: Anselmoo <13209783+Anselmoo@users.noreply.github.com> * test: add comprehensive tests for centralized demo module Co-authored-by: Anselmoo <13209783+Anselmoo@users.noreply.github.com> * docs: add demo section to README with examples Co-authored-by: Anselmoo <13209783+Anselmoo@users.noreply.github.com> --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: Anselmoo <13209783+Anselmoo@users.noreply.github.com>
1 parent c214713 commit c2f7488

File tree

113 files changed

+515
-1100
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

113 files changed

+515
-1100
lines changed

README.md

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,35 @@ from opt.classical import BFGS
8585
from opt import ParticleSwarm, AdamW, BFGS
8686
```
8787

88+
### Quick Demo
89+
90+
All optimizers include a standardized demo that can be run directly or customized:
91+
92+
```python
93+
from opt.demo import run_demo
94+
from opt.swarm_intelligence import ParticleSwarm
95+
96+
# Run with default settings
97+
run_demo(ParticleSwarm)
98+
99+
# Or customize parameters
100+
run_demo(
101+
ParticleSwarm,
102+
max_iter=200,
103+
population_size=50,
104+
c1=2.0,
105+
c2=2.0
106+
)
107+
```
108+
109+
You can also run demos directly from the command line:
110+
111+
```bash
112+
python -m opt.swarm_intelligence.particle_swarm
113+
python -m opt.gradient_based.adamw
114+
python -m opt.classical.simulated_annealing
115+
```
116+
88117
## Project Structure
89118

90119
Optimizers are organized into categorical subfolders:

opt/classical/bfgs.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@
3535
from scipy.optimize import minimize
3636

3737
from opt.abstract_optimizer import AbstractOptimizer
38-
from opt.benchmark.functions import shifted_ackley
3938

4039

4140
if TYPE_CHECKING:
@@ -131,7 +130,6 @@ def bounded_func(x: np.ndarray) -> float:
131130

132131

133132
if __name__ == "__main__":
134-
optimizer = BFGS(func=shifted_ackley, lower_bound=-2.768, upper_bound=+2.768, dim=2)
135-
best_solution, best_fitness = optimizer.search()
136-
print(f"Best solution: {best_solution}")
137-
print(f"Best fitness: {best_fitness}")
133+
from opt.demo import run_demo
134+
135+
run_demo(BFGS)

opt/classical/conjugate_gradient.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,6 @@
4141
from scipy.optimize import minimize
4242

4343
from opt.abstract_optimizer import AbstractOptimizer
44-
from opt.benchmark.functions import shifted_ackley
4544

4645

4746
if TYPE_CHECKING:
@@ -137,9 +136,6 @@ def bounded_func(x: np.ndarray) -> float:
137136

138137

139138
if __name__ == "__main__":
140-
optimizer = ConjugateGradient(
141-
func=shifted_ackley, lower_bound=-2.768, upper_bound=+2.768, dim=2
142-
)
143-
best_solution, best_fitness = optimizer.search()
144-
print(f"Best solution: {best_solution}")
145-
print(f"Best fitness: {best_fitness}")
139+
from opt.demo import run_demo
140+
141+
run_demo(ConjugateGradient)

opt/classical/hill_climbing.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@
3535
import numpy as np
3636

3737
from opt.abstract_optimizer import AbstractOptimizer
38-
from opt.benchmark.functions import shifted_ackley
3938

4039

4140
if TYPE_CHECKING:
@@ -132,9 +131,6 @@ def search(self) -> tuple[np.ndarray, float]:
132131

133132

134133
if __name__ == "__main__":
135-
optimizer = HillClimbing(
136-
func=shifted_ackley, dim=2, lower_bound=-32.768, upper_bound=+32.768
137-
)
138-
best_solution, best_fitness = optimizer.search()
139-
print(f"Best solution found: {best_solution}")
140-
print(f"Best fitness found: {best_fitness}")
134+
from opt.demo import run_demo
135+
136+
run_demo(HillClimbing)

opt/classical/lbfgs.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@
3535
from scipy.optimize import minimize
3636

3737
from opt.abstract_optimizer import AbstractOptimizer
38-
from opt.benchmark.functions import shifted_ackley
3938

4039

4140
if TYPE_CHECKING:
@@ -126,9 +125,6 @@ def search(self) -> tuple[np.ndarray, float]:
126125

127126

128127
if __name__ == "__main__":
129-
optimizer = LBFGS(
130-
func=shifted_ackley, lower_bound=-2.768, upper_bound=+2.768, dim=2
131-
)
132-
best_solution, best_fitness = optimizer.search()
133-
print(f"Best solution: {best_solution}")
134-
print(f"Best fitness: {best_fitness}")
128+
from opt.demo import run_demo
129+
130+
run_demo(LBFGS)

opt/classical/nelder_mead.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,6 @@
3838
from scipy.optimize import minimize
3939

4040
from opt.abstract_optimizer import AbstractOptimizer
41-
from opt.benchmark.functions import shifted_ackley
4241

4342

4443
if TYPE_CHECKING:
@@ -134,9 +133,6 @@ def bounded_func(x: np.ndarray) -> float:
134133

135134

136135
if __name__ == "__main__":
137-
optimizer = NelderMead(
138-
func=shifted_ackley, lower_bound=-2.768, upper_bound=+2.768, dim=2
139-
)
140-
best_solution, best_fitness = optimizer.search()
141-
print(f"Best solution: {best_solution}")
142-
print(f"Best fitness: {best_fitness}")
136+
from opt.demo import run_demo
137+
138+
run_demo(NelderMead)

opt/classical/powell.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@
4040
from scipy.optimize import minimize
4141

4242
from opt.abstract_optimizer import AbstractOptimizer
43-
from opt.benchmark.functions import shifted_ackley
4443

4544

4645
if TYPE_CHECKING:
@@ -136,9 +135,6 @@ def bounded_func(x: np.ndarray) -> float:
136135

137136

138137
if __name__ == "__main__":
139-
optimizer = Powell(
140-
func=shifted_ackley, lower_bound=-2.768, upper_bound=+2.768, dim=2
141-
)
142-
best_solution, best_fitness = optimizer.search()
143-
print(f"Best solution: {best_solution}")
144-
print(f"Best fitness: {best_fitness}")
138+
from opt.demo import run_demo
139+
140+
run_demo(Powell)

opt/classical/simulated_annealing.py

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@
2525
import numpy as np
2626

2727
from opt.abstract_optimizer import AbstractOptimizer
28-
from opt.benchmark.functions import shifted_ackley
2928

3029

3130
if TYPE_CHECKING:
@@ -143,13 +142,6 @@ def search(self) -> tuple[np.ndarray, float]:
143142

144143

145144
if __name__ == "__main__":
146-
# Create a SimulatedAnnealing object
147-
optimizer = SimulatedAnnealing(
148-
shifted_ackley, dim=2, lower_bound=-2.768, upper_bound=+2.768
149-
)
145+
from opt.demo import run_demo
150146

151-
# Perform the optimization
152-
153-
best_solution, best_cost = optimizer.search()
154-
print(f"Best solution: {best_solution}")
155-
print(f"Best cost: {best_cost}")
147+
run_demo(SimulatedAnnealing)

opt/classical/tabu_search.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,6 @@ def objective_function(x):
6161
import numpy as np
6262

6363
from opt.abstract_optimizer import AbstractOptimizer
64-
from opt.benchmark.functions import shifted_ackley
6564

6665

6766
if TYPE_CHECKING:
@@ -184,9 +183,6 @@ def search(self) -> tuple[np.ndarray, float]:
184183

185184

186185
if __name__ == "__main__":
187-
optimizer = TabuSearch(
188-
func=shifted_ackley, dim=2, lower_bound=-2.768, upper_bound=+2.768
189-
)
190-
best_solution, best_fitness = optimizer.search()
191-
print(f"Best solution found: {best_solution}")
192-
print(f"Best fitness value: {best_fitness}")
186+
from opt.demo import run_demo
187+
188+
run_demo(TabuSearch)

opt/classical/trust_region.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@
4040
from scipy.optimize import minimize
4141

4242
from opt.abstract_optimizer import AbstractOptimizer
43-
from opt.benchmark.functions import shifted_ackley
4443

4544

4645
if TYPE_CHECKING:
@@ -155,9 +154,6 @@ def bounded_func(x: np.ndarray) -> float:
155154

156155

157156
if __name__ == "__main__":
158-
optimizer = TrustRegion(
159-
func=shifted_ackley, lower_bound=-2.768, upper_bound=+2.768, dim=2
160-
)
161-
best_solution, best_fitness = optimizer.search()
162-
print(f"Best solution: {best_solution}")
163-
print(f"Best fitness: {best_fitness}")
157+
from opt.demo import run_demo
158+
159+
run_demo(TrustRegion)

0 commit comments

Comments
 (0)