Skip to content
Open
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .tools/envs/testenv-linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ dependencies:
- DFO-LS>=1.5.3 # dev, tests
- Py-BOBYQA # dev, tests
- fides==0.7.4 # dev, tests
- pyensmallen
- kaleido # dev, tests
- pandas-stubs # dev, tests
- types-cffi # dev, tests
Expand Down
1 change: 1 addition & 0 deletions .tools/envs/testenv-numpy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ dependencies:
- DFO-LS>=1.5.3 # dev, tests
- Py-BOBYQA # dev, tests
- fides==0.7.4 # dev, tests
- pyensmallen
- kaleido # dev, tests
- types-cffi # dev, tests
- types-openpyxl # dev, tests
Expand Down
1 change: 1 addition & 0 deletions .tools/envs/testenv-others.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ dependencies:
- DFO-LS>=1.5.3 # dev, tests
- Py-BOBYQA # dev, tests
- fides==0.7.4 # dev, tests
- pyensmallen
- kaleido # dev, tests
- pandas-stubs # dev, tests
- types-cffi # dev, tests
Expand Down
1 change: 1 addition & 0 deletions .tools/envs/testenv-pandas.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ dependencies:
- DFO-LS>=1.5.3 # dev, tests
- Py-BOBYQA # dev, tests
- fides==0.7.4 # dev, tests
- pyensmallen
- kaleido # dev, tests
- types-cffi # dev, tests
- types-openpyxl # dev, tests
Expand Down
1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ dependencies:
- DFO-LS>=1.5.3 # dev, tests
- Py-BOBYQA # dev, tests
- fides==0.7.4 # dev, tests
- pyensmallen
- kaleido # dev, tests
- pre-commit>=4 # dev
- -e . # dev
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ dependencies = [
"sqlalchemy>=1.3",
"annotated-types",
"typing-extensions",
"pyensmallen",
]
dynamic = ["version"]
keywords = [
Expand Down Expand Up @@ -334,6 +335,7 @@ ignore_errors = true

[[tool.mypy.overrides]]
module = [
"pyensmallen",
"pybaum",
"scipy",
"scipy.linalg",
Expand Down
9 changes: 9 additions & 0 deletions src/optimagic/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
NloptVAR,
)
from optimagic.optimizers.pounders import Pounders
from optimagic.optimizers.pyensmallen_optimizers import EnsmallenLBFGS
from optimagic.optimizers.pygmo_optimizers import (
PygmoBeeColony,
PygmoCmaes,
Expand Down Expand Up @@ -896,6 +897,7 @@ class GradientBasedLocalScalarAlgorithms(AlgoSelection):
nlopt_slsqp: Type[NloptSLSQP] = NloptSLSQP
nlopt_tnewton: Type[NloptTNewton] = NloptTNewton
nlopt_var: Type[NloptVAR] = NloptVAR
ensmallen_lbfgs: Type[EnsmallenLBFGS] = EnsmallenLBFGS
scipy_bfgs: Type[ScipyBFGS] = ScipyBFGS
scipy_conjugate_gradient: Type[ScipyConjugateGradient] = ScipyConjugateGradient
scipy_lbfgsb: Type[ScipyLBFGSB] = ScipyLBFGSB
Expand Down Expand Up @@ -1950,6 +1952,7 @@ class GradientBasedLocalAlgorithms(AlgoSelection):
nlopt_slsqp: Type[NloptSLSQP] = NloptSLSQP
nlopt_tnewton: Type[NloptTNewton] = NloptTNewton
nlopt_var: Type[NloptVAR] = NloptVAR
ensmallen_lbfgs: Type[EnsmallenLBFGS] = EnsmallenLBFGS
scipy_bfgs: Type[ScipyBFGS] = ScipyBFGS
scipy_conjugate_gradient: Type[ScipyConjugateGradient] = ScipyConjugateGradient
scipy_lbfgsb: Type[ScipyLBFGSB] = ScipyLBFGSB
Expand Down Expand Up @@ -2061,6 +2064,7 @@ class GradientBasedScalarAlgorithms(AlgoSelection):
nlopt_slsqp: Type[NloptSLSQP] = NloptSLSQP
nlopt_tnewton: Type[NloptTNewton] = NloptTNewton
nlopt_var: Type[NloptVAR] = NloptVAR
ensmallen_lbfgs: Type[EnsmallenLBFGS] = EnsmallenLBFGS
scipy_bfgs: Type[ScipyBFGS] = ScipyBFGS
scipy_basinhopping: Type[ScipyBasinhopping] = ScipyBasinhopping
scipy_conjugate_gradient: Type[ScipyConjugateGradient] = ScipyConjugateGradient
Expand Down Expand Up @@ -2674,6 +2678,7 @@ class LocalScalarAlgorithms(AlgoSelection):
nlopt_sbplx: Type[NloptSbplx] = NloptSbplx
nlopt_tnewton: Type[NloptTNewton] = NloptTNewton
nlopt_var: Type[NloptVAR] = NloptVAR
ensmallen_lbfgs: Type[EnsmallenLBFGS] = EnsmallenLBFGS
scipy_bfgs: Type[ScipyBFGS] = ScipyBFGS
scipy_cobyla: Type[ScipyCOBYLA] = ScipyCOBYLA
scipy_conjugate_gradient: Type[ScipyConjugateGradient] = ScipyConjugateGradient
Expand Down Expand Up @@ -3070,6 +3075,7 @@ class GradientBasedAlgorithms(AlgoSelection):
nlopt_slsqp: Type[NloptSLSQP] = NloptSLSQP
nlopt_tnewton: Type[NloptTNewton] = NloptTNewton
nlopt_var: Type[NloptVAR] = NloptVAR
ensmallen_lbfgs: Type[EnsmallenLBFGS] = EnsmallenLBFGS
scipy_bfgs: Type[ScipyBFGS] = ScipyBFGS
scipy_basinhopping: Type[ScipyBasinhopping] = ScipyBasinhopping
scipy_conjugate_gradient: Type[ScipyConjugateGradient] = ScipyConjugateGradient
Expand Down Expand Up @@ -3263,6 +3269,7 @@ class LocalAlgorithms(AlgoSelection):
nlopt_tnewton: Type[NloptTNewton] = NloptTNewton
nlopt_var: Type[NloptVAR] = NloptVAR
pounders: Type[Pounders] = Pounders
ensmallen_lbfgs: Type[EnsmallenLBFGS] = EnsmallenLBFGS
scipy_bfgs: Type[ScipyBFGS] = ScipyBFGS
scipy_cobyla: Type[ScipyCOBYLA] = ScipyCOBYLA
scipy_conjugate_gradient: Type[ScipyConjugateGradient] = ScipyConjugateGradient
Expand Down Expand Up @@ -3470,6 +3477,7 @@ class ScalarAlgorithms(AlgoSelection):
nlopt_sbplx: Type[NloptSbplx] = NloptSbplx
nlopt_tnewton: Type[NloptTNewton] = NloptTNewton
nlopt_var: Type[NloptVAR] = NloptVAR
ensmallen_lbfgs: Type[EnsmallenLBFGS] = EnsmallenLBFGS
pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony
pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes
pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch
Expand Down Expand Up @@ -3646,6 +3654,7 @@ class Algorithms(AlgoSelection):
nlopt_tnewton: Type[NloptTNewton] = NloptTNewton
nlopt_var: Type[NloptVAR] = NloptVAR
pounders: Type[Pounders] = Pounders
ensmallen_lbfgs: Type[EnsmallenLBFGS] = EnsmallenLBFGS
pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony
pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes
pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch
Expand Down
88 changes: 88 additions & 0 deletions src/optimagic/optimizers/pyensmallen_optimizers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
"""Implement ensmallen optimizers."""

from dataclasses import dataclass

import numpy as np
import pyensmallen as pye
from numpy.typing import NDArray

from optimagic import mark
from optimagic.optimization.algo_options import (
CONVERGENCE_FTOL_REL,
CONVERGENCE_GTOL_ABS,
MAX_LINE_SEARCH_STEPS,
STOPPING_MAXITER,
)
from optimagic.optimization.algorithm import Algorithm, InternalOptimizeResult
from optimagic.optimization.internal_optimization_problem import (
InternalOptimizationProblem,
)
from optimagic.typing import AggregationLevel, NonNegativeFloat, PositiveInt

LIMITED_MEMORY_MAX_HISTORY = 10
"""Number of memory points to be stored (default 10)."""
MIN_LINE_SEARCH_STEPS = 1e-20
"""The minimum step of the line search."""
MAX_LINE_SEARCH_TRIALS = 50
"""The maximum number of trials for the line search (before giving up)."""
ARMIJO_CONSTANT = 1e-4
"""Controls the accuracy of the line search routine for determining the Armijo
condition."""
WOLFE_CONDITION = 0.9
"""Parameter for detecting the Wolfe condition."""


@mark.minimizer(
name="ensmallen_lbfgs",
solver_type=AggregationLevel.SCALAR,
is_available=True,
is_global=False,
needs_jac=True,
needs_hess=False,
supports_parallelism=False,
supports_bounds=False,
supports_linear_constraints=False,
supports_nonlinear_constraints=False,
disable_history=False,
)
@dataclass(frozen=True)
class EnsmallenLBFGS(Algorithm):
limited_memory_max_history: PositiveInt = LIMITED_MEMORY_MAX_HISTORY
stopping_maxiter: PositiveInt = STOPPING_MAXITER
armijo_constant: NonNegativeFloat = ARMIJO_CONSTANT # needs review
wolfe_condition: NonNegativeFloat = WOLFE_CONDITION # needs review
convergence_gtol_abs: NonNegativeFloat = CONVERGENCE_GTOL_ABS
convergence_ftol_rel: NonNegativeFloat = CONVERGENCE_FTOL_REL
max_line_search_trials: PositiveInt = MAX_LINE_SEARCH_TRIALS
min_step_for_line_search: NonNegativeFloat = MIN_LINE_SEARCH_STEPS
max_step_for_line_search: NonNegativeFloat = MAX_LINE_SEARCH_STEPS

def _solve_internal_problem(
self, problem: InternalOptimizationProblem, x0: NDArray[np.float64]
) -> InternalOptimizeResult:
optimizer = pye.L_BFGS(
numBasis=self.limited_memory_max_history,
maxIterations=self.stopping_maxiter,
armijoConstant=self.armijo_constant,
wolfe=self.wolfe_condition,
minGradientNorm=self.convergence_gtol_abs,
factr=self.convergence_ftol_rel,
maxLineSearchTrials=self.max_line_search_trials,
minStep=self.min_step_for_line_search,
maxStep=self.max_step_for_line_search,
)

def objective_function(
x: NDArray[np.float64], grad: NDArray[np.float64]
) -> np.float64:
grad[:] = problem.jac(x)
return np.float64(problem.fun(x))

raw = optimizer.optimize(objective_function, x0)

res = InternalOptimizeResult(
x=raw, # only best x is available
fun=problem.fun(raw), # best f(x) value is not available
)

return res
Loading