Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 32 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ pip install hyperactive
```bash
pip install hyperactive[sklearn-integration] # scikit-learn integration
pip install hyperactive[sktime-integration] # sktime/skpro integration
pip install hyperactive[lipo-integration] # lipo global optimizer
pip install hyperactive[all_extras] # Everything including Optuna
```

Expand Down Expand Up @@ -111,7 +112,7 @@ pip install hyperactive[all_extras] # Everything including Optuna
</td>
<td width="33%">
<a href="https://hyperactive.readthedocs.io/en/latest/user_guide/optimizers/optuna.html"><b>Multiple Backends</b></a><br>
<sub>GFO algorithms, Optuna samplers, and sklearn search methods through one unified API.</sub>
<sub>GFO algorithms, Optuna samplers, sklearn search methods, and lipo's parameter-free global optimizer through one unified API.</sub>
</td>
<td width="33%">
<a href="https://hyperactive.readthedocs.io/en/latest/api_reference.html"><b>Stable & Tested</b></a><br>
Expand Down Expand Up @@ -177,13 +178,13 @@ flowchart TB
GFO["GFO<br/>21 algorithms"]
OPTUNA["Optuna<br/>8 algorithms"]
SKL["sklearn<br/>2 algorithms"]
MORE["...<br/>more to come"]
LIPO["LIPO<br/>1 algorithm"]
end

OPT --> GFO
OPT --> OPTUNA
OPT --> SKL
OPT --> MORE
OPT --> LIPO
end

subgraph OUT["Output"]
Expand Down Expand Up @@ -366,6 +367,34 @@ best_params = optimizer.solve()



<details>
<summary><b>LIPO Global Optimizer</b></summary>

```python
import numpy as np
from hyperactive.opt.lipo import LIPOOptimizer

def objective(params):
x, y = params["x"], params["y"]
return -(x**2 + y**2)

search_space = {
"x": np.arange(-5, 5, 0.1),
"y": np.arange(-5, 5, 0.1),
}

optimizer = LIPOOptimizer(
search_space=search_space,
n_iter=100,
experiment=objective,
)
best_params = optimizer.solve()
```

</details>



<details>
<summary><b>Time Series Forecasting with sktime</b></summary>

Expand Down
22 changes: 22 additions & 0 deletions examples/lipo/lipo_examples.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
"""Example usage of LIPOOptimizer."""

import numpy as np

from hyperactive.opt.lipo import LIPOOptimizer


def objective(params):
"""Sphere function — maximum at origin."""
x, y = params["x"], params["y"]
return -(x**2 + y**2) # max at (0, 0)


opt = LIPOOptimizer(
search_space={
"x": np.arange(-5, 5, 0.1),
"y": np.arange(-5, 5, 0.1),
},
n_iter=100,
experiment=objective,
)
print(opt.solve()) # {'x': ~0.0, 'y': ~0.0}
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ all_extras = [
"optuna<5",
"cmaes", # Required for CmaEsOptimizer (optuna's CMA-ES sampler)
"lightning",
"lipo",
"lightgbm",
]

Expand Down
2 changes: 2 additions & 0 deletions src/hyperactive/opt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
StochasticHillClimbing,
TreeStructuredParzenEstimators,
)
from .lipo import LIPOOptimizer
from .optuna import (
CmaEsOptimizer,
GPOptimizer,
Expand All @@ -42,6 +43,7 @@
__all__ = [
"GridSearchSk",
"RandomSearchSk",
"LIPOOptimizer",
"HillClimbing",
"RepulsingHillClimbing",
"StochasticHillClimbing",
Expand Down
59 changes: 59 additions & 0 deletions src/hyperactive/opt/lipo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
"""LIPO optimizer integration for Hyperactive."""

import numpy as np


class LIPOOptimizer:
"""Parameter-free global optimizer via the lipo package."""

def __init__(self, search_space, n_iter, experiment, maximize=True):
self.search_space = search_space
self.n_iter = n_iter
self.experiment = experiment
self.maximize = maximize

def _parse_search_space(self):
lower, upper, cats = {}, {}, {}
for key, values in self.search_space.items():
# Categorical: list of strings
if isinstance(values, list) and isinstance(values[0], str):
cats[key] = values
else:
arr = np.array(values)
lower[key] = float(arr.min())
upper[key] = float(arr.max())
# Store grid so we can snap results back later
self._grids = getattr(self, "_grids", {})
self._grids[key] = arr
return lower, upper, cats

def _snap_to_grid(self, params):
"""Snap lipo's continuous output to nearest valid grid point."""
snapped = {}
for key, val in params.items():
if key in getattr(self, "_grids", {}):
grid = self._grids[key]
snapped[key] = grid[np.argmin(np.abs(grid - val))]
else:
snapped[key] = val # categorical, pass through
return snapped

def solve(self):
"""Run optimizer and return best parameters as a dict."""
from lipo import GlobalOptimizer

lower, upper, cats = self._parse_search_space()

def wrapped(**kwargs):
return self.experiment(self._snap_to_grid(kwargs))

opt = GlobalOptimizer(
wrapped,
lower_bounds=lower,
upper_bounds=upper,
categories=cats,
maximize=self.maximize,
)
opt.run(self.n_iter)

return self._snap_to_grid(opt.optimum[0])
55 changes: 55 additions & 0 deletions src/hyperactive/tests/test_lipo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
"""Tests for LIPOOptimizer."""

import numpy as np

from hyperactive.opt.lipo import LIPOOptimizer


def sphere(params):
"""Sphere function — minimum at origin."""
return -(params["x"] ** 2 + params["y"] ** 2)


def test_lipo_basic():
"""LIPOOptimizer finds near-zero optimum on a continuous grid."""
opt = LIPOOptimizer(
search_space={
"x": np.arange(-5, 5, 0.1),
"y": np.arange(-5, 5, 0.1),
},
n_iter=50,
experiment=sphere,
)
best = opt.solve()
assert "x" in best and "y" in best
assert abs(best["x"]) < 1.5 # should be near 0


def test_lipo_categorical():
"""LIPOOptimizer selects the best categorical value."""

def fn(p):
return 1.0 if p["kernel"] == "rbf" else 0.0

opt = LIPOOptimizer(
search_space={"kernel": ["linear", "rbf", "poly"]},
n_iter=20,
experiment=fn,
)
best = opt.solve()
assert best["kernel"] == "rbf"


def test_lipo_snap_to_grid():
"""_snap_to_grid maps continuous output to the nearest grid point."""

def fn(p):
return -abs(p["x"] - 3)

opt = LIPOOptimizer(
search_space={"x": np.array([1, 2, 3, 4, 5])},
n_iter=30,
experiment=fn,
)
best = opt.solve()
assert best["x"] in [1, 2, 3, 4, 5] # must be on the grid