Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion packages/gds-psuu/gds_psuu/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@
from gds_psuu.errors import PsuuError, PsuuSearchError, PsuuValidationError
from gds_psuu.evaluation import EvaluationResult, Evaluator
from gds_psuu.kpi import KPI, final_state_mean, final_state_std, time_average
from gds_psuu.objective import Objective, SingleKPI, WeightedSum
from gds_psuu.optimizers.base import Optimizer
from gds_psuu.optimizers.bayesian import BayesianOptimizer
from gds_psuu.optimizers.grid import GridSearchOptimizer
from gds_psuu.optimizers.random import RandomSearchOptimizer
from gds_psuu.results import EvaluationSummary, SweepResults
Expand All @@ -22,7 +24,7 @@
from gds_psuu.types import KPIFn, KPIScores, ParamPoint

__all__ = [
"KPI",
"BayesianOptimizer",
"Constraint",
"Continuous",
"Discrete",
Expand All @@ -32,18 +34,22 @@
"FunctionalConstraint",
"GridSearchOptimizer",
"Integer",
"KPI",
"KPIFn",
"KPIScores",
"LinearConstraint",
"Objective",
"Optimizer",
"ParamPoint",
"ParameterSpace",
"PsuuError",
"PsuuSearchError",
"PsuuValidationError",
"RandomSearchOptimizer",
"SingleKPI",
"Sweep",
"SweepResults",
"WeightedSum",
"final_state_mean",
"final_state_std",
"time_average",
Expand Down
50 changes: 50 additions & 0 deletions packages/gds-psuu/gds_psuu/objective.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
"""Composable objective functions for multi-KPI optimization."""

from __future__ import annotations

from abc import ABC, abstractmethod
from typing import Self

from pydantic import BaseModel, ConfigDict, model_validator

from gds_psuu.errors import PsuuValidationError
from gds_psuu.types import KPIScores # noqa: TC001


class Objective(BaseModel, ABC):
"""Reduces KPIScores to a single scalar for optimizer consumption."""

model_config = ConfigDict(frozen=True)

@abstractmethod
def score(self, kpi_scores: KPIScores) -> float:
"""Compute a scalar objective value from KPI scores."""


class SingleKPI(Objective):
"""Optimize a single KPI."""

name: str
maximize: bool = True

def score(self, kpi_scores: KPIScores) -> float:
val = kpi_scores[self.name]
return val if self.maximize else -val


class WeightedSum(Objective):
"""Weighted linear combination of KPIs.

Use negative weights to minimize a KPI.
"""

weights: dict[str, float]

@model_validator(mode="after")
def _validate_nonempty(self) -> Self:
if not self.weights:
raise PsuuValidationError("WeightedSum must have at least 1 weight")
return self

def score(self, kpi_scores: KPIScores) -> float:
return sum(w * kpi_scores[k] for k, w in self.weights.items())
2 changes: 2 additions & 0 deletions packages/gds-psuu/gds_psuu/optimizers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
"""Optimizer implementations for parameter space search."""

from gds_psuu.optimizers.base import Optimizer
from gds_psuu.optimizers.bayesian import BayesianOptimizer
from gds_psuu.optimizers.grid import GridSearchOptimizer
from gds_psuu.optimizers.random import RandomSearchOptimizer

__all__ = [
"BayesianOptimizer",
"GridSearchOptimizer",
"Optimizer",
"RandomSearchOptimizer",
Expand Down
83 changes: 46 additions & 37 deletions packages/gds-psuu/gds_psuu/optimizers/bayesian.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Bayesian optimizer — wraps scikit-optimize (optional dependency)."""
"""Bayesian optimizer — wraps optuna (optional dependency)."""

from __future__ import annotations

Expand All @@ -12,44 +12,44 @@
from gds_psuu.types import KPIScores, ParamPoint

try:
from skopt import Optimizer as SkoptOptimizer # type: ignore[import-untyped]
from skopt.space import Categorical, Real # type: ignore[import-untyped]
from skopt.space import Integer as SkoptInteger
import optuna

_HAS_SKOPT = True
_HAS_OPTUNA = True
except ImportError: # pragma: no cover
_HAS_SKOPT = False
_HAS_OPTUNA = False


class BayesianOptimizer(Optimizer):
"""Bayesian optimization using Gaussian process surrogate.
"""Bayesian optimization using optuna's TPE sampler.

Requires ``scikit-optimize``. Install with::
Requires ``optuna``. Install with::

pip install gds-psuu[bayesian]
uv add gds-psuu[bayesian]

Optimizes a single target KPI (by default the first one registered).
"""

def __init__(
self,
n_calls: int = 20,
n_trials: int = 20,
target_kpi: str | None = None,
maximize: bool = True,
seed: int | None = None,
) -> None:
if not _HAS_SKOPT: # pragma: no cover
if not _HAS_OPTUNA: # pragma: no cover
raise ImportError(
"scikit-optimize is required for BayesianOptimizer. "
"Install with: pip install gds-psuu[bayesian]"
"optuna is required for BayesianOptimizer. "
"Install with: uv add gds-psuu[bayesian]"
)
self._n_calls = n_calls
self._n_trials = n_trials
self._target_kpi = target_kpi
self._maximize = maximize
self._seed = seed
self._optimizer: Any = None
self._study: Any = None
self._space: ParameterSpace | None = None
self._param_names: list[str] = []
self._count: int = 0
self._current_trial: Any = None

def setup(self, space: ParameterSpace, kpi_names: list[str]) -> None:
if self._target_kpi is None:
Expand All @@ -59,38 +59,47 @@ def setup(self, space: ParameterSpace, kpi_names: list[str]) -> None:
f"Target KPI '{self._target_kpi}' not found in {kpi_names}"
)

self._space = space
self._param_names = space.dimension_names
dimensions: list[Any] = []
for dim in space.params.values():
if isinstance(dim, Continuous):
dimensions.append(Real(dim.min_val, dim.max_val))
elif isinstance(dim, Integer):
dimensions.append(SkoptInteger(dim.min_val, dim.max_val))
elif isinstance(dim, Discrete):
dimensions.append(Categorical(list(dim.values)))

self._optimizer = SkoptOptimizer(
dimensions=dimensions,
random_state=self._seed,
n_initial_points=min(5, self._n_calls),
sampler = optuna.samplers.TPESampler(seed=self._seed)
direction = "maximize" if self._maximize else "minimize"
optuna.logging.set_verbosity(optuna.logging.WARNING)
self._study = optuna.create_study(
direction=direction,
sampler=sampler,
)
self._count = 0

def suggest(self) -> ParamPoint:
assert self._optimizer is not None, "Call setup() before suggest()"
point = self._optimizer.ask()
return dict(zip(self._param_names, point, strict=True))
assert self._study is not None, "Call setup() before suggest()"
assert self._space is not None

self._current_trial = self._study.ask()
point: ParamPoint = {}
for name, dim in self._space.params.items():
if isinstance(dim, Continuous):
point[name] = self._current_trial.suggest_float(
name, dim.min_val, dim.max_val
)
elif isinstance(dim, Integer):
point[name] = self._current_trial.suggest_int(
name, dim.min_val, dim.max_val
)
elif isinstance(dim, Discrete):
point[name] = self._current_trial.suggest_categorical(
name, list(dim.values)
)
return point

def observe(self, params: ParamPoint, scores: KPIScores) -> None:
assert self._optimizer is not None
assert self._study is not None
assert self._target_kpi is not None
point = [params[name] for name in self._param_names]
assert self._current_trial is not None
value = scores[self._target_kpi]
# skopt minimizes, so negate if we want to maximize
if self._maximize:
value = -value
self._optimizer.tell(point, value)
self._study.tell(self._current_trial, value)
self._current_trial = None
self._count += 1

def is_exhausted(self) -> bool:
return self._count >= self._n_calls
return self._count >= self._n_trials
20 changes: 19 additions & 1 deletion packages/gds-psuu/gds_psuu/results.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,16 @@

from __future__ import annotations

from typing import Any
from typing import TYPE_CHECKING, Any

from pydantic import BaseModel, ConfigDict

from gds_psuu.evaluation import EvaluationResult # noqa: TC001
from gds_psuu.types import KPIScores, ParamPoint # noqa: TC001

if TYPE_CHECKING:
from gds_psuu.objective import Objective


class EvaluationSummary(BaseModel):
"""Summary of a single evaluation (without raw simulation data)."""
Expand Down Expand Up @@ -54,6 +57,21 @@ def best(self, kpi: str, *, maximize: bool = True) -> EvaluationSummary:
)
return EvaluationSummary(params=best_eval.params, scores=best_eval.scores)

def best_by_objective(self, objective: Objective) -> EvaluationSummary:
"""Return the evaluation with the best objective score.

The objective reduces multiple KPI scores to a single scalar.
Higher is better.
"""
if not self.evaluations:
raise ValueError("No evaluations to search")

best_eval = max(
self.evaluations,
key=lambda e: objective.score(e.scores),
)
return EvaluationSummary(params=best_eval.params, scores=best_eval.scores)

def to_dataframe(self) -> Any:
"""Convert to pandas DataFrame. Requires ``pandas`` installed."""
try:
Expand Down
2 changes: 2 additions & 0 deletions packages/gds-psuu/gds_psuu/sweep.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

from gds_psuu.evaluation import EvaluationResult, Evaluator
from gds_psuu.kpi import KPI # noqa: TC001
from gds_psuu.objective import Objective # noqa: TC001
from gds_psuu.optimizers.base import Optimizer # noqa: TC001
from gds_psuu.results import SweepResults
from gds_psuu.space import ParameterSpace # noqa: TC001
Expand All @@ -25,6 +26,7 @@ class Sweep(BaseModel):
space: ParameterSpace
kpis: list[KPI]
optimizer: Optimizer
objective: Objective | None = None
timesteps: int = 100
runs: int = 1

Expand Down
2 changes: 1 addition & 1 deletion packages/gds-psuu/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ dependencies = [

[project.optional-dependencies]
pandas = ["pandas>=2.0"]
bayesian = ["scikit-optimize>=0.10"]
bayesian = ["optuna>=4.0"]

[project.urls]
Homepage = "https://github.com/BlockScience/gds-core"
Expand Down
Loading