Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion packages/gds-psuu/gds_psuu/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,21 +9,32 @@
from gds_psuu.optimizers.grid import GridSearchOptimizer
from gds_psuu.optimizers.random import RandomSearchOptimizer
from gds_psuu.results import EvaluationSummary, SweepResults
from gds_psuu.space import Continuous, Discrete, Integer, ParameterSpace
from gds_psuu.space import (
Constraint,
Continuous,
Discrete,
FunctionalConstraint,
Integer,
LinearConstraint,
ParameterSpace,
)
from gds_psuu.sweep import Sweep
from gds_psuu.types import KPIFn, KPIScores, ParamPoint

__all__ = [
"KPI",
"Constraint",
"Continuous",
"Discrete",
"EvaluationResult",
"EvaluationSummary",
"Evaluator",
"FunctionalConstraint",
"GridSearchOptimizer",
"Integer",
"KPIFn",
"KPIScores",
"LinearConstraint",
"Optimizer",
"ParamPoint",
"ParameterSpace",
Expand Down
28 changes: 25 additions & 3 deletions packages/gds-psuu/gds_psuu/optimizers/random.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,22 @@
import random
from typing import TYPE_CHECKING

from gds_psuu.errors import PsuuSearchError
from gds_psuu.optimizers.base import Optimizer
from gds_psuu.space import Continuous, Discrete, Integer, ParameterSpace

if TYPE_CHECKING:
from gds_psuu.types import KPIScores, ParamPoint

_MAX_REJECTION_RETRIES = 1000


class RandomSearchOptimizer(Optimizer):
"""Samples parameter points uniformly at random.

Uses stdlib ``random.Random`` for reproducibility — no numpy required.
When the parameter space has constraints, uses rejection sampling
with a configurable retry limit.
"""

def __init__(self, n_samples: int = 20, seed: int | None = None) -> None:
Expand All @@ -28,8 +33,8 @@ def setup(self, space: ParameterSpace, kpi_names: list[str]) -> None:
self._space = space
self._count = 0

def suggest(self) -> ParamPoint:
assert self._space is not None, "Call setup() before suggest()"
def _sample_point(self) -> ParamPoint:
assert self._space is not None
point: ParamPoint = {}
for name, dim in self._space.params.items():
if isinstance(dim, Continuous):
Expand All @@ -38,9 +43,26 @@ def suggest(self) -> ParamPoint:
point[name] = self._rng.randint(dim.min_val, dim.max_val)
elif isinstance(dim, Discrete):
point[name] = self._rng.choice(dim.values)
self._count += 1
return point

def suggest(self) -> ParamPoint:
assert self._space is not None, "Call setup() before suggest()"
if not self._space.constraints:
point = self._sample_point()
self._count += 1
return point

for _ in range(_MAX_REJECTION_RETRIES):
point = self._sample_point()
if self._space.is_feasible(point):
self._count += 1
return point

raise PsuuSearchError(
f"Could not find a feasible point after {_MAX_REJECTION_RETRIES} "
"random samples. The feasible region may be too small."
)

def observe(self, params: ParamPoint, scores: KPIScores) -> None:
pass # Random search doesn't adapt

Expand Down
72 changes: 66 additions & 6 deletions packages/gds-psuu/gds_psuu/space.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,14 @@

import itertools
import math
from typing import TYPE_CHECKING, Any, Self
from abc import ABC, abstractmethod
from collections.abc import Callable # noqa: TC003
from typing import Any, Self

from pydantic import BaseModel, ConfigDict, model_validator

from gds_psuu.errors import PsuuValidationError

if TYPE_CHECKING:
from gds_psuu.types import ParamPoint
from gds_psuu.types import ParamPoint # noqa: TC001


class Continuous(BaseModel):
Expand Down Expand Up @@ -67,30 +67,87 @@ def _validate_values(self) -> Self:
Dimension = Continuous | Integer | Discrete


class Constraint(BaseModel, ABC):
"""Base class for parameter space constraints."""

model_config = ConfigDict(frozen=True, arbitrary_types_allowed=True)

@abstractmethod
def is_feasible(self, point: ParamPoint) -> bool:
"""Return True if the point satisfies this constraint."""


class LinearConstraint(Constraint):
"""Linear inequality constraint: sum(coeff_i * x_i) <= bound."""

coefficients: dict[str, float]
bound: float

@model_validator(mode="after")
def _validate_nonempty(self) -> Self:
if not self.coefficients:
raise PsuuValidationError(
"LinearConstraint must have at least 1 coefficient"
)
return self

def is_feasible(self, point: ParamPoint) -> bool:
total = sum(coeff * point[name] for name, coeff in self.coefficients.items())
return total <= self.bound


class FunctionalConstraint(Constraint):
"""Arbitrary feasibility predicate over a parameter point."""

fn: Callable[[ParamPoint], bool]

def is_feasible(self, point: ParamPoint) -> bool:
return self.fn(point)


class ParameterSpace(BaseModel):
"""Defines the searchable parameter space as a mapping of named dimensions."""

model_config = ConfigDict(frozen=True)
model_config = ConfigDict(frozen=True, arbitrary_types_allowed=True)

params: dict[str, Dimension]
constraints: tuple[Constraint, ...] = ()

@model_validator(mode="after")
def _validate_nonempty(self) -> Self:
if not self.params:
raise PsuuValidationError("ParameterSpace must have at least 1 parameter")
return self

@model_validator(mode="after")
def _validate_constraint_params(self) -> Self:
param_names = set(self.params.keys())
for constraint in self.constraints:
if isinstance(constraint, LinearConstraint):
unknown = set(constraint.coefficients.keys()) - param_names
if unknown:
raise PsuuValidationError(
f"LinearConstraint references unknown params: {unknown}"
)
return self

@property
def dimension_names(self) -> list[str]:
"""Ordered list of parameter names."""
return list(self.params.keys())

def is_feasible(self, point: ParamPoint) -> bool:
"""Check if a parameter point satisfies all constraints."""
return all(c.is_feasible(point) for c in self.constraints)

def grid_points(self, n_steps: int) -> list[ParamPoint]:
"""Generate a grid of parameter points.

For Continuous: ``n_steps`` evenly spaced values between min and max.
For Integer: all integers in [min_val, max_val] (ignores n_steps).
For Discrete: all values.

Points that violate constraints are excluded.
"""
axes: list[list[Any]] = []
for dim in self.params.values():
Expand All @@ -106,6 +163,9 @@ def grid_points(self, n_steps: int) -> list[ParamPoint]:
elif isinstance(dim, Discrete):
axes.append(list(dim.values))
names = self.dimension_names
return [
all_points = [
dict(zip(names, combo, strict=True)) for combo in itertools.product(*axes)
]
if self.constraints:
return [p for p in all_points if self.is_feasible(p)]
return all_points
Loading