From f01be8fb221a177e28ec7b2fe956e4c8f0be4084 Mon Sep 17 00:00:00 2001 From: rohan Date: Thu, 5 Mar 2026 14:10:10 +0530 Subject: [PATCH] feat(gds-psuu): add declarative parameter space constraints Add Constraint ABC with LinearConstraint and FunctionalConstraint implementations. ParameterSpace now accepts an optional constraints tuple, validates constraint param references at construction time, and filters infeasible points from grid_points(). RandomSearchOptimizer uses rejection sampling with a 1000-retry limit. 22 new tests. Closes #113 --- packages/gds-psuu/gds_psuu/__init__.py | 13 +- .../gds-psuu/gds_psuu/optimizers/random.py | 28 +- packages/gds-psuu/gds_psuu/space.py | 72 ++++- packages/gds-psuu/tests/test_constraints.py | 259 ++++++++++++++++++ 4 files changed, 362 insertions(+), 10 deletions(-) create mode 100644 packages/gds-psuu/tests/test_constraints.py diff --git a/packages/gds-psuu/gds_psuu/__init__.py b/packages/gds-psuu/gds_psuu/__init__.py index 663a378..99df342 100644 --- a/packages/gds-psuu/gds_psuu/__init__.py +++ b/packages/gds-psuu/gds_psuu/__init__.py @@ -9,21 +9,32 @@ from gds_psuu.optimizers.grid import GridSearchOptimizer from gds_psuu.optimizers.random import RandomSearchOptimizer from gds_psuu.results import EvaluationSummary, SweepResults -from gds_psuu.space import Continuous, Discrete, Integer, ParameterSpace +from gds_psuu.space import ( + Constraint, + Continuous, + Discrete, + FunctionalConstraint, + Integer, + LinearConstraint, + ParameterSpace, +) from gds_psuu.sweep import Sweep from gds_psuu.types import KPIFn, KPIScores, ParamPoint __all__ = [ "KPI", + "Constraint", "Continuous", "Discrete", "EvaluationResult", "EvaluationSummary", "Evaluator", + "FunctionalConstraint", "GridSearchOptimizer", "Integer", "KPIFn", "KPIScores", + "LinearConstraint", "Optimizer", "ParamPoint", "ParameterSpace", diff --git a/packages/gds-psuu/gds_psuu/optimizers/random.py b/packages/gds-psuu/gds_psuu/optimizers/random.py index fee3dab..aa1fa03 100644 --- a/packages/gds-psuu/gds_psuu/optimizers/random.py +++ b/packages/gds-psuu/gds_psuu/optimizers/random.py @@ -5,17 +5,22 @@ import random from typing import TYPE_CHECKING +from gds_psuu.errors import PsuuSearchError from gds_psuu.optimizers.base import Optimizer from gds_psuu.space import Continuous, Discrete, Integer, ParameterSpace if TYPE_CHECKING: from gds_psuu.types import KPIScores, ParamPoint +_MAX_REJECTION_RETRIES = 1000 + class RandomSearchOptimizer(Optimizer): """Samples parameter points uniformly at random. Uses stdlib ``random.Random`` for reproducibility — no numpy required. + When the parameter space has constraints, uses rejection sampling + with a configurable retry limit. """ def __init__(self, n_samples: int = 20, seed: int | None = None) -> None: @@ -28,8 +33,8 @@ def setup(self, space: ParameterSpace, kpi_names: list[str]) -> None: self._space = space self._count = 0 - def suggest(self) -> ParamPoint: - assert self._space is not None, "Call setup() before suggest()" + def _sample_point(self) -> ParamPoint: + assert self._space is not None point: ParamPoint = {} for name, dim in self._space.params.items(): if isinstance(dim, Continuous): @@ -38,9 +43,26 @@ def suggest(self) -> ParamPoint: point[name] = self._rng.randint(dim.min_val, dim.max_val) elif isinstance(dim, Discrete): point[name] = self._rng.choice(dim.values) - self._count += 1 return point + def suggest(self) -> ParamPoint: + assert self._space is not None, "Call setup() before suggest()" + if not self._space.constraints: + point = self._sample_point() + self._count += 1 + return point + + for _ in range(_MAX_REJECTION_RETRIES): + point = self._sample_point() + if self._space.is_feasible(point): + self._count += 1 + return point + + raise PsuuSearchError( + f"Could not find a feasible point after {_MAX_REJECTION_RETRIES} " + "random samples. The feasible region may be too small." + ) + def observe(self, params: ParamPoint, scores: KPIScores) -> None: pass # Random search doesn't adapt diff --git a/packages/gds-psuu/gds_psuu/space.py b/packages/gds-psuu/gds_psuu/space.py index 317340e..9784871 100644 --- a/packages/gds-psuu/gds_psuu/space.py +++ b/packages/gds-psuu/gds_psuu/space.py @@ -4,14 +4,14 @@ import itertools import math -from typing import TYPE_CHECKING, Any, Self +from abc import ABC, abstractmethod +from collections.abc import Callable # noqa: TC003 +from typing import Any, Self from pydantic import BaseModel, ConfigDict, model_validator from gds_psuu.errors import PsuuValidationError - -if TYPE_CHECKING: - from gds_psuu.types import ParamPoint +from gds_psuu.types import ParamPoint # noqa: TC001 class Continuous(BaseModel): @@ -67,12 +67,51 @@ def _validate_values(self) -> Self: Dimension = Continuous | Integer | Discrete +class Constraint(BaseModel, ABC): + """Base class for parameter space constraints.""" + + model_config = ConfigDict(frozen=True, arbitrary_types_allowed=True) + + @abstractmethod + def is_feasible(self, point: ParamPoint) -> bool: + """Return True if the point satisfies this constraint.""" + + +class LinearConstraint(Constraint): + """Linear inequality constraint: sum(coeff_i * x_i) <= bound.""" + + coefficients: dict[str, float] + bound: float + + @model_validator(mode="after") + def _validate_nonempty(self) -> Self: + if not self.coefficients: + raise PsuuValidationError( + "LinearConstraint must have at least 1 coefficient" + ) + return self + + def is_feasible(self, point: ParamPoint) -> bool: + total = sum(coeff * point[name] for name, coeff in self.coefficients.items()) + return total <= self.bound + + +class FunctionalConstraint(Constraint): + """Arbitrary feasibility predicate over a parameter point.""" + + fn: Callable[[ParamPoint], bool] + + def is_feasible(self, point: ParamPoint) -> bool: + return self.fn(point) + + class ParameterSpace(BaseModel): """Defines the searchable parameter space as a mapping of named dimensions.""" - model_config = ConfigDict(frozen=True) + model_config = ConfigDict(frozen=True, arbitrary_types_allowed=True) params: dict[str, Dimension] + constraints: tuple[Constraint, ...] = () @model_validator(mode="after") def _validate_nonempty(self) -> Self: @@ -80,17 +119,35 @@ def _validate_nonempty(self) -> Self: raise PsuuValidationError("ParameterSpace must have at least 1 parameter") return self + @model_validator(mode="after") + def _validate_constraint_params(self) -> Self: + param_names = set(self.params.keys()) + for constraint in self.constraints: + if isinstance(constraint, LinearConstraint): + unknown = set(constraint.coefficients.keys()) - param_names + if unknown: + raise PsuuValidationError( + f"LinearConstraint references unknown params: {unknown}" + ) + return self + @property def dimension_names(self) -> list[str]: """Ordered list of parameter names.""" return list(self.params.keys()) + def is_feasible(self, point: ParamPoint) -> bool: + """Check if a parameter point satisfies all constraints.""" + return all(c.is_feasible(point) for c in self.constraints) + def grid_points(self, n_steps: int) -> list[ParamPoint]: """Generate a grid of parameter points. For Continuous: ``n_steps`` evenly spaced values between min and max. For Integer: all integers in [min_val, max_val] (ignores n_steps). For Discrete: all values. + + Points that violate constraints are excluded. """ axes: list[list[Any]] = [] for dim in self.params.values(): @@ -106,6 +163,9 @@ def grid_points(self, n_steps: int) -> list[ParamPoint]: elif isinstance(dim, Discrete): axes.append(list(dim.values)) names = self.dimension_names - return [ + all_points = [ dict(zip(names, combo, strict=True)) for combo in itertools.product(*axes) ] + if self.constraints: + return [p for p in all_points if self.is_feasible(p)] + return all_points diff --git a/packages/gds-psuu/tests/test_constraints.py b/packages/gds-psuu/tests/test_constraints.py new file mode 100644 index 0000000..96a0c2e --- /dev/null +++ b/packages/gds-psuu/tests/test_constraints.py @@ -0,0 +1,259 @@ +"""Tests for parameter space constraints.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pytest + +from gds_psuu import ( + KPI, + Continuous, + Discrete, + FunctionalConstraint, + GridSearchOptimizer, + Integer, + LinearConstraint, + ParameterSpace, + PsuuSearchError, + PsuuValidationError, + RandomSearchOptimizer, + Sweep, + final_state_mean, +) + +if TYPE_CHECKING: + from gds_sim import Model + + +class TestLinearConstraint: + def test_feasible(self) -> None: + c = LinearConstraint(coefficients={"a": 1.0, "b": 1.0}, bound=10.0) + assert c.is_feasible({"a": 3.0, "b": 5.0}) is True + + def test_infeasible(self) -> None: + c = LinearConstraint(coefficients={"a": 1.0, "b": 1.0}, bound=10.0) + assert c.is_feasible({"a": 6.0, "b": 5.0}) is False + + def test_boundary(self) -> None: + c = LinearConstraint(coefficients={"a": 1.0, "b": 1.0}, bound=10.0) + assert c.is_feasible({"a": 5.0, "b": 5.0}) is True # exactly equal + + def test_empty_coefficients_rejected(self) -> None: + with pytest.raises( + (PsuuValidationError, ValueError), + match="at least 1 coefficient", + ): + LinearConstraint(coefficients={}, bound=10.0) + + def test_negative_coefficients(self) -> None: + c = LinearConstraint(coefficients={"a": 1.0, "b": -1.0}, bound=0.0) + # a - b <= 0 means a <= b + assert c.is_feasible({"a": 3.0, "b": 5.0}) is True + assert c.is_feasible({"a": 5.0, "b": 3.0}) is False + + +class TestFunctionalConstraint: + def test_feasible(self) -> None: + c = FunctionalConstraint(fn=lambda p: p["x"] > 0) + assert c.is_feasible({"x": 1.0}) is True + + def test_infeasible(self) -> None: + c = FunctionalConstraint(fn=lambda p: p["x"] > 0) + assert c.is_feasible({"x": -1.0}) is False + + def test_multi_param(self) -> None: + c = FunctionalConstraint(fn=lambda p: p["a"] * p["b"] < 100) + assert c.is_feasible({"a": 5, "b": 10}) is True + assert c.is_feasible({"a": 20, "b": 10}) is False + + +class TestParameterSpaceConstraints: + def test_no_constraints_default(self) -> None: + space = ParameterSpace(params={"x": Continuous(min_val=0, max_val=10)}) + assert space.constraints == () + + def test_is_feasible_no_constraints(self) -> None: + space = ParameterSpace(params={"x": Continuous(min_val=0, max_val=10)}) + assert space.is_feasible({"x": 5.0}) is True + + def test_is_feasible_with_linear(self) -> None: + space = ParameterSpace( + params={ + "a": Continuous(min_val=0, max_val=100), + "b": Continuous(min_val=0, max_val=100), + }, + constraints=( + LinearConstraint(coefficients={"a": 1.0, "b": 1.0}, bound=100.0), + ), + ) + assert space.is_feasible({"a": 40.0, "b": 50.0}) is True + assert space.is_feasible({"a": 60.0, "b": 50.0}) is False + + def test_is_feasible_with_functional(self) -> None: + space = ParameterSpace( + params={ + "x": Continuous(min_val=0, max_val=10), + "y": Continuous(min_val=0, max_val=10), + }, + constraints=(FunctionalConstraint(fn=lambda p: p["x"] < p["y"]),), + ) + assert space.is_feasible({"x": 3, "y": 5}) is True + assert space.is_feasible({"x": 5, "y": 3}) is False + + def test_multiple_constraints(self) -> None: + space = ParameterSpace( + params={ + "a": Continuous(min_val=0, max_val=100), + "b": Continuous(min_val=0, max_val=100), + }, + constraints=( + LinearConstraint(coefficients={"a": 1.0, "b": 1.0}, bound=100.0), + FunctionalConstraint(fn=lambda p: p["a"] >= 10), + ), + ) + # Satisfies both + assert space.is_feasible({"a": 40.0, "b": 50.0}) is True + # Fails first constraint + assert space.is_feasible({"a": 60.0, "b": 50.0}) is False + # Fails second constraint + assert space.is_feasible({"a": 5.0, "b": 10.0}) is False + + def test_linear_constraint_unknown_param_rejected(self) -> None: + with pytest.raises((PsuuValidationError, ValueError), match="unknown params"): + ParameterSpace( + params={"a": Continuous(min_val=0, max_val=10)}, + constraints=( + LinearConstraint(coefficients={"a": 1.0, "z": 1.0}, bound=10.0), + ), + ) + + def test_grid_points_filtered(self) -> None: + space = ParameterSpace( + params={ + "a": Continuous(min_val=0, max_val=10), + "b": Continuous(min_val=0, max_val=10), + }, + constraints=( + LinearConstraint(coefficients={"a": 1.0, "b": 1.0}, bound=10.0), + ), + ) + points = space.grid_points(n_steps=3) # 0, 5, 10 for each + # Valid: (0,0), (0,5), (0,10), (5,0), (5,5), (10,0) = 6 of 9 + assert len(points) == 6 + for p in points: + assert p["a"] + p["b"] <= 10.0 + + def test_grid_points_no_constraints_unchanged(self) -> None: + space = ParameterSpace(params={"x": Continuous(min_val=0, max_val=10)}) + points = space.grid_points(n_steps=3) + assert len(points) == 3 + + def test_grid_points_all_infeasible(self) -> None: + space = ParameterSpace( + params={"x": Continuous(min_val=0, max_val=10)}, + constraints=(FunctionalConstraint(fn=lambda p: False),), + ) + points = space.grid_points(n_steps=3) + assert points == [] + + +class TestGridOptimizerWithConstraints: + def test_grid_respects_constraints(self, simple_model: Model) -> None: + space = ParameterSpace( + params={ + "growth_rate": Continuous(min_val=0.01, max_val=0.1), + }, + constraints=(FunctionalConstraint(fn=lambda p: p["growth_rate"] <= 0.06),), + ) + sweep = Sweep( + model=simple_model, + space=space, + kpis=[ + KPI( + name="final_pop", + fn=lambda r: final_state_mean(r, "population"), + ) + ], + optimizer=GridSearchOptimizer(n_steps=3), + timesteps=5, + runs=1, + ) + results = sweep.run() + # 3 grid points: 0.01, 0.055, 0.1 — only first two are <= 0.06 + assert len(results.evaluations) == 2 + + +class TestRandomOptimizerWithConstraints: + def test_random_respects_constraints(self, simple_model: Model) -> None: + space = ParameterSpace( + params={ + "growth_rate": Continuous(min_val=0.01, max_val=0.1), + }, + constraints=(FunctionalConstraint(fn=lambda p: p["growth_rate"] <= 0.05),), + ) + sweep = Sweep( + model=simple_model, + space=space, + kpis=[ + KPI( + name="final_pop", + fn=lambda r: final_state_mean(r, "population"), + ) + ], + optimizer=RandomSearchOptimizer(n_samples=10, seed=42), + timesteps=5, + runs=1, + ) + results = sweep.run() + assert len(results.evaluations) == 10 + for ev in results.evaluations: + assert ev.params["growth_rate"] <= 0.05 + + def test_random_infeasible_raises(self) -> None: + space = ParameterSpace( + params={"x": Continuous(min_val=0, max_val=10)}, + constraints=(FunctionalConstraint(fn=lambda p: False),), + ) + opt = RandomSearchOptimizer(n_samples=1, seed=0) + opt.setup(space, ["kpi"]) + with pytest.raises(PsuuSearchError, match="feasible point"): + opt.suggest() + + def test_random_no_constraints_unchanged(self) -> None: + space = ParameterSpace(params={"x": Continuous(min_val=0, max_val=10)}) + opt = RandomSearchOptimizer(n_samples=5, seed=42) + opt.setup(space, ["kpi"]) + points = [opt.suggest() for _ in range(5)] + assert len(points) == 5 + assert opt.is_exhausted() + + +class TestConstraintWithIntegerAndDiscrete: + def test_integer_with_linear_constraint(self) -> None: + space = ParameterSpace( + params={ + "a": Integer(min_val=1, max_val=5), + "b": Integer(min_val=1, max_val=5), + }, + constraints=( + LinearConstraint(coefficients={"a": 1.0, "b": 1.0}, bound=5.0), + ), + ) + points = space.grid_points(n_steps=0) # n_steps ignored for Integer + for p in points: + assert p["a"] + p["b"] <= 5 + + def test_discrete_with_functional_constraint(self) -> None: + space = ParameterSpace( + params={ + "strategy": Discrete(values=("A", "B", "C")), + "x": Continuous(min_val=0, max_val=10), + }, + constraints=(FunctionalConstraint(fn=lambda p: p["strategy"] != "C"),), + ) + points = space.grid_points(n_steps=3) + # 3 discrete * 3 continuous = 9, minus 3 where strategy="C" = 6 + assert len(points) == 6 + for p in points: + assert p["strategy"] != "C"