Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
118 commits
Select commit Hold shift + click to select a range
002479b
optimization for AdaBoost
sibre28 Jun 13, 2024
5c41c51
small change
sibre28 Jun 16, 2024
89139c2
add adaboost tests
sibre28 Jun 16, 2024
701655e
add decisiontree tests and move some tests to classifier class
sibre28 Jun 16, 2024
8f1d576
add learning rate choice for adaboost and refactor
sibre28 Jun 17, 2024
3b31cb6
add gradientboosting
sibre28 Jun 17, 2024
4c4595d
adjust test file
sibre28 Jun 17, 2024
b5ecbf5
add knearestneighbors
sibre28 Jun 17, 2024
1b62a6e
add randomforest
sibre28 Jun 17, 2024
6cecab7
add svm
sibre28 Jun 17, 2024
95d693a
add svm
sibre28 Jun 17, 2024
20cc993
add ada boost regression
sibre28 Jun 18, 2024
963c6c0
add decision tree regression
sibre28 Jun 18, 2024
92859f9
add gradient boosting regression
sibre28 Jun 18, 2024
b421192
add knearestneighbors regression
sibre28 Jun 18, 2024
3dc12c5
add random forest regression
sibre28 Jun 18, 2024
20c8d10
add random forest regression
sibre28 Jun 18, 2024
facde0c
add svm regression
sibre28 Jun 18, 2024
24735a2
fix error
sibre28 Jun 18, 2024
0b2e1cc
fix error
sibre28 Jun 18, 2024
cfa19e4
add warnings for models without choices
sibre28 Jun 18, 2024
7708b4c
add cross validation
sibre28 Jun 19, 2024
bfe0d1e
add multi processing
sibre28 Jun 19, 2024
54bb3e4
Merge branch 'main' into 264-hyperparameter-optimization
sibre28 Jun 19, 2024
a63ad5e
linter fixes
sibre28 Jun 20, 2024
ae1a492
remove unneccesary parameter
sibre28 Jun 22, 2024
b282d52
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Jun 22, 2024
0da15e6
remove unneccesary parameter
sibre28 Jun 22, 2024
e784d81
linter fix
sibre28 Jun 22, 2024
ada2d3d
linter fix
sibre28 Jun 22, 2024
ebd1369
linter fix
sibre28 Jun 23, 2024
fe0a906
linter fix
sibre28 Jun 23, 2024
226ef31
linter fix
sibre28 Jun 23, 2024
798e932
linter fix
sibre28 Jun 23, 2024
ca56177
linter fix
sibre28 Jun 23, 2024
603e356
linter fix
sibre28 Jun 23, 2024
45e5d47
linter fix
sibre28 Jun 23, 2024
107a222
linter fix
sibre28 Jun 23, 2024
ea0214f
linter fix
sibre28 Jun 23, 2024
674d083
linter fix
sibre28 Jun 23, 2024
7264c09
linter fix
sibre28 Jun 23, 2024
9670810
linter fix
sibre28 Jun 24, 2024
761c1c5
style: apply automated linter fixes
megalinter-bot Jun 24, 2024
f0e78fe
style: apply automated linter fixes
megalinter-bot Jun 24, 2024
84f3c43
Merge branch 'main' into 264-hyperparameter-optimization
sibre28 Jun 25, 2024
4b4d391
combine linear, lasso, ridge and elasticnet into ElasticNetRegressor
sibre28 Jun 25, 2024
8ac656a
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Jun 25, 2024
6505378
Add Choice to ElasticNetRegressor
sibre28 Jun 25, 2024
1682ff7
add precondition methods to ElasticNetRegressor
sibre28 Jun 25, 2024
4d1ec98
Add tests for ElasticNetRegressor and remove tests of lasso and ridge…
sibre28 Jun 25, 2024
1a605c2
style: apply automated linter fixes
megalinter-bot Jun 25, 2024
e5faec2
style: apply automated linter fixes
megalinter-bot Jun 25, 2024
4c4481a
adjust arima test case where lassoRegression was used
sibre28 Jun 25, 2024
823743e
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Jun 25, 2024
e35e4b9
style: apply automated linter fixes
megalinter-bot Jun 25, 2024
233816a
add docstrings
sibre28 Jun 25, 2024
e347832
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Jun 25, 2024
041d770
add docstrings
sibre28 Jun 25, 2024
901981c
style: apply automated linter fixes
megalinter-bot Jun 25, 2024
e3b5195
change tutorial which used linearregressor
sibre28 Jun 25, 2024
d038693
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Jun 25, 2024
d221241
Merge branch 'main' into 264-hyperparameter-optimization
lars-reimann Jun 27, 2024
b417348
add learner param of adaboost as choice
sibre28 Jun 27, 2024
f5613aa
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Jun 27, 2024
17896ab
adjust baseline models
sibre28 Jun 27, 2024
65be84d
adjust decision tree max_depth choice to take None as well
sibre28 Jun 27, 2024
d1454ee
adjust random forest max_depth choice to take None as well
sibre28 Jun 27, 2024
f607bbf
add svm kernel choice
sibre28 Jun 27, 2024
de53187
linter fix
sibre28 Jun 27, 2024
b7fc975
linter fix
sibre28 Jun 27, 2024
ebaaa15
change elasticnet to linear regressor
sibre28 Jun 28, 2024
9aec1b0
add choices for the penalty params of linear regressor
sibre28 Jun 28, 2024
fda0af0
linter fix
sibre28 Jun 28, 2024
68810d4
linter fix
sibre28 Jun 28, 2024
8e088d7
linter fix
sibre28 Jun 29, 2024
425ceb0
linter fix
sibre28 Jun 29, 2024
aa219e3
Merge branch 'main' into 264-hyperparameter-optimization
sibre28 Jun 29, 2024
5cbc5b4
linter fix
sibre28 Jun 29, 2024
9b708e5
style: apply automated linter fixes
megalinter-bot Jun 29, 2024
54be12e
style: apply automated linter fixes
megalinter-bot Jun 29, 2024
254edf9
linter fix
sibre28 Jun 29, 2024
3721ea5
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Jun 29, 2024
51da4c1
linter fix
sibre28 Jun 29, 2024
dddb2de
Merge branch 'main' into 264-hyperparameter-optimization
sibre28 Jun 30, 2024
e4e4f22
style: apply automated linter fixes
megalinter-bot Jun 30, 2024
3b74c21
add missing variable update
sibre28 Jul 2, 2024
c2de799
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Jul 3, 2024
e98b1c4
change choice to remove duplicates and throw error when no args are p…
sibre28 Jul 8, 2024
fcfcf02
add emptyChoiceError
sibre28 Jul 8, 2024
dff67a3
set context of ProcessPoolExecutor to fork
sibre28 Jul 8, 2024
415dfa2
update choice tests
sibre28 Jul 8, 2024
5374149
linter fix
sibre28 Jul 8, 2024
1daac9f
style: apply automated linter fixes
megalinter-bot Jul 8, 2024
2d0b293
set context of Processpoolexecutor to spawn
sibre28 Jul 8, 2024
a440661
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Jul 8, 2024
f2a80ee
style: apply automated linter fixes
megalinter-bot Jul 8, 2024
1d63c2f
Merge branch 'main' into 264-hyperparameter-optimization
sibre28 Jul 9, 2024
d6e596e
set context of Processpoolexecutor to spawn
sibre28 Jul 10, 2024
6b705e2
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Jul 10, 2024
09b5b06
style: apply automated linter fixes
megalinter-bot Jul 10, 2024
02fddbe
Merge branch 'main' into 264-hyperparameter-optimization
sibre28 Jul 14, 2024
401de4c
style: apply automated linter fixes
megalinter-bot Jul 14, 2024
319ce97
add tests
sibre28 Jul 14, 2024
7ef9d45
linter
sibre28 Jul 14, 2024
f3def3d
style: apply automated linter fixes
megalinter-bot Jul 14, 2024
c16b4df
add tests
sibre28 Jul 18, 2024
2d57870
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Jul 18, 2024
d2f0d10
Merge branch 'main' into 264-hyperparameter-optimization
sibre28 Jul 18, 2024
17ce5c2
style: apply automated linter fixes
megalinter-bot Jul 18, 2024
5d92cad
Merge branch 'main' into 264-hyperparameter-optimization
sibre28 Jul 22, 2024
a9492ad
Merge branch 'main' into 264-hyperparameter-optimization
sibre28 Aug 7, 2024
9513f15
fix codecov
sibre28 Aug 29, 2024
aebd475
Merge branch 'main' into 264-hyperparameter-optimization
sibre28 Aug 29, 2024
80c99b9
fix linter
sibre28 Aug 29, 2024
61904f7
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Aug 29, 2024
5c80f7f
style: apply automated linter fixes
megalinter-bot Aug 29, 2024
6c72375
fix error
sibre28 Aug 29, 2024
7346dfc
Merge remote-tracking branch 'origin/264-hyperparameter-optimization'…
sibre28 Aug 29, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 19 additions & 9 deletions src/safeds/ml/classical/_bases/_ada_boost_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from safeds._utils import _structural_hash
from safeds._validation import _check_bounds, _ClosedBound, _OpenBound
from safeds.ml.hyperparameters import Choice

if TYPE_CHECKING:
from safeds.ml.classical import SupervisedModel
Expand All @@ -18,16 +19,25 @@ class _AdaBoostBase(ABC):
@abstractmethod
def __init__(
self,
max_learner_count: int,
learning_rate: float,
max_learner_count: int | Choice[int],
learning_rate: float | Choice[float],
) -> None:
# Validation
_check_bounds("max_learner_count", max_learner_count, lower_bound=_ClosedBound(1))
_check_bounds("learning_rate", learning_rate, lower_bound=_OpenBound(0))
if isinstance(max_learner_count, Choice):
for mlc in max_learner_count:
_check_bounds("max_learner_count", mlc, lower_bound=_ClosedBound(1))
else:
_check_bounds("max_learner_count", max_learner_count, lower_bound=_ClosedBound(1))

if isinstance(learning_rate, Choice):
for lr in learning_rate:
_check_bounds("learning_rate", lr, lower_bound=_OpenBound(0))
else:
_check_bounds("learning_rate", learning_rate, lower_bound=_OpenBound(0))

# Hyperparameters
self._max_learner_count: int = max_learner_count
self._learning_rate: float = learning_rate
self._max_learner_count: int | Choice[int] = max_learner_count
self._learning_rate: float | Choice[float] = learning_rate

def __hash__(self) -> int:
return _structural_hash(
Expand All @@ -40,16 +50,16 @@ def __hash__(self) -> int:
# ------------------------------------------------------------------------------------------------------------------

@property
def max_learner_count(self) -> int:
def max_learner_count(self) -> int | Choice[int]:
"""The maximum number of learners in the ensemble."""
return self._max_learner_count

@property
def learning_rate(self) -> float:
def learning_rate(self) -> float | Choice[float]:
"""The learning rate."""
return self._learning_rate

@property
@abstractmethod
def learner(self) -> SupervisedModel | None:
def learner(self) -> SupervisedModel | None | Choice[SupervisedModel | None]:
"""The base learner used for training the ensemble."""
29 changes: 17 additions & 12 deletions src/safeds/ml/classical/_bases/_decision_tree_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from safeds._utils import _structural_hash
from safeds._validation import _check_bounds, _ClosedBound
from safeds.ml.hyperparameters import Choice


class _DecisionTreeBase(ABC):
Expand All @@ -14,20 +15,24 @@ class _DecisionTreeBase(ABC):
@abstractmethod
def __init__(
self,
max_depth: int | None,
min_sample_count_in_leaves: int,
max_depth: int | None | Choice[int | None],
min_sample_count_in_leaves: int | Choice[int],
) -> None:
# Validation
_check_bounds("max_depth", max_depth, lower_bound=_ClosedBound(1))
_check_bounds(
"min_sample_count_in_leaves",
min_sample_count_in_leaves,
lower_bound=_ClosedBound(1),
)
if isinstance(max_depth, Choice):
for md in max_depth:
_check_bounds("max_depth", md, lower_bound=_ClosedBound(1))
else:
_check_bounds("max_depth", max_depth, lower_bound=_ClosedBound(1))
if isinstance(min_sample_count_in_leaves, Choice):
for msc in min_sample_count_in_leaves:
_check_bounds("min_sample_count_in_leaves", msc, lower_bound=_ClosedBound(1))
else:
_check_bounds("min_sample_count_in_leaves", min_sample_count_in_leaves, lower_bound=_ClosedBound(1))

# Hyperparameters
self._max_depth: int | None = max_depth
self._min_sample_count_in_leaves: int = min_sample_count_in_leaves
self._max_depth: int | None | Choice[int | None] = max_depth
self._min_sample_count_in_leaves: int | Choice[int] = min_sample_count_in_leaves

def __hash__(self) -> int:
return _structural_hash(
Expand All @@ -40,11 +45,11 @@ def __hash__(self) -> int:
# ------------------------------------------------------------------------------------------------------------------

@property
def max_depth(self) -> int | None:
def max_depth(self) -> int | None | Choice[int | None]:
"""The maximum depth of the tree."""
return self._max_depth

@property
def min_sample_count_in_leaves(self) -> int:
def min_sample_count_in_leaves(self) -> int | Choice[int]:
"""The minimum number of samples that must remain in the leaves of the tree."""
return self._min_sample_count_in_leaves
22 changes: 16 additions & 6 deletions src/safeds/ml/classical/_bases/_gradient_boosting_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from safeds._utils import _structural_hash
from safeds._validation import _check_bounds, _ClosedBound, _OpenBound
from safeds.ml.hyperparameters import Choice


class _GradientBoostingBase(ABC):
Expand All @@ -14,12 +15,21 @@ class _GradientBoostingBase(ABC):
@abstractmethod
def __init__(
self,
tree_count: int,
learning_rate: float,
tree_count: int | Choice[int],
learning_rate: float | Choice[float],
) -> None:
# Validation
_check_bounds("tree_count", tree_count, lower_bound=_ClosedBound(1))
_check_bounds("learning_rate", learning_rate, lower_bound=_OpenBound(0))
if isinstance(tree_count, Choice):
for tc in tree_count:
_check_bounds("tree_count", tc, lower_bound=_ClosedBound(1))
else:
_check_bounds("tree_count", tree_count, lower_bound=_ClosedBound(1))

if isinstance(learning_rate, Choice):
for lr in learning_rate:
_check_bounds("learning_rate", lr, lower_bound=_OpenBound(0))
else:
_check_bounds("learning_rate", learning_rate, lower_bound=_OpenBound(0))

# Hyperparameters
self._tree_count = tree_count
Expand All @@ -36,11 +46,11 @@ def __hash__(self) -> int:
# ------------------------------------------------------------------------------------------------------------------

@property
def tree_count(self) -> int:
def tree_count(self) -> int | Choice[int]:
"""The number of trees (estimators) in the ensemble."""
return self._tree_count

@property
def learning_rate(self) -> float:
def learning_rate(self) -> float | Choice[float]:
"""The learning rate."""
return self._learning_rate
11 changes: 8 additions & 3 deletions src/safeds/ml/classical/_bases/_k_nearest_neighbors_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from safeds._utils import _structural_hash
from safeds._validation import _check_bounds, _ClosedBound
from safeds.ml.hyperparameters import Choice


class _KNearestNeighborsBase(ABC):
Expand All @@ -14,10 +15,14 @@ class _KNearestNeighborsBase(ABC):
@abstractmethod
def __init__(
self,
neighbor_count: int,
neighbor_count: int | Choice[int],
) -> None:
# Validation
_check_bounds("neighbor_count", neighbor_count, lower_bound=_ClosedBound(1))
if isinstance(neighbor_count, Choice):
for nc in neighbor_count:
_check_bounds("neighbor_count", nc, lower_bound=_ClosedBound(1))
else:
_check_bounds("neighbor_count", neighbor_count, lower_bound=_ClosedBound(1))

# Hyperparameters
self._neighbor_count = neighbor_count
Expand All @@ -32,6 +37,6 @@ def __hash__(self) -> int:
# ------------------------------------------------------------------------------------------------------------------

@property
def neighbor_count(self) -> int:
def neighbor_count(self) -> int | Choice[int]:
"""The number of neighbors used for interpolation."""
return self._neighbor_count
43 changes: 27 additions & 16 deletions src/safeds/ml/classical/_bases/_random_forest_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from safeds._utils import _structural_hash
from safeds._validation import _check_bounds, _ClosedBound
from safeds.ml.hyperparameters import Choice


class _RandomForestBase(ABC):
Expand All @@ -14,23 +15,33 @@ class _RandomForestBase(ABC):
@abstractmethod
def __init__(
self,
tree_count: int,
max_depth: int | None,
min_sample_count_in_leaves: int,
tree_count: int | Choice[int],
max_depth: int | None | Choice[int | None],
min_sample_count_in_leaves: int | Choice[int],
) -> None:
# Validation
_check_bounds("tree_count", tree_count, lower_bound=_ClosedBound(1))
_check_bounds("max_depth", max_depth, lower_bound=_ClosedBound(1))
_check_bounds(
"min_sample_count_in_leaves",
min_sample_count_in_leaves,
lower_bound=_ClosedBound(1),
)
if isinstance(tree_count, Choice):
for tc in tree_count:
_check_bounds("tree_count", tc, lower_bound=_ClosedBound(1))
else:
_check_bounds("tree_count", tree_count, lower_bound=_ClosedBound(1))

if isinstance(max_depth, Choice):
for md in max_depth:
_check_bounds("max_depth", md, lower_bound=_ClosedBound(1))
else:
_check_bounds("max_depth", max_depth, lower_bound=_ClosedBound(1))

if isinstance(min_sample_count_in_leaves, Choice):
for msc in min_sample_count_in_leaves:
_check_bounds("min_sample_count_in_leaves", msc, lower_bound=_ClosedBound(1))
else:
_check_bounds("min_sample_count_in_leaves", min_sample_count_in_leaves, lower_bound=_ClosedBound(1))

# Hyperparameters
self._tree_count: int = tree_count
self._max_depth: int | None = max_depth
self._min_sample_count_in_leaves: int = min_sample_count_in_leaves
self._tree_count: int | Choice[int] = tree_count
self._max_depth: int | None | Choice[int | None] = max_depth
self._min_sample_count_in_leaves: int | Choice[int] = min_sample_count_in_leaves

def __hash__(self) -> int:
return _structural_hash(
Expand All @@ -44,16 +55,16 @@ def __hash__(self) -> int:
# ------------------------------------------------------------------------------------------------------------------

@property
def tree_count(self) -> int:
def tree_count(self) -> int | Choice[int]:
"""The number of trees used in the random forest."""
return self._tree_count

@property
def max_depth(self) -> int | None:
def max_depth(self) -> int | None | Choice[int | None]:
"""The maximum depth of each tree."""
return self._max_depth

@property
def min_sample_count_in_leaves(self) -> int:
def min_sample_count_in_leaves(self) -> int | Choice[int]:
"""The minimum number of samples that must remain in the leaves of each tree."""
return self._min_sample_count_in_leaves
19 changes: 12 additions & 7 deletions src/safeds/ml/classical/_bases/_support_vector_machine_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from safeds._utils import _structural_hash
from safeds._validation import _check_bounds, _ClosedBound, _OpenBound
from safeds.ml.hyperparameters import Choice

if TYPE_CHECKING:
from sklearn.svm import SVC as SklearnSVC # noqa: N811
Expand Down Expand Up @@ -76,18 +77,22 @@ def sigmoid() -> _SupportVectorMachineBase.Kernel:
@abstractmethod
def __init__(
self,
c: float,
kernel: _SupportVectorMachineBase.Kernel | None,
c: float | Choice[float],
kernel: _SupportVectorMachineBase.Kernel | None | Choice[_SupportVectorMachineBase.Kernel | None],
) -> None:
if kernel is None:
kernel = _SupportVectorMachineBase.Kernel.radial_basis_function()

# Validation
_check_bounds("c", c, lower_bound=_OpenBound(0))
if isinstance(c, Choice):
for value in c:
_check_bounds("c", value, lower_bound=_OpenBound(0))
else:
_check_bounds("c", c, lower_bound=_OpenBound(0))

# Hyperparameters
self._c: float = c
self._kernel: _SupportVectorMachineBase.Kernel = kernel
self._c: float | Choice[float] = c
self._kernel: _SupportVectorMachineBase.Kernel | Choice[_SupportVectorMachineBase.Kernel | None] = kernel

def __hash__(self) -> int:
return _structural_hash(
Expand All @@ -100,14 +105,14 @@ def __hash__(self) -> int:
# ------------------------------------------------------------------------------------------------------------------

@property
def c(self) -> float:
def c(self) -> float | Choice[float]:
"""The regularization strength."""
return self._c

# This property is abstract, so subclasses must declare a public return type.
@property
@abstractmethod
def kernel(self) -> _SupportVectorMachineBase.Kernel:
def kernel(self) -> _SupportVectorMachineBase.Kernel | Choice[_SupportVectorMachineBase.Kernel | None]:
"""The type of kernel used."""


Expand Down
28 changes: 19 additions & 9 deletions src/safeds/ml/classical/_supervised_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,12 @@ def fit(self, training_set: TabularDataset) -> Self:

Raises
------
PlainTableError
If a table is passed instead of a TabularDataset.
DatasetMissesDataError
If the given training set contains no data.
FittingWithChoiceError
When trying to call this method on a model with hyperparameter choices.
LearningError
If the training data contains invalid values or if the training failed.
"""
Expand All @@ -88,7 +94,8 @@ def fit(self, training_set: TabularDataset) -> Self:
if training_set.to_table().row_count == 0:
raise DatasetMissesDataError

self._check_additional_fit_preconditions(training_set)
self._check_additional_fit_preconditions()
self._check_more_additional_fit_preconditions(training_set)

wrapped_model = self._get_sklearn_model()
_fit_sklearn_model_in_place(wrapped_model, training_set)
Expand Down Expand Up @@ -234,15 +241,14 @@ def get_target_type(self) -> DataType:
# Template methods
# ------------------------------------------------------------------------------------------------------------------

def _check_additional_fit_preconditions(self, training_set: TabularDataset) -> None: # noqa: B027
"""
Check additional preconditions for fitting the model and raise an error if any are violated.
def _check_additional_fit_preconditions(self) -> None: # noqa: B027
"""Check additional preconditions for fitting the model and raise an error if any are violated."""

Parameters
----------
training_set:
The training data containing the features and target.
"""
def _check_more_additional_fit_preconditions(self, training_set: TabularDataset) -> None: # noqa: B027
"""Check additional preconditions for fitting the model and raise an error if any are violated."""

def _check_additional_fit_by_exhaustive_search_preconditions(self) -> None: # noqa: B027
"""Check additional preconditions for fitting by exhaustive search and raise an error if any are violated."""

def _check_additional_predict_preconditions(self, dataset: Table | TabularDataset) -> None: # noqa: B027
"""
Expand All @@ -254,6 +260,10 @@ def _check_additional_predict_preconditions(self, dataset: Table | TabularDatase
The dataset containing at least the features.
"""

def _get_models_for_all_choices(self) -> list[Self]:
"""Get a list of all possible models, given the Parameter Choices."""
raise NotImplementedError # pragma: no cover

@abstractmethod
def _clone(self) -> Self:
"""
Expand Down
Loading