Skip to content
40 changes: 40 additions & 0 deletions src/safeds/ml/nn/_forward_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
if TYPE_CHECKING:
from torch import Tensor, nn

from safeds._utils import _structural_hash
from safeds.exceptions import ClosedBound, OutOfBoundsError
from safeds.ml.nn._layer import _Layer

Expand Down Expand Up @@ -88,3 +89,42 @@ def _set_input_size(self, input_size: int) -> None:
if input_size < 1:
raise OutOfBoundsError(actual=input_size, name="input_size", lower_bound=ClosedBound(1))
self._input_size = input_size

def __hash__(self) -> int:
"""
Return a deterministic hash value for this forward layer.

Returns
-------
hash:
the hash value
"""
return _structural_hash(self._input_size, self._output_size)

def __eq__(self, other: object) -> bool:
"""
Compare two forward layer instances.

Returns
-------
equals:
'True' if input and output size are equal, 'False' otherwise.
"""
if not isinstance(other, ForwardLayer):
return NotImplemented
if self is other:
return True
return self._input_size == other._input_size and self._output_size == other._output_size

def __sizeof__(self) -> int:
import sys

"""
Return the complete size of this object.

Returns
-------
size:
Size of this object in bytes.
"""
return sys.getsizeof(self._input_size) + sys.getsizeof(self._output_size)
81 changes: 81 additions & 0 deletions tests/safeds/ml/nn/test_forward_layer.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
import sys
from typing import Any

import pytest
from safeds.exceptions import OutOfBoundsError
from safeds.ml.nn import ForwardLayer
Expand Down Expand Up @@ -70,3 +73,81 @@ def test_should_raise_if_output_size_out_of_bounds(output_size: int) -> None:
)
def test_should_raise_if_output_size_doesnt_match(output_size: int) -> None:
assert ForwardLayer(output_size=output_size, input_size=1).output_size == output_size


@pytest.mark.parametrize(
("layer1", "layer2", "equal"),
[
(
ForwardLayer(input_size=1, output_size=2),
ForwardLayer(input_size=1, output_size=2),
True,
),
(
ForwardLayer(input_size=1, output_size=2),
ForwardLayer(input_size=2, output_size=1),
False,
),
],
ids=["equal", "not equal"],
)
def test_should_compare_forward_layers(layer1: ForwardLayer, layer2: ForwardLayer, equal: bool) -> None:
assert (layer1.__eq__(layer2)) == equal


def test_should_assert_that_forward_layer_is_equal_to_itself() -> None:
layer = ForwardLayer(input_size=1, output_size=1)
assert layer.__eq__(layer)


@pytest.mark.parametrize(
("layer", "other"),
[
(ForwardLayer(input_size=1, output_size=1), None),
],
ids=["ForwardLayer vs. None"],
)
def test_should_return_not_implemented_if_other_is_not_forward_layer(layer: ForwardLayer, other: Any) -> None:
assert (layer.__eq__(other)) is NotImplemented


@pytest.mark.parametrize(
("layer1", "layer2"),
[
(
ForwardLayer(input_size=1, output_size=2),
ForwardLayer(input_size=1, output_size=2),
),
],
ids=["equal"],
)
def test_should_assert_that_equal_forward_layers_have_equal_hash(layer1: ForwardLayer, layer2: ForwardLayer) -> None:
assert layer1.__hash__() == layer2.__hash__()


@pytest.mark.parametrize(
("layer1", "layer2"),
[
(
ForwardLayer(input_size=1, output_size=2),
ForwardLayer(input_size=2, output_size=1),
),
],
ids=["not equal"],
)
def test_should_assert_that_different_forward_layers_have_different_hash(
layer1: ForwardLayer,
layer2: ForwardLayer,
) -> None:
assert layer1.__hash__() != layer2.__hash__()


@pytest.mark.parametrize(
"layer",
[
ForwardLayer(input_size=1, output_size=1),
],
ids=["one"],
)
def test_should_assert_that_layer_size_is_greater_than_normal_object(layer: ForwardLayer) -> None:
assert sys.getsizeof(layer) > sys.getsizeof(object())