Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ optional-dependencies.doc = [
"sphinx-autofixture>=0.4.1",
]
optional-dependencies.full = [ "fast-array-utils[accel,dask,sparse]", "h5py", "zarr" ]
optional-dependencies.sparse = [ "scipy>=1.11" ]
optional-dependencies.sparse = [ "scipy>=1.13" ]
optional-dependencies.test = [
"anndata",
"fast-array-utils[accel,test-min]",
Expand Down Expand Up @@ -102,6 +102,7 @@ overrides.matrix.resolution.features = [
overrides.matrix.resolution.dependencies = [
# TODO: move to min dep once this is fixed: https://github.com/tlambert03/hatch-min-requirements/issues/11
{ if = [ "lowest" ], value = "dask==2023.6.1" },
{ if = [ "lowest" ], value = "scipy==1.13.0" },
]

[[tool.hatch.envs.hatch-test.matrix]]
Expand Down Expand Up @@ -174,6 +175,7 @@ filterwarnings = [
"error",
# codspeed seems to break this dtype added by h5py
"ignore:.*numpy[.]longdouble:UserWarning",
"ignore:FNV hashing is not implemented in Numba:UserWarning",
]
markers = [
"benchmark: marks tests as benchmark (to run with `--codspeed`)",
Expand Down
14 changes: 11 additions & 3 deletions src/testing/fast_array_utils/_array_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from dataclasses import KW_ONLY, dataclass, field
from functools import cached_property, partial
from importlib.metadata import version
from typing import TYPE_CHECKING, Generic, TypeVar, cast
from typing import TYPE_CHECKING, Generic, TypedDict, TypeVar, cast

import numpy as np
from packaging.version import Version
Expand Down Expand Up @@ -378,12 +378,20 @@ def random_mat(
m, n = shape
return cast(
"types.CSBase",
random_spmat(m, n, density=density, format=format, dtype=dtype, rng=rng)
random_spmat(m, n, density=density, format=format, dtype=dtype, **_rng_kw(rng))
if container == "matrix"
else random_sparr(shape, density=density, format=format, dtype=dtype, rng=rng),
else random_sparr(shape, density=density, format=format, dtype=dtype, **_rng_kw(rng)),
)


class RngKw(TypedDict):
rng: np.random.Generator | None


def _rng_kw(rng: np.random.Generator | None) -> RngKw:
return RngKw(rng=rng) if Version(version("scipy")) >= Version("1.15") else cast("RngKw", dict(random_state=rng))


def _half_chunk_size(a: tuple[int, ...]) -> tuple[int, ...]:
def half_rounded_up(x: int) -> int:
div, mod = divmod(x, 2)
Expand Down
21 changes: 19 additions & 2 deletions tests/test_stats.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# SPDX-License-Identifier: MPL-2.0
from __future__ import annotations

from importlib.metadata import version
from importlib.util import find_spec
from pathlib import Path
from typing import TYPE_CHECKING, cast
Expand All @@ -9,6 +10,7 @@
import pytest
import scipy.sparse as sps
from numpy.exceptions import AxisError
from packaging.version import Version

from fast_array_utils import stats, types
from testing.fast_array_utils import SUPPORTED_TYPES, Flags
Expand Down Expand Up @@ -39,6 +41,11 @@
ATS_CUPY_SPARSE = {at for at in SUPPORTED_TYPES if "cupyx.scipy" in str(at)}


def _xfail_if_old_scipy(array_type: ArrayType[Any], ndim: Literal[1, 2]) -> pytest.MarkDecorator:
cond = ndim == 1 and bool(array_type.flags & Flags.Sparse) and Version(version("scipy")) < Version("1.14")
return pytest.mark.xfail(cond, reason="Sparse matrices don’t support 1d arrays")


@pytest.fixture(
scope="session",
params=[
Expand Down Expand Up @@ -134,7 +141,10 @@ def pbmc64k_reduced_raw() -> sps.csr_array[np.float32]:
@pytest.mark.array_type(skip={*ATS_SPARSE_DS, Flags.Matrix})
@pytest.mark.parametrize("func", STAT_FUNCS)
@pytest.mark.parametrize(("ndim", "axis"), [(1, 0), (2, 3), (2, -1)], ids=["1d-ax0", "2d-ax3", "2d-axneg"])
def test_ndim_error(array_type: ArrayType[Array], func: StatFunNoDtype, ndim: Literal[1, 2], axis: Literal[0, 1] | None) -> None:
def test_ndim_error(
request: pytest.FixtureRequest, array_type: ArrayType[Array], func: StatFunNoDtype, ndim: Literal[1, 2], axis: Literal[0, 1] | None
) -> None:
request.applymarker(_xfail_if_old_scipy(array_type, ndim))
check_ndim(array_type, ndim)
# not using the fixture because we don’t need to test multiple dtypes
np_arr = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32)
Expand All @@ -148,12 +158,15 @@ def test_ndim_error(array_type: ArrayType[Array], func: StatFunNoDtype, ndim: Li

@pytest.mark.array_type(skip=ATS_SPARSE_DS)
def test_sum(
request: pytest.FixtureRequest,
array_type: ArrayType[CpuArray | GpuArray | DiskArray | types.DaskArray],
dtype_in: type[DTypeIn],
dtype_arg: type[DTypeOut] | None,
axis: Literal[0, 1] | None,
np_arr: NDArray[DTypeIn],
ndim: Literal[1, 2],
) -> None:
request.applymarker(_xfail_if_old_scipy(array_type, ndim))
if np.dtype(dtype_arg).kind in "iu" and (array_type.flags & Flags.Gpu) and (array_type.flags & Flags.Sparse):
pytest.skip("GPU sparse matrices don’t support int dtypes")
arr = array_type(np_arr.copy())
Expand Down Expand Up @@ -209,7 +222,8 @@ def test_sum_dask_shapes(array_type: ArrayType[types.DaskArray], axis: Literal[0


@pytest.mark.array_type(skip=ATS_SPARSE_DS)
def test_mean(array_type: ArrayType[Array], axis: Literal[0, 1] | None, np_arr: NDArray[DTypeIn]) -> None:
def test_mean(request: pytest.FixtureRequest, array_type: ArrayType[Array], axis: Literal[0, 1] | None, np_arr: NDArray[DTypeIn], ndim: Literal[1, 2]) -> None:
request.applymarker(_xfail_if_old_scipy(array_type, ndim))
arr = array_type(np_arr)

result = stats.mean(arr, axis=axis) # type: ignore[arg-type] # https://github.com/python/mypy/issues/16777
Expand All @@ -224,10 +238,13 @@ def test_mean(array_type: ArrayType[Array], axis: Literal[0, 1] | None, np_arr:

@pytest.mark.array_type(skip=Flags.Disk)
def test_mean_var(
request: pytest.FixtureRequest,
array_type: ArrayType[CpuArray | GpuArray | types.DaskArray],
axis: Literal[0, 1] | None,
np_arr: NDArray[DTypeIn],
ndim: Literal[1, 2],
) -> None:
request.applymarker(_xfail_if_old_scipy(array_type, ndim))
arr = array_type(np_arr)

mean, var = stats.mean_var(arr, axis=axis, correction=1)
Expand Down
Loading