Skip to content

Commit bed6308

Browse files
authored
Merge pull request #3640 from alejoe91/support_numpy_2
Support numpy 2.0
2 parents ae19c2a + 6cc73f5 commit bed6308

File tree

3 files changed

+9
-9
lines changed

3 files changed

+9
-9
lines changed

.github/workflows/full-test-with-codecov.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ jobs:
2323
- uses: actions/checkout@v4
2424
- uses: actions/setup-python@v5
2525
with:
26-
python-version: '3.10'
26+
python-version: '3.12'
2727
- name: Get ephy_testing_data current head hash
2828
# the key depends on the last comit repo https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git
2929
id: vars

pyproject.toml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ authors = [
77
]
88
description = "Python toolkit for analysis, visualization, and comparison of spike sorting output"
99
readme = "README.md"
10-
requires-python = ">=3.9,<3.13" # Only numpy 2.1 supported on python 3.13 for windows. We need to wait for fix on neo
10+
requires-python = ">=3.9,<3.13"
1111
classifiers = [
1212
"Programming Language :: Python :: 3 :: Only",
1313
"License :: OSI Approved :: MIT License",
@@ -20,11 +20,11 @@ classifiers = [
2020

2121

2222
dependencies = [
23-
"numpy>=1.20, <2.0", # 1.20 np.ptp, 1.26 might be necessary for avoiding pickling errors when numpy >2.0
23+
"numpy>=1.20",
2424
"threadpoolctl>=3.0.0",
2525
"tqdm",
2626
"zarr>=2.18,<3",
27-
"neo>=0.13.0",
27+
"neo>=0.14.0",
2828
"probeinterface>=0.2.23",
2929
"packaging",
3030
]

src/spikeinterface/core/waveform_tools.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@ def allocate_waveforms_buffers(
170170
Dictionary to "construct" array in workers process (memmap file or sharemem)
171171
"""
172172

173-
nsamples = nbefore + nafter
173+
n_samples = nbefore + nafter
174174

175175
dtype = np.dtype(dtype)
176176
if mode == "shared_memory":
@@ -187,11 +187,11 @@ def allocate_waveforms_buffers(
187187
num_chans = recording.get_num_channels()
188188
else:
189189
num_chans = np.sum(sparsity_mask[unit_ind, :])
190-
shape = (n_spikes, nsamples, num_chans)
190+
shape = (int(n_spikes), int(n_samples), int(num_chans))
191191

192192
if mode == "memmap":
193193
filename = str(folder / f"waveforms_{unit_id}.npy")
194-
arr = np.lib.format.open_memmap(filename, mode="w+", dtype=dtype, shape=shape)
194+
arr = np.lib.format.open_memmap(filename, mode="w+", dtype=dtype.str, shape=shape)
195195
waveforms_by_units[unit_id] = arr
196196
arrays_info[unit_id] = filename
197197
elif mode == "shared_memory":
@@ -476,7 +476,7 @@ def extract_waveforms_to_single_buffer(
476476
Optionally return in case of shared_memory if copy=False.
477477
Dictionary to "construct" array in workers process (memmap file or sharemem info)
478478
"""
479-
nsamples = nbefore + nafter
479+
n_samples = nbefore + nafter
480480

481481
dtype = np.dtype(dtype)
482482
if mode == "shared_memory":
@@ -489,7 +489,7 @@ def extract_waveforms_to_single_buffer(
489489
num_chans = recording.get_num_channels()
490490
else:
491491
num_chans = int(max(np.sum(sparsity_mask, axis=1))) # This is a numpy scalar, so we cast to int
492-
shape = (num_spikes, nsamples, num_chans)
492+
shape = (int(num_spikes), int(n_samples), int(num_chans))
493493

494494
if mode == "memmap":
495495
all_waveforms = np.lib.format.open_memmap(file_path, mode="w+", dtype=dtype, shape=shape)

0 commit comments

Comments
 (0)