Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@ include mne/datasets/_fsaverage/root.txt
include mne/datasets/_fsaverage/bem.txt
include mne/datasets/_infant/*.txt
include mne/datasets/_phantom/*.txt
include mne/data/dataset_checksums.txt
include mne/data/eegbci_checksums.txt

recursive-include mne/channels/data/layouts *
recursive-include mne/channels/data/montages *
Expand Down
6 changes: 0 additions & 6 deletions doc/changes/latest.inc
Original file line number Diff line number Diff line change
Expand Up @@ -173,12 +173,8 @@ Enhancements

- Add :func:`mne.gui.locate_ieeg` to locate intracranial electrode contacts from a CT, an MRI (with Freesurfer ``recon-all``) and the channel names from an :class:`mne.Info` object (:gh:`9586` by `Alex Rockhill`_)

- All data fetching code now relies on ``pooch``, which is an added optional requirement for dataset fetchers (:gh:`9742` by `Adam Li`_ and `Daniel McCloy`_)

- Add phantom FreeSurfer subject fetcher :func:`mne.datasets.fetch_phantom` to allow fetching the Otaniemi phantom (:gh:`9796` by `Eric Larson`_)

- Add :func:`mne.datasets.fetch_dataset` to allow packages to fetch datasets in a similar fashion to MNE (:gh:`9763` by `Adam Li`_)

- Add full ECoG dataset to MNE-misc-data and demonstrate its use in :ref:`ex-electrode-pos-2d` and :ref:`tut-ieeg-localize` (:gh:`9784` by `Alex Rockhill`_)

- Add options ``tol`` and ``accuracy`` to :func:`mne.fit_dipole` to control optimization (:gh:`9810` by `Eric Larson`_)
Expand Down Expand Up @@ -304,5 +300,3 @@ API changes
- The ``mne.Report.sections`` attribute has been deprecated in favor of ``mne.Report.tags``

- The ``mne.Report.fnames`` attribute has been deprecated without replacement

- The output folder name for HF_SEF datasets is now ``hf_sef`` instead of ``HF_SEF`` (:gh:`9763` by `Adam Li`_)
12 changes: 6 additions & 6 deletions doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,6 @@
'eeglabio': ('https://eeglabio.readthedocs.io/en/latest', None),
'dipy': ('https://dipy.org/documentation/1.4.0./',
'https://dipy.org/documentation/1.4.0./objects.inv/'),
'pooch': ('https://www.fatiando.org/pooch/latest/', None),
}


Expand Down Expand Up @@ -316,14 +315,15 @@ def __call__(self, gallery_conf, fname):
plt.ioff()
plt.rcParams['animation.embed_limit'] = 30.
gc.collect()
_assert_no_instances(Brain, 'Brain') # calls gc.collect()
when = 'mne/conf.py:Resetter.__call__'
_assert_no_instances(Brain, when) # calls gc.collect()
if Plotter is not None:
_assert_no_instances(Plotter, 'Plotter')
_assert_no_instances(Plotter, when)
if BackgroundPlotter is not None:
_assert_no_instances(BackgroundPlotter, 'BackgroundPlotter')
_assert_no_instances(BackgroundPlotter, when)
if vtkPolyData is not None:
_assert_no_instances(vtkPolyData, 'vtkPolyData')
_assert_no_instances(_Renderer, '_Renderer')
_assert_no_instances(vtkPolyData, when)
_assert_no_instances(_Renderer, when)
# This will overwrite some Sphinx printing but it's useful
# for memory timestamps
if os.getenv('SG_STAMP_STARTS', '').lower() == 'true':
Expand Down
3 changes: 1 addition & 2 deletions doc/datasets.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ Datasets
.. autosummary::
:toctree: generated/

fetch_dataset
brainstorm.bst_auditory.data_path
brainstorm.bst_resting.data_path
brainstorm.bst_raw.data_path
Expand Down Expand Up @@ -42,4 +41,4 @@ Datasets
refmeg_noise.data_path
ssvep.data_path
erp_core.data_path
epilepsy_ecog.data_path
epilepsy_ecog.data_path
1 change: 0 additions & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,5 @@ dependencies:
- mne
- mffpy>=0.5.7
- ipywidgets
- pooch
- pip:
- ipyvtklink
3 changes: 1 addition & 2 deletions mne/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -617,8 +617,7 @@ def _fail(*args, **kwargs):
@pytest.fixture(scope='function')
def download_is_error(monkeypatch):
"""Prevent downloading by raising an error when it's attempted."""
import pooch
monkeypatch.setattr(pooch, 'retrieve', _fail)
monkeypatch.setattr(mne.utils.fetching, '_get_http', _fail)


@pytest.fixture()
Expand Down
3,058 changes: 0 additions & 3,058 deletions mne/data/eegbci_checksums.txt

This file was deleted.

4 changes: 1 addition & 3 deletions mne/datasets/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
from . import ssvep
from . import erp_core
from . import epilepsy_ecog
from ._fetch import fetch_dataset
from .utils import (_download_all_example_data, fetch_hcp_mmp_parcellation,
fetch_aparc_sub_parcellation)
from ._fsaverage.base import fetch_fsaverage
Expand All @@ -39,6 +38,5 @@
'fetch_hcp_mmp_parcellation', 'fieldtrip_cmc', 'hf_sef', 'kiloword',
'misc', 'mtrf', 'multimodal', 'opm', 'phantom_4dbti', 'sample',
'sleep_physionet', 'somato', 'spm_face', 'ssvep', 'testing',
'visual_92_categories', 'limo', 'erp_core', 'epilepsy_ecog',
'fetch_dataset', 'fetch_phantom',
'visual_92_categories', 'limo', 'erp_core', 'epilepsy_ecog'
]
9 changes: 4 additions & 5 deletions mne/datasets/_fake/_fake.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,16 @@
# License: BSD Style.

from ...utils import verbose
from ..utils import (_data_path_doc, _download_mne_dataset,
from ..utils import (_data_path, _data_path_doc,
_get_version, _version_doc)


@verbose
def data_path(path=None, force_update=False, update_path=False,
download=True, verbose=None): # noqa: D103
return _download_mne_dataset(
name='fake', processor='untar', path=path,
force_update=force_update, update_path=update_path,
download=download)
return _data_path(path=path, force_update=force_update,
update_path=update_path, name='fake',
download=download)


data_path.__doc__ = _data_path_doc.format(name='fake',
Expand Down
Loading