diff --git a/doc/changes/devel/12771.apichange.rst b/doc/changes/devel/12771.apichange.rst new file mode 100644 index 00000000000..49110fff558 --- /dev/null +++ b/doc/changes/devel/12771.apichange.rst @@ -0,0 +1,4 @@ +:func:`mne.datasets.fetch_fsaverage` now returns a :class:`python:pathlib.Path` object +rather than a string. Support for string concatenation with plus (``+``) is thus +deprecated and will be removed in 1.9, use the forward-slash ``/`` operator instead, +by `Eric Larson`_. diff --git a/examples/inverse/morph_volume_stc.py b/examples/inverse/morph_volume_stc.py index 144bfd631e3..bccfcdc5b51 100644 --- a/examples/inverse/morph_volume_stc.py +++ b/examples/inverse/morph_volume_stc.py @@ -26,8 +26,6 @@ # Copyright the MNE-Python contributors. # %% -import os - import nibabel as nib from nilearn.plotting import plot_glass_brain @@ -40,15 +38,15 @@ # %% # Setup paths sample_dir_raw = sample.data_path() -sample_dir = os.path.join(sample_dir_raw, "MEG", "sample") -subjects_dir = os.path.join(sample_dir_raw, "subjects") +sample_dir = sample_dir_raw / "MEG" / "sample" +subjects_dir = sample_dir_raw / "subjects" -fname_evoked = os.path.join(sample_dir, "sample_audvis-ave.fif") -fname_inv = os.path.join(sample_dir, "sample_audvis-meg-vol-7-meg-inv.fif") +fname_evoked = sample_dir / "sample_audvis-ave.fif" +fname_inv = sample_dir / "sample_audvis-meg-vol-7-meg-inv.fif" -fname_t1_fsaverage = os.path.join(subjects_dir, "fsaverage", "mri", "brain.mgz") +fname_t1_fsaverage = subjects_dir / "fsaverage" / "mri" / "brain.mgz" fetch_fsaverage(subjects_dir) # ensure fsaverage src exists -fname_src_fsaverage = subjects_dir + "/fsaverage/bem/fsaverage-vol-5-src.fif" +fname_src_fsaverage = subjects_dir / "fsaverage" / "bem" / "fsaverage-vol-5-src.fif" # %% # Compute example data. For reference see :ref:`ex-inverse-volume`. diff --git a/mne/datasets/_fsaverage/base.py b/mne/datasets/_fsaverage/base.py index 2360bcfb60c..d027cd6920b 100644 --- a/mne/datasets/_fsaverage/base.py +++ b/mne/datasets/_fsaverage/base.py @@ -3,12 +3,12 @@ # Copyright the MNE-Python contributors. import os -import os.path as op +from pathlib import Path, PosixPath, WindowsPath -from ...utils import get_subjects_dir, set_config, verbose +from ...utils import get_subjects_dir, set_config, verbose, warn from ..utils import _get_path, _manifest_check_download -FSAVERAGE_MANIFEST_PATH = op.dirname(__file__) +FSAVERAGE_MANIFEST_PATH = Path(__file__).parent @verbose @@ -26,9 +26,12 @@ def fetch_fsaverage(subjects_dir=None, *, verbose=None): Returns ------- - fs_dir : str + fs_dir : Path The fsaverage directory. - (essentially ``subjects_dir + '/fsaverage'``). + (essentially ``subjects_dir / 'fsaverage'``). + + .. versionchanged:: 1.8 + A :class:`pathlib.Path` object is returned instead of a string. Notes ----- @@ -65,69 +68,67 @@ def fetch_fsaverage(subjects_dir=None, *, verbose=None): # fid.write('\n'.join(names)) # subjects_dir = _set_montage_coreg_path(subjects_dir) - subjects_dir = op.abspath(op.expanduser(subjects_dir)) - fs_dir = op.join(subjects_dir, "fsaverage") - os.makedirs(fs_dir, exist_ok=True) + subjects_dir = subjects_dir.expanduser().absolute() + fs_dir = subjects_dir / "fsaverage" + fs_dir.mkdir(parents=True, exist_ok=True) _manifest_check_download( - manifest_path=op.join(FSAVERAGE_MANIFEST_PATH, "root.txt"), - destination=op.join(subjects_dir), + manifest_path=FSAVERAGE_MANIFEST_PATH / "root.txt", + destination=subjects_dir, url="https://osf.io/3bxqt/download?version=2", hash_="5133fe92b7b8f03ae19219d5f46e4177", ) _manifest_check_download( - manifest_path=op.join(FSAVERAGE_MANIFEST_PATH, "bem.txt"), - destination=op.join(subjects_dir, "fsaverage"), + manifest_path=FSAVERAGE_MANIFEST_PATH / "bem.txt", + destination=subjects_dir / "fsaverage", url="https://osf.io/7ve8g/download?version=4", hash_="b31509cdcf7908af6a83dc5ee8f49fb1", ) - return fs_dir + return _mne_path(fs_dir) def _get_create_subjects_dir(subjects_dir): subjects_dir = get_subjects_dir(subjects_dir, raise_error=False) if subjects_dir is None: subjects_dir = _get_path(None, "MNE_DATA", "montage coregistration") - subjects_dir = op.join(subjects_dir, "MNE-fsaverage-data") - os.makedirs(subjects_dir, exist_ok=True) - else: - subjects_dir = str(subjects_dir) + subjects_dir = subjects_dir / "MNE-fsaverage-data" + subjects_dir.mkdir(parents=True, exist_ok=True) return subjects_dir def _set_montage_coreg_path(subjects_dir=None): - """Set a subject directory suitable for montage(-only) coregistration. - - Parameters - ---------- - subjects_dir : path-like | None - The path to use as the subjects directory in the MNE-Python - config file. None will use the existing config variable (i.e., - will not change anything), and if it does not exist, will use - ``~/mne_data/MNE-fsaverage-data``. - - Returns - ------- - subjects_dir : str - The subjects directory that was used. - - See Also - -------- - mne.datasets.fetch_fsaverage - mne.get_config - mne.set_config - - Notes - ----- - If you plan to only do EEG-montage based coregistrations with fsaverage - without any MRI warping, this function can facilitate the process. - Essentially it sets the default value for ``subjects_dir`` in MNE - functions to be ``~/mne_data/MNE-fsaverage-data`` (assuming it has - not already been set to some other value). - - .. versionadded:: 0.18 - """ + """Set a subject directory suitable for montage(-only) coregistration.""" subjects_dir = _get_create_subjects_dir(subjects_dir) old_subjects_dir = get_subjects_dir(None, raise_error=False) if old_subjects_dir is None: set_config("SUBJECTS_DIR", subjects_dir) return subjects_dir + + +# Adapted from pathlib.Path.__new__ +def _mne_path(path): + klass = MNEWindowsPath if os.name == "nt" else MNEPosixPath + out = klass(path) + assert isinstance(out, klass) + return out + + +class _PathAdd: + def __add__(self, other): + if isinstance(other, str): + warn( + "data_path functions now return pathlib.Path objects which " + "do not natively support the plus (+) operator, switch to " + "using forward slash (/) instead. Support for plus will be " + "removed in 1.2.", + FutureWarning, + ) + return f"{str(self)}{other}" + raise NotImplementedError + + +class MNEWindowsPath(_PathAdd, WindowsPath): # noqa: D101 + pass + + +class MNEPosixPath(_PathAdd, PosixPath): # noqa: D101 + pass diff --git a/mne/datasets/_infant/base.py b/mne/datasets/_infant/base.py index 5a57bb4f87c..2c75cf234b1 100644 --- a/mne/datasets/_infant/base.py +++ b/mne/datasets/_infant/base.py @@ -2,8 +2,7 @@ # License: BSD-3-Clause # Copyright the MNE-Python contributors. -import os -import os.path as op +from pathlib import Path from ...utils import _check_option, _validate_type, get_subjects_dir, verbose from ..utils import _manifest_check_download @@ -27,7 +26,7 @@ 75fe37a1bc80ed6793a8abb47681d5ab ANTS7-5Months3T.zip 790f7dba0a264262e6c1c2dfdf216215 ANTS9-0Months3T.zip """ -_MANIFEST_PATH = op.dirname(__file__) +_MANIFEST_PATH = Path(__file__).parent @verbose @@ -80,15 +79,15 @@ def fetch_infant_template(age, subjects_dir=None, *, verbose=None): dash = "-5" if ".5" in age else "-0" subject = f"ANTS{first}{dash}{unit}3T" # Actually get and create the files - subj_dir = subjects_dir / subject - os.makedirs(subj_dir, exist_ok=True) + subject_dir = subjects_dir / subject + subject_dir.mkdir(parents=True, exist_ok=True) # .zip -> hash mapping orig_hashes = dict( line.strip().split()[::-1] for line in _ORIGINAL_HASHES.strip().splitlines() ) _manifest_check_download( - manifest_path=op.join(_MANIFEST_PATH, f"{subject}.txt"), - destination=subj_dir, + manifest_path=_MANIFEST_PATH / f"{subject}.txt", + destination=subject_dir, url=_ORIGINAL_URL.format(subject=subject), hash_=orig_hashes[f"{subject}.zip"], ) diff --git a/mne/datasets/_phantom/base.py b/mne/datasets/_phantom/base.py index 98eaf1cc9bd..911ac54a073 100644 --- a/mne/datasets/_phantom/base.py +++ b/mne/datasets/_phantom/base.py @@ -2,13 +2,12 @@ # License: BSD-3-Clause # Copyright the MNE-Python contributors. -import os -import os.path as op +from pathlib import Path from ...utils import _check_option, _validate_type, get_subjects_dir, verbose from ..utils import _manifest_check_download -PHANTOM_MANIFEST_PATH = op.dirname(__file__) +PHANTOM_MANIFEST_PATH = Path(__file__).parent @verbose @@ -24,7 +23,7 @@ def fetch_phantom(kind, subjects_dir=None, *, verbose=None): Returns ------- - subject_dir : str + subject_dir : pathlib.Path The resulting phantom subject directory. See Also @@ -54,9 +53,9 @@ def fetch_phantom(kind, subjects_dir=None, *, verbose=None): subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) subject = f"phantom_{kind}" subject_dir = subjects_dir / subject - os.makedirs(subject_dir, exist_ok=True) + subject_dir.mkdir(parents=True, exist_ok=True) _manifest_check_download( - manifest_path=op.join(PHANTOM_MANIFEST_PATH, f"{subject}.txt"), + manifest_path=PHANTOM_MANIFEST_PATH / f"{subject}.txt", destination=subjects_dir, url=phantoms[kind]["url"], hash_=phantoms[kind]["hash"], diff --git a/mne/datasets/tests/test_datasets.py b/mne/datasets/tests/test_datasets.py index 7ba2795b5f7..3f429ea0f51 100644 --- a/mne/datasets/tests/test_datasets.py +++ b/mne/datasets/tests/test_datasets.py @@ -5,11 +5,12 @@ import shutil import zipfile from functools import partial -from os import path as op +from pathlib import Path import pooch import pytest +import mne.datasets._fsaverage.base from mne import datasets, read_labels_from_annot, write_labels_to_annot from mne.datasets import fetch_dataset, fetch_infant_template, fetch_phantom, testing from mne.datasets._fsaverage.base import _set_montage_coreg_path @@ -61,20 +62,17 @@ def test_datasets_basic(tmp_path, monkeypatch): assert dataset.get_version() is None assert not datasets.has_dataset(dname) print(f"{dname}: {datasets.has_dataset(dname)}") - tempdir = str(tmp_path) # Explicitly test one that isn't preset (given the config) - monkeypatch.setenv("MNE_DATASETS_SAMPLE_PATH", tempdir) + monkeypatch.setenv("MNE_DATASETS_SAMPLE_PATH", str(tmp_path)) dataset = datasets.sample assert str(dataset.data_path(download=False)) == "." assert dataset.get_version() != "" assert dataset.get_version() is None # don't let it read from the config file to get the directory, # force it to look for the default - monkeypatch.setenv("_MNE_FAKE_HOME_DIR", tempdir) + monkeypatch.setenv("_MNE_FAKE_HOME_DIR", str(tmp_path)) monkeypatch.delenv("SUBJECTS_DIR", raising=False) - assert str(datasets.utils._get_path(None, "foo", "bar")) == op.join( - tempdir, "mne_data" - ) + assert datasets.utils._get_path(None, "foo", "bar") == tmp_path / "mne_data" assert get_subjects_dir(None) is None _set_montage_coreg_path() sd = get_subjects_dir() @@ -83,12 +81,22 @@ def test_datasets_basic(tmp_path, monkeypatch): with pytest.raises(FileNotFoundError, match="as specified by MNE_DAT"): testing.data_path(download=False) + def noop(*args, **kwargs): + return + + monkeypatch.setattr(mne.datasets._fsaverage.base, "_manifest_check_download", noop) + sd_2 = datasets.fetch_fsaverage() + assert sd / "fsaverage" == sd_2 + with pytest.warns(FutureWarning, match="switch to using forward slash"): + sd_label_str = sd_2 + f"{os.sep}label" + assert sd_label_str == str(sd_2 / "label") + @requires_good_network def test_downloads(tmp_path, monkeypatch, capsys): """Test dataset URL and version handling.""" # Try actually downloading a dataset - kwargs = dict(path=str(tmp_path), verbose=True) + kwargs = dict(path=tmp_path, verbose=True) # XXX we shouldn't need to disable capsys here, but there's a pytest bug # that we're hitting (https://github.com/pytest-dev/pytest/issues/5997) # now that we use pooch @@ -99,8 +107,8 @@ def test_downloads(tmp_path, monkeypatch, capsys): datasets.utils, "_MODULES_TO_ENSURE_DOWNLOAD_IS_FALSE_IN_TESTS", () ) path = datasets._fake.data_path(update_path=False, **kwargs) - assert op.isdir(path) - assert op.isfile(op.join(path, "bar")) + assert path.is_dir() + assert (path / "bar").is_file() assert not datasets.has_dataset("fake") # not in the desired path assert datasets._fake.get_version() is None assert datasets.utils._get_version("fake") is None @@ -173,27 +181,23 @@ def _error_download_2(self, fname, downloader, processor): def test_fetch_parcellations(tmp_path): """Test fetching parcellations.""" pytest.importorskip("nibabel") - this_subjects_dir = str(tmp_path) - os.mkdir(op.join(this_subjects_dir, "fsaverage")) - os.mkdir(op.join(this_subjects_dir, "fsaverage", "label")) - os.mkdir(op.join(this_subjects_dir, "fsaverage", "surf")) + this_subjects_dir = tmp_path + fsaverage_dir = this_subjects_dir / "fsaverage" + (fsaverage_dir / "label").mkdir(parents=True) + (fsaverage_dir / "surf").mkdir() for hemi in ("lh", "rh"): shutil.copyfile( - op.join(subjects_dir, "fsaverage", "surf", f"{hemi}.white"), - op.join(this_subjects_dir, "fsaverage", "surf", f"{hemi}.white"), + subjects_dir / "fsaverage" / "surf" / f"{hemi}.white", + fsaverage_dir / "surf" / f"{hemi}.white", ) # speed up by prenteding we have one of them - with open( - op.join(this_subjects_dir, "fsaverage", "label", "lh.aparc_sub.annot"), "wb" - ): + with open(fsaverage_dir / "label" / "lh.aparc_sub.annot", "wb"): pass datasets.fetch_aparc_sub_parcellation(subjects_dir=this_subjects_dir) with ArgvSetter(("--accept-hcpmmp-license",)): datasets.fetch_hcp_mmp_parcellation(subjects_dir=this_subjects_dir) for hemi in ("lh", "rh"): - assert op.isfile( - op.join(this_subjects_dir, "fsaverage", "label", f"{hemi}.aparc_sub.annot") - ) + assert (fsaverage_dir / "label" / f"{hemi}.aparc_sub.annot").is_file() # test our annot round-trips here kwargs = dict( subject="fsaverage", hemi="both", sort=False, subjects_dir=this_subjects_dir @@ -205,9 +209,9 @@ def test_fetch_parcellations(tmp_path): table_name="./left.fsaverage164.label.gii", **kwargs, ) - orig = op.join(this_subjects_dir, "fsaverage", "label", "lh.HCPMMP1.annot") + orig = fsaverage_dir / "label" / "lh.HCPMMP1.annot" first = hashfunc(orig) - new = orig[:-6] + "_round.annot" + new = str(orig)[:-6] + "_round.annot" second = hashfunc(new) assert first == second @@ -216,7 +220,9 @@ def test_fetch_parcellations(tmp_path): def _fake_zip_fetch(url, path, fname, *args, **kwargs): - fname = op.join(path, fname) + path = Path(path) + assert isinstance(fname, str) + fname = path / fname with zipfile.ZipFile(fname, "w") as zipf: with zipf.open("foo/", "w"): pass @@ -229,20 +235,20 @@ def _fake_zip_fetch(url, path, fname, *args, **kwargs): def test_manifest_check_download(tmp_path, n_have, monkeypatch): """Test our manifest downloader.""" monkeypatch.setattr(pooch, "retrieve", _fake_zip_fetch) - destination = op.join(str(tmp_path), "empty") - manifest_path = op.join(str(tmp_path), "manifest.txt") + destination = tmp_path / "empty" + manifest_path = tmp_path / "manifest.txt" with open(manifest_path, "w") as fid: for fname in _zip_fnames: fid.write(f"{fname}\n") assert n_have in range(len(_zip_fnames) + 1) - assert not op.isdir(destination) + assert not destination.is_file() if n_have > 0: - os.makedirs(op.join(destination, "foo")) - assert op.isdir(op.join(destination, "foo")) + (destination / "foo").mkdir(parents=True) + assert (destination / "foo").is_dir() for fname in _zip_fnames: - assert not op.isfile(op.join(destination, fname)) + assert not (destination / fname).is_file() for fname in _zip_fnames[:n_have]: - with open(op.join(destination, fname), "w"): + with open(destination / fname, "w"): pass with catch_logging() as log: with use_log_level(True): @@ -257,9 +263,9 @@ def test_manifest_check_download(tmp_path, n_have, monkeypatch): assert want in log else: assert want not in log - assert op.isdir(destination) + assert (destination).is_dir() for fname in _zip_fnames: - assert op.isfile(op.join(destination, fname)) + assert (destination / fname).is_file() def _fake_mcd(manifest_path, destination, url, hash_, name=None, fake_files=False): @@ -267,7 +273,7 @@ def _fake_mcd(manifest_path, destination, url, hash_, name=None, fake_files=Fals name = url.split("/")[-1].split(".")[0] assert name in url assert name in str(destination) - assert name in manifest_path + assert name in str(manifest_path) assert len(hash_) == 32 if fake_files: with open(manifest_path) as fid: @@ -275,8 +281,8 @@ def _fake_mcd(manifest_path, destination, url, hash_, name=None, fake_files=Fals path = path.strip() if not path: continue - fname = op.join(destination, path) - os.makedirs(op.dirname(fname), exist_ok=True) + fname = destination / path + fname.parent.mkdir(exist_ok=True) with open(fname, "wb"): pass @@ -301,7 +307,7 @@ def test_phantom(tmp_path, monkeypatch): partial(_fake_mcd, name="phantom_otaniemi", fake_files=True), ) fetch_phantom("otaniemi", subjects_dir=tmp_path) - assert op.isfile(tmp_path / "phantom_otaniemi" / "mri" / "T1.mgz") + assert (tmp_path / "phantom_otaniemi" / "mri" / "T1.mgz").is_file() @requires_good_network @@ -311,7 +317,7 @@ def test_fetch_uncompressed_file(tmp_path): dataset_name="license", url="https://raw.githubusercontent.com/mne-tools/mne-python/main/LICENSE.txt", archive_name="LICENSE.foo", - folder_name=op.join(tmp_path, "foo"), + folder_name=tmp_path / "foo", hash=None, ) fetch_dataset(dataset_dict, path=None, force_update=True) diff --git a/mne/datasets/utils.py b/mne/datasets/utils.py index 898fff6fde1..75614d0df2f 100644 --- a/mne/datasets/utils.py +++ b/mne/datasets/utils.py @@ -121,11 +121,11 @@ def _get_path(path, key, name): # 4. ~/mne_data (but use a fake home during testing so we don't # unnecessarily create ~/mne_data) logger.info(f"Using default location ~/mne_data for {name}...") - path = op.join(os.getenv("_MNE_FAKE_HOME_DIR", op.expanduser("~")), "mne_data") - if not op.exists(path): - logger.info("Creating ~/mne_data") + path = Path(os.getenv("_MNE_FAKE_HOME_DIR", "~")).expanduser() / "mne_data" + if not path.is_dir(): + logger.info(f"Creating {path}") try: - os.mkdir(path) + path.mkdir() except OSError: raise OSError( "User does not have write permissions " @@ -134,7 +134,7 @@ def _get_path(path, key, name): "write permissions, for ex:data_path" "('/home/xyz/me2/')" ) - return Path(path).expanduser() + return path def _do_path_update(path, update_path, key, name): @@ -768,27 +768,27 @@ def _manifest_check_download(manifest_path, destination, url, hash_): with open(manifest_path) as fid: names = [name.strip() for name in fid.readlines()] - manifest_path = op.basename(manifest_path) need = list() for name in names: - if not op.isfile(op.join(destination, name)): + if not (destination / name).is_file(): need.append(name) logger.info( "%d file%s missing from %s in %s" - % (len(need), _pl(need), manifest_path, destination) + % (len(need), _pl(need), manifest_path.name, destination) ) if len(need) > 0: downloader = pooch.HTTPDownloader(**_downloader_params()) with tempfile.TemporaryDirectory() as path: logger.info("Downloading missing files remotely") - fname_path = op.join(path, "temp.zip") + path = Path(path) + fname_path = path / "temp.zip" pooch.retrieve( url=url, known_hash=f"md5:{hash_}", path=path, downloader=downloader, - fname=op.basename(fname_path), + fname=fname_path.name, ) logger.info(f"Extracting missing file{_pl(need)}") diff --git a/mne/utils/config.py b/mne/utils/config.py index 7fe36f6f353..4bd25362fc2 100644 --- a/mne/utils/config.py +++ b/mne/utils/config.py @@ -470,6 +470,8 @@ def get_subjects_dir(subjects_dir=None, raise_error=False): if subjects_dir is None: subjects_dir = get_config("SUBJECTS_DIR", raise_error=raise_error) from_config = True + if subjects_dir is not None: + subjects_dir = Path(subjects_dir) if subjects_dir is not None: # Emit a nice error or warning if their config is bad try: diff --git a/tutorials/forward/35_eeg_no_mri.py b/tutorials/forward/35_eeg_no_mri.py index 81a96ed24b3..a2deaa069b6 100644 --- a/tutorials/forward/35_eeg_no_mri.py +++ b/tutorials/forward/35_eeg_no_mri.py @@ -24,8 +24,6 @@ # License: BSD-3-Clause # Copyright the MNE-Python contributors. -import os.path as op - import numpy as np import mne @@ -33,13 +31,13 @@ # Download fsaverage files fs_dir = fetch_fsaverage(verbose=True) -subjects_dir = op.dirname(fs_dir) +subjects_dir = fs_dir.parent # The files live in: subject = "fsaverage" trans = "fsaverage" # MNE has a built-in fsaverage transformation -src = op.join(fs_dir, "bem", "fsaverage-ico-5-src.fif") -bem = op.join(fs_dir, "bem", "fsaverage-5120-5120-5120-bem-sol.fif") +src = fs_dir / "bem" / "fsaverage-ico-5-src.fif" +bem = fs_dir / "bem" / "fsaverage-5120-5120-5120-bem-sol.fif" ############################################################################## # Load the data @@ -110,7 +108,7 @@ # It comes with several helpful built-in files, including a 10-20 montage # in the MRI coordinate frame, which can be used to compute the # MRI<->head transform ``trans``: -fname_1020 = op.join(subjects_dir, subject, "montages", "10-20-montage.fif") +fname_1020 = subjects_dir / subject / "montages" / "10-20-montage.fif" mon = mne.channels.read_dig_fif(fname_1020) mon.rename_channels({f"EEG{ii:03d}": ch_name for ii, ch_name in enumerate(ch_names, 1)}) trans = mne.channels.compute_native_head_t(mon) @@ -120,11 +118,11 @@ ############################################################################## # There are also BEM and source spaces: -bem_dir = op.join(subjects_dir, subject, "bem") -fname_src = op.join(bem_dir, f"{subject}-oct-6-src.fif") +bem_dir = subjects_dir / subject / "bem" +fname_src = bem_dir / f"{subject}-oct-6-src.fif" src = mne.read_source_spaces(fname_src) print(src) -fname_bem = op.join(bem_dir, f"{subject}-5120-5120-5120-bem-sol.fif") +fname_bem = bem_dir / f"{subject}-5120-5120-5120-bem-sol.fif" bem = mne.read_bem_solution(fname_bem) ##############################################################################