diff --git a/doc/changes/latest.inc b/doc/changes/latest.inc index b0de3f112a0..a39172bc546 100644 --- a/doc/changes/latest.inc +++ b/doc/changes/latest.inc @@ -73,7 +73,9 @@ Enhancements - Show multiple colors and linestyles for excluded components with :class:`mne.Evoked` in :meth:`mne.preprocessing.ICA.plot_sources` (:gh:`9444` by `Martin Schulz`_) -- Added tutorial for how to processes image (CT and MR) files in order to localize electrode contacts for intracranial recordings :ref:`tut-ieeg-localize` (:gh`9484` by `Alex Rockhill`_) +- Add functions for aligning MRI and CT data `mne.transforms.compute_volume_registration` and `mne.transforms.apply_volume_registration` (:gh:`9503` by `Alex Rockhill`_ and `Eric Larson`_) + +- Add tutorial for how to processes image (CT and MR) files in order to localize electrode contacts for intracranial recordings :ref:`tut-ieeg-localize` (:gh`9484` by `Alex Rockhill`_) - Add support for colormap normalization in :func:`mne.viz.plot_topomap` (:gh:`9468` by `Clemens Brunner`_) @@ -95,8 +97,6 @@ Bugs - Fix bug with `mne.io.read_raw_fieldtrip` and `mne.read_epochs_fieldtrip` where channel positions were not set properly (:gh:`9447` by `Eric Larson`_) -- Fix bug with `mne.Epochs.plot` where event lines were misplaced if epochs were overlapping in time (:gh:`9505` by `Daniel McCloy`_) - - :func:`mne.concatenate_raws` now raises an exception if ``raw.info['dev_head_t']`` differs between files. This behavior can be controlled using the new ``on_mismatch`` parameter (:gh:`9438` by `Richard Höchenberger`_) - Fixed bug in :meth:`mne.Epochs.drop_bad` where subsequent rejections failed if they only specified thresholds for a subset of the channel types used in a previous rejection (:gh:`9485` by `Richard Höchenberger`_). @@ -111,4 +111,4 @@ Bugs API changes ~~~~~~~~~~~ -- Nothing yet +- In `mne.compute_source_morph`, the ``niter_affine`` and ``niter_sdr`` parameters have been replaced by ``niter`` and ``pipeline`` parameters for more consistent and finer-grained control of registration/warping steps and iteration (:gh:`9505` by `Alex Rockhill`_ and `Eric Larson`_) \ No newline at end of file diff --git a/doc/mri.rst b/doc/mri.rst index 109b4d9fbe1..cd0e625828d 100644 --- a/doc/mri.rst +++ b/doc/mri.rst @@ -18,9 +18,11 @@ Step by step instructions for using :func:`gui.coregistration`: gui.coregistration gui.fiducials create_default_subject - marching_cubes scale_mri scale_bem scale_labels scale_source_space + surface.marching_cubes + transforms.apply_volume_registration + transforms.compute_volume_registration voxel_neighbors diff --git a/mne/__init__.py b/mne/__init__.py index c102536f3e0..0d28afe9d4b 100644 --- a/mne/__init__.py +++ b/mne/__init__.py @@ -63,7 +63,7 @@ extract_label_time_course, stc_near_sensors) from .surface import (read_surface, write_surface, decimate_surface, read_tri, get_head_surf, get_meg_helmet_surf, dig_mri_distances, - marching_cubes, voxel_neighbors) + voxel_neighbors) from .morph_map import read_morph_map from .morph import (SourceMorph, read_source_morph, grade_to_vertices, compute_source_morph) diff --git a/mne/datasets/utils.py b/mne/datasets/utils.py index 53ca2a83df8..468e46dcf5f 100644 --- a/mne/datasets/utils.py +++ b/mne/datasets/utils.py @@ -254,7 +254,7 @@ def _data_path(path=None, force_update=False, update_path=True, download=True, path = _get_path(path, key, name) # To update the testing or misc dataset, push commits, then make a new # release on GitHub. Then update the "releases" variable: - releases = dict(testing='0.121', misc='0.11') + releases = dict(testing='0.121', misc='0.13') # And also update the "md5_hashes['testing']" variable below. # To update any other dataset, update the data archive itself (upload # an updated version) and update the md5 hash. @@ -345,7 +345,7 @@ def _data_path(path=None, force_update=False, update_path=True, download=True, bst_raw='fa2efaaec3f3d462b319bc24898f440c', bst_resting='70fc7bf9c3b97c4f2eab6260ee4a0430'), fake='3194e9f7b46039bb050a74f3e1ae9908', - misc='4c07fe4e3a068301417362e835c9af59', + misc='ab4be0b325f5896a28165ad177bda338', sample='12b75d1cb7df9dfb4ad73ed82f61094f', somato='32fd2f6c8c7eb0784a1de6435273c48b', spm='9f43f67150e3b694b523a21eb929ea75', diff --git a/mne/defaults.py b/mne/defaults.py index 38237176a42..f3dcb655f91 100644 --- a/mne/defaults.py +++ b/mne/defaults.py @@ -99,7 +99,14 @@ alpha=None, resolution=1., surface_alpha=None, blending='mip', silhouette_alpha=None, silhouette_linewidth=2.), prefixes={'': 1e0, 'd': 1e1, 'c': 1e2, 'm': 1e3, 'µ': 1e6, 'u': 1e6, - 'n': 1e9, 'p': 1e12, 'f': 1e15} + 'n': 1e9, 'p': 1e12, 'f': 1e15}, + transform_zooms=dict( + translation=None, rigid=None, affine=None, sdr=None), + transform_niter=dict( + translation=(100, 100, 10), + rigid=(100, 100, 10), + affine=(100, 100, 10), + sdr=(5, 5, 3)), ) diff --git a/mne/morph.py b/mne/morph.py index 8d52a6d52a6..1ed7651cb93 100644 --- a/mne/morph.py +++ b/mne/morph.py @@ -17,10 +17,9 @@ _get_ico_tris) from .source_space import SourceSpaces, _ensure_src, _grid_interp from .surface import mesh_edges, read_surface, _compute_nearest -from .transforms import _angle_between_quats, rot_to_quat from .utils import (logger, verbose, check_version, get_subjects_dir, warn as warn_, fill_doc, _check_option, _validate_type, - BunchConst, wrapped_stdout, _check_fname, warn, + BunchConst, _check_fname, warn, _ensure_int, ProgressBar, use_log_level) from .externals.h5io import read_hdf5, write_hdf5 @@ -1006,100 +1005,20 @@ def _interpolate_data(stc, morph, mri_resolution, mri_space, output): ############################################################################### # Morph for VolSourceEstimate -def _compute_r2(a, b): - return 100 * (a.ravel() @ b.ravel()) / \ - (np.linalg.norm(a) * np.linalg.norm(b)) - - def _compute_morph_sdr(mri_from, mri_to, niter_affine, niter_sdr, zooms): """Get a matrix that morphs data from one subject to another.""" - with np.testing.suppress_warnings(): - from dipy.align import imaffine, imwarp, metrics, transforms - from dipy.align.reslice import reslice - - logger.info('Computing nonlinear Symmetric Diffeomorphic Registration...') - - # reslice mri_from to zooms - mri_from_orig = mri_from - mri_from, mri_from_affine = reslice( - _get_img_fdata(mri_from_orig), mri_from_orig.affine, - mri_from_orig.header.get_zooms()[:3], zooms) - - # reslice mri_to to zooms - mri_to, affine = reslice( - _get_img_fdata(mri_to), mri_to.affine, - mri_to.header.get_zooms()[:3], zooms) - - mri_to /= mri_to.max() - mri_from /= mri_from.max() # normalize - - # compute center of mass - c_of_mass = imaffine.transform_centers_of_mass( - mri_to, affine, mri_from, mri_from_affine) - - # set up Affine Registration - affreg = imaffine.AffineRegistration( - metric=imaffine.MutualInformationMetric(nbins=32), - level_iters=list(niter_affine), - sigmas=[3.0, 1.0, 0.0], - factors=[4, 2, 1]) - - # translation - logger.info('Optimizing translation:') - with wrapped_stdout(indent=' ', cull_newlines=True): - translation = affreg.optimize( - mri_to, mri_from, transforms.TranslationTransform3D(), None, - affine, mri_from_affine, starting_affine=c_of_mass.affine) - mri_from_to = translation.transform(mri_from) - dist = np.linalg.norm(translation.affine[:3, 3]) - logger.info(f' Translation: {dist:6.1f} mm') - logger.info(f' R²: {_compute_r2(mri_to, mri_from_to):6.1f}%') - - # rigid body transform (translation + rotation) - logger.info('Optimizing rigid-body:') - with wrapped_stdout(indent=' ', cull_newlines=True): - rigid = affreg.optimize( - mri_to, mri_from, transforms.RigidTransform3D(), None, - affine, mri_from_affine, starting_affine=translation.affine) - mri_from_to = rigid.transform(mri_from) - dist = np.linalg.norm(rigid.affine[:3, 3]) - angle = np.rad2deg(_angle_between_quats( - np.zeros(3), rot_to_quat(rigid.affine[:3, :3]))) - - logger.info(f' Translation: {dist:6.1f} mm') - logger.info(f' Rotation: {angle:6.1f}°') - logger.info(f' R²: {_compute_r2(mri_to, mri_from_to):6.1f}%') - - # affine transform (translation + rotation + scaling) - logger.info('Optimizing full affine:') - with wrapped_stdout(indent=' ', cull_newlines=True): - pre_affine = affreg.optimize( - mri_to, mri_from, transforms.AffineTransform3D(), None, - affine, mri_from_affine, starting_affine=rigid.affine) - mri_from_to = pre_affine.transform(mri_from) - logger.info( - f' R²: {_compute_r2(mri_to, mri_from_to):6.1f}%') - - # SDR - shape = tuple(pre_affine.domain_shape) - if len(niter_sdr): - sdr = imwarp.SymmetricDiffeomorphicRegistration( - metrics.CCMetric(3), list(niter_sdr)) - logger.info('Optimizing SDR:') - with wrapped_stdout(indent=' ', cull_newlines=True): - sdr_morph = sdr.optimize(mri_to, pre_affine.transform(mri_from)) - assert shape == tuple(sdr_morph.domain_shape) # should be tuple of int - mri_from_to = sdr_morph.transform(mri_from_to) - logger.info( - f' R²: {_compute_r2(mri_to, mri_from_to):6.1f}%') - else: - sdr_morph = None - - _debug_img(mri_from_orig.dataobj, mri_from_orig.affine, 'From') - _debug_img(mri_from, affine, 'From-reslice') - _debug_img(mri_from_to, affine, 'From-reslice') - _debug_img(mri_to, affine, 'To-reslice') - return shape, zooms, affine, pre_affine, sdr_morph + from .transforms import _compute_volume_registration + from dipy.align.imaffine import AffineMap + pipeline = 'all' if niter_sdr else 'affines' + niter = dict(translation=niter_affine, rigid=niter_affine, + affine=niter_affine, + sdr=niter_sdr if niter_sdr else (1,)) + pre_affine, sdr_morph, to_shape, to_affine, from_shape, from_affine = \ + _compute_volume_registration( + mri_from, mri_to, zooms=zooms, niter=niter, pipeline=pipeline) + pre_affine = AffineMap( + pre_affine, to_shape, to_affine, from_shape, from_affine) + return to_shape, zooms, to_affine, pre_affine, sdr_morph def _compute_morph_matrix(subject_from, subject_to, vertices_from, vertices_to, diff --git a/mne/tests/test_morph.py b/mne/tests/test_morph.py index d91d81a6c5b..345fa227e3c 100644 --- a/mne/tests/test_morph.py +++ b/mne/tests/test_morph.py @@ -467,11 +467,11 @@ def test_volume_source_morph_basic(tmpdir): @testing.requires_testing_data @pytest.mark.parametrize( 'subject_from, subject_to, lower, upper, dtype, morph_mat', [ - ('sample', 'fsaverage', 5.9, 6.1, float, False), + ('sample', 'fsaverage', 10.0, 10.4, float, False), ('fsaverage', 'fsaverage', 0., 0.1, float, False), ('sample', 'sample', 0., 0.1, complex, False), ('sample', 'sample', 0., 0.1, float, True), # morph_mat - ('sample', 'fsaverage', 10, 12, float, True), # morph_mat + ('sample', 'fsaverage', 7.0, 7.4, float, True), # morph_mat ]) def test_volume_source_morph_round_trip( tmpdir, subject_from, subject_to, lower, upper, dtype, morph_mat, @@ -554,7 +554,7 @@ def test_volume_source_morph_round_trip( # check that power is more or less preserved (labelizing messes with this) if morph_mat: if subject_to == 'fsaverage': - limits = (18, 18.5) + limits = (14.0, 14.2) else: limits = (7, 7.5) else: diff --git a/mne/tests/test_source_estimate.py b/mne/tests/test_source_estimate.py index 23a2c3b7f60..1f3d6899339 100644 --- a/mne/tests/test_source_estimate.py +++ b/mne/tests/test_source_estimate.py @@ -1803,7 +1803,7 @@ def test_scale_morph_labels(kind, scale, monkeypatch, tmpdir): min_, max_ = 0.72, 0.75 else: # min_, max_ = 0.84, 0.855 # zooms='auto' values - min_, max_ = 0.61, 0.62 + min_, max_ = 0.61, 0.63 assert min_ < corr <= max_, scale else: assert_allclose( diff --git a/mne/tests/test_transforms.py b/mne/tests/test_transforms.py index 3668c219220..265b3ba7679 100644 --- a/mne/tests/test_transforms.py +++ b/mne/tests/test_transforms.py @@ -8,6 +8,7 @@ import mne from mne.datasets import testing +from mne.fixes import _get_img_fdata from mne import read_trans, write_trans from mne.io import read_info from mne.transforms import (invert_transform, _get_trans, @@ -21,12 +22,15 @@ rotation3d_align_z_axis, _read_fs_xfm, _write_fs_xfm, _quat_real, _fit_matched_points, _quat_to_euler, _euler_to_quat, - _quat_to_affine) + _quat_to_affine, _compute_r2) +from mne.utils import requires_nibabel, requires_dipy data_path = testing.data_path(download=False) fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-trans.fif') fname_eve = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw-eve.fif') +subjects_dir = op.join(data_path, 'subjects') +fname_t1 = op.join(subjects_dir, 'fsaverage', 'mri', 'T1.mgz') base_dir = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data') fname_trans = op.join(base_dir, 'sample-audvis-raw-trans.txt') @@ -507,3 +511,29 @@ def test_euler(quats): quat_rot = quat_to_rot(quats) euler_rot = np.array([rotation(*e)[:3, :3] for e in euler]) assert_allclose(quat_rot, euler_rot, atol=1e-14) + + +@requires_nibabel() +@requires_dipy() +@pytest.mark.slowtest +@testing.requires_testing_data +def test_volume_registration(): + """Test volume registration.""" + import nibabel as nib + from dipy.align import resample + T1 = nib.load(fname_t1) + affine = np.eye(4) + affine[0, 3] = 10 + T1_resampled = resample(moving=T1.get_fdata(), + static=T1.get_fdata(), + moving_affine=T1.affine, + static_affine=T1.affine, + between_affine=np.linalg.inv(affine)) + for pipeline in ('rigids', ('translation', 'sdr')): + reg_affine, sdr_morph = mne.transforms.compute_volume_registration( + T1_resampled, T1, pipeline=pipeline, zooms=10, niter=[5]) + assert_allclose(affine, reg_affine, atol=0.25) + T1_aligned = mne.transforms.apply_volume_registration( + T1_resampled, T1, reg_affine, sdr_morph) + r2 = _compute_r2(_get_img_fdata(T1_aligned), _get_img_fdata(T1)) + assert 99.9 < r2 diff --git a/mne/transforms.py b/mne/transforms.py index 653e4cc6d10..a94ffa16df1 100644 --- a/mne/transforms.py +++ b/mne/transforms.py @@ -13,13 +13,15 @@ import numpy as np from copy import deepcopy -from .fixes import jit, mean +from .fixes import jit, mean, _get_img_fdata from .io.constants import FIFF from .io.open import fiff_open from .io.tag import read_tag from .io.write import start_file, end_file, write_coord_trans +from .defaults import _handle_default from .utils import (check_fname, logger, verbose, _ensure_int, _validate_type, - _check_path_like, get_subjects_dir, fill_doc, _check_fname) + _check_path_like, get_subjects_dir, fill_doc, _check_fname, + _check_option, _require_version, wrapped_stdout) # transformation from anterior/left/superior coordinate system to @@ -1521,3 +1523,247 @@ def _euler_to_quat(euler): quat[..., 2] = cphi * ctheta * spsi - sphi * stheta * cpsi quat *= mult return quat + + +############################################################################### +# Affine Registration and SDR + +_ORDERED_STEPS = ('translation', 'rigid', 'affine', 'sdr') + + +def _validate_zooms(zooms): + _validate_type(zooms, (dict, list, tuple, 'numeric', None), 'zooms') + zooms = _handle_default('transform_zooms', zooms) + for key, val in zooms.items(): + _check_option('zooms key', key, _ORDERED_STEPS) + if val is not None: + val = tuple( + float(x) for x in np.array(val, dtype=float).ravel()) + _check_option(f'len(zooms[{repr(key)})', len(val), (1, 3)) + if len(val) == 1: + val = val * 3 + for this_zoom in val: + if this_zoom <= 1: + raise ValueError(f'Zooms must be > 1, got {this_zoom}') + zooms[key] = val + return zooms + + +def _validate_niter(niter): + _validate_type(niter, (dict, list, tuple, None), 'niter') + niter = _handle_default('transform_niter', niter) + for key, value in niter.items(): + _check_option('niter key', key, _ORDERED_STEPS) + _check_option(f'len(niter[{repr(key)}])', len(value), (1, 2, 3)) + return niter + + +def _validate_pipeline(pipeline): + _validate_type(pipeline, (str, list, tuple), 'pipeline') + pipeline_defaults = dict( + all=_ORDERED_STEPS, + rigids=_ORDERED_STEPS[:_ORDERED_STEPS.index('rigid') + 1], + affines=_ORDERED_STEPS[:_ORDERED_STEPS.index('affine') + 1]) + if isinstance(pipeline, str): # use defaults + _check_option('pipeline', pipeline, ('all', 'rigids', 'affines'), + extra='when str') + pipeline = pipeline_defaults[pipeline] + for ii, step in enumerate(pipeline): + name = f'pipeline[{ii}]' + _validate_type(step, str, name) + _check_option(name, step, _ORDERED_STEPS) + ordered_pipeline = tuple(sorted( + pipeline, key=lambda x: _ORDERED_STEPS.index(x))) + if pipeline != ordered_pipeline: + raise ValueError( + f'Steps in pipeline are out of order, expected {ordered_pipeline} ' + f'but got {pipeline} instead') + if len(set(pipeline)) != len(pipeline): + raise ValueError('Steps in pipeline should not be repeated') + return tuple(pipeline) + + +def _compute_r2(a, b): + return 100 * (a.ravel() @ b.ravel()) / \ + (np.linalg.norm(a) * np.linalg.norm(b)) + + +def _reslice_normalize(img, zooms): + from dipy.align.reslice import reslice + img_zooms = img.header.get_zooms()[:3] + img_affine = img.affine + img = _get_img_fdata(img) + if zooms is not None: + img, img_affine = reslice(img, img_affine, img_zooms, zooms) + img /= img.max() # normalize + return img, img_affine + + +@verbose +def compute_volume_registration(moving, static, pipeline='all', zooms=None, + niter=None, verbose=None): + """Align two volumes using an affine and, optionally, SDR. + + Parameters + ---------- + %(moving)s + %(static)s + %(pipeline)s + zooms : float | tuple | dict | None + The voxel size of volume for each spatial dimension in mm. + If None (default), MRIs won't be resliced (slow, but most accurate). + Can be a tuple to provide separate zooms for each dimension (X/Y/Z), + or a dict with keys ``['translation', 'rigid', 'affine', 'sdr']`` + (each with values that are float`, tuple, or None) to provide separate + reslicing/accuracy for the steps. + %(niter)s + %(verbose)s + + Returns + ------- + %(reg_affine)s + %(sdr_morph)s + + Notes + ----- + This function is heavily inspired by and extends + :func:`dipy.align.affine_registration + `. + + .. versionadded:: 0.24 + """ + return _compute_volume_registration( + moving, static, pipeline, zooms, niter)[:2] + + +def _compute_volume_registration(moving, static, pipeline, zooms, niter): + _require_version('nibabel', 'SDR morph', '2.1.0') + _require_version('dipy', 'SDR morph', '0.10.1') + import nibabel as nib + with np.testing.suppress_warnings(): + from dipy.align import (affine_registration, center_of_mass, + translation, rigid, affine, resample, + imwarp, metrics) + + # input validation + _validate_type(moving, nib.spatialimages.SpatialImage, 'moving') + _validate_type(static, nib.spatialimages.SpatialImage, 'static') + zooms = _validate_zooms(zooms) + niter = _validate_niter(niter) + pipeline = _validate_pipeline(pipeline) + + logger.info('Computing registration...') + + # affine optimizations + moving_orig = moving + static_orig = static + out_affine = np.eye(4) + sdr_morph = None + pipeline_options = dict(translation=[center_of_mass, translation], + rigid=[rigid], affine=[affine]) + sigmas = [3.0, 1.0, 0.0] + factors = [4, 2, 1] + for i, step in enumerate(pipeline): + # reslice image with zooms + if i == 0 or zooms[step] != zooms[pipeline[i - 1]]: + if zooms[step] is not None: + logger.info(f'Reslicing to zooms={zooms[step]} for {step} ...') + else: + logger.info(f'Using original zooms for {step} ...') + static_zoomed, static_affine = _reslice_normalize( + static, zooms[step]) + # must be resliced every time because it is adjusted at every step + moving_zoomed, moving_affine = _reslice_normalize(moving, zooms[step]) + logger.info(f'Optimizing {step}:') + if step == 'sdr': # happens last + sdr = imwarp.SymmetricDiffeomorphicRegistration( + metrics.CCMetric(3), niter[step]) + with wrapped_stdout(indent=' ', cull_newlines=True): + sdr_morph = sdr.optimize(static_zoomed, moving_zoomed, + static_affine, moving_affine) + moving_zoomed = sdr_morph.transform(moving_zoomed) + else: + with wrapped_stdout(indent=' ', cull_newlines=True): + moving_zoomed, reg_affine = affine_registration( + moving_zoomed, static_zoomed, moving_affine, static_affine, + nbins=32, metric='MI', pipeline=pipeline_options[step], + level_iters=niter[step], sigmas=sigmas, factors=factors) + + # update the overall alignment affine + out_affine = np.dot(out_affine, reg_affine) + + # apply the current affine to the full-resolution data + moving = resample(np.asarray(moving_orig.dataobj), + np.asarray(static_orig.dataobj), + moving_orig.affine, static_orig.affine, + out_affine) + + # report some useful information + if step in ('translation', 'rigid'): + dist = np.linalg.norm(reg_affine[:3, 3]) + angle = np.rad2deg(_angle_between_quats( + np.zeros(3), rot_to_quat(reg_affine[:3, :3]))) + logger.info(f' Translation: {dist:6.1f} mm') + if step == 'rigid': + logger.info(f' Rotation: {angle:6.1f}°') + assert moving_zoomed.shape == static_zoomed.shape, step + r2 = _compute_r2(static_zoomed, moving_zoomed) + logger.info(f' R²: {r2:6.1f}%') + return (out_affine, sdr_morph, static_zoomed.shape, static_affine, + moving_zoomed.shape, moving_affine) + + +@verbose +def apply_volume_registration(moving, static, reg_affine, sdr_morph=None, + interpolation='linear', verbose=None): + """Apply volume registration. + + Uses registration parameters computed by + :func:`~mne.transforms.compute_volume_registration`. + + Parameters + ---------- + %(moving)s + %(static)s + %(reg_affine)s + %(sdr_morph)s + interpolation : str + Interpolation to be used during the interpolation. + Can be "linear" (default) or "nearest". + %(verbose)s + + Returns + ------- + reg_img : instance of SpatialImage + The image after affine (and SDR, if provided) registration. + + Notes + ----- + .. versionadded:: 0.24 + """ + _require_version('nibabel', 'SDR morph', '2.1.0') + _require_version('dipy', 'SDR morph', '0.10.1') + from nibabel.spatialimages import SpatialImage + from dipy.align.imwarp import DiffeomorphicMap + from dipy.align.imaffine import AffineMap + _validate_type(moving, SpatialImage, 'moving') + _validate_type(static, SpatialImage, 'static') + _validate_type(reg_affine, np.ndarray, 'reg_affine') + _check_option('reg_affine.shape', reg_affine.shape, ((4, 4),)) + _validate_type(sdr_morph, (DiffeomorphicMap, None), 'sdr_morph') + logger.info('Applying affine registration ...') + moving, moving_affine = np.asarray(moving.dataobj), moving.affine + static, static_affine = np.asarray(static.dataobj), static.affine + affine_map = AffineMap(reg_affine, + static.shape, static_affine, + moving.shape, moving_affine) + reg_data = affine_map.transform(moving, interpolation=interpolation) + if sdr_morph is not None: + logger.info('Appling SDR warp ...') + reg_data = sdr_morph.transform( + reg_data, interpolation=interpolation, + image_world2grid=np.linalg.inv(static_affine), + out_shape=static.shape, out_grid2world=static_affine) + reg_img = SpatialImage(reg_data, static_affine) + logger.info('[done]') + return reg_img diff --git a/mne/utils/docs.py b/mne/utils/docs.py index 4127f119b83..fed60d58344 100644 --- a/mne/utils/docs.py +++ b/mne/utils/docs.py @@ -2379,6 +2379,80 @@ and its data is set to 0. This is useful for later re-referencing. """ +# Morphing +docdict['reg_affine'] = """ +reg_affine : ndarray of float, shape (4, 4) + The affine that registers one volume to another. +""" +docdict['sdr_morph'] = """ +sdr_morph : instance of dipy.align.DiffeomorphicMap + The class that applies the the symmetric diffeomorphic registration + (SDR) morph. +""" +docdict['moving'] = """ +moving : instance of SpatialImage + The image to morph ("from" volume). +""" +docdict['static'] = """ +static : instance of SpatialImage + The image to align with ("to" volume). +""" +docdict['niter'] = """ +niter : dict | tuple | None + For each phase of the volume registration, ``niter`` is the number of + iterations per successive stage of optimization. If a tuple is + provided, it will be used for all steps (except center of mass, which does + not iterate). It should have length 3 to + correspond to ``sigmas=[3.0, 1.0, 0.0]`` and ``factors=[4, 2, 1]`` in + the pipeline (see :func:`dipy.align.affine_registration + ` for details). + If a dictionary is provided, number of iterations can be set for each + step as a key. Steps not in the dictionary will use the default value. + The default (None) is equivalent to: + + niter=dict(translation=(100, 100, 10), + rigid=(100, 100, 10), + affine=(100, 100, 10), + sdr=(5, 5, 3)) +""" +docdict['pipeline'] = """ +pipeline : str | tuple + The volume registration steps to perform (a ``str`` for a single step, + or ``tuple`` for a set of sequential steps). The following steps can be + performed, and do so by matching mutual information between the images + (unless otherwise noted): + + ``'translation'`` + Translation. + + ``'rigid'`` + Rigid-body, i.e., rotation and translation. + + ``'affine'`` + A full affine transformation, which includes translation, rotation, + scaling, and shear. + + ``'sdr'`` + Symmetric diffeomorphic registration :footcite:`AvantsEtAl2008`, a + non-linear similarity-matching algorithm. + + The following string shortcuts can also be used: + + ``'all'`` (default) + All steps will be performed above in the order above, i.e., + ``('translation', 'rigid', 'affine', 'sdr')``. + + ``'rigids'`` + The rigid steps (first two) will be performed, which registers + the volume without distorting its underlying structure, i.e., + ``('translation', 'rigid')``. This is useful for + example when registering images from the same subject, such as + CT and MR images. + + ``'affines'`` + The affine steps (first three) will be performed, i.e., omitting + the SDR step. +""" docdict_indented = {} diff --git a/tutorials/clinical/10_ieeg_localize.py b/tutorials/clinical/10_ieeg_localize.py index 62b397223bf..64a2731081c 100644 --- a/tutorials/clinical/10_ieeg_localize.py +++ b/tutorials/clinical/10_ieeg_localize.py @@ -29,9 +29,8 @@ import matplotlib.pyplot as plt import nibabel as nib -from dipy.align import (affine_registration, center_of_mass, translation, - rigid, affine, resample) -from dipy.align.reslice import reslice +import nilearn.plotting +from dipy.align import resample import mne from mne.datasets import fetch_fsaverage @@ -71,7 +70,7 @@ # ``256 x 256 x 256`` voxels. # # .. note:: -# Using the ``--deface`` flag will create a defaced, anonymized T1 image +# Using the ``-deface`` flag will create a defaced, anonymized T1 image # located at ``$MY_DATA_DIRECTORY/$SUBJECT/mri/orig_defaced.mgz``, # which is helpful for when you publish your data. You can also use # :func:`mne_bids.write_anat` and pass ``deface=True``. @@ -89,11 +88,11 @@ def plot_overlay(image, compare, title, thresh=None): """Define a helper function for comparing plots.""" image = nib.orientations.apply_orientation( - image.get_fdata().copy(), nib.orientations.axcodes2ornt( - nib.orientations.aff2axcodes(image.affine))) + np.asarray(image.dataobj), nib.orientations.axcodes2ornt( + nib.orientations.aff2axcodes(image.affine))).astype(np.float32) compare = nib.orientations.apply_orientation( - compare.get_fdata().copy(), nib.orientations.axcodes2ornt( - nib.orientations.aff2axcodes(compare.affine))) + np.asarray(compare.dataobj), nib.orientations.axcodes2ornt( + nib.orientations.aff2axcodes(compare.affine))).astype(np.float32) if thresh is not None: compare[compare < np.quantile(compare, thresh)] = np.nan fig, axes = plt.subplots(1, 3, figsize=(12, 4)) @@ -111,90 +110,35 @@ def plot_overlay(image, compare, title, thresh=None): T1 = nib.load(op.join(misc_path, 'seeg', 'sample_seeg_T1.mgz')) CT_orig = nib.load(op.join(misc_path, 'seeg', 'sample_seeg_CT.mgz')) -# resample to T1 shape -CT_resampled = resample(moving=CT_orig.get_fdata(), - static=T1.get_fdata(), +# resample to T1's definition of world coordinates +CT_resampled = resample(moving=np.asarray(CT_orig.dataobj), + static=np.asarray(T1.dataobj), moving_affine=CT_orig.affine, - static_affine=T1.affine, - between_affine=None) + static_affine=T1.affine) plot_overlay(T1, CT_resampled, 'Unaligned CT Overlaid on T1', thresh=0.95) +del CT_resampled ############################################################################### # Now we need to align our CT image to the T1 image. # -# .. code-block:: python +# We want this to be a rigid transformation (just rotation + translation), +# so we don't do a full affine registration (that includes shear) here. +# This takes a while (~10 minutes) to execute so we skip actually running it +# here:: # -# # normalize intensities -# mri_to = T1.get_fdata().copy() -# mri_to /= mri_to.max() -# ct_from = CT_orig.get_fdata().copy() -# ct_from /= ct_from.max() +# reg_affine, _ = mne.transforms.compute_volume_registration( +# CT_orig, T1, pipeline='rigids', verbose=True) # -# # downsample for speed -# zooms = (5, 5, 5) -# mri_to, affine_to = reslice( -# mri_to, affine=T1.affine, -# zooms=T1.header.get_zooms()[:3], new_zooms=zooms) -# ct_from, affine_from = reslice( -# ct_from, affine=CT_orig.affine, -# zooms=CT_orig.header.get_zooms()[:3], new_zooms=zooms) -# -# # first optimize the translation on the zoomed images using -# # ``factors`` which looks at the image at different scales -# reg_affine = affine_registration( -# moving=ct_from, -# static=mri_to, -# moving_affine=affine_from, -# static_affine=affine_to, -# nbins=32, -# metric='MI', -# pipeline=[center_of_mass, translation], -# level_iters=[100, 100, 10], -# sigmas=[3.0, 1.0, 0.0], -# factors=[4, 2, 1])[1] -# -# CT_translated = resample(moving=CT_orig.get_fdata(), -# static=T1.get_fdata(), -# moving_affine=CT_orig.affine, -# static_affine=T1.affine, -# between_affine=reg_affine) -# -# # Now, fine-tune the registration -# reg_affine = affine_registration( -# moving=CT_translated.get_fdata(), -# static=T1.get_fdata(), -# moving_affine=CT_translated.affine, -# static_affine=T1.affine, -# nbins=32, -# metric='MI', -# pipeline=[rigid], -# level_iters=[100, 100, 10], -# sigmas=[3.0, 1.0, 0.0], -# factors=[4, 2, 1])[1] -# -# CT_aligned = resample(moving=CT_translated.get_fdata(), -# static=T1.get_fdata(), -# moving_affine=CT_translated.affine, -# static_affine=T1.affine, -# between_affine=reg_affine) - - -############################################################################### -# The previous section takes several minutes to execute so the results are -# presented here pre-computed for convenience. +# And instead we just hard-code the resulting 4x4 matrix: -alignment_affine = np.array([ - [0.99235816, -0.03412124, 0.11857915, -133.22262329], - [0.04601133, 0.99402046, -0.09902669, -97.64542095], - [-0.11449119, 0.10372593, 0.98799428, -84.39915646], +reg_affine = np.array([ + [0.99270756, -0.03243313, 0.11610254, -133.094156], + [0.04374389, 0.99439665, -0.09623816, -97.58320673], + [-0.11233068, 0.10061512, 0.98856381, -84.45551601], [0., 0., 0., 1.]]) -CT_aligned = resample(moving=CT_orig.get_fdata(), - static=T1.get_fdata(), - moving_affine=CT_orig.affine, - static_affine=T1.affine, - between_affine=alignment_affine) - +CT_aligned = mne.transforms.apply_volume_registration(CT_orig, T1, reg_affine) plot_overlay(T1, CT_aligned, 'Aligned CT Overlaid on T1', thresh=0.95) +del CT_orig ############################################################################### # We can now see how the CT image looks properly aligned to the T1 image. @@ -207,15 +151,17 @@ def plot_overlay(image, compare, title, thresh=None): # make low intensity parts of the CT transparent for easier visualization CT_data = CT_aligned.get_fdata().copy() CT_data[CT_data < np.quantile(CT_data, 0.95)] = np.nan +T1_data = np.asarray(T1.dataobj) fig, axes = plt.subplots(1, 3, figsize=(12, 6)) for ax in axes: ax.axis('off') -axes[0].imshow(T1.get_fdata()[T1.shape[0] // 2], cmap='gray') +axes[0].imshow(T1_data[T1.shape[0] // 2], cmap='gray') axes[0].set_title('MR') -axes[1].imshow(CT_aligned.get_fdata()[CT_aligned.shape[0] // 2], cmap='gray') +axes[1].imshow(np.asarray(CT_aligned.dataobj)[CT_aligned.shape[0] // 2], + cmap='gray') axes[1].set_title('CT') -axes[2].imshow(T1.get_fdata()[T1.shape[0] // 2], cmap='gray') +axes[2].imshow(T1_data[T1.shape[0] // 2], cmap='gray') axes[2].imshow(CT_data[CT_aligned.shape[0] // 2], cmap='gist_heat', alpha=0.5) for ax in (axes[0], axes[2]): ax.annotate('Subcutaneous fat', (110, 52), xytext=(100, 30), @@ -227,6 +173,7 @@ def plot_overlay(image, compare, title, thresh=None): color='white', arrowprops=dict(facecolor='white')) axes[2].set_title('CT aligned to MR') fig.tight_layout() +del CT_data, T1 ############################################################################### # Marking the Location of Each Electrode Contact @@ -249,7 +196,7 @@ def plot_overlay(image, compare, title, thresh=None): # .. code-block:: bash # # $ freeview $MISC_PATH/seeg/sample_seeg_T1.mgz \ -# $MISC_PATH/seeg/sample_seeg_CT.mgz +# $MISC_PATH/seeg/sample_seeg_CT.mgz # # Now, we'll need the subject's brain segmented out from the rest of the T1 # image from the freesurfer ``recon-all`` reconstruction. This is so that @@ -258,7 +205,7 @@ def plot_overlay(image, compare, title, thresh=None): # # Let's plot the electrode contact locations on the subject's brain. -# Load electrode positions from file +# load electrode positions from file elec_df = pd.read_csv(op.join(misc_path, 'seeg', 'sample_seeg_electrodes.tsv'), sep='\t', header=0, index_col=None) ch_names = elec_df['name'].tolist() @@ -267,8 +214,9 @@ def plot_overlay(image, compare, title, thresh=None): # load the subject's brain subject_brain = nib.load(op.join(misc_path, 'seeg', 'sample_seeg_brain.mgz')) -# Make brain surface from T1 -verts, triangles = mne.marching_cubes(subject_brain.get_fdata(), level=100) +# make brain surface from T1 +verts, triangles = mne.surface.marching_cubes( + np.asarray(subject_brain.dataobj), level=100) # transform from voxels to surface RAS verts = mne.transforms.apply_trans( subject_brain.header.get_vox2ras_tkr(), verts) / 1000. # to meters @@ -279,7 +227,7 @@ def plot_overlay(image, compare, title, thresh=None): opacity=0.05, representation='surface') for ch_coord in ch_coords: renderer.sphere(center=tuple(ch_coord / 1000.), color='y', scale=0.005) -view_kwargs = dict(azimuth=40, elevation=60) +view_kwargs = dict(azimuth=60, elevation=100) mne.viz.set_3d_view(renderer.figure, focalpoint=(0, 0, 0), distance=0.3, **view_kwargs) renderer.show() @@ -309,63 +257,23 @@ def plot_overlay(image, compare, title, thresh=None): # Now, we'll register the affine of the subject's brain to the template brain. # This aligns the two brains, preparing the subject's brain to be warped # to the template. +# +# .. warning:: Here we use ``zooms=5`` just for speed, in general we recommend +# using ``zooms=None``` (default) for highest accuracy. To deal +# with this coarseness, we also use a threshold of 0.8 for the CT +# electrodes rather than 0.95. This coarse zoom and low threshold +# is useful for getting a quick view of the data, but finalized +# pipelines should use ``zooms=None`` instead! -# normalize intensities -mri_to = template_brain.get_fdata().copy() -mri_to /= mri_to.max() -mri_from = subject_brain.get_fdata().copy() -mri_from /= mri_from.max() - -# downsample for speed -zooms = (5, 5, 5) -mri_to, affine_to = reslice( - mri_to, affine=template_brain.affine, - zooms=template_brain.header.get_zooms()[:3], new_zooms=zooms) -mri_from, affine_from = reslice( - mri_from, affine=subject_brain.affine, - zooms=subject_brain.header.get_zooms()[:3], new_zooms=zooms) - -reg_affine = affine_registration( - moving=mri_from, - static=mri_to, - moving_affine=affine_from, - static_affine=affine_to, - nbins=32, - metric='MI', - pipeline=[center_of_mass, translation, rigid, affine], - level_iters=[100, 100, 10], - sigmas=[3.0, 1.0, 0.0], - factors=[4, 2, 1])[1] - -# Apply the transform to the subject brain to plot it -subject_brain_aligned = resample(moving=subject_brain.get_fdata(), - static=template_brain.get_fdata(), - moving_affine=subject_brain.affine, - static_affine=template_brain.affine, - between_affine=reg_affine) -plot_overlay(template_brain, subject_brain_aligned, - 'Alignment with fsaverage after Affine Registration') +CT_thresh = 0.8 # 0.95 is better for zooms=None! +reg_affine, sdr_morph = mne.transforms.compute_volume_registration( + subject_brain, template_brain, zooms=5, verbose=True) +subject_brain_sdr = mne.transforms.apply_volume_registration( + subject_brain, template_brain, reg_affine, sdr_morph) -############################################################################### -# Next, we'll compute the symmetric diffeomorphic registration. This accounts -# for differences in the shape and size of the subject's brain areas -# compared to the template brain. -# -# This takes several minutes, so it won't actually be executed in the -# tutorial, instead, we'll just use pre-computed results for convenience. -# -# .. code-block:: python -# -# from dipy.align.metrics import CCMetric -# from dipy.align.imwarp import SymmetricDiffeomorphicRegistration -# # Compute registration -# sdr = SymmetricDiffeomorphicRegistration( -# metric=CCMetric(3), level_iters=[10, 10, 5]) -# mapping = sdr.optimize(static=template_brain.get_fdata(), -# moving=subject_brain.get_fdata(), -# static_grid2world=template_brain.affine, -# moving_grid2world=subject_brain.affine, -# prealign=reg_affine) +# apply the transform to the subject brain to plot it +plot_overlay(template_brain, subject_brain_sdr, + 'Alignment with fsaverage after SDR Registration') ############################################################################### # Finally, we'll apply the registrations to the electrode contact coordinates. @@ -377,46 +285,68 @@ def plot_overlay(image, compare, title, thresh=None): # the SDR transform. We can finally recover a position by averaging the # positions of all the voxels that had the contact's lookup number in # the warped image. -# -# .. code-block:: python -# -# # convert electrode positions from surface RAS to voxels -# ch_coords = mne.transforms.apply_trans( -# np.linalg.inv(subject_brain.header.get_vox2ras_tkr()), ch_coords) -# -# # Take channel coordinates and use the CT to transform them -# # into a 3D image where all the voxels over a threshold nearby -# # are labeled with an index -# CT_data = CT_aligned.get_fdata() -# thresh = np.quantile(CT_data, 0.95) -# elec_image = np.zeros(subject_brain.shape, dtype=int) -# for i, ch_coord in enumerate(ch_coords): -# # this looks up to a voxel away, it may be marked imperfectly -# volume = mne.voxel_neighbors(ch_coord, CT_data, thresh) -# for voxel in volume: -# if elec_image[voxel] != 0: -# # some voxels ambiguous because the contacts are bridged on -# # the CT so assign the voxel to the nearest contact location -# dist_old = np.sqrt( -# (ch_coords[elec_image[voxel] - 1] - voxel)**2).sum() -# dist_new = np.sqrt((ch_coord - voxel)**2).sum() -# if dist_new < dist_old: -# elec_image[voxel] = i + 1 -# else: -# elec_image[voxel] = i + 1 -# -# # Apply the mapping -# warped_elec_image = mapping.transform(elec_image, -# interpolation='nearest') -# -# # Recover the electrode contact positions as the center of mass -# for i in range(ch_coords.shape[0]): -# ch_coords[i] = np.array( -# np.where(warped_elec_image == i + 1)).mean(axis=1) -# -# # Convert back to surface RAS but to the template surface RAS this time -# ch_coords = mne.transforms.apply_trans( -# template_brain.header.get_vox2ras_tkr(), ch_coords) + +# convert electrode positions from surface RAS to voxels +ch_coords = mne.transforms.apply_trans( + np.linalg.inv(subject_brain.header.get_vox2ras_tkr()), ch_coords) +# take channel coordinates and use the CT to transform them +# into a 3D image where all the voxels over a threshold nearby +# are labeled with an index +CT_data = CT_aligned.get_fdata() +thresh = np.quantile(CT_data, CT_thresh) +elec_image = np.zeros(subject_brain.shape, dtype=int) +for i, ch_coord in enumerate(ch_coords): + # this looks up to a voxel away, it may be marked imperfectly + volume = mne.voxel_neighbors(ch_coord, CT_data, thresh) + for voxel in volume: + if elec_image[voxel] != 0: + # some voxels ambiguous because the contacts are bridged on + # the CT so assign the voxel to the nearest contact location + dist_old = np.sqrt( + (ch_coords[elec_image[voxel] - 1] - voxel)**2).sum() + dist_new = np.sqrt((ch_coord - voxel)**2).sum() + if dist_new < dist_old: + elec_image[voxel] = i + 1 + else: + elec_image[voxel] = i + 1 + +# apply the mapping +elec_image = nib.Nifti1Image(elec_image, subject_brain.affine) + +# ensure that each electrode contact was marked in at least one voxel +assert set(np.arange(1, ch_coords.shape[0] + 1)).difference( + set(np.unique(elec_image.get_fdata()))) == set() + +del subject_brain, CT_aligned, CT_data # not used anymore + +############################################################################### +# Warp and plot the result. + +warped_elec_image = mne.transforms.apply_volume_registration( + elec_image, template_brain, reg_affine, sdr_morph, interpolation='nearest') + +# ensure that all the electrode contacts were warped to the template +assert set(np.arange(1, ch_coords.shape[0] + 1)).difference( + set(np.unique(warped_elec_image.get_fdata()))) == set() + +fig, axes = plt.subplots(2, 1, figsize=(8, 8)) +nilearn.plotting.plot_glass_brain(elec_image, axes=axes[0], cmap='Dark2') +fig.text(0.1, 0.65, 'Subject T1', rotation='vertical') +nilearn.plotting.plot_glass_brain(warped_elec_image, axes=axes[1], + cmap='Dark2') +fig.text(0.1, 0.25, 'fsaverage', rotation='vertical') +fig.suptitle('Electrodes warped to fsaverage') + +# recover the electrode contact positions as the center of mass +warped_elec_data = warped_elec_image.get_fdata() +for val, ch_coord in enumerate(ch_coords, 1): + ch_coord[:] = np.array( + np.where(warped_elec_data == val), float).mean(axis=1) + +# convert back to surface RAS but to the template surface RAS this time +ch_coords = mne.transforms.apply_trans( + template_brain.header.get_vox2ras_tkr(), ch_coords) +del template_brain ############################################################################### # We can now plot the result. You can compare this to the plot in @@ -426,13 +356,7 @@ def plot_overlay(image, compare, title, thresh=None): # SDR to warp the positions of the electrode contacts, the position in the # template brain is able to be more accurately estimated. -# sphinx_gallery_thumbnail_number = 7 - -# load pre-computed warped values -elec_df = pd.read_csv( - op.join(misc_path, 'seeg', 'sample_seeg_electrodes_fsaverage.tsv'), - sep='\t', header=0, index_col=None) -ch_coords = elec_df[['R', 'A', 'S']].to_numpy(dtype=float) +# sphinx_gallery_thumbnail_number = 8 # load electrophysiology data raw = mne.io.read_raw(op.join(misc_path, 'seeg', 'sample_seeg_ieeg.fif')) @@ -441,7 +365,7 @@ def plot_overlay(image, compare, title, thresh=None): 'fsaverage', subjects_dir=subjects_dir) lpa, nasion, rpa = lpa['r'], nasion['r'], rpa['r'] -# Create a montage with our new points +# create a montage with our new points ch_pos = dict(zip(ch_names, ch_coords / 1000)) # mm -> m montage = mne.channels.make_dig_montage( ch_pos, coord_frame='mri', nasion=nasion, lpa=lpa, rpa=rpa)