Skip to content

Commit

Permalink
Limit current adaptive mask method to brain mask (#1060)
Browse files Browse the repository at this point in the history
* Limit adaptive mask calculation to brain mask.

Limit adaptive mask calculation to brain mask.

Expand on logic of first adaptive mask method.

Update tedana/utils.py

Improve docstring.

Update test.

Add decreasing-signal-based adaptive mask.

Keep removing.

Co-Authored-By: Dan Handwerker <7406227+handwerkerd@users.noreply.github.com>

* Use `compute_epi_mask` in t2smap workflow.

* Try fixing the tests.

* Fix make_adaptive_mask.

* Update test_utils.py

* Update test_utils.py

* Improve docstring.

* Update utils.py

* Update test_utils.py

* Revert "Update test_utils.py"

This reverts commit 259b002.

* Don't take absolute value of echo means.

* Log echo-wise thresholds in adaptive mask.

* Add comment about non-zero voxels.

* Update utils.py

* Update test_utils.py

* Update test_utils.py

* Update test_utils.py

* Log the thresholds again.

* Address review.

* Fix test.

---------

Co-authored-by: Dan Handwerker <7406227+handwerkerd@users.noreply.github.com>
  • Loading branch information
tsalo and handwerkerd authored Apr 12, 2024
1 parent 41dafe5 commit 62e15ab
Show file tree
Hide file tree
Showing 5 changed files with 83 additions and 59 deletions.
7 changes: 6 additions & 1 deletion tedana/tests/test_decay.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,13 @@
def testdata1():
tes = np.array([14.5, 38.5, 62.5])
in_files = [op.join(get_test_data_path(), f"echo{i + 1}.nii.gz") for i in range(3)]
mask_file = op.join(get_test_data_path(), "mask.nii.gz")
data, _ = io.load_data(in_files, n_echos=len(tes))
mask, adaptive_mask = utils.make_adaptive_mask(data, methods=["dropout", "decay"])
mask, adaptive_mask = utils.make_adaptive_mask(
data,
mask=mask_file,
methods=["dropout", "decay"],
)
fittype = "loglin"
data_dict = {
"data": data,
Expand Down
7 changes: 6 additions & 1 deletion tedana/tests/test_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,13 @@ def testdata1():
"""Data used for tests of the metrics module."""
tes = np.array([14.5, 38.5, 62.5])
in_files = [op.join(get_test_data_path(), f"echo{i + 1}.nii.gz") for i in range(3)]
mask_file = op.join(get_test_data_path(), "mask.nii.gz")
data_cat, ref_img = io.load_data(in_files, n_echos=len(tes))
_, adaptive_mask = utils.make_adaptive_mask(data_cat, methods=["dropout", "decay"])
_, adaptive_mask = utils.make_adaptive_mask(
data_cat,
mask=mask_file,
methods=["dropout", "decay"],
)
data_optcom = np.mean(data_cat, axis=1)
mixing = np.random.random((data_optcom.shape[1], 50))
io_generator = io.OutputGenerator(ref_img)
Expand Down
65 changes: 36 additions & 29 deletions tedana/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,69 +77,78 @@ def test_reshape_niimg():
def test_make_adaptive_mask():
"""Test tedana.utils.make_adaptive_mask with different methods."""
# load data make masks
mask_file = pjoin(datadir, "mask.nii.gz")
data = io.load_data(fnames, n_echos=len(tes))[0]

# Just dropout method
mask, masksum = utils.make_adaptive_mask(data, threshold=1, methods=["dropout"])
mask, masksum = utils.make_adaptive_mask(
data,
mask=mask_file,
threshold=1,
methods=["dropout"],
)

assert mask.shape == masksum.shape == (64350,)
assert np.allclose(mask, (masksum >= 1).astype(bool))
assert mask.sum() == 50786
assert mask.sum() == 49376
vals, counts = np.unique(masksum, return_counts=True)
assert np.allclose(vals, np.array([0, 1, 2, 3]))
assert np.allclose(counts, np.array([13564, 3977, 5060, 41749]))
assert np.allclose(counts, np.array([14974, 3682, 5128, 40566]))

# Just decay method
mask, masksum = utils.make_adaptive_mask(data, threshold=1, methods=["decay"])
mask, masksum = utils.make_adaptive_mask(
data,
mask=mask_file,
threshold=1,
methods=["decay"],
)

assert mask.shape == masksum.shape == (64350,)
assert np.allclose(mask, (masksum >= 1).astype(bool))
assert mask.sum() == 64350 # This method can't flag first echo as bad
assert mask.sum() == 60985 # This method can't flag first echo as bad
vals, counts = np.unique(masksum, return_counts=True)
assert np.allclose(vals, np.array([1, 2, 3]))
assert np.allclose(counts, np.array([5666, 6552, 52132]))
assert np.allclose(vals, np.array([0, 1, 2, 3]))
assert np.allclose(counts, np.array([3365, 4365, 5971, 50649]))

# Dropout and decay methods combined
mask, masksum = utils.make_adaptive_mask(data, threshold=1, methods=["dropout", "decay"])
mask, masksum = utils.make_adaptive_mask(
data,
mask=mask_file,
threshold=1,
methods=["dropout", "decay"],
)

assert mask.shape == masksum.shape == (64350,)
assert np.allclose(mask, (masksum >= 1).astype(bool))
assert mask.sum() == 50786
assert mask.sum() == 49376
vals, counts = np.unique(masksum, return_counts=True)
assert np.allclose(vals, np.array([0, 1, 2, 3]))
assert np.allclose(counts, np.array([13564, 4959, 5349, 40478]))
assert np.allclose(counts, np.array([14974, 4386, 5604, 39386]))

# Adding "none" should have no effect
mask, masksum = utils.make_adaptive_mask(
data, threshold=1, methods=["dropout", "decay", "none"]
data,
mask=mask_file,
threshold=1,
methods=["dropout", "decay", "none"],
)

assert mask.shape == masksum.shape == (64350,)
assert np.allclose(mask, (masksum >= 1).astype(bool))
assert mask.sum() == 50786
assert mask.sum() == 49376
vals, counts = np.unique(masksum, return_counts=True)
assert np.allclose(vals, np.array([0, 1, 2, 3]))
assert np.allclose(counts, np.array([13564, 4959, 5349, 40478]))
assert np.allclose(counts, np.array([14974, 4386, 5604, 39386]))

# Just "none"
mask, masksum = utils.make_adaptive_mask(data, threshold=1, methods=["none"])
mask, masksum = utils.make_adaptive_mask(data, mask=mask_file, threshold=1, methods=["none"])

assert mask.shape == masksum.shape == (64350,)
assert np.allclose(mask, (masksum >= 1).astype(bool))
assert mask.sum() == 64350
assert mask.sum() == 60985
vals, counts = np.unique(masksum, return_counts=True)
assert np.allclose(vals, np.array([3]))
assert np.allclose(counts, np.array([64350]))

# test user-defined mask
# TODO: Add mask file with no bad voxels to test against
mask, masksum = utils.make_adaptive_mask(
data,
mask=pjoin(datadir, "mask.nii.gz"),
threshold=3,
methods=["dropout", "decay"],
)
assert np.allclose(mask, (masksum >= 3).astype(bool))
assert np.allclose(vals, np.array([0, 1, 2, 3]))
assert np.allclose(counts, np.array([3365, 1412, 1195, 58378]))


# SMOKE TESTS
Expand Down Expand Up @@ -178,8 +187,6 @@ def test_smoke_make_adaptive_mask():
data = np.random.random((n_samples, n_echos, n_times))
mask = np.random.randint(2, size=n_samples)

assert utils.make_adaptive_mask(data, methods=["dropout", "decay"]) is not None
# functions with mask
assert utils.make_adaptive_mask(data, mask=mask, methods=["dropout", "decay"]) is not None


Expand Down
54 changes: 28 additions & 26 deletions tedana/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,16 +49,18 @@ def reshape_niimg(data):
return fdata


def make_adaptive_mask(data, mask=None, threshold=1, methods=["dropout"]):
def make_adaptive_mask(data, mask, threshold=1, methods=["dropout"]):
"""Make map of `data` specifying longest echo a voxel can be sampled with.
Parameters
----------
data : (S x E x T) array_like
Multi-echo data array, where `S` is samples, `E` is echos, and `T` is time.
mask : :obj:`str` or img_like, optional
Binary mask for voxels to consider in TE Dependent ANAlysis. Default is
to generate mask from data with good signal across echoes
mask : :obj:`str` or img_like
Binary mask for voxels to consider in TE Dependent ANAlysis.
This must be provided, as the mask is used to identify exemplar voxels.
Without a mask limiting the voxels to consider,
the adaptive mask will generally select voxels outside the brain as exemplars.
threshold : :obj:`int`, optional
Minimum echo count to retain in the mask. Default is 1, which is
equivalent not thresholding.
Expand All @@ -81,6 +83,7 @@ def make_adaptive_mask(data, mask=None, threshold=1, methods=["dropout"]):
Dropout
Remove voxels with relatively low mean magnitudes from the mask.
This method uses distributions of values across the mask.
Therefore, it is sensitive to the quality of the mask.
A bad mask may result in a bad adaptive mask.
Expand Down Expand Up @@ -121,6 +124,9 @@ def make_adaptive_mask(data, mask=None, threshold=1, methods=["dropout"]):
f"An adaptive mask was then generated using the {'+'.join(methods)} method(s), "
"in which each voxel's value reflects the number of echoes with 'good' data."
)
mask = reshape_niimg(mask).astype(bool)
data = data[mask, :, :]

if (methods is None) or (len(methods) == 1 and methods[0].lower() == "none"):
LGR.warning(
"No methods provided for adaptive mask generation. "
Expand All @@ -139,9 +145,8 @@ def make_adaptive_mask(data, mask=None, threshold=1, methods=["dropout"]):
n_samples, n_echos, _ = data.shape
adaptive_masks = []

# Generate a base adaptive mask that flags any NaNs or negative values
# TODO When masking is moved before dropout calc, change to "data <= 0"
bad_data_vals = np.isnan(data) + (data < 0)
# Generate a base adaptive mask that flags any NaN, zero, or negative values
bad_data_vals = np.isnan(data) + (data <= 0)
good_vox_echoes = 1 - np.any(bad_data_vals, axis=-1).astype(int)
base_adaptive_mask = np.zeros(n_samples, dtype=int)
for echo_idx in range(n_echos):
Expand Down Expand Up @@ -180,6 +185,8 @@ def make_adaptive_mask(data, mask=None, threshold=1, methods=["dropout"]):
if lthrs.ndim > 1:
lthrs = lthrs[:, lthrs.sum(axis=0).argmax()]

LGR.info("Echo-wise intensity thresholds for adaptive mask: %s", lthrs)

# determine samples where absolute value is greater than echo-specific thresholds
# and count # of echos that pass criterion
dropout_adaptive_mask = (np.abs(echo_means) > lthrs).sum(axis=-1)
Expand All @@ -196,26 +203,21 @@ def make_adaptive_mask(data, mask=None, threshold=1, methods=["dropout"]):
# Retain the most conservative of the selected adaptive mask estimates
adaptive_mask = np.minimum.reduce(adaptive_masks)

if mask is None:
# make it a boolean mask to (where we have at least `threshold` echoes with good signal)
mask = (adaptive_mask >= threshold).astype(bool)
# TODO: Use visual report to make checking the reduced mask easier
if np.any(adaptive_mask < threshold):
n_bad_voxels = np.sum(adaptive_mask < threshold)
LGR.warning(
f"{n_bad_voxels} voxels in user-defined mask do not have good signal. "
"Removing voxels from mask."
)
adaptive_mask[adaptive_mask < threshold] = 0
else:
# if the user has supplied a binary mask
mask = reshape_niimg(mask).astype(bool)
adaptive_mask = adaptive_mask * mask
# reduce mask based on adaptive_mask
# TODO: Use visual report to make checking the reduced mask easier
if np.any(adaptive_mask[mask] < threshold):
n_bad_voxels = np.sum(adaptive_mask[mask] < threshold)
LGR.warning(
f"{n_bad_voxels} voxels in user-defined mask do not have good "
"signal. Removing voxels from mask."
)
adaptive_mask[adaptive_mask < threshold] = 0
mask = adaptive_mask.astype(bool)

return mask, adaptive_mask

modified_mask = adaptive_mask.astype(bool)

adaptive_mask = unmask(adaptive_mask, mask)
modified_mask = unmask(modified_mask, mask)

return modified_mask, adaptive_mask


def unmask(data, mask):
Expand Down
9 changes: 7 additions & 2 deletions tedana/workflows/t2smap.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import sys

import numpy as np
from nilearn.masking import compute_epi_mask
from scipy import stats
from threadpoolctl import threadpool_limits

Expand Down Expand Up @@ -284,11 +285,15 @@ def t2smap_workflow(
config="auto",
make_figures=False,
)
n_samp, n_echos, n_vols = catd.shape
n_echos = catd.shape[1]
LGR.debug(f"Resulting data shape: {catd.shape}")

if mask is None:
LGR.info("Computing adaptive mask")
LGR.info(
"Computing initial mask from the first echo using nilearn's compute_epi_mask function."
)
first_echo_img = io.new_nii_like(io_generator.reference_img, catd[:, 0, :])
mask = compute_epi_mask(first_echo_img)
else:
LGR.info("Using user-defined mask")
mask, masksum = utils.make_adaptive_mask(
Expand Down

0 comments on commit 62e15ab

Please sign in to comment.