Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add --masktype option to control adaptive mask method #1057

Merged
merged 29 commits into from
Apr 8, 2024
Merged
Show file tree
Hide file tree
Changes from 24 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
5683d32
Remove getsum parameter.
tsalo Mar 10, 2024
772620e
Keep removing.
tsalo Mar 10, 2024
9b852bd
Add decreasing-signal-based adaptive mask.
tsalo Mar 10, 2024
b595dff
Update test.
tsalo Mar 10, 2024
bb1eb79
Improve docstring.
tsalo Mar 10, 2024
ac4114e
Update tedana/utils.py
tsalo Mar 13, 2024
8f5bda2
Expand on logic of first adaptive mask method.
tsalo Mar 13, 2024
b037d6c
Merge remote-tracking branch 'upstream/main' into fix-adaptive-mask
tsalo Mar 29, 2024
ca8c1b9
Add masktype option.
tsalo Mar 29, 2024
ae59d8b
Update utils.py
tsalo Mar 29, 2024
ce618b3
Add "none" option.
tsalo Mar 29, 2024
33c4c34
Make dropout (old behavior) the default.
tsalo Mar 29, 2024
a7b5cb1
Update utils.py
tsalo Mar 30, 2024
823aa5c
Improve test.
tsalo Apr 2, 2024
c02b9d9
Fix.
tsalo Apr 2, 2024
e7b29a1
Update test_utils.py
tsalo Apr 2, 2024
2432f3e
Update test_utils.py
tsalo Apr 2, 2024
e356347
Update test_utils.py
tsalo Apr 3, 2024
7a8d7bd
Try updating RTD Python version.
tsalo Apr 3, 2024
116480c
Apply suggestions from code review
tsalo Apr 4, 2024
9973e45
Update docstring.
tsalo Apr 4, 2024
8e1dd4e
Replace "none" adaptive mask with base one.
tsalo Apr 4, 2024
4eb3c8f
Update test_integration.py
tsalo Apr 4, 2024
ff863fb
Update utils.py
tsalo Apr 4, 2024
386178f
Update tedana/utils.py
tsalo Apr 5, 2024
8da1e22
Update tedana/workflows/t2smap.py
tsalo Apr 5, 2024
bb5c947
Update tedana/workflows/tedana.py
tsalo Apr 5, 2024
783bd01
Update tedana/utils.py
tsalo Apr 5, 2024
a0d5f79
Update utils.py
tsalo Apr 5, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ sphinx:
build:
os: ubuntu-22.04
tools:
python: "3.8"
python: "3.10"
jobs:
post_checkout:
- git fetch --unshallow
Expand Down
40 changes: 20 additions & 20 deletions docs/notebooks/plot_approach_figures.ipynb

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion tedana/tests/test_decay.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def testdata1():
tes = np.array([14.5, 38.5, 62.5])
in_files = [op.join(get_test_data_path(), f"echo{i + 1}.nii.gz") for i in range(3)]
data, _ = io.load_data(in_files, n_echos=len(tes))
mask, adaptive_mask = utils.make_adaptive_mask(data, getsum=True)
mask, adaptive_mask = utils.make_adaptive_mask(data, methods=["dropout", "decay"])
fittype = "loglin"
data_dict = {
"data": data,
Expand Down
1 change: 1 addition & 0 deletions tedana/tests/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -669,6 +669,7 @@ def test_integration_t2smap(skip_integration):
+ ["-e"]
+ [str(te) for te in echo_times]
+ ["--out-dir", out_dir, "--fittype", "curvefit"]
+ ["--masktype", "dropout", "decay"]
)
t2smap_cli._main(args)

Expand Down
2 changes: 1 addition & 1 deletion tedana/tests/test_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def testdata1():
tes = np.array([14.5, 38.5, 62.5])
in_files = [op.join(get_test_data_path(), f"echo{i + 1}.nii.gz") for i in range(3)]
data_cat, ref_img = io.load_data(in_files, n_echos=len(tes))
_, adaptive_mask = utils.make_adaptive_mask(data_cat, getsum=True)
_, adaptive_mask = utils.make_adaptive_mask(data_cat, methods=["dropout", "decay"])
data_optcom = np.mean(data_cat, axis=1)
mixing = np.random.random((data_optcom.shape[1], 50))
io_generator = io.OutputGenerator(ref_img)
Expand Down
65 changes: 54 additions & 11 deletions tedana/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,26 +75,69 @@ def test_reshape_niimg():


def test_make_adaptive_mask():
"""Test tedana.utils.make_adaptive_mask with different methods."""
# load data make masks
data = io.load_data(fnames, n_echos=len(tes))[0]
mask, masksum = utils.make_adaptive_mask(data, getsum=True, threshold=1)

# getsum doesn't change mask values
assert np.allclose(mask, utils.make_adaptive_mask(data))
# shapes are all the same
# Just dropout method
mask, masksum = utils.make_adaptive_mask(data, threshold=1, methods=["dropout"])

assert mask.shape == masksum.shape == (64350,)
assert np.allclose(mask, (masksum >= 1).astype(bool))
# mask has correct # of entries
assert mask.sum() == 50786
# masksum has correct values
vals, counts = np.unique(masksum, return_counts=True)
assert np.allclose(vals, np.array([0, 1, 2, 3]))
assert np.allclose(counts, np.array([13564, 3977, 5060, 41749]))

# Just decay method
mask, masksum = utils.make_adaptive_mask(data, threshold=1, methods=["decay"])

assert mask.shape == masksum.shape == (64350,)
assert np.allclose(mask, (masksum >= 1).astype(bool))
assert mask.sum() == 64350 # This method can't flag first echo as bad
vals, counts = np.unique(masksum, return_counts=True)
assert np.allclose(vals, np.array([1, 2, 3]))
assert np.allclose(counts, np.array([5666, 6552, 52132]))

# Dropout and decay methods combined
mask, masksum = utils.make_adaptive_mask(data, threshold=1, methods=["dropout", "decay"])

assert mask.shape == masksum.shape == (64350,)
assert np.allclose(mask, (masksum >= 1).astype(bool))
assert mask.sum() == 50786
vals, counts = np.unique(masksum, return_counts=True)
assert np.allclose(vals, np.array([0, 1, 2, 3]))
assert np.allclose(counts, np.array([13564, 4959, 5349, 40478]))
handwerkerd marked this conversation as resolved.
Show resolved Hide resolved

# Adding "none" should have no effect
mask, masksum = utils.make_adaptive_mask(
data, threshold=1, methods=["dropout", "decay", "none"]
)

assert mask.shape == masksum.shape == (64350,)
assert np.allclose(mask, (masksum >= 1).astype(bool))
assert mask.sum() == 50786
vals, counts = np.unique(masksum, return_counts=True)
assert np.allclose(vals, np.array([0, 1, 2, 3]))
assert np.allclose(counts, np.array([13564, 4959, 5349, 40478]))

# Just "none"
mask, masksum = utils.make_adaptive_mask(data, threshold=1, methods=["none"])

assert mask.shape == masksum.shape == (64350,)
assert np.allclose(mask, (masksum >= 1).astype(bool))
assert mask.sum() == 64350
vals, counts = np.unique(masksum, return_counts=True)
assert np.allclose(vals, np.array([3]))
assert np.allclose(counts, np.array([64350]))

# test user-defined mask
# TODO: Add mask file with no bad voxels to test against
mask, masksum = utils.make_adaptive_mask(
data, mask=pjoin(datadir, "mask.nii.gz"), getsum=True, threshold=3
data,
mask=pjoin(datadir, "mask.nii.gz"),
threshold=3,
methods=["dropout", "decay"],
)
assert np.allclose(mask, (masksum >= 3).astype(bool))

Expand Down Expand Up @@ -127,17 +170,17 @@ def test_smoke_make_adaptive_mask():

in the correct format.

Note: make_adaptive_mask has optional paramters - mask and getsum.
Note: make_adaptive_mask has optional paramters - mask and threshold.
"""
n_samples = 100
n_echos = 5
n_times = 20
data = np.random.random((n_samples, n_echos, n_times))
mask = np.random.randint(2, size=n_samples)

assert utils.make_adaptive_mask(data) is not None
assert utils.make_adaptive_mask(data, mask=mask) is not None # functions with mask
assert utils.make_adaptive_mask(data, getsum=True) is not None # functions when getsumis true
assert utils.make_adaptive_mask(data, methods=["dropout", "decay"]) is not None
# functions with mask
assert utils.make_adaptive_mask(data, mask=mask, methods=["dropout", "decay"]) is not None


def test_smoke_unmask():
Expand Down
177 changes: 124 additions & 53 deletions tedana/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,88 +49,159 @@ def reshape_niimg(data):
return fdata


def make_adaptive_mask(data, mask=None, getsum=False, threshold=1):
"""
Make map of `data` specifying longest echo a voxel can be sampled with.
def make_adaptive_mask(data, mask=None, threshold=1, methods=["dropout"]):
"""Make map of `data` specifying longest echo a voxel can be sampled with.

Parameters
----------
data : (S x E x T) array_like
Multi-echo data array, where `S` is samples, `E` is echos, and `T` is
time
Multi-echo data array, where `S` is samples, `E` is echos, and `T` is time.
mask : :obj:`str` or img_like, optional
Binary mask for voxels to consider in TE Dependent ANAlysis. Default is
to generate mask from data with good signal across echoes
getsum : :obj:`bool`, optional
Return `masksum` in addition to `mask`. Default: False
threshold : :obj:`int`, optional
Minimum echo count to retain in the mask. Default is 1, which is
equivalent not thresholding.
methods : :obj:`list`, optional
List of methods to use for adaptive mask generation. Default is ["dropout"].
Valid methods are "decay", "dropout", and "none".

Returns
-------
mask : (S,) :obj:`numpy.ndarray`
Boolean array of voxels that have sufficient signal in at least one
echo
masksum : (S,) :obj:`numpy.ndarray`
Valued array indicating the number of echos with sufficient signal in a
given voxel. Only returned if `getsum = True`
Boolean array of voxels that have sufficient signal in at least ``threshold`` echos.
adaptive_mask : (S,) :obj:`numpy.ndarray`
Valued array indicating the number of echos with sufficient signal in a given voxel.

Notes
-----
handwerkerd marked this conversation as resolved.
Show resolved Hide resolved
The adaptive mask can flag "bad" echoes via two methods: dropout and decay.
Either or both methods are applied to the mean magnitude across time for each voxel and echo.

Dropout

Remove voxels with relatively low mean magnitudes from the mask.
This method uses distributions of values across the mask.
Therefore, it is sensitive to the quality of the mask.
A bad mask may result in a bad adaptive mask.

This method is implemented as follows:

a. Calculate the 33rd percentile of values in the first echo,
based on voxel-wise mean over time.
b. Identify the voxel where the first echo's mean value is equal to the 33rd percentile.
Basically, this identifies "exemplar" voxel reflecting the 33rd percentile.

- The 33rd percentile is arbitrary.
- If more than one voxel has a value exactly equal to the 33rd percentile,
keep all of them.
c. For the exemplar voxel from the first echo, calculate 1/3 of the mean value for each echo.

- This is the threshold for "good" data.
- The 1/3 value is arbitrary.
- If there was more than one exemplar voxel, retain the the highest value for each echo.
d. For each voxel, count the number of echoes that have a mean value greater than the
corresponding echo's threshold.

Decay

Determine the echo at which the signal stops decreasing for each voxel.
If a voxel's signal stops decreasing as echo time increases, then we can infer that the
voxel has either fully dephased (i.e., "bottomed out") or been contaminated by noise.
This essentially identifies the last echo with "good" data.
For a scan that collects many echoes for T2* estimation or has a relatively short echo
spacing, it is possible that a later echo will have a higher value,
but the overall trend still shows a decay.
This method should not be used in those situations.

The element-wise minimum value between any selected methods is used to construct the adaptive
mask.
"""
RepLGR.info(
"An adaptive mask was then generated, in which each voxel's "
"value reflects the number of echoes with 'good' data."
f"An adaptive mask was then generated using the {'+'.join(methods)} method(s), "
"in which each voxel's value reflects the number of echoes with 'good' data."
)

# take temporal mean of echos and extract non-zero values in first echo
echo_means = data.mean(axis=-1) # temporal mean of echos
first_echo = echo_means[echo_means[:, 0] != 0, 0]

# get 33rd %ile of `first_echo` and find corresponding index
# NOTE: percentile is arbitrary
# TODO: "interpolation" param changed to "method" in numpy 1.22.0
# confirm method="higher" is the same as interpolation="higher"
# Current minimum version for numpy in tedana is 1.16 where
# there is no "method" parameter. Either wait until we bump
# our minimum numpy version to 1.22 or add a version check
# or try/catch statement.
perc = np.percentile(first_echo, 33, interpolation="higher")
perc_val = echo_means[:, 0] == perc

# extract values from all echos at relevant index
# NOTE: threshold of 1/3 voxel value is arbitrary
lthrs = np.squeeze(echo_means[perc_val].T) / 3

# if multiple samples were extracted per echo, keep the one w/the highest signal
if lthrs.ndim > 1:
lthrs = lthrs[:, lthrs.sum(axis=0).argmax()]

# determine samples where absolute value is greater than echo-specific thresholds
# and count # of echos that pass criterion
masksum = (np.abs(echo_means) > lthrs).sum(axis=-1)
assert methods, "No methods provided for adaptive mask generation."
tsalo marked this conversation as resolved.
Show resolved Hide resolved
assert all([method.lower() in ["decay", "dropout", "none"] for method in methods])

n_samples, n_echos, _ = data.shape
adaptive_masks = []

# Generate a base adaptive mask that flags any NaNs or negative values
base_adaptive_mask = np.full(n_samples, n_echos, dtype=int)
for i_voxel in range(n_samples):
voxel_data = data[i_voxel, :, :]
nonnan_echoes = ~np.any(np.isnan(voxel_data), axis=1)
nonneg_echoes = ~np.any(voxel_data < 0, axis=1)
good_echoes = nonnan_echoes * nonneg_echoes
# Find the index of the last True element in a 1D boolean array
last_good_echo = (np.where(good_echoes)[0][-1] + 1) if np.any(good_echoes) else 0
base_adaptive_mask[i_voxel] = last_good_echo
tsalo marked this conversation as resolved.
Show resolved Hide resolved

adaptive_masks.append(base_adaptive_mask)

if ("dropout" in methods) or ("decay" in methods):
echo_means = data.mean(axis=-1) # temporal mean of echos

if "dropout" in methods:
# take temporal mean of echos and extract non-zero values in first echo
first_echo = echo_means[echo_means[:, 0] != 0, 0]

# get 33rd %ile of `first_echo` and find corresponding index
# NOTE: percentile is arbitrary
# TODO: "interpolation" param changed to "method" in numpy 1.22.0
# confirm method="higher" is the same as interpolation="higher"
# Current minimum version for numpy in tedana is 1.16 where
# there is no "method" parameter. Either wait until we bump
# our minimum numpy version to 1.22 or add a version check
# or try/catch statement.
perc = np.percentile(first_echo, 33, interpolation="higher")
perc_val = echo_means[:, 0] == perc

# extract values from all echos at relevant index
# NOTE: threshold of 1/3 voxel value is arbitrary
lthrs = np.squeeze(echo_means[perc_val].T) / 3

# if multiple samples were extracted per echo, keep the one w/the highest signal
if lthrs.ndim > 1:
lthrs = lthrs[:, lthrs.sum(axis=0).argmax()]

# determine samples where absolute value is greater than echo-specific thresholds
# and count # of echos that pass criterion
dropout_adaptive_mask = (np.abs(echo_means) > lthrs).sum(axis=-1)
adaptive_masks.append(dropout_adaptive_mask)

if "decay" in methods:
# Determine where voxels stop decreasing in signal from echo to echo
echo_diffs = np.hstack((np.full((n_samples, 1), -1), np.diff(echo_means, axis=1)))
diff_mask = echo_diffs >= 0 # flag where signal is not decreasing
last_decreasing_echo = diff_mask.argmax(axis=1)
last_decreasing_echo[last_decreasing_echo == 0] = n_echos # if no increase, set to n_echos
adaptive_masks.append(last_decreasing_echo)

# Retain the most conservative of the selected adaptive mask estimates
adaptive_mask = np.minimum.reduce(adaptive_masks)

if mask is None:
handwerkerd marked this conversation as resolved.
Show resolved Hide resolved
# make it a boolean mask to (where we have at least `threshold` echoes with good signal)
mask = (masksum >= threshold).astype(bool)
masksum[masksum < threshold] = 0
mask = (adaptive_mask >= threshold).astype(bool)
adaptive_mask[adaptive_mask < threshold] = 0
else:
# if the user has supplied a binary mask
mask = reshape_niimg(mask).astype(bool)
masksum = masksum * mask
# reduce mask based on masksum
adaptive_mask = adaptive_mask * mask
# reduce mask based on adaptive_mask
# TODO: Use visual report to make checking the reduced mask easier
if np.any(masksum[mask] < threshold):
n_bad_voxels = np.sum(masksum[mask] < threshold)
if np.any(adaptive_mask[mask] < threshold):
n_bad_voxels = np.sum(adaptive_mask[mask] < threshold)
LGR.warning(
f"{n_bad_voxels} voxels in user-defined mask do not have good "
"signal. Removing voxels from mask."
)
masksum[masksum < threshold] = 0
mask = masksum.astype(bool)

if getsum:
return mask, masksum
adaptive_mask[adaptive_mask < threshold] = 0
mask = adaptive_mask.astype(bool)

return mask
return mask, adaptive_mask


def unmask(data, mask):
Expand Down
20 changes: 19 additions & 1 deletion tedana/workflows/t2smap.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,16 @@ def _get_parser():
help=("Filenaming convention. bids will use the latest BIDS derivatives version."),
default="bids",
)
optional.add_argument(
"--masktype",
dest="masktype",
required=False,
action="store",
nargs="+",
help="Method(s) by which to define the adaptive mask.",
choices=["dropout", "decay", "none"],
default=["dropout"],
)
optional.add_argument(
"--fittype",
dest="fittype",
Expand Down Expand Up @@ -152,6 +162,7 @@ def t2smap_workflow(
mask=None,
prefix="",
convention="bids",
masktype=["dropout"],
fittype="loglin",
fitmode="all",
combmode="t2s",
Expand All @@ -176,6 +187,8 @@ def t2smap_workflow(
mask : :obj:`str`, optional
Binary mask of voxels to include in TE Dependent ANAlysis. Must be spatially
aligned with `data`.
masktype : {'dropout', 'decay'} or :obj:`list`, optional
tsalo marked this conversation as resolved.
Show resolved Hide resolved
Method(s) by which to define the adaptive mask. Default is ["dropout"].
fittype : {'loglin', 'curvefit'}, optional
Monoexponential fitting method.
'loglin' means to use the the default linear fit to the log of
Expand Down Expand Up @@ -278,7 +291,12 @@ def t2smap_workflow(
LGR.info("Computing adaptive mask")
else:
LGR.info("Using user-defined mask")
mask, masksum = utils.make_adaptive_mask(catd, mask=mask, getsum=True, threshold=1)
mask, masksum = utils.make_adaptive_mask(
catd,
mask=mask,
threshold=1,
methods=masktype,
)

LGR.info("Computing adaptive T2* map")
if fitmode == "all":
Expand Down
Loading