diff --git a/get_version.py b/get_version.py deleted file mode 100644 index 65fb10d7..00000000 --- a/get_version.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python -"""Read current version.""" -import sys -import os.path as op - - -def main(): - sys.path.insert(0, op.abspath(".")) - from nibabies._version import get_versions - - print(get_versions()["version"]) - - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/nibabies/__about__.py b/nibabies/__about__.py index ff65e704..54176d01 100644 --- a/nibabies/__about__.py +++ b/nibabies/__about__.py @@ -5,15 +5,15 @@ try: from ._version import __version__ except ImportError: - __version__ = "0+unknown" + __version__ = '0+unknown' -__org__ = "nipreps" -__packagename__ = "nibabies" -__copyright__ = "Copyright 2023, Center for Reproducible Neuroscience, Stanford University" +__org__ = 'nipreps' +__packagename__ = 'nibabies' +__copyright__ = 'Copyright 2023, Center for Reproducible Neuroscience, Stanford University' __credits__ = ( - "Contributors: please check the ``.zenodo.json`` file at the top-level folder" - "of the repository" + 'Contributors: please check the ``.zenodo.json`` file at the top-level folder' + 'of the repository' ) -__url__ = f"https://github.com/{__org__}/{__packagename__}" +__url__ = f'https://github.com/{__org__}/{__packagename__}' -DOWNLOAD_URL = f"https://github.com/{__org__}/{__packagename__}/archive/{__version__}.tar.gz" +DOWNLOAD_URL = f'https://github.com/{__org__}/{__packagename__}/archive/{__version__}.tar.gz' diff --git a/nibabies/__init__.py b/nibabies/__init__.py index 0325cfb2..aa4b8275 100644 --- a/nibabies/__init__.py +++ b/nibabies/__init__.py @@ -1 +1 @@ -from .__about__ import __version__ +from .__about__ import __version__ # noqa: F401 diff --git a/nibabies/_warnings.py b/nibabies/_warnings.py index 88bdad5d..ef4677f0 100644 --- a/nibabies/_warnings.py +++ b/nibabies/_warnings.py @@ -3,7 +3,7 @@ import logging import warnings -_wlog = logging.getLogger("py.warnings") +_wlog = logging.getLogger('py.warnings') _wlog.addHandler(logging.NullHandler()) @@ -11,9 +11,9 @@ def _warn(message, category=None, stacklevel=1, source=None): """Redefine the warning function.""" if category is not None: category = type(category).__name__ - category = category.replace("type", "WARNING") + category = category.replace('type', 'WARNING') - logging.getLogger("py.warnings").warning(f"{category or 'WARNING'}: {message}") + logging.getLogger('py.warnings').warning(f"{category or 'WARNING'}: {message}") def _showwarning(message, category, filename, lineno, file=None, line=None): diff --git a/nibabies/cli/mcribs.py b/nibabies/cli/mcribs.py index 9fb73324..d57c504f 100644 --- a/nibabies/cli/mcribs.py +++ b/nibabies/cli/mcribs.py @@ -8,7 +8,7 @@ def _parser(): - parser = ArgumentParser(description="Test script for MCRIBS surfaces") + parser = ArgumentParser(description='Test script for MCRIBS surfaces') parser.add_argument('subject', help='Subject ID') parser.add_argument('t2w', type=os.path.abspath, help='Input T2w (radioisotropic)') parser.add_argument( diff --git a/nibabies/cli/run.py b/nibabies/cli/run.py index 047f7e0a..d666b3cc 100755 --- a/nibabies/cli/run.py +++ b/nibabies/cli/run.py @@ -85,7 +85,7 @@ def main(): config.loggers.workflow.log( 15, - '\n'.join(['nibabies config:'] + ['\t\t%s' % s for s in config.dumps().splitlines()]), + '\n'.join(['nibabies config:'] + [f'\t\t{s}' for s in config.dumps().splitlines()]), ) config.loggers.workflow.log(25, 'nibabies started!') diff --git a/nibabies/cli/version.py b/nibabies/cli/version.py index 7e52f3c1..44badb8c 100644 --- a/nibabies/cli/version.py +++ b/nibabies/cli/version.py @@ -2,7 +2,8 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Version CLI helpers.""" -from datetime import datetime +from contextlib import suppress +from datetime import datetime, timezone from pathlib import Path import requests @@ -10,7 +11,7 @@ from .. import __version__ RELEASE_EXPIRY_DAYS = 14 -DATE_FMT = "%Y%m%d" +DATE_FMT = '%Y%m%d' def check_latest(): @@ -20,7 +21,8 @@ def check_latest(): latest = None date = None outdated = None - cachefile = Path.home() / ".cache" / "nibabies" / "latest" + now = datetime.now(tz=timezone.utc) + cachefile = Path.home() / '.cache' / 'nibabies' / 'latest' try: cachefile.parent.mkdir(parents=True, exist_ok=True) except OSError: @@ -28,27 +30,26 @@ def check_latest(): if cachefile and cachefile.exists(): try: - latest, date = cachefile.read_text().split("|") - except Exception: + latest, date = cachefile.read_text().split('|') + except Exception: # noqa: S110, BLE001 pass else: try: latest = Version(latest) - date = datetime.strptime(date, DATE_FMT) + date = datetime.strptime(date, DATE_FMT).astimezone(timezone.utc) except (InvalidVersion, ValueError): latest = None else: - if abs((datetime.now() - date).days) > RELEASE_EXPIRY_DAYS: + if abs((now - date).days) > RELEASE_EXPIRY_DAYS: outdated = True if latest is None or outdated is True: - try: - response = requests.get(url="https://pypi.org/pypi/nibabies/json", timeout=1.0) - except Exception: - response = None + response = None + with suppress(Exception): + response = requests.get(url='https://pypi.org/pypi/nibabies/json', timeout=1.0) if response and response.status_code == 200: - versions = [Version(rel) for rel in response.json()["releases"].keys()] + versions = [Version(rel) for rel in response.json()['releases'].keys()] versions = [rel for rel in versions if not rel.is_prerelease] if versions: latest = sorted(versions)[-1] @@ -56,10 +57,8 @@ def check_latest(): latest = None if cachefile is not None and latest is not None: - try: - cachefile.write_text("|".join(("%s" % latest, datetime.now().strftime(DATE_FMT)))) - except Exception: - pass + with suppress(OSError): + cachefile.write_text(f'{latest}|{now.strftime(DATE_FMT)}') return latest @@ -67,18 +66,17 @@ def check_latest(): def is_flagged(): """Check whether current version is flagged.""" # https://raw.githubusercontent.com/nipreps/fmriprep/master/.versions.json - flagged = tuple() - try: + flagged = () + response = None + with suppress(Exception): response = requests.get( url="""\ https://raw.githubusercontent.com/nipreps/nibabies/master/.versions.json""", timeout=1.0, ) - except Exception: - response = None if response and response.status_code == 200: - flagged = response.json().get("flagged", {}) or {} + flagged = response.json().get('flagged', {}) or {} if __version__ in flagged: return True, flagged[__version__] diff --git a/nibabies/conftest.py b/nibabies/conftest.py index 8070b17c..a9b789b2 100644 --- a/nibabies/conftest.py +++ b/nibabies/conftest.py @@ -8,22 +8,22 @@ from nibabies.data import load as load_data FILES = ( - "functional.nii", - "anatomical.nii", - "func.dlabel.nii", - "func.dtseries.nii", - "epi.nii", - "T1w.nii", - "func_to_struct.mat", - "atlas.nii", - "label_list.txt", - "sub-01_run-01_echo-1_bold.nii.gz", - "sub-01_run-01_echo-2_bold.nii.gz", - "sub-01_run-01_echo-3_bold.nii.gz", + 'functional.nii', + 'anatomical.nii', + 'func.dlabel.nii', + 'func.dtseries.nii', + 'epi.nii', + 'T1w.nii', + 'func_to_struct.mat', + 'atlas.nii', + 'label_list.txt', + 'sub-01_run-01_echo-1_bold.nii.gz', + 'sub-01_run-01_echo-2_bold.nii.gz', + 'sub-01_run-01_echo-3_bold.nii.gz', ) -@pytest.fixture(scope="package") +@pytest.fixture(scope='package') def data_dir(): with TemporaryDirectory() as tmpdir: tmp_path = Path(tmpdir) @@ -33,7 +33,7 @@ def data_dir(): @pytest.fixture(autouse=True) -def set_namespace(doctest_namespace, data_dir): - doctest_namespace["data_dir"] = data_dir - doctest_namespace["test_data"] = load_data.cached('../tests/data') - doctest_namespace["Path"] = Path +def _populate_namespace(doctest_namespace, data_dir): + doctest_namespace['data_dir'] = data_dir + doctest_namespace['test_data'] = load_data.cached('../tests/data') + doctest_namespace['Path'] = Path diff --git a/nibabies/data/__init__.py b/nibabies/data/__init__.py index 557c96e2..9dbc26f2 100644 --- a/nibabies/data/__init__.py +++ b/nibabies/data/__init__.py @@ -1,183 +1,3 @@ -"""Data file retrieval - -.. autofunction:: load - -.. automethod:: load.readable - -.. automethod:: load.as_path - -.. automethod:: load.cached - -.. autoclass:: Loader -""" - -from __future__ import annotations - -import atexit -import os -from contextlib import AbstractContextManager, ExitStack -from functools import cached_property -from pathlib import Path -from types import ModuleType -from typing import Union - -try: - from functools import cache -except ImportError: # PY38 - from functools import lru_cache as cache - -try: # Prefer backport to leave consistency to dependency spec - from importlib_resources import as_file, files -except ImportError: - from importlib.resources import as_file, files # type: ignore - -try: # Prefer stdlib so Sphinx can link to authoritative documentation - from importlib.resources.abc import Traversable -except ImportError: - from importlib_resources.abc import Traversable - -__all__ = ["load"] - - -class Loader: - """A loader for package files relative to a module - - This class wraps :mod:`importlib.resources` to provide a getter - function with an interpreter-lifetime scope. For typical packages - it simply passes through filesystem paths as :class:`~pathlib.Path` - objects. For zipped distributions, it will unpack the files into - a temporary directory that is cleaned up on interpreter exit. - - This loader accepts a fully-qualified module name or a module - object. - - Expected usage:: - - '''Data package - - .. autofunction:: load_data - - .. automethod:: load_data.readable - - .. automethod:: load_data.as_path - - .. automethod:: load_data.cached - ''' - - from nibabies.data import Loader - - load_data = Loader(__package__) - - :class:`~Loader` objects implement the :func:`callable` interface - and generate a docstring, and are intended to be treated and documented - as functions. - - For greater flexibility and improved readability over the ``importlib.resources`` - interface, explicit methods are provided to access resources. - - +---------------+----------------+------------------+ - | On-filesystem | Lifetime | Method | - +---------------+----------------+------------------+ - | `True` | Interpreter | :meth:`cached` | - +---------------+----------------+------------------+ - | `True` | `with` context | :meth:`as_path` | - +---------------+----------------+------------------+ - | `False` | n/a | :meth:`readable` | - +---------------+----------------+------------------+ - - It is also possible to use ``Loader`` directly:: - - from nibabies.data import Loader - - Loader(other_package).readable('data/resource.ext').read_text() - - with Loader(other_package).as_path('data') as pkgdata: - # Call function that requires full Path implementation - func(pkgdata) - - # contrast to - - from importlib_resources import files, as_file - - files(other_package).joinpath('data/resource.ext').read_text() - - with as_file(files(other_package) / 'data') as pkgdata: - func(pkgdata) - - .. automethod:: readable - - .. automethod:: as_path - - .. automethod:: cached - """ - - def __init__(self, anchor: Union[str, ModuleType]): - self._anchor = anchor - self.files = files(anchor) - self.exit_stack = ExitStack() - atexit.register(self.exit_stack.close) - # Allow class to have a different docstring from instances - self.__doc__ = self._doc - - @cached_property - def _doc(self): - """Construct docstring for instances - - Lists the public top-level paths inside the location, where - non-public means has a `.` or `_` prefix or is a 'tests' - directory. - """ - top_level = sorted( - os.path.relpath(p, self.files) + "/"[: p.is_dir()] - for p in self.files.iterdir() - if p.name[0] not in (".", "_") and p.name != "tests" - ) - doclines = [ - f"Load package files relative to ``{self._anchor}``.", - "", - "This package contains the following (top-level) files/directories:", - "", - *(f"* ``{path}``" for path in top_level), - ] - - return "\n".join(doclines) - - def readable(self, *segments) -> Traversable: - """Provide read access to a resource through a Path-like interface. - - This file may or may not exist on the filesystem, and may be - efficiently used for read operations, including directory traversal. - - This result is not cached or copied to the filesystem in cases where - that would be necessary. - """ - return self.files.joinpath(*segments) - - def as_path(self, *segments) -> AbstractContextManager[Path]: - """Ensure data is available as a :class:`~pathlib.Path`. - - This method generates a context manager that yields a Path when - entered. - - This result is not cached, and any temporary files that are created - are deleted when the context is exited. - """ - return as_file(self.files.joinpath(*segments)) - - @cache - def cached(self, *segments) -> Path: - """Ensure data is available as a :class:`~pathlib.Path`. - - Any temporary files that are created remain available throughout - the duration of the program, and are deleted when Python exits. - - Results are cached so that multiple calls do not unpack the same - data multiple times, but the cache is sensitive to the specific - argument(s) passed. - """ - return self.exit_stack.enter_context(as_file(self.files.joinpath(*segments))) - - __call__ = cached - +from acres import Loader load = Loader(__package__) diff --git a/nibabies/interfaces/__init__.py b/nibabies/interfaces/__init__.py index fa8eb4a0..63b63cb4 100644 --- a/nibabies/interfaces/__init__.py +++ b/nibabies/interfaces/__init__.py @@ -3,4 +3,4 @@ # TODO: Set default as default in niworkflows class DerivativesDataSink(_DDS): - out_path_base = "" + out_path_base = '' diff --git a/nibabies/interfaces/freesurfer.py b/nibabies/interfaces/freesurfer.py index 99fd3266..42e54124 100644 --- a/nibabies/interfaces/freesurfer.py +++ b/nibabies/interfaces/freesurfer.py @@ -19,46 +19,46 @@ class InfantReconAllInputSpec(CommandLineInputSpec): subjects_dir = Directory( exists=True, hash_files=False, - desc="path to subjects directory", + desc='path to subjects directory', ) subject_id = traits.Str( - "recon_all", - argstr="--subject %s", - desc="subject name", + 'recon_all', + argstr='--subject %s', + desc='subject name', required=True, ) t1_file = File( exists=True, - desc="path to T1w file", + desc='path to T1w file', ) age = traits.Range( low=0, - argstr="--age %d", - desc="Subject age in months", + argstr='--age %d', + desc='Subject age in months', ) outdir = Directory( - argstr="--outdir %s", - desc="Output directory where the reconall results are written." - "The default location is /", + argstr='--outdir %s', + desc='Output directory where the reconall results are written.' + 'The default location is /', ) mask_file = traits.File( - argstr="--masked %s", - desc="Skull-stripped and INU-corrected T1 (skips skullstripping step)", + argstr='--masked %s', + desc='Skull-stripped and INU-corrected T1 (skips skullstripping step)', ) newborn = traits.Bool( - xor=["age"], - argstr="--newborn", - help="Use newborns from set", + xor=['age'], + argstr='--newborn', + help='Use newborns from set', ) aseg_file = File( - argstr="--segfile %s", - desc="Pre-computed segmentation file", + argstr='--segfile %s', + desc='Pre-computed segmentation file', ) class InfantReconAllOutputSpec(TraitedSpec): - outdir = Directory(exists=True, desc="Output directory.") - subject_id = traits.Str(desc="Subject name for whom to retrieve data") + outdir = Directory(exists=True, desc='Output directory.') + subject_id = traits.Str(desc='Subject name for whom to retrieve data') class InfantReconAll(CommandLine): @@ -66,7 +66,7 @@ class InfantReconAll(CommandLine): Runs the infant recon all pipeline """ - _cmd = "infant_recon_all" + _cmd = 'infant_recon_all' input_spec = InfantReconAllInputSpec output_spec = InfantReconAllOutputSpec _no_run = False @@ -76,15 +76,15 @@ def cmdline(self): cmd = super().cmdline # check if previously run if isdefined(self.inputs.outdir): - logdir = Path(self.inputs.outdir) / "log" + logdir = Path(self.inputs.outdir) / 'log' if logdir.exists(): try: - log = sorted(list(logdir.glob("summary.*.log")))[0] - self._no_run = "Successfully finished infant_recon_all" in log.read_text() + log = sorted(logdir.glob('summary.*.log'))[0] + self._no_run = 'Successfully finished infant_recon_all' in log.read_text() except IndexError: pass if self._no_run: - return "echo infant_recon_all: nothing to do" + return 'echo infant_recon_all: nothing to do' return cmd def _run_interface(self, runtime): @@ -96,34 +96,34 @@ def _run_interface(self, runtime): self.inputs.outdir = str(subjdir) try: subjdir.mkdir(parents=True, exist_ok=True) - except OSError: + except OSError as err: raise OSError( - f"Current SUBJECTS_DIR <{subjdir}> cannot be written to. To fix this," - "either define the input or unset the environmental variable." - ) + f'Current SUBJECTS_DIR <{subjdir}> cannot be written to. To fix this,' + 'either define the input or unset the environmental variable.' + ) from err # T1 image is expected to be in a specific location if no mask is present - if not (subjdir / "mprage.nii.gz").exists() and not (subjdir / "mprage.mgz").exists(): + if not (subjdir / 'mprage.nii.gz').exists() and not (subjdir / 'mprage.mgz').exists(): if isdefined(self.inputs.t1_file): - Path(self.inputs.t1_file).symlink_to(subjdir / "mprage.nii.gz") + Path(self.inputs.t1_file).symlink_to(subjdir / 'mprage.nii.gz') elif not isdefined(self.inputs.mask_file): - raise RuntimeError("Neither T1 or mask present!") + raise RuntimeError('Neither T1 or mask present!') # warn users that this might fail... if not check_total_memory(recommended_gb=20): - logging.getLogger("nipype.interface").warning( - f"For best results, run {self._cmd} with at least 20GB available RAM." + logging.getLogger('nipype.interface').warning( + f'For best results, run {self._cmd} with at least 20GB available RAM.' ) return super()._run_interface(runtime) def _list_outputs(self): outputs = self._outputs().get() - outputs["subject_id"] = self.inputs.subject_id - outputs["outdir"] = self.inputs.outdir + outputs['subject_id'] = self.inputs.subject_id + outputs['outdir'] = self.inputs.outdir return outputs def _set_subjects_dir(): - subjdir = os.getenv("SUBJECTS_DIR") + subjdir = os.getenv('SUBJECTS_DIR') if not subjdir: subjdir = os.getcwd() - os.environ["SUBJECTS_DIR"] = subjdir + os.environ['SUBJECTS_DIR'] = subjdir return subjdir diff --git a/nibabies/interfaces/gifti.py b/nibabies/interfaces/gifti.py index 7d1abb9b..e86bed2e 100644 --- a/nibabies/interfaces/gifti.py +++ b/nibabies/interfaces/gifti.py @@ -15,18 +15,18 @@ class _MaskGiftiInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc="Input GIFTI (n-darrays)") - mask_file = File(exists=True, mandatory=True, desc="Input mask (single binary darray)") + in_file = File(exists=True, mandatory=True, desc='Input GIFTI (n-darrays)') + mask_file = File(exists=True, mandatory=True, desc='Input mask (single binary darray)') threshold = traits.Float( - desc="If mask is probabilistic, inclusion limit", + desc='If mask is probabilistic, inclusion limit', ) metadata = traits.Dict( - desc="Metadata to insert into GIFTI", + desc='Metadata to insert into GIFTI', ) class _MaskGiftiOutputSpec(TraitedSpec): - out_file = File(desc="Masked file") + out_file = File(desc='Masked file') class MaskGifti(SimpleInterface): @@ -36,7 +36,7 @@ class MaskGifti(SimpleInterface): output_spec = _MaskGiftiOutputSpec def _run_interface(self, runtime): - self._results["out_file"] = _mask_gifti( + self._results['out_file'] = _mask_gifti( self.inputs.in_file, self.inputs.mask_file, threshold=self.inputs.threshold or None, @@ -63,8 +63,8 @@ def _mask_gifti(in_file, mask_file, *, threshold=None, metadata=None, newpath=No if isinstance(data, tuple): try: data = np.vstack(data) - except Exception: - raise NotImplementedError(f"Tricky GIFTI: {in_file} not supported.") + except Exception as err: # noqa: BLE001 + raise NotImplementedError(f'Tricky GIFTI: {in_file} not supported.') from err else: data = data.T masked = data[:, indices] @@ -77,7 +77,7 @@ def _mask_gifti(in_file, mask_file, *, threshold=None, metadata=None, newpath=No # Finalize by adding additional metadata to file metad = { - **{"CreatedBy": f"MaskGifti (NiBabies-{__version__})"}, + **{'CreatedBy': f'MaskGifti (NiBabies-{__version__})'}, **metadata, } if int(nb.__version__[0]) >= 4: # API will change in 4.0.0 @@ -91,6 +91,6 @@ def _mask_gifti(in_file, mask_file, *, threshold=None, metadata=None, newpath=No if newpath is None: newpath = Path() - out_file = str((Path(newpath) / f"masked_{Path(in_file).name}").absolute()) + out_file = str((Path(newpath) / f'masked_{Path(in_file).name}').absolute()) nb.save(img, out_file) return out_file diff --git a/nibabies/interfaces/maths.py b/nibabies/interfaces/maths.py index bf604f62..62fde540 100644 --- a/nibabies/interfaces/maths.py +++ b/nibabies/interfaces/maths.py @@ -8,16 +8,16 @@ class ClipInputSpec(TraitedSpec): - in_file = File(exists=True, mandatory=True, desc="Input imaging file") - out_file = File(desc="Output file name") + in_file = File(exists=True, mandatory=True, desc='Input imaging file') + out_file = File(desc='Output file name') minimum = traits.Float( - -np.inf, usedefault=True, desc="Values under minimum are set to minimum" + -np.inf, usedefault=True, desc='Values under minimum are set to minimum' ) - maximum = traits.Float(np.inf, usedefault=True, desc="Values over maximum are set to maximum") + maximum = traits.Float(np.inf, usedefault=True, desc='Values over maximum are set to maximum') class ClipOutputSpec(TraitedSpec): - out_file = File(desc="Output file name") + out_file = File(desc='Output file name') class Clip(SimpleInterface): @@ -42,24 +42,24 @@ def _run_interface(self, runtime): if np.any((data < self.inputs.minimum) | (data > self.inputs.maximum)): if not out_file: out_file = fname_presuffix( - self.inputs.in_file, suffix="_clipped", newpath=runtime.cwd + self.inputs.in_file, suffix='_clipped', newpath=runtime.cwd ) np.clip(data, self.inputs.minimum, self.inputs.maximum, out=data) img.__class__(data, img.affine, img.header).to_filename(out_file) elif not out_file: out_file = self.inputs.in_file - self._results["out_file"] = out_file + self._results['out_file'] = out_file return runtime class Label2MaskInputSpec(TraitedSpec): - in_file = File(exists=True, mandatory=True, desc="Input label file") - label_val = traits.Int(mandatory=True, dec="Label value to create mask from") + in_file = File(exists=True, mandatory=True, desc='Input label file') + label_val = traits.Int(mandatory=True, dec='Label value to create mask from') class Label2MaskOutputSpec(TraitedSpec): - out_file = File(desc="Output file name") + out_file = File(desc='Output file name') class Label2Mask(SimpleInterface): @@ -77,9 +77,9 @@ def _run_interface(self, runtime): out_img = img.__class__(mask, img.affine, img.header) out_img.set_data_dtype(np.uint8) - out_file = fname_presuffix(self.inputs.in_file, suffix="_mask", newpath=runtime.cwd) + out_file = fname_presuffix(self.inputs.in_file, suffix='_mask', newpath=runtime.cwd) out_img.to_filename(out_file) - self._results["out_file"] = out_file + self._results['out_file'] = out_file return runtime diff --git a/nibabies/interfaces/multiecho.py b/nibabies/interfaces/multiecho.py index cc8cf293..79381bd4 100644 --- a/nibabies/interfaces/multiecho.py +++ b/nibabies/interfaces/multiecho.py @@ -117,7 +117,7 @@ class T2SMap(CommandLine): def _format_arg(self, name, trait_spec, value): if name == 'echo_times': value = [te * 1000 for te in value] - return super(T2SMap, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self._outputs().get() diff --git a/nibabies/interfaces/nibabel.py b/nibabies/interfaces/nibabel.py index c7e64f1e..1ba475df 100644 --- a/nibabies/interfaces/nibabel.py +++ b/nibabies/interfaces/nibabel.py @@ -10,17 +10,17 @@ class ReorientImageInputSpec(BaseInterfaceInputSpec): - in_file = File(exists=True, mandatory=True, desc="Moving file") + in_file = File(exists=True, mandatory=True, desc='Moving file') target_file = File( - exists=True, xor=["target_orientation"], desc="Reference file to reorient to" + exists=True, xor=['target_orientation'], desc='Reference file to reorient to' ) target_orientation = traits.Str( - xor=["target_file"], desc="Axis codes of coordinate system to reorient to" + xor=['target_file'], desc='Axis codes of coordinate system to reorient to' ) class ReorientImageOutputSpec(TraitedSpec): - out_file = File(desc="Reoriented file") + out_file = File(desc='Reoriented file') class ReorientImage(SimpleInterface): @@ -28,7 +28,7 @@ class ReorientImage(SimpleInterface): output_spec = ReorientImageOutputSpec def _run_interface(self, runtime): - self._results["out_file"] = reorient_image( + self._results['out_file'] = reorient_image( self.inputs.in_file, target_file=self.inputs.target_file, target_ornt=self.inputs.target_orientation, @@ -67,6 +67,6 @@ def reorient_image( if newpath is None: newpath = Path() - out_file = str((Path(newpath) / "reoriented.nii.gz").absolute()) + out_file = str((Path(newpath) / 'reoriented.nii.gz').absolute()) reoriented.to_filename(out_file) return out_file diff --git a/nibabies/reports/core.py b/nibabies/reports/core.py index 58f5661a..2b0c142e 100644 --- a/nibabies/reports/core.py +++ b/nibabies/reports/core.py @@ -37,7 +37,7 @@ def generate_reports( """Execute run_reports on a list of subjects.""" reportlets_dir = None if work_dir is not None: - reportlets_dir = Path(work_dir) / "reportlets" + reportlets_dir = Path(work_dir) / 'reportlets' report_errors = [] for subject, session in sub_ses_list: @@ -56,13 +56,15 @@ def generate_reports( if errno: import logging - logger = logging.getLogger("cli") - error_list = ", ".join( - "%s (%d)" % (subid, err) for subid, err in zip(sub_ses_list, report_errors) if err + logger = logging.getLogger('cli') + error_list = ', '.join( + '%s (%d)' % (subid, err) + for subid, err in zip(sub_ses_list, report_errors, strict=False) + if err ) logger.error( - "Preprocessing did not finish successfully. Errors occurred while processing " - "data from participants: %s. Check the HTML reports for details.", + 'Preprocessing did not finish successfully. Errors occurred while processing ' + 'data from participants: %s. Check the HTML reports for details.', error_list, ) return errno diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index a407984a..228440d1 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -196,7 +196,7 @@ def validate_input_dir(exec_env, bids_dir, participant_label): ignored_subs = all_subs.difference(selected_subs) if ignored_subs: for sub in ignored_subs: - validator_config_dict['ignoredFiles'].append('/sub-%s/**' % sub) + validator_config_dict['ignoredFiles'].append(f'/sub-{sub}/**') with tempfile.NamedTemporaryFile(mode='w+', suffix='.json') as temp: temp.write(json.dumps(validator_config_dict)) temp.flush() @@ -266,7 +266,7 @@ def _get_age_from_tsv( try: # extract age value from row age = int(df.loc[mask, age_col].values[0]) - except Exception: + except Exception: # noqa: BLE001 age = None return age @@ -275,5 +275,5 @@ def _verify_age_json(bids_json: Path) -> bool: try: data = json.loads(bids_json.read_text()) return data['age']['Units'].lower() == 'months' - except Exception: + except Exception: # noqa: BLE001 return False diff --git a/nibabies/utils/confounds.py b/nibabies/utils/confounds.py index eef9371a..a2729211 100644 --- a/nibabies/utils/confounds.py +++ b/nibabies/utils/confounds.py @@ -91,7 +91,8 @@ def acompcor_masks(in_files, is_aseg=False, zooms=None): from scipy.ndimage import binary_dilation from skimage.morphology import ball - assert len(in_files) == 3, f"Expected GM, WM, and CSF files. Got {in_files}" + if len(in_files) != 3: + raise ValueError('Expected GM, WM, and CSF files. Got %s', in_files) csf_file = in_files[2] # BIDS labeling (CSF=2; last of list) # Load PV maps (fast) or segments (recon-all) @@ -113,7 +114,7 @@ def acompcor_masks(in_files, is_aseg=False, zooms=None): csf_file = mask2vf( csf_file, zooms=zooms, - out_file=str(Path("acompcor_csf.nii.gz").absolute()), + out_file=str(Path('acompcor_csf.nii.gz').absolute()), ) csf_data = nb.load(csf_file).get_fdata() wm_data = mask2vf(in_files[1], zooms=zooms) @@ -125,8 +126,8 @@ def acompcor_masks(in_files, is_aseg=False, zooms=None): gm_data = binary_dilation(gm_data, structure=ball(3)) # Output filenames - wm_file = str(Path("acompcor_wm.nii.gz").absolute()) - combined_file = str(Path("acompcor_wmcsf.nii.gz").absolute()) + wm_file = str(Path('acompcor_wm.nii.gz').absolute()) + combined_file = str(Path('acompcor_wmcsf.nii.gz').absolute()) # Prepare WM mask wm_data[gm_data] = 0 # Make sure voxel does not contain GM diff --git a/nibabies/utils/filtering.py b/nibabies/utils/filtering.py index 7303ab06..0af51c44 100644 --- a/nibabies/utils/filtering.py +++ b/nibabies/utils/filtering.py @@ -4,7 +4,7 @@ def truncation( in_file, clip_max=99.9, - dtype="int16", + dtype='int16', out_file=None, out_max=1000, out_min=0, @@ -41,7 +41,7 @@ def truncation( data = np.clip(data, 0, np.percentile(data.reshape(-1), clip_max)) if out_file is None: - out_file = fname_presuffix(Path(in_file).name, suffix="_trunc") + out_file = fname_presuffix(Path(in_file).name, suffix='_trunc') out_file = str(Path(out_file).absolute()) img.__class__(data.astype(dtype), img.affine, hdr).to_filename(out_file) @@ -58,7 +58,7 @@ def gaussian_filter(in_file, sigma=None, out_file=None): from scipy.ndimage import gaussian_filter if out_file is None: - out_file = fname_presuffix(Path(in_file).name, suffix="_gauss") + out_file = fname_presuffix(Path(in_file).name, suffix='_gauss') out_file = str(Path(out_file).absolute()) img = nb.load(in_file) diff --git a/nibabies/utils/telemetry.py b/nibabies/utils/telemetry.py index 15ea4f70..6cd165de 100644 --- a/nibabies/utils/telemetry.py +++ b/nibabies/utils/telemetry.py @@ -2,7 +2,7 @@ from .. import __version__, config -migas = optional_package("migas")[0] +migas = optional_package('migas')[0] def setup_migas(init_ping: bool = True, exit_ping: bool = True) -> None: @@ -33,4 +33,4 @@ def send_crumb(**kwargs) -> dict: """ Communicate with the migas telemetry server. This requires `migas.setup()` to be called. """ - return migas.add_breadcrumb("nipreps/nibabies", __version__, **kwargs) + return migas.add_breadcrumb('nipreps/nibabies', __version__, **kwargs) diff --git a/nibabies/utils/viz.py b/nibabies/utils/viz.py index 8433ab48..f593e42d 100644 --- a/nibabies/utils/viz.py +++ b/nibabies/utils/viz.py @@ -18,7 +18,7 @@ class fMRIPlot: """Generates the fMRI Summary Plot.""" - __slots__ = ("func_file", "mask_data", "tr", "seg_data", "confounds", "spikes", "sort_carpet") + __slots__ = ('func_file', 'mask_data', 'tr', 'seg_data', 'confounds', 'spikes', 'sort_carpet') def __init__( self, @@ -44,7 +44,7 @@ def __init__( if not isinstance(func_img, nb.Cifti2Image): self.mask_data = nb.fileslice.strided_scalar(func_img.shape[:3], np.uint8(1)) if mask_file: - self.mask_data = np.asanyarray(nb.load(mask_file).dataobj).astype("uint8") + self.mask_data = np.asanyarray(nb.load(mask_file).dataobj).astype('uint8') if seg_file: self.seg_data = np.asanyarray(nb.load(seg_file).dataobj) @@ -54,14 +54,14 @@ def __init__( vlines = {} self.confounds = {} if data is None and conf_file: - data = pd.read_csv(conf_file, sep=r"[\t\s]+", usecols=usecols, index_col=False) + data = pd.read_csv(conf_file, sep=r'[\t\s]+', usecols=usecols, index_col=False) if data is not None: for name in data.columns.ravel(): self.confounds[name] = { - "values": data[[name]].values.ravel().tolist(), - "units": units.get(name), - "cutoff": vlines.get(name), + 'values': data[[name]].values.ravel().tolist(), + 'units': units.get(name), + 'cutoff': vlines.get(name), } self.spikes = [] @@ -73,8 +73,8 @@ def plot(self, figure=None): """Main plotter""" import seaborn as sns - sns.set_style("whitegrid") - sns.set_context("paper", font_scale=0.8) + sns.set_style('whitegrid') + sns.set_context('paper', font_scale=0.8) if figure is None: figure = plt.gcf() @@ -96,10 +96,10 @@ def plot(self, figure=None): if self.confounds: from seaborn import color_palette - palette = color_palette("husl", nconfounds) + palette = color_palette('husl', nconfounds) for i, (name, kwargs) in enumerate(self.confounds.items()): - tseries = kwargs.pop("values") + tseries = kwargs.pop('values') confoundplot(tseries, grid[grid_id], tr=self.tr, color=palette[i], name=name, **kwargs) grid_id += 1 @@ -126,7 +126,7 @@ def plot_carpet( legend=False, tr=None, lut=None, - sort_rows="ward", + sort_rows='ward', ): """ Plot an image representation of voxel intensities across time also know @@ -176,35 +176,36 @@ def plot_carpet( img = nb.load(func) if isinstance(func, str) else func if isinstance(img, nb.Cifti2Image): - assert img.nifti_header.get_intent()[0] == "ConnDenseSeries", "Not a dense timeseries" + if img.nifti_header.get_intent()[0] != 'ConnDenseSeries': + raise ValueError('Not a dense timeseries') data = img.get_fdata().T matrix = img.header.matrix struct_map = { - "LEFT_CORTEX": 1, - "RIGHT_CORTEX": 2, - "SUBCORTICAL": 3, - "CEREBELLUM": 4, + 'LEFT_CORTEX': 1, + 'RIGHT_CORTEX': 2, + 'SUBCORTICAL': 3, + 'CEREBELLUM': 4, } - seg = np.zeros((data.shape[0],), dtype="uint32") + seg = np.zeros((data.shape[0],), dtype='uint32') for bm in matrix.get_index_map(1).brain_models: - if "CORTEX" in bm.brain_structure: - lidx = (1, 2)["RIGHT" in bm.brain_structure] - elif "CEREBELLUM" in bm.brain_structure: + if 'CORTEX' in bm.brain_structure: + lidx = (1, 2)['RIGHT' in bm.brain_structure] + elif 'CEREBELLUM' in bm.brain_structure: lidx = 4 else: lidx = 3 index_final = bm.index_offset + bm.index_count seg[bm.index_offset : index_final] = lidx - assert len(seg[seg < 1]) == 0, "Unassigned labels" + if len(seg[seg < 1]) != 0: + raise ValueError('Unassigned labels') # Decimate data data, seg = _decimate_data(data, seg, size) - cmap = ListedColormap([cm.get_cmap("Paired").colors[i] for i in (1, 0, 7, 3)]) - assert len(cmap.colors) == len( - struct_map - ), "Mismatch between expected # of structures and colors" + cmap = ListedColormap([cm.get_cmap('Paired').colors[i] for i in (1, 0, 7, 3)]) + if len(cmap.colors) != len(struct_map): + raise ValueError('Mismatch between expected # of structures and colors') # ensure no legend for CIFTI legend = False @@ -215,7 +216,7 @@ def plot_carpet( img_nii = check_niimg_4d( img, - dtype="auto", + dtype='auto', ) func_data = _safe_get_data(img_nii, ensure_finite=True) func_data = func_data[..., nskip:] @@ -225,7 +226,7 @@ def plot_carpet( # Map segmentation if lut is None: - lut = np.zeros((256,), dtype="uint32") + lut = np.zeros((256,), dtype='uint32') lut[1:11] = 4 lut[255] = 3 lut[30:99] = 2 @@ -237,13 +238,13 @@ def plot_carpet( data, seg = _decimate_data(data, seg, size) # Set colormap - cmap = ListedColormap(cm.get_cmap("tab10").colors[:4]) + cmap = ListedColormap(cm.get_cmap('tab10').colors[:4]) if legend: epiavg = func_data.mean(3) epinii = nb.Nifti1Image(epiavg, img_nii.affine, img_nii.header) segnii = nb.Nifti1Image(lut[atlaslabels.astype(int)], epinii.affine, epinii.header) - segnii.set_data_dtype("uint8") + segnii.set_data_dtype('uint8') nslices = epiavg.shape[-1] return _carpet( @@ -275,7 +276,7 @@ def _carpet( epinii=None, segnii=None, nslices=None, - sort_rows="ward", + sort_rows='ward', ): """Common carpetplot building code for volumetric / CIFTI plots""" @@ -297,14 +298,14 @@ def _carpet( from scipy.cluster.hierarchy import dendrogram, linkage from sklearn.cluster import ward_tree - order = np.zeros(len(seg), dtype="uint32") + order = np.zeros(len(seg), dtype='uint32') roi_start = 0 for i in np.unique(seg): roi_mask = seg == i roi = data[roi_mask] - if isinstance(sort_rows, str) and sort_rows.lower() == "linkage": + if isinstance(sort_rows, str) and sort_rows.lower() == 'linkage': linkage_matrix = linkage( - roi, method="average", metric="euclidean", optimal_ordering=True + roi, method='average', metric='euclidean', optimal_ordering=True ) else: children, _, n_leaves, _, distances = ward_tree(roi, return_distance=True) @@ -313,7 +314,7 @@ def _carpet( dn = dendrogram(linkage_matrix, no_plot=True) nreg = len(roi) order[roi_start : roi_start + nreg] = np.argwhere(roi_mask).squeeze()[ - np.array(dn["leaves"]) + np.array(dn['leaves']) ] roi_start += nreg else: @@ -337,20 +338,20 @@ def _carpet( ax0 = plt.subplot(gs[0]) ax0.set_yticks([]) ax0.set_xticks([]) - ax0.imshow(seg[order, np.newaxis], interpolation="none", aspect="auto", cmap=cmap) + ax0.imshow(seg[order, np.newaxis], interpolation='none', aspect='auto', cmap=cmap) ax0.grid(False) - ax0.spines["left"].set_visible(False) - ax0.spines["bottom"].set_color("none") - ax0.spines["bottom"].set_visible(False) + ax0.spines['left'].set_visible(False) + ax0.spines['bottom'].set_color('none') + ax0.spines['bottom'].set_visible(False) # Carpet plot ax1 = plt.subplot(gs[1]) ax1.imshow( data[order], - interpolation="nearest", - aspect="auto", - cmap="gray", + interpolation='nearest', + aspect='auto', + cmap='gray', vmin=v[0], vmax=v[1], ) @@ -363,28 +364,28 @@ def _carpet( interval = max((int(data.shape[-1] + 1) // 10, int(data.shape[-1] + 1) // 5, 1)) xticks = list(range(0, data.shape[-1])[::interval]) if notr: - xlabel = "time-points (index)" + xlabel = 'time-points (index)' xticklabels = [round(xtick) for xtick in xticks] else: - xlabel = "time (s)" - xticklabels = ["%.02f" % (tr * xtick) for xtick in xticks] + xlabel = 'time (s)' + xticklabels = ['%.02f' % (tr * xtick) for xtick in xticks] ax1.set_xticks(xticks) ax1.set_xlabel(xlabel) ax1.set_xticklabels(xticklabels) # Remove and redefine spines - for side in ["top", "right"]: + for side in ['top', 'right']: # Toggle the spine objects - ax0.spines[side].set_color("none") + ax0.spines[side].set_color('none') ax0.spines[side].set_visible(False) - ax1.spines[side].set_color("none") + ax1.spines[side].set_color('none') ax1.spines[side].set_visible(False) - ax1.yaxis.set_ticks_position("left") - ax1.xaxis.set_ticks_position("bottom") - ax1.spines["bottom"].set_visible(False) - ax1.spines["left"].set_color("none") - ax1.spines["left"].set_visible(False) + ax1.yaxis.set_ticks_position('left') + ax1.xaxis.set_ticks_position('bottom') + ax1.spines['bottom'].set_visible(False) + ax1.spines['left'].set_color('none') + ax1.spines['left'].set_visible(False) if title: ax1.set_title(title) @@ -400,17 +401,17 @@ def _carpet( segnii, bg_img=epinii, axes=ax2, - display_mode="z", + display_mode='z', annotate=False, cut_coords=[c], threshold=0.1, cmap=cmap, - interpolation="nearest", + interpolation='nearest', ) if output_file is not None: figure = plt.gcf() - figure.savefig(output_file, bbox_inches="tight") + figure.savefig(output_file, bbox_inches='tight') plt.close(figure) figure = None return output_file @@ -424,9 +425,9 @@ def spikesplot( tr=None, zscored=True, spike_thresh=6.0, - title="Spike plot", + title='Spike plot', ax=None, - cmap="viridis", + cmap='viridis', hide_x=True, nskip=0, ): @@ -463,9 +464,9 @@ def spikesplot( ax.plot(ts_z[sl, :], color=colors[sl], lw=0.5) else: markerline, stemlines, baseline = ax.stem(ts_z[sl, :]) - plt.setp(markerline, "markerfacecolor", colors[sl]) - plt.setp(baseline, "color", colors[sl], "linewidth", 1) - plt.setp(stemlines, "color", colors[sl], "linewidth", 1) + plt.setp(markerline, 'markerfacecolor', colors[sl]) + plt.setp(baseline, 'color', colors[sl], 'linewidth', 1) + plt.setp(stemlines, 'color', colors[sl], 'linewidth', 1) # Handle X, Y axes ax.grid(False) @@ -478,15 +479,15 @@ def spikesplot( if not hide_x: if tr is None: - ax.set_xlabel("time (frame #)") + ax.set_xlabel('time (frame #)') else: - ax.set_xlabel("time (s)") - ax.set_xticklabels(["%.02f" % t for t in (tr * np.array(xticks)).tolist()]) + ax.set_xlabel('time (s)') + ax.set_xticklabels([f'{t:.2f}' for t in (tr * np.array(xticks)).tolist()]) # Handle Y axis - ylabel = "slice-wise noise average on background" + ylabel = 'slice-wise noise average on background' if zscored: - ylabel += " (z-scored)" + ylabel += ' (z-scored)' zs_max = np.abs(ts_z).max() ax.set_ylim( ( @@ -502,13 +503,13 @@ def spikesplot( # yticks.insert(0, ts_z.min()) # yticks += [ts_z.max()] for val in ytick_vals: - ax.plot((0, ntsteps - 1), (-val, -val), "k:", alpha=0.2) - ax.plot((0, ntsteps - 1), (val, val), "k:", alpha=0.2) + ax.plot((0, ntsteps - 1), (-val, -val), 'k:', alpha=0.2) + ax.plot((0, ntsteps - 1), (val, val), 'k:', alpha=0.2) # Plot spike threshold if zs_max < spike_thresh: - ax.plot((0, ntsteps - 1), (-spike_thresh, -spike_thresh), "k:") - ax.plot((0, ntsteps - 1), (spike_thresh, spike_thresh), "k:") + ax.plot((0, ntsteps - 1), (-spike_thresh, -spike_thresh), 'k:') + ax.plot((0, ntsteps - 1), (spike_thresh, spike_thresh), 'k:') else: yticks = [ ts_z[:, nskip:].min(), @@ -522,20 +523,20 @@ def spikesplot( ax.annotate( ylabel, xy=(0.0, 0.7), - xycoords="axes fraction", + xycoords='axes fraction', xytext=(0, 0), - textcoords="offset points", - va="center", - ha="left", - color="gray", + textcoords='offset points', + va='center', + ha='left', + color='gray', size=4, bbox={ - "boxstyle": "round", - "fc": "w", - "ec": "none", - "color": "none", - "lw": 0, - "alpha": 0.8, + 'boxstyle': 'round', + 'fc': 'w', + 'ec': 'none', + 'color': 'none', + 'lw': 0, + 'alpha': 0.8, }, ) ax.set_yticks([]) @@ -548,21 +549,21 @@ def spikesplot( # ax.plot((0, ntsteps - 1), (yticks[0], yticks[0]), 'k:') # ax.plot((0, ntsteps - 1), (yticks[-1], yticks[-1]), 'k:') - for side in ["top", "right"]: - ax.spines[side].set_color("none") + for side in ['top', 'right']: + ax.spines[side].set_color('none') ax.spines[side].set_visible(False) if not hide_x: - ax.spines["bottom"].set_position(("outward", 10)) - ax.xaxis.set_ticks_position("bottom") + ax.spines['bottom'].set_position(('outward', 10)) + ax.xaxis.set_ticks_position('bottom') else: - ax.spines["bottom"].set_color("none") - ax.spines["bottom"].set_visible(False) + ax.spines['bottom'].set_color('none') + ax.spines['bottom'].set_visible(False) # ax.spines["left"].set_position(('outward', 30)) # ax.yaxis.set_ticks_position('left') - ax.spines["left"].set_visible(False) - ax.spines["left"].set_color(None) + ax.spines['left'].set_visible(False) + ax.spines['left'].set_color(None) # labels = [label for label in ax.yaxis.get_ticklabels()] # labels[0].set_weight('bold') @@ -572,7 +573,7 @@ def spikesplot( return ax -def spikesplot_cb(position, cmap="viridis", fig=None): +def spikesplot_cb(position, cmap='viridis', fig=None): # Add colorbar if fig is None: fig = plt.gcf() @@ -581,12 +582,12 @@ def spikesplot_cb(position, cmap="viridis", fig=None): cb = ColorbarBase( cax, cmap=cm.get_cmap(cmap), - spacing="proportional", - orientation="horizontal", + spacing='proportional', + orientation='horizontal', drawedges=False, ) cb.set_ticks([0, 0.5, 1.0]) - cb.set_ticklabels(["Inferior", "(axial slice)", "Superior"]) + cb.set_ticklabels(['Inferior', '(axial slice)', 'Superior']) cb.outline.set_linewidth(0) cb.ax.xaxis.set_tick_params(width=0) return cax @@ -600,7 +601,7 @@ def confoundplot( units=None, tr=None, hide_x=True, - color="b", + color='b', nskip=0, cutoff=None, ylims=None, @@ -628,52 +629,52 @@ def confoundplot( if not hide_x: if notr: - ax_ts.set_xlabel("time (frame #)") + ax_ts.set_xlabel('time (frame #)') else: - ax_ts.set_xlabel("time (s)") + ax_ts.set_xlabel('time (s)') labels = tr * np.array(xticks) - ax_ts.set_xticklabels(["%.02f" % t for t in labels.tolist()]) + ax_ts.set_xticklabels([f'{t:.2f}' for t in labels.tolist()]) else: ax_ts.set_xticklabels([]) if name is not None: if units is not None: - name += " [%s]" % units + name += f' [{units}]' ax_ts.annotate( name, xy=(0.0, 0.7), xytext=(0, 0), - xycoords="axes fraction", - textcoords="offset points", - va="center", - ha="left", + xycoords='axes fraction', + textcoords='offset points', + va='center', + ha='left', color=color, size=8, bbox={ - "boxstyle": "round", - "fc": "w", - "ec": "none", - "color": "none", - "lw": 0, - "alpha": 0.8, + 'boxstyle': 'round', + 'fc': 'w', + 'ec': 'none', + 'color': 'none', + 'lw': 0, + 'alpha': 0.8, }, ) - for side in ["top", "right"]: - ax_ts.spines[side].set_color("none") + for side in ['top', 'right']: + ax_ts.spines[side].set_color('none') ax_ts.spines[side].set_visible(False) if not hide_x: - ax_ts.spines["bottom"].set_position(("outward", 20)) - ax_ts.xaxis.set_ticks_position("bottom") + ax_ts.spines['bottom'].set_position(('outward', 20)) + ax_ts.xaxis.set_ticks_position('bottom') else: - ax_ts.spines["bottom"].set_color("none") - ax_ts.spines["bottom"].set_visible(False) + ax_ts.spines['bottom'].set_color('none') + ax_ts.spines['bottom'].set_visible(False) # ax_ts.spines["left"].set_position(('outward', 30)) - ax_ts.spines["left"].set_color("none") - ax_ts.spines["left"].set_visible(False) + ax_ts.spines['left'].set_color('none') + ax_ts.spines['left'].set_visible(False) # ax_ts.yaxis.set_ticks_position('left') ax_ts.set_yticks([]) @@ -707,56 +708,56 @@ def confoundplot( p95 = 0 stats_label = ( - r"max: {max:.3f}{units} $\bullet$ mean: {mean:.3f}{units} " - r"$\bullet$ $\sigma$: {sigma:.3f}" - ).format(max=maxv, mean=mean, units=units or "", sigma=stdv) + r'max: {max:.3f}{units} $\bullet$ mean: {mean:.3f}{units} ' + r'$\bullet$ $\sigma$: {sigma:.3f}' + ).format(max=maxv, mean=mean, units=units or '', sigma=stdv) ax_ts.annotate( stats_label, xy=(0.98, 0.7), - xycoords="axes fraction", + xycoords='axes fraction', xytext=(0, 0), - textcoords="offset points", - va="center", - ha="right", + textcoords='offset points', + va='center', + ha='right', color=color, size=4, bbox={ - "boxstyle": "round", - "fc": "w", - "ec": "none", - "color": "none", - "lw": 0, - "alpha": 0.8, + 'boxstyle': 'round', + 'fc': 'w', + 'ec': 'none', + 'color': 'none', + 'lw': 0, + 'alpha': 0.8, }, ) # Annotate percentile 95 - ax_ts.plot((0, ntsteps - 1), [p95] * 2, linewidth=0.1, color="lightgray") + ax_ts.plot((0, ntsteps - 1), [p95] * 2, linewidth=0.1, color='lightgray') ax_ts.annotate( - "%.2f" % p95, + f'{p95:.2f}', xy=(0, p95), xytext=(-1, 0), - textcoords="offset points", - va="center", - ha="right", - color="lightgray", + textcoords='offset points', + va='center', + ha='right', + color='lightgray', size=3, ) if cutoff is None: cutoff = [] - for i, thr in enumerate(cutoff): - ax_ts.plot((0, ntsteps - 1), [thr] * 2, linewidth=0.2, color="dimgray") + for thr in cutoff: + ax_ts.plot((0, ntsteps - 1), [thr] * 2, linewidth=0.2, color='dimgray') ax_ts.annotate( - "%.2f" % thr, + f'{thr:.2f}', xy=(0, thr), xytext=(-1, 0), - textcoords="offset points", - va="center", - ha="right", - color="dimgray", + textcoords='offset points', + va='center', + ha='right', + color='dimgray', size=3, ) @@ -766,7 +767,7 @@ def confoundplot( if gs_dist is not None: ax_dist = plt.subplot(gs_dist) sns.displot(tseries, vertical=True, ax=ax_dist) - ax_dist.set_xlabel("Timesteps") + ax_dist.set_xlabel('Timesteps') ax_dist.set_ylim(ax_ts.get_ylim()) ax_dist.set_yticklabels([]) @@ -812,28 +813,28 @@ def compcor_variance_plot( metadata = {} if metadata_sources is None: if len(metadata_files) == 1: - metadata_sources = ["CompCor"] + metadata_sources = ['CompCor'] else: - metadata_sources = ["Decomposition {:d}".format(i) for i in range(len(metadata_files))] - for file, source in zip(metadata_files, metadata_sources): - metadata[source] = pd.read_csv(str(file), sep=r"\s+") - metadata[source]["source"] = source + metadata_sources = [f'Decomposition {i:d}' for i in range(len(metadata_files))] + for file, source in zip(metadata_files, metadata_sources, strict=False): + metadata[source] = pd.read_csv(str(file), sep=r'\s+') + metadata[source]['source'] = source metadata = pd.concat(list(metadata.values())) bbox_txt = { - "boxstyle": "round", - "fc": "white", - "ec": "none", - "color": "none", - "linewidth": 0, - "alpha": 0.8, + 'boxstyle': 'round', + 'fc': 'white', + 'ec': 'none', + 'color': 'none', + 'linewidth': 0, + 'alpha': 0.8, } decompositions = [] - data_sources = list(metadata.groupby(["source", "mask"]).groups.keys()) + data_sources = list(metadata.groupby(['source', 'mask']).groups.keys()) for source, mask in data_sources: if not np.isnan( - metadata.loc[(metadata["source"] == source) & (metadata["mask"] == mask)][ - "singular_value" + metadata.loc[(metadata['source'] == source) & (metadata['mask'] == mask)][ + 'singular_value' ].values[0] ): decompositions.append((source, mask)) @@ -846,62 +847,62 @@ def compcor_variance_plot( ax = [plt.axes()] for m, (source, mask) in enumerate(decompositions): - components = metadata[(metadata["mask"] == mask) & (metadata["source"] == source)] + components = metadata[(metadata['mask'] == mask) & (metadata['source'] == source)] if len([m for s, m in decompositions if s == source]) > 1: - title_mask = " ({} mask)".format(mask) + title_mask = f' ({mask} mask)' else: - title_mask = "" - fig_title = "{}{}".format(source, title_mask) + title_mask = '' + fig_title = f'{source}{title_mask}' ax[m].plot( np.arange(components.shape[0] + 1), - [0] + list(100 * components["cumulative_variance_explained"]), - color="purple", + [0] + list(100 * components['cumulative_variance_explained']), + color='purple', linewidth=2.5, ) ax[m].grid(False) - ax[m].set_xlabel("number of components in model") - ax[m].set_ylabel("cumulative variance explained (%)") + ax[m].set_xlabel('number of components in model') + ax[m].set_ylabel('cumulative variance explained (%)') ax[m].set_title(fig_title) varexp = {} for i, thr in enumerate(varexp_thresh): varexp[thr] = ( - np.atleast_1d(np.searchsorted(components["cumulative_variance_explained"], thr)) + np.atleast_1d(np.searchsorted(components['cumulative_variance_explained'], thr)) + 1 ) - ax[m].axhline(y=100 * thr, color="lightgrey", linewidth=0.25) - ax[m].axvline(x=varexp[thr], color="C{}".format(i), linewidth=2, linestyle=":") + ax[m].axhline(y=100 * thr, color='lightgrey', linewidth=0.25) + ax[m].axvline(x=varexp[thr], color=f'C{i}', linewidth=2, linestyle=':') ax[m].text( 0, 100 * thr, - "{:.0f}".format(100 * thr), - fontsize="x-small", + f'{100 * thr:.0f}', + fontsize='x-small', bbox=bbox_txt, ) ax[m].text( varexp[thr][0], 25, - "{} components explain\n{:.0f}% of variance".format(varexp[thr][0], 100 * thr), + f'{varexp[thr][0]} components explain\n{100 * thr:.0f}% of variance', rotation=90, - horizontalalignment="center", - fontsize="xx-small", + horizontalalignment='center', + fontsize='xx-small', bbox=bbox_txt, ) ax[m].set_yticks([]) ax[m].set_yticklabels([]) for tick in ax[m].xaxis.get_major_ticks(): - tick.label.set_fontsize("x-small") - tick.label.set_rotation("vertical") - for side in ["top", "right", "left"]: - ax[m].spines[side].set_color("none") + tick.label.set_fontsize('x-small') + tick.label.set_rotation('vertical') + for side in ['top', 'right', 'left']: + ax[m].spines[side].set_color('none') ax[m].spines[side].set_visible(False) if output_file is not None: figure = plt.gcf() - figure.savefig(output_file, bbox_inches="tight") + figure.savefig(output_file, bbox_inches='tight') plt.close(figure) figure = None return output_file @@ -914,7 +915,7 @@ def confounds_correlation_plot( figure=None, max_dim=20, output_file=None, - reference="global_signal", + reference='global_signal', ): """ Generate a bar plot with the correlation of confounds. @@ -956,7 +957,7 @@ def confounds_correlation_plot( if columns: columns = set(columns) # Drop duplicates columns.add(reference) # Make sure the reference is included - confounds_data = confounds_data[[el for el in columns]] + confounds_data = confounds_data[list(columns)] confounds_data = confounds_data.loc[ :, np.logical_not(np.isclose(confounds_data.var(skipna=True), 0)) @@ -964,9 +965,9 @@ def confounds_correlation_plot( corr = confounds_data.corr() gscorr = corr.copy() - gscorr["index"] = gscorr.index + gscorr['index'] = gscorr.index gscorr[reference] = np.abs(gscorr[reference]) - gs_descending = gscorr.sort_values(by=reference, ascending=False)["index"] + gs_descending = gscorr.sort_values(by=reference, ascending=False)['index'] n_vars = corr.shape[0] max_dim = min(n_vars, max_dim) @@ -983,40 +984,40 @@ def confounds_correlation_plot( mask = np.zeros_like(corr, dtype=np.bool) mask[np.triu_indices_from(mask)] = True - sns.heatmap(corr, linewidths=0.5, cmap="coolwarm", center=0, square=True, ax=ax0) - ax0.tick_params(axis="both", which="both", width=0) + sns.heatmap(corr, linewidths=0.5, cmap='coolwarm', center=0, square=True, ax=ax0) + ax0.tick_params(axis='both', which='both', width=0) for tick in ax0.xaxis.get_major_ticks(): - tick.label.set_fontsize("small") + tick.label.set_fontsize('small') for tick in ax0.yaxis.get_major_ticks(): - tick.label.set_fontsize("small") + tick.label.set_fontsize('small') sns.barplot( data=gscorr, - x="index", + x='index', y=reference, ax=ax1, order=gs_descending, - palette="Reds_d", + palette='Reds_d', saturation=0.5, ) - ax1.set_xlabel("Confound time series") - ax1.set_ylabel("Magnitude of correlation with {}".format(reference)) - ax1.tick_params(axis="x", which="both", width=0) - ax1.tick_params(axis="y", which="both", width=5, length=5) + ax1.set_xlabel('Confound time series') + ax1.set_ylabel(f'Magnitude of correlation with {reference}') + ax1.tick_params(axis='x', which='both', width=0) + ax1.tick_params(axis='y', which='both', width=5, length=5) for tick in ax1.xaxis.get_major_ticks(): - tick.label.set_fontsize("small") - tick.label.set_rotation("vertical") + tick.label.set_fontsize('small') + tick.label.set_rotation('vertical') for tick in ax1.yaxis.get_major_ticks(): - tick.label.set_fontsize("small") - for side in ["top", "right", "left"]: - ax1.spines[side].set_color("none") + tick.label.set_fontsize('small') + for side in ['top', 'right', 'left']: + ax1.spines[side].set_color('none') ax1.spines[side].set_visible(False) if output_file is not None: figure = plt.gcf() - figure.savefig(output_file, bbox_inches="tight") + figure.savefig(output_file, bbox_inches='tight') plt.close(figure) figure = None return output_file @@ -1025,8 +1026,8 @@ def confounds_correlation_plot( def cifti_surfaces_plot( in_cifti, - density="32k", - surface_type="inflated", + density='32k', + surface_type='inflated', clip_range=(0, None), output_file=None, **splt_kwargs, @@ -1063,34 +1064,34 @@ def cifti_surfaces_plot( def get_surface_meshes(density, surface_type): import templateflow.api as tf - lh, rh = tf.get("fsLR", density=density, suffix=surface_type, extension=[".surf.gii"]) + lh, rh = tf.get('fsLR', density=density, suffix=surface_type, extension=['.surf.gii']) return str(lh), str(rh) - if density != "32k": - raise NotImplementedError("Only 32k density is currently supported.") + if density != '32k': + raise NotImplementedError('Only 32k density is currently supported.') img = nb.cifti2.load(in_cifti) - if img.nifti_header.get_intent()[0] != "ConnDenseSeries": - raise TypeError(f"{in_cifti} is not a dense timeseries CIFTI file") + if img.nifti_header.get_intent()[0] != 'ConnDenseSeries': + raise TypeError(f'{in_cifti} is not a dense timeseries CIFTI file') geo = img.header.get_index_map(1) left_cortex, right_cortex = None, None for bm in geo.brain_models: - if bm.brain_structure == "CIFTI_STRUCTURE_CORTEX_LEFT": + if bm.brain_structure == 'CIFTI_STRUCTURE_CORTEX_LEFT': left_cortex = bm - elif bm.brain_structure == "CIFTI_STRUCTURE_CORTEX_RIGHT": + elif bm.brain_structure == 'CIFTI_STRUCTURE_CORTEX_RIGHT': right_cortex = bm if left_cortex is None or right_cortex is None: - raise RuntimeError("CIFTI is missing cortex information") + raise RuntimeError('CIFTI is missing cortex information') # calculate an average of the BOLD data, excluding the first 5 volumes # as potential nonsteady states data = img.dataobj[5:20].mean(axis=0) cortex_data = _concat_brain_struct_data((left_cortex, right_cortex), data) - if density == "32k" and len(cortex_data) != 59412: - raise ValueError("Cortex data is not in fsLR space") + if density == '32k' and len(cortex_data) != 59412: + raise ValueError('Cortex data is not in fsLR space') # medial wall needs to be added back in cortex_data = add_fslr_medial_wall(cortex_data) if clip_range: @@ -1101,13 +1102,13 @@ def get_surface_meshes(density, surface_type): # Build the figure lh_mesh, rh_mesh = get_surface_meshes(density, surface_type) p = splt.Plot( - surf_lh=lh_mesh, surf_rh=rh_mesh, layout=splt_kwargs.pop("layout", "row"), **splt_kwargs + surf_lh=lh_mesh, surf_rh=rh_mesh, layout=splt_kwargs.pop('layout', 'row'), **splt_kwargs ) p.add_layer({'left': lh_data, 'right': rh_data}, cmap='YlOrRd_r') figure = p.build() # figsize - leave default? if output_file is not None: - figure.savefig(output_file, bbox_inches="tight") + figure.savefig(output_file, bbox_inches='tight') plt.close(figure) figure = None return output_file @@ -1124,7 +1125,7 @@ def _get_tr(img): return img.header.matrix.get_index_map(0).series_step except AttributeError: return img.header.get_zooms()[-1] - raise RuntimeError("Could not extract TR - unknown data structure type") + raise RuntimeError('Could not extract TR - unknown data structure type') def _decimate_data(data, seg, size): diff --git a/nibabies/workflows/anatomical/resampling.py b/nibabies/workflows/anatomical/resampling.py index c687a050..16e2870c 100644 --- a/nibabies/workflows/anatomical/resampling.py +++ b/nibabies/workflows/anatomical/resampling.py @@ -13,11 +13,11 @@ def init_anat_fsLR_resampling_wf( - grayord_density: ty.Literal["91k"], mcribs: bool, name="anat_fsLR_resampling_wf" + grayord_density: ty.Literal['91k'], mcribs: bool, name='anat_fsLR_resampling_wf' ) -> LiterateWorkflow: """Resample the surfaces into fsLR space""" workflow = LiterateWorkflow(name=name) - fslr_density = "32k" if grayord_density == "91k" else "59k" + fslr_density = '32k' if grayord_density == '91k' else '59k' workflow.__desc__ = """\ The BOLD time-series were resampled onto the left/right-symmetric template @@ -79,8 +79,8 @@ def init_anat_fsLR_resampling_wf( # Line 393 of FreeSurfer2CaretConvertAndRegisterNonlinear.sh downsampled_midthickness = pe.Node( - SurfaceResample(method="BARYCENTRIC"), - name="downsampled_midthickness", + SurfaceResample(method='BARYCENTRIC'), + name='downsampled_midthickness', mem_gb=DEFAULT_MEMORY_MIN_GB, ) @@ -96,11 +96,11 @@ def init_anat_fsLR_resampling_wf( # fmt:off workflow.connect([ (inputnode, morph_grayords_wf, [ - ("morphometrics", "inputnode.morphometrics"), - ("surfaces", "inputnode.surfaces"), - ("sphere_reg_fsLR", "inputnode.sphere_reg")]), + ('morphometrics', 'inputnode.morphometrics'), + ('surfaces', 'inputnode.surfaces'), + ('sphere_reg_fsLR', 'inputnode.sphere_reg')]), (joinnode, morph_grayords_wf, [ - ("midthickness_fsLR", "inputnode.midthickness_fsLR")]), + ('midthickness_fsLR', 'inputnode.midthickness_fsLR')]), ]) # fmt:on else: @@ -109,23 +109,23 @@ def init_anat_fsLR_resampling_wf( # fmt:off workflow.connect([ (inputnode, select_surfaces, [ - ("surfaces", "surfaces"), - ("sphere_reg_fsLR", "spherical_registrations")]), - (itersource, select_surfaces, [("hemi", "hemi")]), + ('surfaces', 'surfaces'), + ('sphere_reg_fsLR', 'spherical_registrations')]), + (itersource, select_surfaces, [('hemi', 'hemi')]), # Downsample midthickness to fsLR density (select_surfaces, downsampled_midthickness, [ - ("midthickness", "surface_in"), - ("sphere_reg", "current_sphere"), - ("template_sphere", "new_sphere")]), - (downsampled_midthickness, joinnode, [("surface_out", "midthickness_fsLR")]), - (joinnode, outputnode, [("midthickness_fsLR", "midthickness_fsLR")]), + ('midthickness', 'surface_in'), + ('sphere_reg', 'current_sphere'), + ('template_sphere', 'new_sphere')]), + (downsampled_midthickness, joinnode, [('surface_out', 'midthickness_fsLR')]), + (joinnode, outputnode, [('midthickness_fsLR', 'midthickness_fsLR')]), # resample morphometrics to fsLR 32k (inputnode, morph_grayords_wf, [ - ("subject_id", "inputnode.subject_id"), - ("subjects_dir", "inputnode.subjects_dir")]), + ('subject_id', 'inputnode.subject_id'), + ('subjects_dir', 'inputnode.subjects_dir')]), (morph_grayords_wf, outputnode, [ - ("outputnode.cifti_morph", "cifti_morph"), - ("outputnode.cifti_metadata", "cifti_metadata")]), + ('outputnode.cifti_morph', 'cifti_morph'), + ('outputnode.cifti_metadata', 'cifti_metadata')]), ]) # fmt:on return workflow @@ -133,7 +133,7 @@ def init_anat_fsLR_resampling_wf( def init_mcribs_morph_grayords_wf( grayord_density: ty.Literal['91k'], # Only 91k supported ATM - name: str = "morph_grayords_wf", + name: str = 'morph_grayords_wf', ): """ Sample Grayordinates files onto the fsLR atlas. @@ -189,54 +189,54 @@ def init_mcribs_morph_grayords_wf( inputnode = pe.Node( niu.IdentityInterface( fields=[ - "subject_id", - "subjects_dir", - "surfaces", - "morphometrics", - "sphere_reg", - "midthickness_fsLR", + 'subject_id', + 'subjects_dir', + 'surfaces', + 'morphometrics', + 'sphere_reg', + 'midthickness_fsLR', ] ), - name="inputnode", + name='inputnode', ) outputnode = pe.Node( - niu.IdentityInterface(fields=["cifti_morph", "cifti_metadata"]), - name="outputnode", + niu.IdentityInterface(fields=['cifti_morph', 'cifti_metadata']), + name='outputnode', ) surfmorph_list = pe.Node( niu.Merge(3, ravel_inputs=True), - name="surfmorph_list", + name='surfmorph_list', run_without_submitting=True, ) subject_midthickness = pe.Node( niu.Function(function=_get_surf), - name="get_midthickness", + name='get_midthickness', run_without_submitting=True, ) - subject_midthickness.inputs.name = "midthickness" + subject_midthickness.inputs.name = 'midthickness' subject_midthickness.inputs.mult = 3 - template_midthickness = subject_midthickness.clone("get_new_midthickness") + template_midthickness = subject_midthickness.clone('get_new_midthickness') # Create Vertex Areas from midthickness surfaces - subject_va = pe.MapNode(SurfaceVertexAreas(), iterfield="in_file", name="subject_va") - template_va = subject_va.clone("template_va") + subject_va = pe.MapNode(SurfaceVertexAreas(), iterfield='in_file', name='subject_va') + template_va = subject_va.clone('template_va') # Setup Workbench command. LR ordering for hemi can be assumed, as it is imposed # by the iterfield of the MapNode in the surface sampling workflow above. resample = pe.MapNode( - MetricResample(method="ADAP_BARY_AREA", area_metrics=True), - name="resample", + MetricResample(method='ADAP_BARY_AREA', area_metrics=True), + name='resample', iterfield=[ - "in_file", - "out_file", - "new_sphere", - "new_area", - "current_sphere", - "current_area", + 'in_file', + 'out_file', + 'new_sphere', + 'new_area', + 'current_sphere', + 'current_area', ], ) @@ -246,10 +246,10 @@ def init_mcribs_morph_grayords_wf( str(atlases / 'tpl-dHCP_space-fsLR_hemi-R_den-32k_desc-week42_sphere.surf.gii'), ] * 3 resample.inputs.out_file = [ - f"space-fsLR_hemi-{h}_den-{grayord_density}_{morph}.shape.gii" + f'space-fsLR_hemi-{h}_den-{grayord_density}_{morph}.shape.gii' # Order: curv-L, curv-R, sulc-L, sulc-R, thickness-L, thickness-R for morph in ('curv', 'sulc', 'thickness') - for h in "LR" + for h in 'LR' ] gen_cifti = pe.MapNode( @@ -257,29 +257,29 @@ def init_mcribs_morph_grayords_wf( grayordinates=grayord_density, ), iterfield=['scalar_name', 'scalar_surfs'], - name="gen_cifti", + name='gen_cifti', ) gen_cifti.inputs.scalar_name = ['curv', 'sulc', 'thickness'] # fmt: off workflow.connect([ - (inputnode, resample, [(("sphere_reg", _triple), "current_sphere")]), - (inputnode, subject_midthickness, [("surfaces", "surfaces")]), - (inputnode, template_midthickness, [("midthickness_fsLR", "surfaces")]), - (subject_midthickness, subject_va, [("out", "in_file")]), - (template_midthickness, template_va, [("out", "in_file")]), - (subject_va, resample, [("out_file", "current_area")]), - (template_va, resample, [("out_file", "new_area")]), + (inputnode, resample, [(('sphere_reg', _triple), 'current_sphere')]), + (inputnode, subject_midthickness, [('surfaces', 'surfaces')]), + (inputnode, template_midthickness, [('midthickness_fsLR', 'surfaces')]), + (subject_midthickness, subject_va, [('out', 'in_file')]), + (template_midthickness, template_va, [('out', 'in_file')]), + (subject_va, resample, [('out_file', 'current_area')]), + (template_va, resample, [('out_file', 'new_area')]), (inputnode, surfmorph_list, [ - (('morphometrics', _get_surf, "curv"), "in1"), - (('morphometrics', _get_surf, "sulc"), "in2"), - (('morphometrics', _get_surf, "thickness"), "in3"), + (('morphometrics', _get_surf, 'curv'), 'in1'), + (('morphometrics', _get_surf, 'sulc'), 'in2'), + (('morphometrics', _get_surf, 'thickness'), 'in3'), ]), (surfmorph_list, resample, [('out', 'in_file')]), (resample, gen_cifti, [ - (("out_file", _collate), "scalar_surfs")]), - (gen_cifti, outputnode, [("out_file", "cifti_morph"), - ("out_metadata", "cifti_metadata")]), + (('out_file', _collate), 'scalar_surfs')]), + (gen_cifti, outputnode, [('out_file', 'cifti_morph'), + ('out_metadata', 'cifti_metadata')]), ]) # fmt: on return workflow @@ -288,7 +288,7 @@ def init_mcribs_morph_grayords_wf( def _get_surf(surfaces, name, mult=1): from smriprep.workflows.surfaces import _sorted_by_basename - "Select a specific surface by name, and optionally multiple it." + 'Select a specific surface by name, and optionally multiple it.' if not surfaces: return surfaces return [surf for surf in _sorted_by_basename(surfaces) if name in surf] * mult diff --git a/pyproject.toml b/pyproject.toml index f3931c97..1c0861e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -131,6 +131,7 @@ extend-exclude = [ "wrapper/**", ".maint/**", "scripts/**", + "docs/**", ] [tool.ruff.lint]