diff --git a/.gitattributes b/.gitattributes index f7e1fcc..034837f 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1 @@ -bids/reports/_version.py export-subst +bids/ext/reports/_version.py export-subst diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..7fa2757 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,14 @@ +--- +# Documentation +# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file +version: 2 +updates: +- package-ecosystem: github-actions + directory: / + schedule: + interval: monthly + +- package-ecosystem: gitsubmodule + directory: / + schedule: + interval: monthly diff --git a/.github/workflows/package.yml b/.github/workflows/package.yml deleted file mode 100644 index d4e77ff..0000000 --- a/.github/workflows/package.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: Packaging - -on: - push: - branches: - - main - - maint/* - - rel/* - tags: - - '*' - -defaults: - run: - shell: bash - -jobs: - package: - # Build packages and upload - runs-on: ${{ matrix.os }} - strategy: - matrix: - include: - - os: ubuntu-latest - python-version: 3.8 - steps: - - uses: actions/checkout@v2 - with: - submodules: recursive - fetch-depth: 0 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Display Python version - run: python -c "import sys; print(sys.version)" - - name: Create virtual environment - run: tools/ci/create_venv.sh - - name: Build sdist - run: tools/ci/build_archive.sh - env: - INSTALL_TYPE: sdist - - name: Build wheel - run: tools/ci/build_archive.sh - env: - INSTALL_TYPE: wheel - - name: Test PyPI upload - uses: pypa/gh-action-pypi-publish@master - with: - user: __token__ - password: ${{ secrets.TEST_PYPI_API_TOKEN }} - repository_url: https://test.pypi.org/legacy/ - skip_existing: true - - name: Upload to PyPI (on tags) - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - uses: pypa/gh-action-pypi-publish@master - with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/pypi-publish.yml b/.github/workflows/pypi-publish.yml new file mode 100644 index 0000000..17d2dc1 --- /dev/null +++ b/.github/workflows/pypi-publish.yml @@ -0,0 +1,31 @@ +# This workflow will upload a Python Package using Twine when a release is created +# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries + +name: Upload Python Package + +on: + release: + types: [published] + +jobs: + deploy: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.7' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install setuptools wheel twine + - name: Build and publish + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + run: | + python setup.py sdist bdist_wheel + twine upload dist/* diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml new file mode 100644 index 0000000..9275a39 --- /dev/null +++ b/.github/workflows/testing.yml @@ -0,0 +1,61 @@ +name: "Run Tests" + +on: + push: + branches: + - "main" + pull_request: + branches: + - '*' + +concurrency: + group: environment-${{ github.ref }} + cancel-in-progress: true + +jobs: + # Determine if tests should be run based on commit message. + check_skip: + runs-on: ubuntu-latest + outputs: + skip: ${{ steps.result_step.outputs.ci-skip }} + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - id: result_step + uses: mstachniuk/ci-skip@master + with: + commit-filter: '[skip ci];[ci skip];[skip github]' + commit-filter-separator: ';' + + run_tests: + needs: check_skip + if: ${{ needs.check_skip.outputs.skip == 'false' }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest", "macos-latest"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + name: ${{ matrix.os }} with Python ${{ matrix.python-version }} + defaults: + run: + shell: bash + steps: + - uses: actions/checkout@v2 + - name: 'Set up python' + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: 'Display Python version' + shell: bash {0} + run: python -c "import sys; print(sys.version)" + - name: 'Install pybids-reports' + shell: bash {0} + run: pip install -e .[tests] + - name: 'Run tests' + shell: bash {0} + run: python -m pytest --pyargs bids/ext/reports --cov=bids/ext/reports + - name: 'Upload coverage to CodeCov' + uses: codecov/codecov-action@v1 + if: success() diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..dd07984 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,24 @@ +--- +repos: + +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + + +- repo: https://github.com/psf/black + rev: 22.12.0 + hooks: + - id: black + +- repo: https://github.com/pycqa/flake8 + rev: 6.0.0 + hooks: + - id: flake8 + +# - repo: https://github.com/pre-commit/mirrors-mypy +# rev: v0.991 +# hooks: +# - id: mypy diff --git a/MANIFEST.in b/MANIFEST.in index e0418dd..d453a12 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,4 @@ include versioneer.py -include bids/reports/_version.py -recursive-include bids/reports/config *.json -recursive-include bids/reports/tests/data * +include bids/ext/reports/_version.py +recursive-include bids/ext/reports/config *.json +recursive-include bids/ext/reports/tests/data * diff --git a/bids/ext/__init__.py b/bids/ext/__init__.py new file mode 100644 index 0000000..8db66d3 --- /dev/null +++ b/bids/ext/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/bids/reports/README.md b/bids/ext/reports/README.md similarity index 100% rename from bids/reports/README.md rename to bids/ext/reports/README.md diff --git a/bids/reports/__init__.py b/bids/ext/reports/__init__.py similarity index 75% rename from bids/reports/__init__.py rename to bids/ext/reports/__init__.py index 17349ae..63d5e21 100644 --- a/bids/reports/__init__.py +++ b/bids/ext/reports/__init__.py @@ -1,9 +1,9 @@ """pybids-reports: A tool for building methods sections for BIDS datasets.""" -from . import _version +from . import _version, parameters, parsing, report from .due import Doi, due from .report import BIDSReport -__all__ = ["BIDSReport"] +__all__ = ["BIDSReport", "parameters", "parsing", "report"] due.cite( Doi("10.1038/sdata.2016.44"), diff --git a/bids/reports/_version.py b/bids/ext/reports/_version.py similarity index 100% rename from bids/reports/_version.py rename to bids/ext/reports/_version.py diff --git a/bids/reports/config/converters.json b/bids/ext/reports/config/converters.json similarity index 100% rename from bids/reports/config/converters.json rename to bids/ext/reports/config/converters.json diff --git a/bids/reports/due.py b/bids/ext/reports/due.py similarity index 94% rename from bids/reports/due.py rename to bids/ext/reports/due.py index a0517a1..46b326d 100644 --- a/bids/reports/due.py +++ b/bids/ext/reports/due.py @@ -63,9 +63,7 @@ def _donothing_func(*args, **kwargs): if type(e).__name__ not in ("ImportError", "ModuleNotFoundError"): import logging - logging.getLogger("duecredit").error( - "Failed to import duecredit due to %s" % str(e) - ) + logging.getLogger("duecredit").error("Failed to import duecredit due to %s" % str(e)) # Initiate due stub due = InactiveDueCreditCollector() BibTeX = Doi = Url = Text = _donothing_func diff --git a/bids/reports/parameters.py b/bids/ext/reports/parameters.py similarity index 91% rename from bids/reports/parameters.py rename to bids/ext/reports/parameters.py index 1644a6f..d1758aa 100644 --- a/bids/reports/parameters.py +++ b/bids/ext/reports/parameters.py @@ -7,15 +7,14 @@ import nibabel as nib from num2words import num2words -from bids.reports.utils import list_to_str, num_to_str, remove_duplicates +from .utils import list_to_str, num_to_str, remove_duplicates logging.basicConfig() -LOGGER = logging.getLogger("reports.parsing") +LOGGER = logging.getLogger("pybids-reports.parameters") def describe_slice_timing(img, metadata: dict) -> str: """Generate description of slice timing from metadata.""" - if "SliceTiming" in metadata: slice_order = " in {0} order".format(get_slice_info(metadata["SliceTiming"])) n_slices = len(metadata["SliceTiming"]) @@ -23,9 +22,7 @@ def describe_slice_timing(img, metadata: dict) -> str: slice_order = "" n_slices = img.shape[2] - return "{n_slices} slices{slice_order}".format( - n_slices=n_slices, slice_order=slice_order - ) + return "{n_slices} slices{slice_order}".format(n_slices=n_slices, slice_order=slice_order) def describe_repetition_time(metadata: dict): @@ -63,9 +60,9 @@ def describe_duration(files) -> str: n_vols = n_vols[0] dur_str = describe_func_duration(n_vols, tr) - dur_str = ( - "Run duration was {0} minutes, during which {1} volumes were acquired." - ).format(dur_str, n_vols) + dur_str = ("Run duration was {0} minutes, during which {1} volumes were acquired.").format( + dur_str, n_vols + ) return dur_str @@ -93,7 +90,6 @@ def describe_echo_times(files): me_str : str Whether the data are multi-echo or single-echo. """ - echo_times = [f.get_metadata()["EchoTime"] for f in files] echo_times = sorted(list(set(echo_times))) if len(echo_times) > 1: @@ -108,7 +104,7 @@ def describe_echo_times(files): def describe_echo_times_fmap(files): - """Generate description of echo times from metadata field for fmaps + """Generate description of echo times from metadata field for fmaps. Parameters ---------- @@ -160,9 +156,7 @@ def describe_image_size(img): def describe_inplane_accel(metadata: dict) -> str: """Generate description of in-plane acceleration factor, if any.""" return ( - "in-plane acceleration factor={}".format( - metadata["ParallelReductionFactorInPlane"] - ) + "in-plane acceleration factor={}".format(metadata["ParallelReductionFactorInPlane"]) if metadata.get("ParallelReductionFactorInPlane", 1) > 1 else "" ) @@ -184,9 +178,7 @@ def describe_bvals(bval_file) -> str: with open(bval_file, "r") as file_object: raw_bvals = file_object.read().splitlines() # Flatten list of space-separated values - bvals = [ - item for sublist in [line.split(" ") for line in raw_bvals] for item in sublist - ] + bvals = [item for sublist in [line.split(" ") for line in raw_bvals] for item in sublist] bvals = sorted([int(v) for v in set(bvals)]) bvals = [num_to_str(v) for v in bvals] bval_str = list_to_str(bvals) @@ -210,9 +202,7 @@ def describe_intendedfor_targets(metadata: dict, layout) -> str: for scan in scans: fn = op.basename(scan) - if_file = [ - f for f in layout.get(extension=[".nii", ".nii.gz"]) if fn in f.path - ][0] + if_file = [f for f in layout.get(extension=[".nii", ".nii.gz"]) if fn in f.path][0] run_num = int(if_file.run) target_type = if_file.entities["suffix"].upper() @@ -229,9 +219,7 @@ def describe_intendedfor_targets(metadata: dict, layout) -> str: run_dict[target_type_str].append(run_num) for scan in run_dict.keys(): - run_dict[scan] = [ - num2words(r, ordinal=True) for r in sorted(run_dict[scan]) - ] + run_dict[scan] = [num2words(r, ordinal=True) for r in sorted(run_dict[scan])] out_list = [] @@ -320,8 +308,7 @@ def describe_sequence(metadata: dict, config: dict): seqs += " ({0})".format(os.path.sep.join(seq_abbrs)) variants = [ - config["seqvar"].get(var, var) - for var in metadata.get("SequenceVariant", "").split("_") + config["seqvar"].get(var, var) for var in metadata.get("SequenceVariant", "").split("_") ] variants = list_to_str(variants) diff --git a/bids/reports/parsing.py b/bids/ext/reports/parsing.py similarity index 93% rename from bids/reports/parsing.py rename to bids/ext/reports/parsing.py index 658be84..422f1d3 100644 --- a/bids/reports/parsing.py +++ b/bids/ext/reports/parsing.py @@ -5,11 +5,11 @@ import nibabel as nib from num2words import num2words -from bids.reports import parameters -from bids.reports.utils import collect_associated_files +from . import parameters +from .utils import collect_associated_files logging.basicConfig() -LOGGER = logging.getLogger("reports.parsing") +LOGGER = logging.getLogger("pybids-reports.parsing") def func_info(layout, files, config): @@ -266,14 +266,11 @@ def fmap_info(layout, files, config): for_str = parameters.describe_intendedfor_targets(metadata, layout) - desc = ( - "A {variants} {seqs} field map ({parameters_str}) was " - "acquired{for_str}.".format( - variants=variants, - seqs=seqs, - for_str=for_str, - parameters_str=parameters_str, - ) + desc = "A {variants} {seqs} field map ({parameters_str}) was acquired{for_str}.".format( + variants=variants, + seqs=seqs, + for_str=for_str, + parameters_str=parameters_str, ) return desc @@ -293,13 +290,10 @@ def general_acquisition_info(metadata): out_str : :obj:`str` Output string with scanner information. """ - out_str = ( - "MR data were acquired using a {tesla}-Tesla {manu} {model} MRI " - "scanner.".format( - tesla=metadata.get("MagneticFieldStrength", "UNKNOWN"), - manu=metadata.get("Manufacturer", "MANUFACTURER"), - model=metadata.get("ManufacturersModelName", "MODEL"), - ) + out_str = "MR data were acquired using a {tesla}-Tesla {manu} {model} MRI scanner.".format( + tesla=metadata.get("MagneticFieldStrength", "UNKNOWN"), + manu=metadata.get("Manufacturer", "MANUFACTURER"), + model=metadata.get("ManufacturersModelName", "MODEL"), ) return out_str @@ -318,7 +312,7 @@ def final_paragraph(metadata): Output string with scanner information. """ # Imported here to avoid a circular import - from bids.reports import __version__ + from . import __version__ if "ConversionSoftware" in metadata.keys(): soft = metadata["ConversionSoftware"] @@ -367,9 +361,9 @@ def parse_files(layout, data_files, sub, config): if group[0].entities["datatype"] == "func": group_description = func_info(layout, group, config) - elif (group[0].entities["datatype"] == "anat") and group[0].entities[ - "suffix" - ].endswith("w"): + elif (group[0].entities["datatype"] == "anat") and group[0].entities["suffix"].endswith( + "w" + ): group_description = anat_info(layout, group, config) elif group[0].entities["datatype"] == "dwi": diff --git a/bids/reports/report.py b/bids/ext/reports/report.py similarity index 92% rename from bids/reports/report.py rename to bids/ext/reports/report.py index 1ea896e..49dc401 100644 --- a/bids/reports/report.py +++ b/bids/ext/reports/report.py @@ -1,9 +1,13 @@ """Generate publication-quality data acquisition methods section from BIDS dataset.""" import json +import logging import os.path as op from collections import Counter -from bids.reports import parsing, utils +from . import parsing, utils + +logging.basicConfig() +LOGGER = logging.getLogger("pybids-reports.report") class BIDSReport(object): @@ -51,8 +55,7 @@ def __init__(self, layout, config=None): if not isinstance(config, dict): raise ValueError( - "Input config must be None, dict, or path to " - "json file containing dict." + "Input config must be None, dict, or path to json file containing dict." ) self.config = config @@ -99,9 +102,7 @@ def generate_from_files(self, files): subject_files = [f for f in files if f.get_entities().get("subject") == sub] description_list = [] for ses in sessions: - data_files = [ - f for f in subject_files if f.get_entities().get("session") == ses - ] + data_files = [f for f in subject_files if f.get_entities().get("session") == ses] if data_files: ses_description = parsing.parse_files( @@ -110,9 +111,7 @@ def generate_from_files(self, files): sub, self.config, ) - ses_description[0] = ( - "In session {0}, ".format(ses) + ses_description[0] - ) + ses_description[0] = "In session {0}, ".format(ses) + ses_description[0] description_list += ses_description metadata = self.layout.get_metadata(data_files[0].path) else: @@ -121,9 +120,7 @@ def generate_from_files(self, files): # Assume all data were converted the same way and use the last nifti # file's json for conversion information. if "metadata" not in vars(): - raise Exception( - "No valid jsons found. Cannot generate final paragraph." - ) + raise Exception("No valid jsons found. Cannot generate final paragraph.") description = "\n\t".join(description_list) description += "\n\n{0}".format(parsing.final_paragraph(metadata)) @@ -201,9 +198,7 @@ def _report_subject(self, subject, **kwargs): """ description_list = [] # Remove session from kwargs if provided, else set session as all available - sessions = kwargs.pop( - "session", self.layout.get_sessions(subject=subject, **kwargs) - ) + sessions = kwargs.pop("session", self.layout.get_sessions(subject=subject, **kwargs)) if not sessions: sessions = [None] elif not isinstance(sessions, list): diff --git a/bids/reports/tests/__init__.py b/bids/ext/reports/tests/__init__.py similarity index 100% rename from bids/reports/tests/__init__.py rename to bids/ext/reports/tests/__init__.py diff --git a/bids/ext/reports/tests/conftest.py b/bids/ext/reports/tests/conftest.py new file mode 100644 index 0000000..ff06b94 --- /dev/null +++ b/bids/ext/reports/tests/conftest.py @@ -0,0 +1,69 @@ +"""Fixtures for tests.""" +import json +from os.path import abspath, join + +import nibabel as nib +import pytest +from bids.tests import get_test_data_path + +from bids import BIDSLayout + + +@pytest.fixture +def testlayout(): + """A BIDSLayout for testing.""" + data_dir = join(get_test_data_path(), "synthetic") + return BIDSLayout(data_dir) + + +@pytest.fixture +def testimg(testlayout): + """A Nifti1Image for testing.""" + func_files = testlayout.get( + subject="01", + session="01", + task="nback", + run="01", + extension=[".nii.gz"], + ) + return nib.load(func_files[0].path) + + +@pytest.fixture +def testdiffimg(testlayout): + """A Nifti1Image for testing.""" + dwi_files = testlayout.get( + subject="01", + session="01", + datatype="dwi", + extension=[".nii.gz"], + ) + return nib.load(dwi_files[0].path) + + +@pytest.fixture +def testconfig(): + """The standard config file for testing.""" + config_file = abspath(join(get_test_data_path(), "../../reports/config/converters.json")) + with open(config_file, "r") as fobj: + config = json.load(fobj) + return config + + +@pytest.fixture +def testmeta(): + """A small metadata dictionary for testing.""" + return { + "RepetitionTime": 2.0, + "MultibandAccelerationFactor": 2, + "ParallelReductionFactorInPlane": 2, + "FlipAngle": 90, + "PhaseEncodingDirection": "i", + "SliceTiming": [0, 1, 2, 3], + } + + +@pytest.fixture +def testmeta_light(): + """An even smaller metadata dictionary for testing.""" + return {"RepetitionTime": 2.0} diff --git a/bids/reports/tests/test_parameters.py b/bids/ext/reports/tests/test_parameters.py similarity index 82% rename from bids/reports/tests/test_parameters.py rename to bids/ext/reports/tests/test_parameters.py index bc43878..16e9160 100644 --- a/bids/reports/tests/test_parameters.py +++ b/bids/ext/reports/tests/test_parameters.py @@ -1,71 +1,7 @@ +"""Tests for bids.reports.parameters module.""" import pytest -import json -import nibabel as nib -from os.path import abspath, join - -from bids.layout import BIDSLayout -from bids.reports import parameters -from bids.tests import get_test_data_path - - -@pytest.fixture -def testlayout(): - """A BIDSLayout for testing.""" - data_dir = join(get_test_data_path(), "synthetic") - return BIDSLayout(data_dir) - - -@pytest.fixture -def testimg(testlayout): - - func_files = testlayout.get( - subject="01", - session="01", - task="nback", - run="01", - extension=[".nii.gz"], - ) - return nib.load(func_files[0].path) - - -@pytest.fixture -def testdiffimg(testlayout): - - dwi_files = testlayout.get( - subject="01", - session="01", - datatype="dwi", - extension=[".nii.gz"], - ) - return nib.load(dwi_files[0].path) - - -@pytest.fixture -def testconfig(): - config_file = abspath( - join(get_test_data_path(), "../../reports/config/converters.json") - ) - with open(config_file, "r") as fobj: - config = json.load(fobj) - return config - - -@pytest.fixture -def testmeta(): - return { - "RepetitionTime": 2.0, - "MultibandAccelerationFactor": 2, - "ParallelReductionFactorInPlane": 2, - "FlipAngle": 90, - "PhaseEncodingDirection": "i", - "SliceTiming": [0, 1, 2, 3], - } - - -@pytest.fixture -def testmeta_light(): - return {"RepetitionTime": 2.0} +from ext.reports import parameters @pytest.mark.parametrize( diff --git a/bids/reports/tests/test_parsing.py b/bids/ext/reports/tests/test_parsing.py similarity index 80% rename from bids/reports/tests/test_parsing.py rename to bids/ext/reports/tests/test_parsing.py index 61fdab3..2f74354 100644 --- a/bids/reports/tests/test_parsing.py +++ b/bids/ext/reports/tests/test_parsing.py @@ -1,34 +1,5 @@ """Tests for bids.reports.parsing.""" -import json -from os.path import abspath, join - -import pytest -from bids.layout import BIDSLayout -from bids.reports import parsing -from bids.tests import get_test_data_path - - -@pytest.fixture -def testlayout(): - """A BIDSLayout for testing.""" - data_dir = join(get_test_data_path(), "synthetic") - return BIDSLayout(data_dir) - - -@pytest.fixture -def testconfig(): - config_file = abspath( - join(get_test_data_path(), "../../reports/config/converters.json") - ) - with open(config_file, "r") as fobj: - config = json.load(fobj) - return config - - -@pytest.fixture -def testmeta(): - metadata = {"RepetitionTime": 2.0} - return metadata +from ext.reports import parsing def test_anat_info_smoke(testlayout, testconfig): diff --git a/bids/reports/tests/test_report.py b/bids/ext/reports/tests/test_report.py similarity index 68% rename from bids/reports/tests/test_report.py rename to bids/ext/reports/tests/test_report.py index df8943f..a00723a 100644 --- a/bids/reports/tests/test_report.py +++ b/bids/ext/reports/tests/test_report.py @@ -3,17 +3,9 @@ from collections import Counter from os.path import abspath, join -import pytest -from bids.layout import BIDSLayout -from bids.reports import BIDSReport from bids.tests import get_test_data_path - -@pytest.fixture -def testlayout(): - """A BIDSLayout for testing.""" - data_dir = join(get_test_data_path(), "synthetic") - return BIDSLayout(data_dir) +from ext.reports import BIDSReport def test_report_init(testlayout): @@ -30,9 +22,7 @@ def test_report_gen(testlayout): def test_report_gen_from_files(testlayout): - """Report generation from file list should return a counter of unique - descriptions in the dataset. - """ + """Test that a report from a file list returns a Counter of unique descs in the dataset.""" report = BIDSReport(testlayout) files = testlayout.get(extension=[".nii.gz", ".nii"]) descriptions = report.generate_from_files(files) @@ -40,8 +30,9 @@ def test_report_gen_from_files(testlayout): def test_report_subject(testlayout): - """Generating a report for one subject should only return one subject's - description (i.e., one pattern with a count of one). + """Test that a report for one subject return one subject's description. + + This should be one pattern with a counter of one in the Counter. """ report = BIDSReport(testlayout) descriptions = report.generate(subject="01") @@ -49,9 +40,7 @@ def test_report_subject(testlayout): def test_report_session(testlayout): - """Generating a report for one session should mean that no other sessions - appear in any of the unique descriptions. - """ + """Test that a report for one session doesn't show other sessions in any descriptions.""" report = BIDSReport(testlayout) descriptions = report.generate(session="01") assert "session 02" not in " ".join(descriptions.keys()) @@ -59,9 +48,7 @@ def test_report_session(testlayout): def test_report_file_config(testlayout): """Report initialization should take in a config file and use that if provided.""" - config_file = abspath( - join(get_test_data_path(), "../../reports/config/converters.json") - ) + config_file = abspath(join(get_test_data_path(), "../../reports/config/converters.json")) report = BIDSReport(testlayout, config=config_file) descriptions = report.generate() assert isinstance(descriptions, Counter) @@ -69,9 +56,7 @@ def test_report_file_config(testlayout): def test_report_dict_config(testlayout): """Report initialization should take in a config dict and use that if provided.""" - config_file = abspath( - join(get_test_data_path(), "../../reports/config/converters.json") - ) + config_file = abspath(join(get_test_data_path(), "../../reports/config/converters.json")) with open(config_file, "r") as fobj: config = json.load(fobj) report = BIDSReport(testlayout, config=config) diff --git a/bids/reports/utils.py b/bids/ext/reports/utils.py similarity index 90% rename from bids/reports/utils.py rename to bids/ext/reports/utils.py index ba6060d..7fa7dc5 100644 --- a/bids/reports/utils.py +++ b/bids/ext/reports/utils.py @@ -6,7 +6,7 @@ import logging logging.basicConfig() -LOGGER = logging.getLogger("reports.utils") +LOGGER = logging.getLogger("pybids-reports.utils") def collect_associated_files(layout, files, extra_entities=()): @@ -32,9 +32,7 @@ def collect_associated_files(layout, files, extra_entities=()): collected_files = [] for f in files: - if len(collected_files) and any( - f in filegroup for filegroup in collected_files - ): + if len(collected_files) and any(f in filegroup for filegroup in collected_files): continue ents = f.get_entities() ents = {k: v for k, v in ents.items() if k not in MULTICONTRAST_ENTITIES} @@ -54,10 +52,7 @@ def collect_associated_files(layout, files, extra_entities=()): def reminder(): """Remind users about things they need to do after generating the report.""" - return ( - "Remember to double-check everything and to replace with " - "a degree symbol." - ) + return "Remember to double-check everything and to replace with a degree symbol." def remove_duplicates(seq): diff --git a/pyproject.toml b/pyproject.toml index d93cdfa..e62edb4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,8 +18,8 @@ exclude = ''' | dist )/ | versioneer.py - | bids/reports/_version.py - | bids/reports/due.py + | bids/ext/reports/_version.py + | bids/ext/reports/due.py ) ''' diff --git a/setup.cfg b/setup.cfg index f423a1e..72ee7eb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,14 +20,20 @@ classifiers = Topic :: Scientific/Engineering [options] -python_requires = >=3.6 +python_requires = >=3.7 install_requires = + pybids >= 0.15 nibabel num2words - pybids -packages = find: + +packages = find_namespace: include_package_data = False +[options.packages.find] +include = + bids.ext.EXTENSION + bids.ext.EXTENSION.* + [options.extras_require] doc = sphinx >=2.2 @@ -36,16 +42,16 @@ doc = docs = %(doc)s test = + codecov flake8-black flake8-docstrings flake8-isort pytest >=3.3 + pytest-cov tests = %(test)s ci_tests = %(test)s - codecov - pytest-cov pytest-xdist dev = %(doc)s @@ -58,8 +64,8 @@ dev = [versioneer] VCS = git style = pep440 -versionfile_source = bids/reports/_version.py -versionfile_build = bids/reports/_version.py +versionfile_source = bids/ext/reports/_version.py +versionfile_build = bids/ext/reports/_version.py tag_prefix = parentdir_prefix = diff --git a/versioneer.py b/versioneer.py index e283ecb..11c3fa7 100644 --- a/versioneer.py +++ b/versioneer.py @@ -1370,13 +1370,9 @@ def versions_from_file(filename): contents = f.read() except EnvironmentError: raise NotThisMethod("unable to read _version.py") - mo = re.search( - r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S - ) + mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: - mo = re.search( - r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S - ) + mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) @@ -1648,9 +1644,7 @@ def get_versions(verbose=False): handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or cfg.verbose - assert ( - cfg.versionfile_source is not None - ), "please set versioneer.versionfile_source" + assert cfg.versionfile_source is not None, "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) @@ -1924,9 +1918,7 @@ def make_release_tree(self, base_dir, files): # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) - write_to_version_file( - target_versionfile, self._versioneer_generated_versions - ) + write_to_version_file(target_versionfile, self._versioneer_generated_versions) cmds["sdist"] = cmd_sdist @@ -2061,10 +2053,7 @@ def do_setup(): else: print(" 'versioneer.py' already in MANIFEST.in") if cfg.versionfile_source not in simple_includes: - print( - " appending versionfile_source ('%s') to MANIFEST.in" - % cfg.versionfile_source - ) + print(" appending versionfile_source ('%s') to MANIFEST.in" % cfg.versionfile_source) with open(manifest_in, "a") as f: f.write("include %s\n" % cfg.versionfile_source) else: