diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index ce49863..0000000 --- a/.coveragerc +++ /dev/null @@ -1,5 +0,0 @@ -[run] -source = - morphapi - */morphapi - */site-packages/morphapi diff --git a/.github/workflows/build_and_publish.yml b/.github/workflows/build_and_publish.yml deleted file mode 100644 index 7ce725d..0000000 --- a/.github/workflows/build_and_publish.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Publish sdist tarball to PyPi - -on: - push: - tags: - - '[0-9]+.[0-9]+.[0-9]+' - -jobs: - build-n-publish: - name: Build and publish on PyPI - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.8 - uses: actions/setup-python@v4 - with: - python-version: 3.8 - - name: Install dependencies - run: | - python -m pip install --upgrade wheel - - name: Build a source tarball and a wheel - run: | - python setup.py sdist bdist_wheel - - name: Publish distribution package to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} - verbose: true diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml deleted file mode 100644 index ea66f73..0000000 --- a/.github/workflows/run-tests.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Run tests - -on: - pull_request: - push: - branches: - - master - -jobs: - tests: - - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip setuptools - pip install tox-gh-actions - - name: Run tox - run: | - tox diff --git a/.github/workflows/test_and_deploy.yml b/.github/workflows/test_and_deploy.yml new file mode 100644 index 0000000..241341b --- /dev/null +++ b/.github/workflows/test_and_deploy.yml @@ -0,0 +1,66 @@ +name: tests + +on: + push: + branches: + - '*' + tags: + - '*' + pull_request: + +jobs: + linting: + runs-on: ubuntu-latest + steps: + - uses: neuroinformatics-unit/actions/lint@v2 + + manifest: + name: Check Manifest + runs-on: ubuntu-latest + steps: + - uses: neuroinformatics-unit/actions/check_manifest@v2 + + test: + needs: [linting, manifest] + name: ${{ matrix.os }} py${{ matrix.python-version }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + # Run all supported Python versions on linux + python-version: ["3.9", "3.10", "3.11"] + os: [ubuntu-latest] + # Include one windows and macos run + include: + - os: macos-latest + python-version: "3.10" + - os: windows-latest + python-version: "3.10" + + steps: + # Run tests + - uses: neuroinformatics-unit/actions/test@v2 + with: + python-version: ${{ matrix.python-version }} + + build_sdist_wheels: + name: Build source distribution + needs: [test] + if: github.event_name == 'push' && github.ref_type == 'tag' + runs-on: ubuntu-latest + steps: + - uses: neuroinformatics-unit/actions/build_sdist_wheels@v2 + + + upload_all: + name: Publish build distributions + needs: [build_sdist_wheels] + runs-on: ubuntu-latest + steps: + - uses: actions/download-artifact@v3 + with: + name: artifact + path: dist + - uses: pypa/gh-action-pypi-publish@v1.5.0 + with: + user: __token__ + password: ${{ secrets.TWINE_API_KEY }} diff --git a/.gitignore b/.gitignore index fee4272..aedc8d7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,45 +1,3 @@ -# Ignored file types -*.ckpt -*.vscode -*.json -*.xml -*.idea -*.pyc -*.vtk -*.obj -*.stl -*.nrrd -*.svg -*.tdms - -# These file formats are tracked -# *.avi -# *.png -# *.mp4 -# *.jpg - - -# User folder where they can save stuff without it being tracked by git -User/* - -# Visual studio stuff -*/tempCodeRunnerFile.py -*/.ipynb_checkpoints -.ipynb_checkpoints -*/__pycache__ -__pycache__ -.DS_Store -Users/ - -# Test scripts and hidden files -workspace.py -workspace.ipynb -playground.py -secrets - -# Custom config files -*.conf.custom - # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -50,6 +8,7 @@ __pycache__/ # Distribution / packaging .Python +env/ build/ develop-eggs/ dist/ @@ -61,11 +20,9 @@ lib64/ parts/ sdist/ var/ -wheels/ *.egg-info/ .installed.cfg *.egg -MANIFEST # PyInstaller # Usually these files are written by a python script from a template @@ -85,9 +42,8 @@ htmlcov/ .cache nosetests.xml coverage.xml -*.cover +*,cover .hypothesis/ -.pytest_cache/ # Translations *.mo @@ -96,59 +52,32 @@ coverage.xml # Django stuff: *.log local_settings.py -db.sqlite3 -# Flask stuff: +# Flask instance folder instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy # Sphinx documentation -doc/build/ +docs/_build/ -# pydocmd -_build/ -mkdocs.yml +# MkDocs documentation +/site/ # PyBuilder target/ -# pyenv -.python-version - -# celery beat schedule file -celerybeat-schedule - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ +# Pycharm and VSCode +.idea/ venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site +.vscode/ -# mypy -.mypy_cache/ +# IPython Notebook +.ipynb_checkpoints -.idea/ +# pyenv +.python-version -*.~lock.* +# OS +.DS_Store -# Test reports -reports +# written by setuptools_scm +**/_version.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8d6fc63..b610cb4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,17 +1,42 @@ + + # Configuring https://pre-commit.ci/ ci: autoupdate_schedule: monthly - + repos: -- repo: https://github.com/psf/black - rev: 23.11.0 - hooks: - - id: black - pass_filenames: true -- repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 - hooks: - - id: flake8 - pass_filenames: true - # this seems to need to be here in addition to setup.cfg - exclude: __init__.py + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-docstring-first + - id: check-executables-have-shebangs + - id: check-merge-conflict + - id: check-toml + - id: end-of-file-fixer + - id: mixed-line-ending + args: [--fix=lf] + - id: requirements-txt-fixer + - id: trailing-whitespace + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: v0.0.272 + hooks: + - id: ruff + - repo: https://github.com/psf/black + rev: 23.3.0 + hooks: + - id: black + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.3.0 + hooks: + - id: mypy + additional_dependencies: + - types-setuptools + - types-requests + - types-PyYAML + - types-retry + - repo: https://github.com/mgedmin/check-manifest + rev: "0.49" + hooks: + - id: check-manifest + args: [--no-build-isolation] + additional_dependencies: [setuptools-scm] diff --git a/LICENSE b/LICENSE index fd8fb01..f219a6b 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2019 BrancoLab +Copyright (c) 2023 University College London Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..25dc798 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,9 @@ +include LICENSE +include README.md +exclude .pre-commit-config.yaml + +recursive-exclude * __pycache__ +recursive-exclude * *.py[co] +recursive-exclude docs * +recursive-exclude tests * +recursive-exclude examples * diff --git a/README.md b/README.md index 91a8ef9..2fe5de2 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ ## Overview Morphapi is a lightweight python package for downloading neurons -morphological reconstructions from publicly available datasets. +morphological reconstructions from publicly available datasets. Please see the [documentation](https://brainglobe.info/documentation/morphapi/index.html) diff --git a/examples/download/allen_morphology_api.py b/examples/download/allen_morphology_api.py index 0159ad2..51238ef 100644 --- a/examples/download/allen_morphology_api.py +++ b/examples/download/allen_morphology_api.py @@ -15,7 +15,7 @@ # Download some neurons neurons = am.download_neurons(neurons[:5].id.values) -# ------------------------------- Visualisation ------------------------------ # +# ----------------------- Visualisation --------------------- # print("creating meshes") neurons = [neuron.create_mesh()[1] for neuron in neurons] diff --git a/examples/download/mouselight_api.py b/examples/download/mouselight_api.py index 249bf70..eb8bda5 100644 --- a/examples/download/mouselight_api.py +++ b/examples/download/mouselight_api.py @@ -1,8 +1,13 @@ +""" +mlapi.download_neurons returns a list of instances of the class Neuron +from morphapi.morphology.morphology. +""" + from vedo import Plotter from morphapi.api.mouselight import MouseLightAPI -# ---------------------------- Downloading neurons --------------------------- # +# ---------------------------- Downloading neurons ------------------------- # mlapi = MouseLightAPI() # Fetch metadata for neurons with soma in the secondary motor cortex @@ -14,13 +19,7 @@ neurons = mlapi.download_neurons(neurons_metadata[0]) -""" - mlapi.download_neurons returns a list of instances of the class Neuron - from morphapi.morphology.morphology. -""" - - -# ------------------------------- Visualisation ------------------------------ # +# ------------------------------- Visualisation ---------------------------- # print("creating meshes") neurons = [neuron.create_mesh()[1] for neuron in neurons] diff --git a/examples/download/mpin_api.py b/examples/download/mpin_api.py index 363af05..0346e18 100644 --- a/examples/download/mpin_api.py +++ b/examples/download/mpin_api.py @@ -1,14 +1,15 @@ +# ----------------------------- Download dataset -------------------------- # +""" + If it's the first time using this API, you'll have to download the dataset + with all of the neurons' data. +""" from vedo import Plotter from morphapi.api.mpin_celldb import MpinMorphologyAPI api = MpinMorphologyAPI() -# ----------------------------- Download dataset ----------------------------- # -""" - If it's the first time using this API, you'll have to download the dataset - with all of the neurons' data. -""" + api.download_dataset() @@ -18,7 +19,7 @@ # and load a few neurons neurons = api.load_neurons(list(api.neurons_df.index[:10])) -# ------------------------------- Visualisation ------------------------------ # +# ------------------------------- Visualisation --------------------------- # print("creating meshes") neurons = [neuron.create_mesh()[1] for neuron in neurons] diff --git a/examples/download/neuromorpho_api.py b/examples/download/neuromorpho_api.py index 9a2b4ab..908786e 100644 --- a/examples/download/neuromorpho_api.py +++ b/examples/download/neuromorpho_api.py @@ -4,26 +4,27 @@ api = NeuroMorpOrgAPI() -# ---------------------------- Downloading metadata --------------------------- # +# ---------------------------- Downloading metadata ------------------------- # # Get metadata for pyramidal neurons from the mouse cortex. metadata, _ = api.get_neurons_metadata( - size=10, # Can get the metadata for up to 500 neurons at the time + size=10, # Can get the metadata for up to 500 neurons at a time species="mouse", cell_type="pyramidal", brain_region="neocortex", ) # To get a list of available query fields: print(api.fields) -# To get a list of valid values for a field: print(api.get_fields_values(field)) +# To get a list of valid values for a field: +# print(api.get_fields_values(field)) print("Neurons metadata:") print(metadata[0]) -# ---------------------------- Download morphology --------------------------- # +# ---------------------------- Download morphology ------------------------- # neurons = api.download_neurons(metadata[5]) -# ------------------------------- Visualisation ------------------------------ # +# ------------------------------- Visualisation ---------------------------- # print("creating meshes") neurons = [neuron.create_mesh()[1] for neuron in neurons] diff --git a/examples/example_files/example1.swc b/examples/example_files/example1.swc index 2e7158e..2bd4faf 100644 --- a/examples/example_files/example1.swc +++ b/examples/example_files/example1.swc @@ -1,10 +1,10 @@ -# Generated 2019/08/21. +# Generated 2019/08/21. # Please consult Terms-of-Use at https://mouselight.janelia.org when referencing this reconstruction. # DOI: 10.25378/janelia.7780856 # Neuron Id: AA0905 # Sample Date: Mon, 02 Jul 2018 18:52:28 GMT # Sample Strain: C57BL/6N -# Label Virus: AAV2/1 Syn-iCre + AAV2/1 CAG-Flex GFP +# Label Virus: AAV2/1 Syn-iCre + AAV2/1 CAG-Flex GFP # Label Fluorophore: anti-GFP 1 1 6957.015539 2478.262394 3250.181397 1.000000 -1 2 2 6958.226485 2477.661115 3249.171194 1.000000 1 diff --git a/examples/example_files/example2.swc b/examples/example_files/example2.swc index 5ed2ad9..971b360 100644 --- a/examples/example_files/example2.swc +++ b/examples/example_files/example2.swc @@ -1,10 +1,10 @@ -# Generated 2019/08/21. +# Generated 2019/08/21. # Please consult Terms-of-Use at https://mouselight.janelia.org when referencing this reconstruction. # DOI: 10.25378/janelia.7742807 # Neuron Id: AA0887 # Sample Date: Mon, 02 Jul 2018 18:52:28 GMT # Sample Strain: C57BL/6N -# Label Virus: AAV2/1 Syn-iCre + AAV2/1 CAG-Flex GFP +# Label Virus: AAV2/1 Syn-iCre + AAV2/1 CAG-Flex GFP # Label Fluorophore: anti-GFP 1 1 7260.333612 2427.763200 2983.276215 1.000000 -1 2 3 7268.124721 2438.788217 2968.109407 1.000000 1 diff --git a/examples/example_files/example3.swc b/examples/example_files/example3.swc index dc17a29..415be6f 100644 --- a/examples/example_files/example3.swc +++ b/examples/example_files/example3.swc @@ -1,10 +1,10 @@ -# Generated 2019/08/21. +# Generated 2019/08/21. # Please consult Terms-of-Use at https://mouselight.janelia.org when referencing this reconstruction. # DOI: 10.25378/janelia.7742810 # Neuron Id: AA0888 # Sample Date: Mon, 02 Jul 2018 18:52:28 GMT # Sample Strain: C57BL/6N -# Label Virus: AAV2/1 Syn-iCre + AAV2/1 CAG-Flex GFP +# Label Virus: AAV2/1 Syn-iCre + AAV2/1 CAG-Flex GFP # Label Fluorophore: anti-GFP 1 1 6327.335938 1856.482422 3459.851563 1.000000 -1 2 2 6324.710938 1855.660156 3459.824218 1.000000 1 diff --git a/examples/visualise/visualise_swc.py b/examples/visualise/visualise_swc.py index 8a10834..cc9800c 100644 --- a/examples/visualise/visualise_swc.py +++ b/examples/visualise/visualise_swc.py @@ -1,13 +1,15 @@ +""" +This example shows how to use vedo to visualise a 3d reconstruction of a +neuron. + +However, the recommended way to visualise neurons is with brainrender: +https://github.com/brainglobe/brainrender +""" + from vedo import Plotter from morphapi.morphology.morphology import Neuron -""" - This example shows how to use vedo to visualise a 3d reconstruction of a neuron. - However, the reccomended way to visualise neurons is with brainrender: - https://github.com/BrancoLab/BrainRender -""" - fp = "examples/example_files/example1.swc" # Create vedo actors from the .swc file diff --git a/media/exampleneuron.png b/media/exampleneuron.png deleted file mode 100644 index 4ff8db8..0000000 Binary files a/media/exampleneuron.png and /dev/null differ diff --git a/morphapi/__init__.py b/morphapi/__init__.py index 8b13789..e495d52 100644 --- a/morphapi/__init__.py +++ b/morphapi/__init__.py @@ -1 +1,7 @@ +from importlib.metadata import PackageNotFoundError, version +try: + __version__ = version("morphapi") +except PackageNotFoundError: + # package is not installed + pass diff --git a/morphapi/api/allenmorphology.py b/morphapi/api/allenmorphology.py index e1b9622..f1aa49b 100644 --- a/morphapi/api/allenmorphology.py +++ b/morphapi/api/allenmorphology.py @@ -8,7 +8,8 @@ from allensdk.core.cell_types_cache import CellTypesCache except ModuleNotFoundError: raise ModuleNotFoundError( - 'You need to install the allen sdk package to use AllenMorphology: "pip install allensdk"' + "You need to install the allen sdk package to use " + 'AllenMorphology: "pip install allensdk"' ) from morphapi.morphology.morphology import Neuron @@ -19,15 +20,18 @@ class AllenMorphology(Paths): - """Handles the download of neuronal morphology data from the Allen database.""" + """Handles the download of neuronal morphology data from the + Allen database.""" def __init__(self, *args, **kwargs): """ - Initialise API interaction and fetch metadata of neurons in the Allen Database. + Initialise API interaction and fetch metadata of neurons in the + Allen Database. """ if not connected_to_internet(): raise ConnectionError( - "You will need to be connected to the internet to use the AllenMorphology class to download neurons" + "You will need to be connected to the internet to use the " + "AllenMorphology class to download neurons" ) Paths.__init__(self, *args, **kwargs) @@ -39,7 +43,8 @@ def __init__(self, *args, **kwargs): ) ) - # Get a list of cell metadata for neurons with reconstructions, download if necessary + # Get a list of cell metadata for neurons with reconstructions, + # download if necessary self.neurons = pd.DataFrame( self.ctc.get_cells(require_reconstruction=True) ) @@ -47,7 +52,8 @@ def __init__(self, *args, **kwargs): if not self.n_neurons: raise ValueError( - "Something went wrong and couldn't get neurons metadata from Allen" + "Something went wrong and couldn't get neurons metadata " + "from Allen" ) self.downloaded_neurons = self.get_downloaded_neurons() @@ -96,7 +102,8 @@ def download_neurons(self, ids, load_neurons=True, **kwargs): self.ctc.get_reconstruction(neuron_id, file_name=neuron_file) except Exception as exc: logger.error( - "Could not fetch the neuron %s for the following reason: %s", + "Could not fetch the neuron %s " + "for the following reason: %s", neuron_id, str(exc), ) diff --git a/morphapi/api/mouselight.py b/morphapi/api/mouselight.py index b65e076..d1fd0c9 100644 --- a/morphapi/api/mouselight.py +++ b/morphapi/api/mouselight.py @@ -1,3 +1,11 @@ +""" +Collections of functions to query https://ml-neuronbrowser.janelia.org/ +and get data about either the status of the API, the brain regions or the +neurons available. +Queries are sent by sending POST requests to +https://ml-neuronbrowser.janelia.org/graphql with a string query. +""" + import logging from collections import namedtuple @@ -8,24 +16,15 @@ from morphapi.api.neuromorphorg import NeuroMorpOrgAPI from morphapi.morphology.morphology import Neuron from morphapi.paths_manager import Paths -from morphapi.utils.data_io import flatten_list -from morphapi.utils.data_io import is_any_item_in_list -from morphapi.utils.webqueries import mouselight_base_url -from morphapi.utils.webqueries import post_mouselight +from morphapi.utils.data_io import flatten_list, is_any_item_in_list +from morphapi.utils.webqueries import mouselight_base_url, post_mouselight logger = logging.getLogger(__name__) -""" - Collections of functions to query https://ml-neuronbrowser.janelia.org/ and get data about either the status of the API, - the brain regions or the neurons available. - Queries are sent by sending POST requests to https://ml-neuronbrowser.janelia.org/graphql - with a string query. -""" - -# ---------------------------------------------------------------------------- # -# QUERY UTILS # -# ---------------------------------------------------------------------------- # +# -------------------------------------------------------------------------- # +# QUERY UTILS # +# -------------------------------------------------------------------------- # def mouselight_api_info(): @@ -50,7 +49,8 @@ def mouselight_api_info(): def mouselight_get_brainregions(): """ - Get metadata about the brain brain regions as they are known by Janelia's Mouse Light. + Get metadata about the brain regions as they are known by + Janelia's Mouse Light. IDs and Names sometimes differ from Allen's CCF. """ @@ -85,7 +85,8 @@ def mouselight_get_brainregions(): def mouselight_structures_identifiers(): """ - When the data are downloaded as SWC, each node has a structure identifier ID to tell if it's soma, axon or dendrite. + When the data are downloaded as SWC, each node has a structure + identifier ID to tell if it's soma, axon or dendrite. This function returns the ID number --> structure table. """ @@ -114,11 +115,15 @@ def mouselight_structures_identifiers(): def make_query(filterby=None, filter_regions=None, invert=False): """ - Constructs the strings used to submit graphql queries to the mouse light api + Constructs the strings used to submit graphql queries to the mouse + light api - :param filterby: str, soma, axon on dendrite. Search by neurite structure (Default value = None) - :param filter_regions: list, tuple. list of strings. Acronyms of brain regions to use for query (Default value = None) - :param invert: If true the inverse of the query is return (i.e. the neurons NOT in a brain region) (Default value = False) + :param filterby: str, soma, axon on dendrite. Search by neurite + structure (Default value = None) + :param filter_regions: list, tuple. list of strings. Acronyms of brain + regions to use for query (Default value = None) + :param invert: If true the inverse of the query is return (i.e., the + neurons NOT in a brain region) (Default value = False) """ searchneurons = """ @@ -254,18 +259,21 @@ def fetch_atlas(atlas_name="allen_mouse_25um"): return BrainGlobeAtlas(atlas_name) -# ---------------------------------------------------------------------------- # -# MAIN CLASS # -# ---------------------------------------------------------------------------- # +# -------------------------------------------------------------------------- # +# MAIN CLASS # +# -------------------------------------------------------------------------- # class MouseLightAPI(Paths): def __init__(self, base_dir=None, **kwargs): """ - Handles the download of neurons morphology data from the Mouse Light project + Handles the download of neurons morphology data from the + Mouse Light project - :param base_dir: path to directory to use for saving data (default value None) - :param kwargs: can be used to pass path to individual data folders. See morphapi/utils /paths_manager.py + :param base_dir: path to directory to use for saving data + (default value None) + :param kwargs: can be used to pass path to individual + data folders. See morphapi/utils /paths_manager.py """ Paths.__init__(self, base_dir=base_dir, **kwargs) @@ -273,12 +281,17 @@ def fetch_neurons_metadata( self, filterby=None, filter_regions=None, **kwargs ): """ - Download neurons metadata and data from the API. The downloaded metadata can be filtered to keep only - the neurons whose soma is in a list of user selected brain regions. - - :param filterby: Accepted values: "soma". If it's "soma", neurons are kept only when their soma - is in the list of brain regions defined by filter_regions (Default value = None) - :param filter_regions: List of brain regions acronyms. If filtering neurons, these specify the filter criteria. (Default value = None) + Download neurons metadata and data from the API. The + downloaded metadata can be filtered to keep only + the neurons whose soma is in a list of user-selected brain regions. + + :param filterby: Accepted values: "soma". If it's "soma", + neurons are kept only when their soma is in the + list of brain regions defined by filter_regions (Default + value = None) + :param filter_regions: List of brain regions acronyms. + If filtering neurons, these specify the filter + criteria. (Default value = None) :param **kwargs: """ @@ -296,7 +309,8 @@ def fetch_neurons_metadata( round(res["queryTime"] / 1000, 2), ) - # Process neurons to clean up the results and make them easier to handle + # Process neurons to clean up the results and make them + # easier to handle neurons = res["neurons"] node = namedtuple("node", "x y z r area_acronym sample_n parent_n") tracing_structure = namedtuple( @@ -384,15 +398,18 @@ def filter_neurons_metadata( atlas=None, ): """ - Filter metadata to keep only the neurons whose soma is in a given list of brain regions. - - :param filterby: Accepted values: "soma". If it's "soma", neurons are kept only when their - soma is in the list of brain regions defined by filter_regions (Default - value = None) - :param filter_regions: List of brain regions acronyms. If filtering neurons, these specify - the filter criteria. (Default value = None) - :param atlas: A `bg_atlasapi.BrainGlobeAtlas` object. If not provided, load the - default atlas. + Filter metadata to keep only the neurons whose soma is + in a given list of brain regions. + + :param filterby: Accepted values: "soma". If it's "soma", + neurons are kept only when their + soma is in the list of brain regions defined by + filter_regions (Defaultvalue = None) + :param filter_regions: List of brain regions acronyms. + If filtering neurons, these specify + the filter criteria. (Default value = None) + :param atlas: A `bg_atlasapi.BrainGlobeAtlas` object. + If not provided, load the default atlas. """ # Filter neurons to keep only those matching the search criteria @@ -404,7 +421,8 @@ def filter_neurons_metadata( if filter_regions is None: raise ValueError( - "If filtering neuron by region, you need to pass a list of filter regions to use" + "If filtering neuron by region, you need " + "to pass a list of filter regions to use" ) # get brain globe atlas @@ -428,7 +446,8 @@ def filter_neurons_metadata( # ignore if region is not found continue - # If any of the ancestors or itself are in the allowed regions, keep neuron. + # If any of the ancestors or itself are in the allowed + # regions, keep neuron. if is_any_item_in_list( filter_regions, neuron_region_ancestors ): @@ -470,7 +489,8 @@ def download_neurons(self, neurons_metadata, load_neurons=True, **kwargs): nrn = nmapi.get_neuron_by_name(neuron["idString"]) except ValueError as exc: logger.error( - "Could not fetch the neuron %s for the following reason: %s", + "Could not fetch the neuron %s for the " + "following reason: %s", neuron["idString"], str(exc), ) diff --git a/morphapi/api/mpin_celldb.py b/morphapi/api/mpin_celldb.py index ff091e9..80f1e5e 100644 --- a/morphapi/api/mpin_celldb.py +++ b/morphapi/api/mpin_celldb.py @@ -57,7 +57,8 @@ def fix_mpin_swgfile(file_path, fixed_file_path=None): class MpinMorphologyAPI(Paths): - """Handles the download of neuronal morphology data from the MPIN database.""" + """Handles the download of neuronal morphology + data from the MPIN database.""" def __init__(self, *args, **kwargs): Paths.__init__(self, *args, **kwargs) @@ -137,7 +138,10 @@ def download_dataset(self): ) SOURCE_DATA_DIR = "MPIN-Atlas__Kunst_et_al__neurons_all" - REMOTE_URL = "https://fishatlas.neuro.mpg.de/neurons/download/download_all_neurons_aligned" + REMOTE_URL = ( + "https://fishatlas.neuro.mpg.de/neurons/download/" + "download_all_neurons_aligned" + ) # # Download folder with all data: download_zip_path = Path(self.mpin_morphology) / "data.zip" @@ -162,9 +166,11 @@ def download_dataset(self): shutil.rmtree(extracted_data_path.parent) - # # 2/1900 neurons still have a little bug, hopefully fixed in the future + # # 2/1900 neurons still have a little bug, hopefully + # fixed in the future # try: # return Neuron(data_file=fixed_file_path) - # except: # Ideally in the next iteration this except won't be necessary + # except: # Ideally in the next iteration this except won't + # be necessary # print(f"Unfixable problem while opening {file_path.name}") # return diff --git a/morphapi/api/neuromorphorg.py b/morphapi/api/neuromorphorg.py index 0df335d..d4e072c 100644 --- a/morphapi/api/neuromorphorg.py +++ b/morphapi/api/neuromorphorg.py @@ -3,8 +3,7 @@ from morphapi.morphology.morphology import Neuron from morphapi.paths_manager import Paths -from morphapi.utils.webqueries import connected_to_internet -from morphapi.utils.webqueries import request +from morphapi.utils.webqueries import connected_to_internet, request logger = logging.getLogger(__name__) @@ -17,7 +16,8 @@ class NeuroMorpOrgAPI(Paths): def __init__(self, *args, **kwargs): if not connected_to_internet(): raise ConnectionError( - "You will need to be connected to the internet to use the NeuroMorpOrgAPI class to download neurons" + "You will need to be connected to the internet to " + "use the NeuroMorpOrgAPI class to download neurons" ) Paths.__init__(self, *args, **kwargs) @@ -35,7 +35,8 @@ def __init__(self, *args, **kwargs): @property def fields(self): """ - Fields contains the types of fields that can be used to restrict queries + Fields contains the types of fields that can be used to + restrict queries """ if self._fields is None: self._fields = request(self._base_url + "/fields").json()[ @@ -69,20 +70,26 @@ def get_neurons_metadata(self, size=100, page=0, **criteria): Neuromorpho.org paginates it's requests so not all neurons metadata can be returned at once - :param size: int in range [0, 500]. Number of neurons whose metadata can be returned at the same time - :param page: int > 0. Page number. The number of pages depends on size and on how many neurons match the criteria - :param criteria: use keywords to restrict the query to neurons that match given criteria. - keywords should be pass as "field=value". Then only neuron's whose 'field' - attribute has value 'value' will be returned. + :param size: int in range [0, 500]. Number of neurons whose + metadata can be returned at the same time + :param page: int > 0. Page number. The number of pages depends + on size and on how many neurons match the criteria + :param criteria: use keywords to restrict the query to neurons + that match given criteria. + keywords should be pass as "field=value". + Then only neuron's whose 'field' + attribute has value 'value' will be returned. """ if size < 0 or size > 500: raise ValueError( - f"Invalid size argument: {size}. Size should be an integer between 0 and 500" + f"Invalid size argument: {size}. Size should be an " + f"integer between 0 and 500" ) if page < 0: raise ValueError( - f"Invalid page argument: {page}. Page should be an integer >= 0" + f"Invalid page argument: {page}. Page should be an " + f"integer >= 0" ) url = self._base_url + "/select?q=" @@ -109,12 +116,14 @@ def get_neurons_metadata(self, size=100, page=0, **criteria): for crit, val in criteria.items(): if crit not in self.fields: raise ValueError( - f"Query criteria {crit} not in available fields: {self.fields}" + f"Query criteria {crit} not in " + f"available fields: {self.fields}" ) field_values = self.get_fields_values(crit) if val not in field_values: raise ValueError( - f"Query criteria value {val} for field {crit} not valid." + f"Query criteria value {val} for " + f"field {crit} not valid." + f"Valid values include: {field_values}" ) @@ -125,8 +134,10 @@ def get_neurons_metadata(self, size=100, page=0, **criteria): neurons = neurons["_embedded"]["neuronResources"] logger.info( - f"Found metadata for {page['totalElements']} neurons [{page['totalPages']} pages in total]. " - f"Returning metadata about {len(neurons)} neurons from page {page['number']}" + f"Found metadata for {page['totalElements']} neurons " + f"[{page['totalPages']} pages in total]. " + f"Returning metadata about {len(neurons)} neurons " + f"from page {page['number']}" ) return neurons, page @@ -159,15 +170,18 @@ def download_neurons( ): """ Downloads neuronal morphological data and saves it to .swc files. - It then returns a list of Neuron instances with morphological data for each neuron. + It then returns a list of Neuron instances with + morphological data for each neuron. - :param neurons: list of neurons metadata (as returned by one of the functions - used to fetch metadata) - :param _name: used internally to save cached neurons with a different prefix when the + :param neurons: list of neurons metadata (as + returned by one of the functions used to fetch metadata) + :param _name: used internally to save cached neurons + with a different prefix when the class is used to download neurons for other APIs :param load_neurons: if set to True, the neurons are loaded into a `morphapi.morphology.morphology.Neuron` object and returned - :param use_neuron_names: if set to True, the filenames use the names of the neurons instead + :param use_neuron_names: if set to True, the filenames + use the names of the neurons instead of their IDs """ if not isinstance(neurons, (list, tuple)): @@ -195,9 +209,15 @@ class is used to download neurons for other APIs if not os.path.isfile(filepath): # Download and write to file if self._version == "CNG version": - url = f"https://neuromorpho.org/dableFiles/{neuron['archive'].lower()}/CNG version/{neuron['neuron_name']}.CNG.swc" + url = ( + f"https://neuromorpho.org/dableFiles/{neuron['archive'].lower()}/" + f"CNG version/{neuron['neuron_name']}.CNG.swc" + ) else: - url = f"https://neuromorpho.org/dableFiles/{neuron['archive'].lower()}/{self._version}/{neuron['neuron_name']}.swc" + url = ( + f"https://neuromorpho.org/dableFiles/{neuron['archive'].lower()}/" + f"{self._version}/{neuron['neuron_name']}.swc" + ) try: req = request(url) @@ -205,7 +225,8 @@ class is used to download neurons for other APIs f.write(req.content.decode("utf-8")) except ValueError as exc: logger.error( - "Could not fetch the neuron %s for the following reason: %s", + "Could not fetch the neuron %s for the " + "following reason: %s", neuron["neuron_name"], str(exc), ) diff --git a/morphapi/morphology/cache.py b/morphapi/morphology/cache.py index bc281fa..47d7a72 100644 --- a/morphapi/morphology/cache.py +++ b/morphapi/morphology/cache.py @@ -1,13 +1,9 @@ import os -from vedo import Mesh -from vedo import load -from vedo import merge -from vedo import write +from vedo import Mesh, load, merge, write from morphapi.paths_manager import Paths -from morphapi.utils.data_io import load_yaml -from morphapi.utils.data_io import save_yaml +from morphapi.utils.data_io import load_yaml, save_yaml class NeuronCache(Paths): @@ -21,7 +17,8 @@ class NeuronCache(Paths): def __init__(self, **kwargs): """ - Initialise API interaction and fetch metadata of neurons in the Allen Database. + Initialise API interaction and fetch metadata of neurons + in the Allen Database. """ super().__init__(**kwargs) # path to data caches @@ -100,7 +97,8 @@ def write_neuron_to_cache(self, neuron_name, neuron, _params): write(actor, fname[0]) else: # Get a single actor for each neuron component. - # If there's no data for the component create an empty actor + # If there's no data for the component + # create an empty actor if not isinstance(actor, Mesh): if isinstance(actor, (list, tuple)): if len(actor) == 1: @@ -110,9 +108,10 @@ def write_neuron_to_cache(self, neuron_name, neuron, _params): else: try: actor = merge(actor) - except: + except: # noqa: E722 raise ValueError( - f"{key} actor should be a mesh or a list of 1 mesh not {actor}" + f"{key} actor should be a mesh or a " + f"list of 1 mesh not {actor}" ) if actor is None: @@ -124,5 +123,6 @@ def write_neuron_to_cache(self, neuron_name, neuron, _params): write(actor, fname[0]) else: raise ValueError( - f"No filename found for {key}. Filenames {file_names}" + f"No filename found for {key}. " + f"Filenames {file_names}" ) diff --git a/morphapi/morphology/morphology.py b/morphapi/morphology/morphology.py index 59beb05..e0d0182 100644 --- a/morphapi/morphology/morphology.py +++ b/morphapi/morphology/morphology.py @@ -6,8 +6,7 @@ from neurom.core.dataformat import COLS from vedo import merge from vedo.colors import color_map -from vedo.shapes import Sphere -from vedo.shapes import Tube +from vedo.shapes import Sphere, Tube try: # For NeuroM >= 3 @@ -198,7 +197,8 @@ def create_mesh( ): if self.points is None: logger.warning( - "No data loaded, you can use the 'load_from_file' method to try to load the file." + "No data loaded, you can use the 'load_from_file' " + "method to try to load the file." ) return @@ -217,11 +217,13 @@ def create_mesh( or not neurite_radius > 0 ): raise ValueError( - "Invalid value for parameter neurite_radius, should be a float > 0" + "Invalid value for parameter neurite_radius, " + "should be a float > 0" ) if not isinstance(soma_radius, (int, float)) or not soma_radius > 0: raise ValueError( - "Invalid value for parameter soma_radius, should be a float > 0" + "Invalid value for parameter soma_radius, " + "should be a float > 0" ) # prepare params dict for caching _params = dict(neurite_radius=neurite_radius, soma_radius=soma_radius) diff --git a/morphapi/paths_manager.py b/morphapi/paths_manager.py index 7116482..0d27f92 100644 --- a/morphapi/paths_manager.py +++ b/morphapi/paths_manager.py @@ -1,11 +1,11 @@ -from pathlib import Path - """ - Class to create and store paths to a number of folders uesed to save/load data +Class to create and store paths to a number of folders uesed to save/load data """ +from pathlib import Path -# Default paths for Data Folders (store stuff like object meshes, neurons morphology data etc) +# Default paths for Data Folders (store stuff like object meshes, +# neurons morphology data etc) default_paths = dict( # APIs caches allen_morphology_cache="Data/allen_morphology_cache", @@ -21,10 +21,13 @@ class Paths: def __init__(self, base_dir=None, **kwargs): """ - Parses a YAML file to get data folders paths. Stores paths to a number of folders used throughtout morphapi. + Parses a YAML file to get data folders paths. Stores paths to a + number of folders used throughtout morphapi. - :param base_dir: str with path to directory to use to save data. If none the user's base directiry is used. - :param kwargs: use the name of a folder as key and a path as argument to specify the path of individual subfolders + :param base_dir: str with path to directory to use to save data. + If none the user's base directiry is used. + :param kwargs: use the name of a folder as key and a path as + argument to specify the path of individual subfolders """ # Get and make base directory @@ -36,7 +39,8 @@ def __init__(self, base_dir=None, **kwargs): self.base_dir.mkdir(exist_ok=True) for fld_name, folder in default_paths.items(): - # Check if user provided a path for this folder, otherwise use default + # Check if user provided a path for this folder, + # otherwise use default path = self.base_dir / kwargs.pop(fld_name, folder) diff --git a/morphapi/utils/data_io.py b/morphapi/utils/data_io.py index d75ba87..3e09bf7 100644 --- a/morphapi/utils/data_io.py +++ b/morphapi/utils/data_io.py @@ -6,7 +6,8 @@ def listdir(fld): """ - List the files into a folder with the coplete file path instead of the relative file path like os.listdir. + List the files into a folder with the coplete file path instead of + the relative file path like os.listdir. :param fld: string, folder path @@ -22,7 +23,7 @@ def get_file_name(filepath): return os.path.splitext(os.path.basename(filepath))[0] -# ------------------------------ Load/Save data ------------------------------ # +# ------------------------------ Load/Save data ---------------------------- # def save_yaml(filepath, content, append=False, topcomment=None): @@ -63,12 +64,13 @@ def load_yaml(filepath): return yaml.load(open(filepath), Loader=yaml.FullLoader) -# ----------------------------- Internet queries ----------------------------- # +# ------------------------- Internet queries ------------------------- # def connected_to_internet(url="https://www.google.com/", timeout=5): """ Check that there is an internet connection - :param url: url to use for testing (Default value = 'https://www.google.com/') + :param url: url to use for testing + (Default value = 'https://www.google.com/') :param timeout: timeout to wait for [in seconds] (Default value = 5) """ @@ -80,9 +82,9 @@ def connected_to_internet(url="https://www.google.com/", timeout=5): return False -# ---------------------------------------------------------------------------- # -# Data manipulation # -# ---------------------------------------------------------------------------- # +# ------------------------------------------------------------------------ # +# Data manipulation # +# ------------------------------------------------------------------------ # def flatten_list(lst): """ Flattens a list of lists diff --git a/morphapi/utils/webqueries.py b/morphapi/utils/webqueries.py index 3b5709c..7fd2ce1 100644 --- a/morphapi/utils/webqueries.py +++ b/morphapi/utils/webqueries.py @@ -75,11 +75,13 @@ def query_mouselight(query): def post_mouselight(url, query=None, clean=False, attempts=3): """ - sends a POST request to a user URL. Query can be either a string (in which case clean should be False) or a dictionary. + sends a POST request to a user URL. Query can be either a string + (in which case clean should be False) or a dictionary. :param url: :param query: string or dictionary with query (Default value = None) - :param clean: if not clean, the query is assumed to be in JSON format (Default value = False) + :param clean: if not clean, the query is assumed to be in + JSON format (Default value = False) :param attempts: number of attempts (Default value = 3) """ @@ -112,9 +114,8 @@ def post_mouselight(url, query=None, clean=False, attempts=3): if request is None: raise ConnectionError( "\n\nMouseLight API query failed with error message:\n{}.\ - \nPerhaps the server is down, visit '{}' to find out.".format( - exception, mouselight_base_url - ) + \nPerhaps the server is down, visit '{}' " + "to find out.".format(exception, mouselight_base_url) ) else: raise NotImplementedError @@ -127,7 +128,8 @@ def post_mouselight(url, query=None, clean=False, attempts=3): return jreq else: raise Exception( - "Query failed to run by returning code of {}. {} -- \n\n{}".format( + "Query failed to run by returning code " + "of {}. {} -- \n\n{}".format( request.status_code, query, request.text ) ) diff --git a/pyproject.toml b/pyproject.toml index c0de9f5..69cc457 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,27 +1,116 @@ +[project] +name = "morphapi" +authors = [{name = "Federico Claudi", email= "hello@brainglobe.info"}] +description = "A lightweight python package to download neuronal morphologies" +readme = "README.md" +requires-python = ">=3.9.0" +dynamic = ["version"] + +dependencies = [ + "bg_atlasapi", + "imagecodecs; python_version>='3.9'", + "neurom>=3,<4", + "numpy", + "pandas", + "pyyaml>=5.3", + "retry", + "rich", + "vedo>=2023.5.0", + "vtk", +] + +license = {text = "MIT"} + +classifiers = [ + "Development Status :: 2 - Pre-Alpha", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "License :: OSI Approved :: MIT License", +] + +[project.urls] +"Homepage" = "https://github.com/brainglobe/morphapi" +"Bug Tracker" = "https://github.com/brainglobe/morphapi/issues" +"Documentation" = "https://github.com/brainglobe/morphapi" +"Source Code" = "https://github.com/brainglobe/morphapi" +"User Support" = "https://github.com/brainglobe/morphapi/issues" + +[project.optional-dependencies] +dev = [ + "pytest", + "pytest-cov", + "coverage", + "tox", + "black", + "mypy", + "pre-commit", + "ruff", + "setuptools_scm", + "pytest-sugar", + "allensdk", +] + +nb = ["jupyter", "k3d"] + [build-system] requires = [ - "setuptools", - "setuptools_scm[toml]", + "setuptools>=45", "wheel", + "setuptools_scm[toml]>=6.2", ] +build-backend = "setuptools.build_meta" + +[tool.setuptools] +include-package-data = true + +[tool.setuptools.packages.find] +include = ["morphapi*"] +exclude = ["tests*", "examples*"] + +[tool.pytest.ini_options] +addopts = "--cov=morphapi" [tool.black] -target-version = ['py37', 'py38', 'py39', 'py310'] +target-version = ['py39', 'py310', 'py311'] skip-string-normalization = false line-length = 79 -exclude = ''' -( - /( - \.eggs - | \.git - | \.hg - | \.mypy_cache - | \.tox - | \.venv - | _build - | buck-out - | build - | dist - )/ -) -''' + +[tool.setuptools_scm] + +[tool.check-manifest] +ignore = [ + ".yaml", + "tox.ini", + "tests/", + "tests/test_unit/", + "tests/test_integration/", +] + +[tool.ruff] +line-length = 79 +exclude = ["__init__.py","build",".eggs"] +select = ["I", "E", "F"] +fix = true + +[tool.tox] +legacy_tox_ini = """ +[tox] +envlist = py{39,310,311} +isolated_build = True + +[gh-actions] +python = + 3.9: py39 + 3.10: py310 + 3.11: py311 + +[testenv] +extras = + dev +commands = + pytest -v --color=yes --cov=morphapi --cov-report=xml +""" diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 9ee791e..0000000 --- a/pytest.ini +++ /dev/null @@ -1,6 +0,0 @@ -[pytest] -addopts = --cov=morphapi - -filterwarnings = - ignore:distutils Version classes are deprecated:DeprecationWarning - ignore:The hookimpl CovPlugin.pytest_.* uses old-style configuration options:pytest.PytestDeprecationWarning diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 2984b6d..0000000 --- a/setup.cfg +++ /dev/null @@ -1,13 +0,0 @@ -[flake8] -# Ignores - https://lintlyci.github.io/Flake8Rules -# W291 trailing whitespace -# E731 do not assign a lambda expression, use a def -# W293 blank line contains whitespace -# E203 Whitespace before ':' (sometimes conflicts with black) -# E501 line too long (84 > 79 characters) (sometimes too annoying) -# W503 Line break occurred before a binary operator -# C901 McCabe complexity test. Would be nice to reenable, but takes work -ignore = E203,W503,E501,E731,C901,W291,W293,E402,E722 -max-line-length = 79 -max-complexity = 18 -exclude = __init__.py \ No newline at end of file diff --git a/setup.py b/setup.py deleted file mode 100644 index 2c0feef..0000000 --- a/setup.py +++ /dev/null @@ -1,53 +0,0 @@ -from os import path - -from setuptools import find_namespace_packages -from setuptools import setup - -this_directory = path.abspath(path.dirname(__file__)) -with open(path.join(this_directory, "README.md"), encoding="utf-8") as f: - long_description = f.read() - -requirements = [ - "bg_atlasapi", - "imagecodecs; python_version>='3.9'", - "neurom>=3,<4", - "numpy", - "pandas", - "pyyaml>=5.3", - "retry", - "rich", - "vedo>=2023.5.0", - "vtk", -] - -setup( - name="morphapi", - description="A lightweight python package to download neuronal morphologies", - long_description=long_description, - long_description_content_type="text/markdown", - use_scm_version=True, - setup_requires=[ - "setuptools_scm", - ], - install_requires=requirements, - extras_require={ - "nb": ["jupyter", "k3d"], - "dev": [ - "pytest-cov", - "pytest", - "pytest-html", - "coveralls", - "coverage<=4.5.4", - "pytest-sugar", - "allensdk", - ], - }, - python_requires=">=3.8", - packages=find_namespace_packages( - exclude=("Installation", "Meshes", "Metadata", "Screenshots") - ), - include_package_data=True, - url="https://github.com/brainglobe/morphapi", - author="Federico Claudi", - zip_safe=False, -) diff --git a/tests/data/example1.swc b/tests/data/example1.swc index 2e7158e..2bd4faf 100644 --- a/tests/data/example1.swc +++ b/tests/data/example1.swc @@ -1,10 +1,10 @@ -# Generated 2019/08/21. +# Generated 2019/08/21. # Please consult Terms-of-Use at https://mouselight.janelia.org when referencing this reconstruction. # DOI: 10.25378/janelia.7780856 # Neuron Id: AA0905 # Sample Date: Mon, 02 Jul 2018 18:52:28 GMT # Sample Strain: C57BL/6N -# Label Virus: AAV2/1 Syn-iCre + AAV2/1 CAG-Flex GFP +# Label Virus: AAV2/1 Syn-iCre + AAV2/1 CAG-Flex GFP # Label Fluorophore: anti-GFP 1 1 6957.015539 2478.262394 3250.181397 1.000000 -1 2 2 6958.226485 2477.661115 3249.171194 1.000000 1 diff --git a/tests/data/example2.swc b/tests/data/example2.swc index 8a6f2d6..473e3f5 100644 --- a/tests/data/example2.swc +++ b/tests/data/example2.swc @@ -1,10 +1,10 @@ -# Generated 2019/08/21. +# Generated 2019/08/21. # Please consult Terms-of-Use at https://mouselight.janelia.org when referencing this reconstruction. # DOI: 10.25378/janelia.7742807 # Neuron Id: AA0887 # Sample Date: Mon, 02 Jul 2018 18:52:28 GMT # Sample Strain: C57BL/6N -# Label Virus: AAV2/1 Syn-iCre + AAV2/1 CAG-Flex GFP +# Label Virus: AAV2/1 Syn-iCre + AAV2/1 CAG-Flex GFP # Label Fluorophore: anti-GFP 1 1 7260.333612 2427.763200 2983.276215 1.000000 -1 2 3 7268.124721 2438.788217 2968.109407 1.000000 1 diff --git a/tests/data/example3.swc b/tests/data/example3.swc index b0c8bda..976adf0 100644 --- a/tests/data/example3.swc +++ b/tests/data/example3.swc @@ -1,10 +1,10 @@ -# Generated 2019/08/21. +# Generated 2019/08/21. # Please consult Terms-of-Use at https://mouselight.janelia.org when referencing this reconstruction. # DOI: 10.25378/janelia.7742810 # Neuron Id: AA0888 # Sample Date: Mon, 02 Jul 2018 18:52:28 GMT # Sample Strain: C57BL/6N -# Label Virus: AAV2/1 Syn-iCre + AAV2/1 CAG-Flex GFP +# Label Virus: AAV2/1 Syn-iCre + AAV2/1 CAG-Flex GFP # Label Fluorophore: anti-GFP 1 1 6327.335938 1856.482422 3459.851563 1.000000 -1 2 2 6324.710938 1855.660156 3459.824218 1.000000 1 diff --git a/tests/test_download.py b/tests/test_download.py index 0f242c9..17bdf90 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -2,6 +2,7 @@ from pathlib import Path import pytest + from morphapi.api.allenmorphology import AllenMorphology from morphapi.api.mouselight import MouseLightAPI from morphapi.api.neuromorphorg import NeuroMorpOrgAPI @@ -18,7 +19,8 @@ def test_neuromorpho_download(tmpdir): with pytest.raises( ValueError, match=re.escape( - f"Query criteria UNKNOWN_FIELD not in available fields: {api.fields}" + f"Query criteria UNKNOWN_FIELD not in " + f"available fields: {api.fields}" ), ): api.get_neurons_metadata(UNKNOWN_FIELD=0) @@ -33,7 +35,7 @@ def test_neuromorpho_download(tmpdir): api.get_neurons_metadata(strain="UNKNOWN_VALUE") metadata, _ = api.get_neurons_metadata( - size=2, # Can get the metadata for up to 500 neurons at the time + size=2, # Can get the metadata for up to 500 neurons at a time species="mouse", cell_type="pyramidal", brain_region="neocortex", diff --git a/tests/test_neuron.py b/tests/test_neuron.py index ba824dd..3ad76e6 100644 --- a/tests/test_neuron.py +++ b/tests/test_neuron.py @@ -65,7 +65,8 @@ def test_empty_neuron(caplog): neuron.create_mesh() assert caplog.messages == [ - "No data loaded, you can use the 'load_from_file' method to try to load the file." + "No data loaded, you can use the 'load_from_file' method to " + "try to load the file." ] neuron.load_from_file() diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 9e47461..0000000 --- a/tox.ini +++ /dev/null @@ -1,26 +0,0 @@ -[tox] -envlist = - py{38,39,310,311} - -[testenv] -extras = - dev -deps: - py39: numpy # This is required during installation -commands = pytest \ - --basetemp={envtmpdir} \ - --cov=morphapi \ - --cov-report term-missing \ - --cov-report html:reports/coverage-{envname} \ - --cov-report xml:reports/coverage-{envname}.xml \ - --html reports/pytest-{envname}.html \ - --junit-xml=reports/pytest-{envname}.xml \ - --self-contained-html \ - {posargs} - -[gh-actions] -python = - 3.8: py38 - 3.9: py39 - 3.10: py310 - 3.11: py311