From 3ac64d18ec0312174e9e27f06e99bc298d34c145 Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Tue, 14 May 2024 07:09:01 -0400 Subject: [PATCH 1/6] drop python 3.8 support --- .github/workflows/testing.yml | 2 +- .github/workflows/upgrade-dependencies.yml | 2 +- pyproject.toml | 2 +- requirements/macos-latest_py3.8.txt | 179 ------- requirements/macos-latest_py3.8_extras.txt | 487 -------------------- requirements/ubuntu-latest_py3.8.txt | 179 ------- requirements/ubuntu-latest_py3.8_extras.txt | 485 ------------------- setup.py | 2 +- 8 files changed, 4 insertions(+), 1334 deletions(-) delete mode 100644 requirements/macos-latest_py3.8.txt delete mode 100644 requirements/macos-latest_py3.8_extras.txt delete mode 100644 requirements/ubuntu-latest_py3.8.txt delete mode 100644 requirements/ubuntu-latest_py3.8_extras.txt diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 423b0a2fc..c83a38927 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -51,7 +51,7 @@ jobs: max-parallel: 6 matrix: os: [ubuntu-latest] - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.9", "3.10", "3.11", "3.12"] runs-on: ${{ matrix.os }} diff --git a/.github/workflows/upgrade-dependencies.yml b/.github/workflows/upgrade-dependencies.yml index 21c8984bf..78d95db29 100644 --- a/.github/workflows/upgrade-dependencies.yml +++ b/.github/workflows/upgrade-dependencies.yml @@ -16,7 +16,7 @@ jobs: matrix: os: ['ubuntu-latest', 'macos-latest'] package: ["."] - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 with: diff --git a/pyproject.toml b/pyproject.toml index d37fe8ed7..ca3ff6201 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "maggma" dynamic = ["version", "readme", "scripts", "classifiers", "dependencies","optional-dependencies", "license"] -requires-python = ">=3.8" +requires-python = ">=3.9" description="Framework to develop datapipelines from files on disk to full dissemenation API" authors =[ {name = "The Materials Project", email = "feedback@materialsproject.org"} diff --git a/requirements/macos-latest_py3.8.txt b/requirements/macos-latest_py3.8.txt deleted file mode 100644 index 94423daba..000000000 --- a/requirements/macos-latest_py3.8.txt +++ /dev/null @@ -1,179 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --output-file=requirements/macos-latest_py3.8.txt -# -aioitertools==0.11.0 - # via maggma (setup.py) -annotated-types==0.6.0 - # via pydantic -anyio==3.7.1 - # via - # fastapi - # starlette -attrs==23.1.0 - # via - # jsonschema - # referencing -bcrypt==4.1.1 - # via paramiko -blinker==1.7.0 - # via flask -boto3==1.33.11 - # via maggma (setup.py) -botocore==1.33.11 - # via - # boto3 - # s3transfer -certifi==2023.11.17 - # via requests -cffi==1.16.0 - # via - # cryptography - # pynacl -charset-normalizer==3.3.2 - # via requests -click==8.1.7 - # via - # flask - # mongogrant - # uvicorn -cryptography==42.0.4 - # via paramiko -dnspython==2.6.1 - # via - # maggma (setup.py) - # pymongo -exceptiongroup==1.2.0 - # via anyio -fastapi==0.104.1 - # via maggma (setup.py) -flask==3.0.0 - # via mongogrant -h11==0.14.0 - # via uvicorn -idna==3.7 - # via - # anyio - # requests -importlib-metadata==7.0.0 - # via flask -importlib-resources==6.1.1 - # via - # jsonschema - # jsonschema-specifications -itsdangerous==2.1.2 - # via flask -jinja2==3.1.4 - # via flask -jmespath==1.0.1 - # via - # boto3 - # botocore -jsonschema==4.20.0 - # via maggma (setup.py) -jsonschema-specifications==2023.11.2 - # via jsonschema -markupsafe==2.1.3 - # via - # jinja2 - # werkzeug -mongogrant==0.3.3 - # via maggma (setup.py) -mongomock==4.1.2 - # via maggma (setup.py) -monty==2023.9.25 - # via maggma (setup.py) -msgpack==1.0.7 - # via maggma (setup.py) -numpy==1.24.4 - # via maggma (setup.py) -orjson==3.9.15 - # via maggma (setup.py) -packaging==23.2 - # via mongomock -paramiko==3.3.1 - # via sshtunnel -pkgutil-resolve-name==1.3.10 - # via jsonschema -pycparser==2.21 - # via cffi -pydantic==2.5.2 - # via - # fastapi - # maggma (setup.py) - # pydantic-settings -pydantic-core==2.14.5 - # via pydantic -pydantic-settings==2.1.0 - # via maggma (setup.py) -pydash==7.0.6 - # via maggma (setup.py) -pymongo==4.6.3 - # via - # maggma (setup.py) - # mongogrant -pynacl==1.5.0 - # via paramiko -python-dateutil==2.8.2 - # via - # botocore - # maggma (setup.py) -python-dotenv==1.0.0 - # via pydantic-settings -pyzmq==25.1.2 - # via maggma (setup.py) -referencing==0.32.0 - # via - # jsonschema - # jsonschema-specifications -requests==2.31.0 - # via mongogrant -rpds-py==0.13.2 - # via - # jsonschema - # referencing -ruamel-yaml==0.17.40 - # via maggma (setup.py) -ruamel-yaml-clib==0.2.8 - # via ruamel-yaml -s3transfer==0.8.2 - # via boto3 -sentinels==1.0.0 - # via mongomock -six==1.16.0 - # via python-dateutil -sniffio==1.3.0 - # via anyio -sshtunnel==0.4.0 - # via maggma (setup.py) -starlette==0.36.2 - # via fastapi -tqdm==4.66.3 - # via maggma (setup.py) -typing-extensions==4.9.0 - # via - # aioitertools - # annotated-types - # fastapi - # pydantic - # pydantic-core - # pydash - # starlette - # uvicorn -urllib3==1.26.18 - # via - # botocore - # requests -uvicorn==0.24.0.post1 - # via maggma (setup.py) -werkzeug==3.0.3 - # via flask -zipp==3.17.0 - # via - # importlib-metadata - # importlib-resources - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/requirements/macos-latest_py3.8_extras.txt b/requirements/macos-latest_py3.8_extras.txt deleted file mode 100644 index 044216192..000000000 --- a/requirements/macos-latest_py3.8_extras.txt +++ /dev/null @@ -1,487 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --all-extras --output-file=requirements/macos-latest_py3.8_extras.txt -# -aioitertools==0.11.0 - # via maggma (setup.py) -annotated-types==0.6.0 - # via pydantic -anyio==3.7.1 - # via - # fastapi - # httpx - # starlette -appnope==0.1.3 - # via ipython -asttokens==2.4.1 - # via stack-data -attrs==23.1.0 - # via - # jsonschema - # referencing -azure-core==1.29.5 - # via - # azure-identity - # azure-storage-blob -azure-identity==1.15.0 - # via maggma (setup.py) -azure-storage-blob==12.19.0 - # via maggma (setup.py) -babel==2.13.1 - # via mkdocs-material -backcall==0.2.0 - # via ipython -bcrypt==4.1.1 - # via paramiko -blinker==1.7.0 - # via flask -boto3==1.33.11 - # via - # maggma (setup.py) - # moto -botocore==1.33.11 - # via - # boto3 - # moto - # s3transfer -certifi==2023.11.17 - # via - # httpcore - # httpx - # requests -cffi==1.16.0 - # via - # cryptography - # pynacl -cfgv==3.4.0 - # via pre-commit -charset-normalizer==3.3.2 - # via requests -click==8.1.7 - # via - # flask - # mkdocs - # mkdocstrings - # mongogrant - # uvicorn -colorama==0.4.6 - # via - # griffe - # mkdocs-material -coverage[toml]==7.3.2 - # via - # coverage - # pytest-cov -cryptography==42.0.4 - # via - # azure-identity - # azure-storage-blob - # moto - # msal - # paramiko - # pyjwt -csscompressor==0.9.5 - # via mkdocs-minify-plugin -decorator==5.1.1 - # via ipython -distlib==0.3.7 - # via virtualenv -dnspython==2.6.1 - # via - # maggma (setup.py) - # pymongo -exceptiongroup==1.2.0 - # via - # anyio - # pytest -execnet==2.0.2 - # via pytest-xdist -executing==2.0.1 - # via stack-data -fastapi==0.104.1 - # via maggma (setup.py) -fastjsonschema==2.19.0 - # via nbformat -filelock==3.13.1 - # via virtualenv -flask==3.0.0 - # via mongogrant -ghp-import==2.1.0 - # via mkdocs -griffe==0.38.1 - # via mkdocstrings-python -h11==0.14.0 - # via - # httpcore - # uvicorn -htmlmin2==0.1.13 - # via mkdocs-minify-plugin -httpcore==1.0.2 - # via httpx -httpx==0.25.2 - # via starlette -hvac==2.0.0 - # via maggma (setup.py) -identify==2.5.33 - # via pre-commit -idna==3.7 - # via - # anyio - # httpx - # requests -importlib-metadata==7.0.0 - # via - # flask - # markdown - # mkdocs - # mkdocstrings - # textual -importlib-resources==6.1.1 - # via - # jsonschema - # jsonschema-specifications -iniconfig==2.0.0 - # via pytest -ipython==8.12.3 - # via maggma (setup.py) -isodate==0.6.1 - # via azure-storage-blob -itsdangerous==2.1.2 - # via - # flask - # starlette -jedi==0.19.1 - # via ipython -jinja2==3.1.4 - # via - # flask - # maggma (setup.py) - # memray - # mkdocs - # mkdocs-material - # mkdocstrings - # moto - # starlette -jmespath==1.0.1 - # via - # boto3 - # botocore -jsmin==3.0.1 - # via mkdocs-minify-plugin -jsonschema==4.20.0 - # via - # maggma (setup.py) - # nbformat -jsonschema-specifications==2023.11.2 - # via jsonschema -jupyter-core==5.5.0 - # via nbformat -linkify-it-py==2.0.2 - # via markdown-it-py -markdown==3.5.1 - # via - # mkdocs - # mkdocs-autorefs - # mkdocs-material - # mkdocstrings - # pymdown-extensions -markdown-it-py[linkify,plugins]==3.0.0 - # via - # mdit-py-plugins - # rich - # textual -markupsafe==2.1.3 - # via - # jinja2 - # mkdocs - # mkdocstrings - # werkzeug -matplotlib-inline==0.1.6 - # via ipython -mdit-py-plugins==0.4.0 - # via markdown-it-py -mdurl==0.1.2 - # via markdown-it-py -memray==1.11.0 - # via maggma (setup.py) -mergedeep==1.3.4 - # via mkdocs -mkdocs==1.5.3 - # via - # maggma (setup.py) - # mkdocs-autorefs - # mkdocs-material - # mkdocs-minify-plugin - # mkdocstrings -mkdocs-autorefs==0.5.0 - # via mkdocstrings -mkdocs-material==9.5.1 - # via maggma (setup.py) -mkdocs-material-extensions==1.3.1 - # via mkdocs-material -mkdocs-minify-plugin==0.7.1 - # via maggma (setup.py) -mkdocstrings[python]==0.24.0 - # via - # maggma (setup.py) - # mkdocstrings - # mkdocstrings-python -mkdocstrings-python==1.7.5 - # via mkdocstrings -mongogrant==0.3.3 - # via maggma (setup.py) -mongomock==4.1.2 - # via maggma (setup.py) -monty==2023.9.25 - # via maggma (setup.py) -montydb==2.5.2 - # via maggma (setup.py) -moto==4.2.11 - # via maggma (setup.py) -msal==1.26.0 - # via - # azure-identity - # msal-extensions -msal-extensions==1.1.0 - # via azure-identity -msgpack==1.0.7 - # via maggma (setup.py) -nbformat==5.9.2 - # via maggma (setup.py) -nodeenv==1.8.0 - # via pre-commit -numpy==1.24.4 - # via maggma (setup.py) -orjson==3.9.15 - # via maggma (setup.py) -packaging==23.2 - # via - # mkdocs - # mongomock - # msal-extensions - # pytest -paginate==0.5.6 - # via mkdocs-material -paramiko==3.3.1 - # via sshtunnel -parso==0.8.3 - # via jedi -pathspec==0.12.1 - # via mkdocs -pexpect==4.9.0 - # via ipython -pickleshare==0.7.5 - # via ipython -pkgutil-resolve-name==1.3.10 - # via jsonschema -platformdirs==4.1.0 - # via - # jupyter-core - # mkdocs - # mkdocstrings - # virtualenv -pluggy==1.3.0 - # via pytest -portalocker==2.8.2 - # via msal-extensions -pre-commit==3.5.0 - # via maggma (setup.py) -prompt-toolkit==3.0.41 - # via ipython -ptyprocess==0.7.0 - # via pexpect -pure-eval==0.2.2 - # via stack-data -pycparser==2.21 - # via cffi -pydantic==2.5.2 - # via - # fastapi - # maggma (setup.py) - # pydantic-settings -pydantic-core==2.14.5 - # via pydantic -pydantic-settings==2.1.0 - # via maggma (setup.py) -pydash==7.0.6 - # via maggma (setup.py) -pygments==2.17.2 - # via - # ipython - # mkdocs-material - # rich -pyjwt[crypto]==2.8.0 - # via - # msal - # pyjwt -pymdown-extensions==10.5 - # via - # mkdocs-material - # mkdocstrings -pymongo==4.6.3 - # via - # maggma (setup.py) - # mongogrant -pynacl==1.5.0 - # via paramiko -pytest==7.4.3 - # via - # maggma (setup.py) - # pytest-asyncio - # pytest-cov - # pytest-mock - # pytest-xdist -pytest-asyncio==0.23.2 - # via maggma (setup.py) -pytest-cov==4.1.0 - # via maggma (setup.py) -pytest-mock==3.12.0 - # via maggma (setup.py) -pytest-xdist==3.5.0 - # via maggma (setup.py) -python-dateutil==2.8.2 - # via - # botocore - # ghp-import - # maggma (setup.py) - # moto -python-dotenv==1.0.0 - # via pydantic-settings -python-multipart==0.0.7 - # via starlette -pytz==2023.3.post1 - # via babel -pyyaml==6.0.1 - # via - # mkdocs - # pre-commit - # pymdown-extensions - # pyyaml-env-tag - # starlette -pyyaml-env-tag==0.1 - # via mkdocs -pyzmq==25.1.2 - # via maggma (setup.py) -referencing==0.32.0 - # via - # jsonschema - # jsonschema-specifications -regex==2023.10.3 - # via - # maggma (setup.py) - # mkdocs-material -requests==2.31.0 - # via - # azure-core - # hvac - # mkdocs-material - # mongogrant - # moto - # msal - # responses -responses==0.21.0 - # via - # maggma (setup.py) - # moto -rich==13.7.0 - # via - # memray - # textual -rpds-py==0.13.2 - # via - # jsonschema - # referencing -ruamel-yaml==0.17.40 - # via maggma (setup.py) -ruamel-yaml-clib==0.2.8 - # via ruamel-yaml -ruff==0.1.7 - # via maggma (setup.py) -s3transfer==0.8.2 - # via boto3 -sentinels==1.0.0 - # via mongomock -six==1.16.0 - # via - # asttokens - # azure-core - # isodate - # python-dateutil -sniffio==1.3.0 - # via - # anyio - # httpx -sshtunnel==0.4.0 - # via maggma (setup.py) -stack-data==0.6.3 - # via ipython -starlette[full]==0.27.0 - # via - # fastapi - # maggma (setup.py) -textual==0.44.1 - # via memray -tomli==2.0.1 - # via - # coverage - # pytest -tqdm==4.66.3 - # via maggma (setup.py) -traitlets==5.14.0 - # via - # ipython - # jupyter-core - # matplotlib-inline - # nbformat -types-python-dateutil==2.8.19.14 - # via maggma (setup.py) -types-pyyaml==6.0.12.12 - # via maggma (setup.py) -types-setuptools==69.0.0.0 - # via maggma (setup.py) -typing-extensions==4.9.0 - # via - # aioitertools - # annotated-types - # azure-core - # azure-storage-blob - # fastapi - # ipython - # mkdocstrings - # pydantic - # pydantic-core - # pydash - # rich - # starlette - # textual - # uvicorn -uc-micro-py==1.0.2 - # via linkify-it-py -urllib3==1.26.18 - # via - # botocore - # requests - # responses -uvicorn==0.24.0.post1 - # via maggma (setup.py) -virtualenv==20.25.0 - # via pre-commit -watchdog==3.0.0 - # via mkdocs -wcwidth==0.2.12 - # via prompt-toolkit -werkzeug==3.0.3 - # via - # flask - # moto -xmltodict==0.13.0 - # via moto -zipp==3.17.0 - # via - # importlib-metadata - # importlib-resources - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/requirements/ubuntu-latest_py3.8.txt b/requirements/ubuntu-latest_py3.8.txt deleted file mode 100644 index cf5d2dc4e..000000000 --- a/requirements/ubuntu-latest_py3.8.txt +++ /dev/null @@ -1,179 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --output-file=requirements/ubuntu-latest_py3.8.txt -# -aioitertools==0.11.0 - # via maggma (setup.py) -annotated-types==0.6.0 - # via pydantic -anyio==3.7.1 - # via - # fastapi - # starlette -attrs==23.1.0 - # via - # jsonschema - # referencing -bcrypt==4.1.1 - # via paramiko -blinker==1.7.0 - # via flask -boto3==1.33.11 - # via maggma (setup.py) -botocore==1.33.11 - # via - # boto3 - # s3transfer -certifi==2023.11.17 - # via requests -cffi==1.16.0 - # via - # cryptography - # pynacl -charset-normalizer==3.3.2 - # via requests -click==8.1.7 - # via - # flask - # mongogrant - # uvicorn -cryptography==42.0.4 - # via paramiko -dnspython==2.6.1 - # via - # maggma (setup.py) - # pymongo -exceptiongroup==1.2.0 - # via anyio -fastapi==0.104.1 - # via maggma (setup.py) -flask==3.0.0 - # via mongogrant -h11==0.14.0 - # via uvicorn -idna==3.7 - # via - # anyio - # requests -importlib-metadata==7.0.0 - # via flask -importlib-resources==6.1.1 - # via - # jsonschema - # jsonschema-specifications -itsdangerous==2.1.2 - # via flask -jinja2==3.1.4 - # via flask -jmespath==1.0.1 - # via - # boto3 - # botocore -jsonschema==4.20.0 - # via maggma (setup.py) -jsonschema-specifications==2023.11.2 - # via jsonschema -markupsafe==2.1.3 - # via - # jinja2 - # werkzeug -mongogrant==0.3.3 - # via maggma (setup.py) -mongomock==4.1.2 - # via maggma (setup.py) -monty==2023.9.25 - # via maggma (setup.py) -msgpack==1.0.7 - # via maggma (setup.py) -numpy==1.24.4 - # via maggma (setup.py) -orjson==3.9.15 - # via maggma (setup.py) -packaging==23.2 - # via mongomock -paramiko==3.3.1 - # via sshtunnel -pkgutil-resolve-name==1.3.10 - # via jsonschema -pycparser==2.21 - # via cffi -pydantic==2.5.2 - # via - # fastapi - # maggma (setup.py) - # pydantic-settings -pydantic-core==2.14.5 - # via pydantic -pydantic-settings==2.1.0 - # via maggma (setup.py) -pydash==7.0.6 - # via maggma (setup.py) -pymongo==4.6.3 - # via - # maggma (setup.py) - # mongogrant -pynacl==1.5.0 - # via paramiko -python-dateutil==2.8.2 - # via - # botocore - # maggma (setup.py) -python-dotenv==1.0.0 - # via pydantic-settings -pyzmq==25.1.2 - # via maggma (setup.py) -referencing==0.32.0 - # via - # jsonschema - # jsonschema-specifications -requests==2.31.0 - # via mongogrant -rpds-py==0.13.2 - # via - # jsonschema - # referencing -ruamel-yaml==0.17.40 - # via maggma (setup.py) -ruamel-yaml-clib==0.2.8 - # via ruamel-yaml -s3transfer==0.8.2 - # via boto3 -sentinels==1.0.0 - # via mongomock -six==1.16.0 - # via python-dateutil -sniffio==1.3.0 - # via anyio -sshtunnel==0.4.0 - # via maggma (setup.py) -starlette==0.36.2 - # via fastapi -tqdm==4.66.3 - # via maggma (setup.py) -typing-extensions==4.9.0 - # via - # aioitertools - # annotated-types - # fastapi - # pydantic - # pydantic-core - # pydash - # starlette - # uvicorn -urllib3==1.26.18 - # via - # botocore - # requests -uvicorn==0.24.0.post1 - # via maggma (setup.py) -werkzeug==3.0.3 - # via flask -zipp==3.17.0 - # via - # importlib-metadata - # importlib-resources - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/requirements/ubuntu-latest_py3.8_extras.txt b/requirements/ubuntu-latest_py3.8_extras.txt deleted file mode 100644 index 44767b94b..000000000 --- a/requirements/ubuntu-latest_py3.8_extras.txt +++ /dev/null @@ -1,485 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.8 -# by the following command: -# -# pip-compile --all-extras --output-file=requirements/ubuntu-latest_py3.8_extras.txt -# -aioitertools==0.11.0 - # via maggma (setup.py) -annotated-types==0.6.0 - # via pydantic -anyio==3.7.1 - # via - # fastapi - # httpx - # starlette -asttokens==2.4.1 - # via stack-data -attrs==23.1.0 - # via - # jsonschema - # referencing -azure-core==1.29.5 - # via - # azure-identity - # azure-storage-blob -azure-identity==1.15.0 - # via maggma (setup.py) -azure-storage-blob==12.19.0 - # via maggma (setup.py) -babel==2.13.1 - # via mkdocs-material -backcall==0.2.0 - # via ipython -bcrypt==4.1.1 - # via paramiko -blinker==1.7.0 - # via flask -boto3==1.33.11 - # via - # maggma (setup.py) - # moto -botocore==1.33.11 - # via - # boto3 - # moto - # s3transfer -certifi==2023.11.17 - # via - # httpcore - # httpx - # requests -cffi==1.16.0 - # via - # cryptography - # pynacl -cfgv==3.4.0 - # via pre-commit -charset-normalizer==3.3.2 - # via requests -click==8.1.7 - # via - # flask - # mkdocs - # mkdocstrings - # mongogrant - # uvicorn -colorama==0.4.6 - # via - # griffe - # mkdocs-material -coverage[toml]==7.3.2 - # via - # coverage - # pytest-cov -cryptography==42.0.4 - # via - # azure-identity - # azure-storage-blob - # moto - # msal - # paramiko - # pyjwt -csscompressor==0.9.5 - # via mkdocs-minify-plugin -decorator==5.1.1 - # via ipython -distlib==0.3.7 - # via virtualenv -dnspython==2.6.1 - # via - # maggma (setup.py) - # pymongo -exceptiongroup==1.2.0 - # via - # anyio - # pytest -execnet==2.0.2 - # via pytest-xdist -executing==2.0.1 - # via stack-data -fastapi==0.104.1 - # via maggma (setup.py) -fastjsonschema==2.19.0 - # via nbformat -filelock==3.13.1 - # via virtualenv -flask==3.0.0 - # via mongogrant -ghp-import==2.1.0 - # via mkdocs -griffe==0.38.1 - # via mkdocstrings-python -h11==0.14.0 - # via - # httpcore - # uvicorn -htmlmin2==0.1.13 - # via mkdocs-minify-plugin -httpcore==1.0.2 - # via httpx -httpx==0.25.2 - # via starlette -hvac==2.0.0 - # via maggma (setup.py) -identify==2.5.33 - # via pre-commit -idna==3.7 - # via - # anyio - # httpx - # requests -importlib-metadata==7.0.0 - # via - # flask - # markdown - # mkdocs - # mkdocstrings - # textual -importlib-resources==6.1.1 - # via - # jsonschema - # jsonschema-specifications -iniconfig==2.0.0 - # via pytest -ipython==8.12.3 - # via maggma (setup.py) -isodate==0.6.1 - # via azure-storage-blob -itsdangerous==2.1.2 - # via - # flask - # starlette -jedi==0.19.1 - # via ipython -jinja2==3.1.4 - # via - # flask - # maggma (setup.py) - # memray - # mkdocs - # mkdocs-material - # mkdocstrings - # moto - # starlette -jmespath==1.0.1 - # via - # boto3 - # botocore -jsmin==3.0.1 - # via mkdocs-minify-plugin -jsonschema==4.20.0 - # via - # maggma (setup.py) - # nbformat -jsonschema-specifications==2023.11.2 - # via jsonschema -jupyter-core==5.5.0 - # via nbformat -linkify-it-py==2.0.2 - # via markdown-it-py -markdown==3.5.1 - # via - # mkdocs - # mkdocs-autorefs - # mkdocs-material - # mkdocstrings - # pymdown-extensions -markdown-it-py[linkify,plugins]==3.0.0 - # via - # mdit-py-plugins - # rich - # textual -markupsafe==2.1.3 - # via - # jinja2 - # mkdocs - # mkdocstrings - # werkzeug -matplotlib-inline==0.1.6 - # via ipython -mdit-py-plugins==0.4.0 - # via markdown-it-py -mdurl==0.1.2 - # via markdown-it-py -memray==1.11.0 - # via maggma (setup.py) -mergedeep==1.3.4 - # via mkdocs -mkdocs==1.5.3 - # via - # maggma (setup.py) - # mkdocs-autorefs - # mkdocs-material - # mkdocs-minify-plugin - # mkdocstrings -mkdocs-autorefs==0.5.0 - # via mkdocstrings -mkdocs-material==9.5.1 - # via maggma (setup.py) -mkdocs-material-extensions==1.3.1 - # via mkdocs-material -mkdocs-minify-plugin==0.7.1 - # via maggma (setup.py) -mkdocstrings[python]==0.24.0 - # via - # maggma (setup.py) - # mkdocstrings - # mkdocstrings-python -mkdocstrings-python==1.7.5 - # via mkdocstrings -mongogrant==0.3.3 - # via maggma (setup.py) -mongomock==4.1.2 - # via maggma (setup.py) -monty==2023.9.25 - # via maggma (setup.py) -montydb==2.5.2 - # via maggma (setup.py) -moto==4.2.11 - # via maggma (setup.py) -msal==1.26.0 - # via - # azure-identity - # msal-extensions -msal-extensions==1.1.0 - # via azure-identity -msgpack==1.0.7 - # via maggma (setup.py) -nbformat==5.9.2 - # via maggma (setup.py) -nodeenv==1.8.0 - # via pre-commit -numpy==1.24.4 - # via maggma (setup.py) -orjson==3.9.15 - # via maggma (setup.py) -packaging==23.2 - # via - # mkdocs - # mongomock - # msal-extensions - # pytest -paginate==0.5.6 - # via mkdocs-material -paramiko==3.3.1 - # via sshtunnel -parso==0.8.3 - # via jedi -pathspec==0.12.1 - # via mkdocs -pexpect==4.9.0 - # via ipython -pickleshare==0.7.5 - # via ipython -pkgutil-resolve-name==1.3.10 - # via jsonschema -platformdirs==4.1.0 - # via - # jupyter-core - # mkdocs - # mkdocstrings - # virtualenv -pluggy==1.3.0 - # via pytest -portalocker==2.8.2 - # via msal-extensions -pre-commit==3.5.0 - # via maggma (setup.py) -prompt-toolkit==3.0.41 - # via ipython -ptyprocess==0.7.0 - # via pexpect -pure-eval==0.2.2 - # via stack-data -pycparser==2.21 - # via cffi -pydantic==2.5.2 - # via - # fastapi - # maggma (setup.py) - # pydantic-settings -pydantic-core==2.14.5 - # via pydantic -pydantic-settings==2.1.0 - # via maggma (setup.py) -pydash==7.0.6 - # via maggma (setup.py) -pygments==2.17.2 - # via - # ipython - # mkdocs-material - # rich -pyjwt[crypto]==2.8.0 - # via - # msal - # pyjwt -pymdown-extensions==10.5 - # via - # mkdocs-material - # mkdocstrings -pymongo==4.6.3 - # via - # maggma (setup.py) - # mongogrant -pynacl==1.5.0 - # via paramiko -pytest==7.4.3 - # via - # maggma (setup.py) - # pytest-asyncio - # pytest-cov - # pytest-mock - # pytest-xdist -pytest-asyncio==0.23.2 - # via maggma (setup.py) -pytest-cov==4.1.0 - # via maggma (setup.py) -pytest-mock==3.12.0 - # via maggma (setup.py) -pytest-xdist==3.5.0 - # via maggma (setup.py) -python-dateutil==2.8.2 - # via - # botocore - # ghp-import - # maggma (setup.py) - # moto -python-dotenv==1.0.0 - # via pydantic-settings -python-multipart==0.0.7 - # via starlette -pytz==2023.3.post1 - # via babel -pyyaml==6.0.1 - # via - # mkdocs - # pre-commit - # pymdown-extensions - # pyyaml-env-tag - # starlette -pyyaml-env-tag==0.1 - # via mkdocs -pyzmq==25.1.2 - # via maggma (setup.py) -referencing==0.32.0 - # via - # jsonschema - # jsonschema-specifications -regex==2023.10.3 - # via - # maggma (setup.py) - # mkdocs-material -requests==2.31.0 - # via - # azure-core - # hvac - # mkdocs-material - # mongogrant - # moto - # msal - # responses -responses==0.21.0 - # via - # maggma (setup.py) - # moto -rich==13.7.0 - # via - # memray - # textual -rpds-py==0.13.2 - # via - # jsonschema - # referencing -ruamel-yaml==0.17.40 - # via maggma (setup.py) -ruamel-yaml-clib==0.2.8 - # via ruamel-yaml -ruff==0.1.7 - # via maggma (setup.py) -s3transfer==0.8.2 - # via boto3 -sentinels==1.0.0 - # via mongomock -six==1.16.0 - # via - # asttokens - # azure-core - # isodate - # python-dateutil -sniffio==1.3.0 - # via - # anyio - # httpx -sshtunnel==0.4.0 - # via maggma (setup.py) -stack-data==0.6.3 - # via ipython -starlette[full]==0.27.0 - # via - # fastapi - # maggma (setup.py) -textual==0.44.1 - # via memray -tomli==2.0.1 - # via - # coverage - # pytest -tqdm==4.66.3 - # via maggma (setup.py) -traitlets==5.14.0 - # via - # ipython - # jupyter-core - # matplotlib-inline - # nbformat -types-python-dateutil==2.8.19.14 - # via maggma (setup.py) -types-pyyaml==6.0.12.12 - # via maggma (setup.py) -types-setuptools==69.0.0.0 - # via maggma (setup.py) -typing-extensions==4.9.0 - # via - # aioitertools - # annotated-types - # azure-core - # azure-storage-blob - # fastapi - # ipython - # mkdocstrings - # pydantic - # pydantic-core - # pydash - # rich - # starlette - # textual - # uvicorn -uc-micro-py==1.0.2 - # via linkify-it-py -urllib3==1.26.18 - # via - # botocore - # requests - # responses -uvicorn==0.24.0.post1 - # via maggma (setup.py) -virtualenv==20.25.0 - # via pre-commit -watchdog==3.0.0 - # via mkdocs -wcwidth==0.2.12 - # via prompt-toolkit -werkzeug==3.0.3 - # via - # flask - # moto -xmltodict==0.13.0 - # via moto -zipp==3.17.0 - # via - # importlib-metadata - # importlib-resources - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/setup.py b/setup.py index 07edb877c..c0bcd2300 100644 --- a/setup.py +++ b/setup.py @@ -78,10 +78,10 @@ }, classifiers=[ "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", "Intended Audience :: System Administrators", From 45688345da11304e2d42156f84ef0ea259d6a124 Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Tue, 14 May 2024 07:11:06 -0400 Subject: [PATCH 2/6] update docs --- docs/index.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/index.md b/docs/index.md index 7cd350a03..e26f48462 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,12 +1,12 @@ # Maggma -[![Static Badge](https://img.shields.io/badge/documentation-blue?logo=github)](https://materialsproject.github.io/maggma) [![testing](https://github.com/materialsproject/maggma/workflows/testing/badge.svg)](https://github.com/materialsproject/maggma/actions?query=workflow%3Atesting) [![codecov](https://codecov.io/gh/materialsproject/maggma/branch/main/graph/badge.svg)](https://codecov.io/gh/materialsproject/maggma) [![python](https://img.shields.io/badge/Python-3.8+-blue.svg?logo=python&logoColor=white)]() +[![Static Badge](https://img.shields.io/badge/documentation-blue?logo=github)](https://materialsproject.github.io/maggma) [![testing](https://github.com/materialsproject/maggma/workflows/testing/badge.svg)](https://github.com/materialsproject/maggma/actions?query=workflow%3Atesting) [![codecov](https://codecov.io/gh/materialsproject/maggma/branch/main/graph/badge.svg)](https://codecov.io/gh/materialsproject/maggma) [![python](https://img.shields.io/badge/Python-3.9+-blue.svg?logo=python&logoColor=white)]() ## What is Maggma Maggma is a framework to build data pipelines from files on disk all the way to a REST API in scientific environments. Maggma has been developed by the Materials Project (MP) team at Lawrence Berkeley National Laboratory. -Maggma is written in [Python](http://docs.python-guide.org/en/latest/) and supports Python 3.8+. +Maggma is written in [Python](http://docs.python-guide.org/en/latest/) and supports Python 3.9+. ## Installation from PyPI From b97080abe905071f8fdb7cb4f0553aa7d8bc456d Mon Sep 17 00:00:00 2001 From: Ryan Kingsbury Date: Tue, 14 May 2024 07:27:49 -0400 Subject: [PATCH 3/6] add py312 requirements --- requirements/ubuntu-latest_py3.12.txt | 161 +++++++ requirements/ubuntu-latest_py3.12_extras.txt | 451 +++++++++++++++++++ 2 files changed, 612 insertions(+) create mode 100644 requirements/ubuntu-latest_py3.12.txt create mode 100644 requirements/ubuntu-latest_py3.12_extras.txt diff --git a/requirements/ubuntu-latest_py3.12.txt b/requirements/ubuntu-latest_py3.12.txt new file mode 100644 index 000000000..728e6c17b --- /dev/null +++ b/requirements/ubuntu-latest_py3.12.txt @@ -0,0 +1,161 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --output-file=requirements/ubuntu-latest_py3.11.txt +# +aioitertools==0.11.0 + # via maggma (setup.py) +annotated-types==0.6.0 + # via pydantic +anyio==3.7.1 + # via + # fastapi + # starlette +attrs==23.1.0 + # via + # jsonschema + # referencing +bcrypt==4.1.1 + # via paramiko +blinker==1.7.0 + # via flask +boto3==1.33.11 + # via maggma (setup.py) +botocore==1.33.11 + # via + # boto3 + # s3transfer +certifi==2023.11.17 + # via requests +cffi==1.16.0 + # via + # cryptography + # pynacl +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via + # flask + # mongogrant + # uvicorn +cryptography==42.0.4 + # via paramiko +dnspython==2.6.1 + # via + # maggma (setup.py) + # pymongo +fastapi==0.104.1 + # via maggma (setup.py) +flask==3.0.0 + # via mongogrant +h11==0.14.0 + # via uvicorn +idna==3.7 + # via + # anyio + # requests +itsdangerous==2.1.2 + # via flask +jinja2==3.1.4 + # via flask +jmespath==1.0.1 + # via + # boto3 + # botocore +jsonschema==4.20.0 + # via maggma (setup.py) +jsonschema-specifications==2023.11.2 + # via jsonschema +markupsafe==2.1.3 + # via + # jinja2 + # werkzeug +mongogrant==0.3.3 + # via maggma (setup.py) +mongomock==4.1.2 + # via maggma (setup.py) +monty==2023.11.3 + # via maggma (setup.py) +msgpack==1.0.7 + # via maggma (setup.py) +numpy==1.26.2 + # via maggma (setup.py) +orjson==3.9.15 + # via maggma (setup.py) +packaging==23.2 + # via mongomock +paramiko==3.3.1 + # via sshtunnel +pycparser==2.21 + # via cffi +pydantic==2.5.2 + # via + # fastapi + # maggma (setup.py) + # pydantic-settings +pydantic-core==2.14.5 + # via pydantic +pydantic-settings==2.1.0 + # via maggma (setup.py) +pydash==7.0.6 + # via maggma (setup.py) +pymongo==4.6.3 + # via + # maggma (setup.py) + # mongogrant +pynacl==1.5.0 + # via paramiko +python-dateutil==2.8.2 + # via + # botocore + # maggma (setup.py) +python-dotenv==1.0.0 + # via pydantic-settings +pyzmq==25.1.2 + # via maggma (setup.py) +referencing==0.32.0 + # via + # jsonschema + # jsonschema-specifications +requests==2.31.0 + # via mongogrant +rpds-py==0.13.2 + # via + # jsonschema + # referencing +ruamel-yaml==0.17.40 + # via maggma (setup.py) +ruamel-yaml-clib==0.2.8 + # via ruamel-yaml +s3transfer==0.8.2 + # via boto3 +sentinels==1.0.0 + # via mongomock +six==1.16.0 + # via python-dateutil +sniffio==1.3.0 + # via anyio +sshtunnel==0.4.0 + # via maggma (setup.py) +starlette==0.36.2 + # via fastapi +tqdm==4.66.3 + # via maggma (setup.py) +typing-extensions==4.9.0 + # via + # fastapi + # pydantic + # pydantic-core + # pydash +urllib3==2.0.7 + # via + # botocore + # requests +uvicorn==0.24.0.post1 + # via maggma (setup.py) +werkzeug==3.0.3 + # via flask + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/ubuntu-latest_py3.12_extras.txt b/requirements/ubuntu-latest_py3.12_extras.txt new file mode 100644 index 000000000..6fb811b6e --- /dev/null +++ b/requirements/ubuntu-latest_py3.12_extras.txt @@ -0,0 +1,451 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --all-extras --output-file=requirements/ubuntu-latest_py3.11_extras.txt +# +aioitertools==0.11.0 + # via maggma (setup.py) +annotated-types==0.6.0 + # via pydantic +anyio==3.7.1 + # via + # fastapi + # httpx + # starlette +asttokens==2.4.1 + # via stack-data +attrs==23.1.0 + # via + # jsonschema + # referencing +azure-core==1.29.5 + # via + # azure-identity + # azure-storage-blob +azure-identity==1.15.0 + # via maggma (setup.py) +azure-storage-blob==12.19.0 + # via maggma (setup.py) +babel==2.13.1 + # via mkdocs-material +bcrypt==4.1.1 + # via paramiko +blinker==1.7.0 + # via flask +boto3==1.33.11 + # via + # maggma (setup.py) + # moto +botocore==1.33.11 + # via + # boto3 + # moto + # s3transfer +certifi==2023.11.17 + # via + # httpcore + # httpx + # requests +cffi==1.16.0 + # via + # cryptography + # pynacl +cfgv==3.4.0 + # via pre-commit +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via + # flask + # mkdocs + # mkdocstrings + # mongogrant + # uvicorn +colorama==0.4.6 + # via + # griffe + # mkdocs-material +coverage[toml]==7.3.2 + # via + # coverage + # pytest-cov +cryptography==42.0.4 + # via + # azure-identity + # azure-storage-blob + # moto + # msal + # paramiko + # pyjwt +csscompressor==0.9.5 + # via mkdocs-minify-plugin +decorator==5.1.1 + # via ipython +distlib==0.3.7 + # via virtualenv +dnspython==2.6.1 + # via + # maggma (setup.py) + # pymongo +execnet==2.0.2 + # via pytest-xdist +executing==2.0.1 + # via stack-data +fastapi==0.104.1 + # via maggma (setup.py) +fastjsonschema==2.19.0 + # via nbformat +filelock==3.13.1 + # via virtualenv +flask==3.0.0 + # via mongogrant +ghp-import==2.1.0 + # via mkdocs +griffe==0.38.1 + # via mkdocstrings-python +h11==0.14.0 + # via + # httpcore + # uvicorn +htmlmin2==0.1.13 + # via mkdocs-minify-plugin +httpcore==1.0.2 + # via httpx +httpx==0.25.2 + # via starlette +hvac==2.0.0 + # via maggma (setup.py) +identify==2.5.33 + # via pre-commit +idna==3.7 + # via + # anyio + # httpx + # requests +importlib-metadata==7.0.0 + # via textual +iniconfig==2.0.0 + # via pytest +ipython==8.18.1 + # via maggma (setup.py) +isodate==0.6.1 + # via azure-storage-blob +itsdangerous==2.1.2 + # via + # flask + # starlette +jedi==0.19.1 + # via ipython +jinja2==3.1.4 + # via + # flask + # maggma (setup.py) + # memray + # mkdocs + # mkdocs-material + # mkdocstrings + # moto + # starlette +jmespath==1.0.1 + # via + # boto3 + # botocore +jsmin==3.0.1 + # via mkdocs-minify-plugin +jsonschema==4.20.0 + # via + # maggma (setup.py) + # nbformat +jsonschema-specifications==2023.11.2 + # via jsonschema +jupyter-core==5.5.0 + # via nbformat +linkify-it-py==2.0.2 + # via markdown-it-py +markdown==3.5.1 + # via + # mkdocs + # mkdocs-autorefs + # mkdocs-material + # mkdocstrings + # pymdown-extensions +markdown-it-py[linkify,plugins]==3.0.0 + # via + # mdit-py-plugins + # rich + # textual +markupsafe==2.1.3 + # via + # jinja2 + # mkdocs + # mkdocstrings + # werkzeug +matplotlib-inline==0.1.6 + # via ipython +mdit-py-plugins==0.4.0 + # via markdown-it-py +mdurl==0.1.2 + # via markdown-it-py +memray==1.11.0 + # via maggma (setup.py) +mergedeep==1.3.4 + # via mkdocs +mkdocs==1.5.3 + # via + # maggma (setup.py) + # mkdocs-autorefs + # mkdocs-material + # mkdocs-minify-plugin + # mkdocstrings +mkdocs-autorefs==0.5.0 + # via mkdocstrings +mkdocs-material==9.5.1 + # via maggma (setup.py) +mkdocs-material-extensions==1.3.1 + # via mkdocs-material +mkdocs-minify-plugin==0.7.1 + # via maggma (setup.py) +mkdocstrings[python]==0.24.0 + # via + # maggma (setup.py) + # mkdocstrings + # mkdocstrings-python +mkdocstrings-python==1.7.5 + # via mkdocstrings +mongogrant==0.3.3 + # via maggma (setup.py) +mongomock==4.1.2 + # via maggma (setup.py) +monty==2023.11.3 + # via maggma (setup.py) +montydb==2.5.2 + # via maggma (setup.py) +moto==4.2.11 + # via maggma (setup.py) +msal==1.26.0 + # via + # azure-identity + # msal-extensions +msal-extensions==1.1.0 + # via azure-identity +msgpack==1.0.7 + # via maggma (setup.py) +nbformat==5.9.2 + # via maggma (setup.py) +nodeenv==1.8.0 + # via pre-commit +numpy==1.26.2 + # via maggma (setup.py) +orjson==3.9.15 + # via maggma (setup.py) +packaging==23.2 + # via + # mkdocs + # mongomock + # msal-extensions + # pytest +paginate==0.5.6 + # via mkdocs-material +paramiko==3.3.1 + # via sshtunnel +parso==0.8.3 + # via jedi +pathspec==0.12.1 + # via mkdocs +pexpect==4.9.0 + # via ipython +platformdirs==4.1.0 + # via + # jupyter-core + # mkdocs + # mkdocstrings + # virtualenv +pluggy==1.3.0 + # via pytest +portalocker==2.8.2 + # via msal-extensions +pre-commit==3.6.0 + # via maggma (setup.py) +prompt-toolkit==3.0.41 + # via ipython +ptyprocess==0.7.0 + # via pexpect +pure-eval==0.2.2 + # via stack-data +pycparser==2.21 + # via cffi +pydantic==2.5.2 + # via + # fastapi + # maggma (setup.py) + # pydantic-settings +pydantic-core==2.14.5 + # via pydantic +pydantic-settings==2.1.0 + # via maggma (setup.py) +pydash==7.0.6 + # via maggma (setup.py) +pygments==2.17.2 + # via + # ipython + # mkdocs-material + # rich +pyjwt[crypto]==2.8.0 + # via + # msal + # pyjwt +pymdown-extensions==10.5 + # via + # mkdocs-material + # mkdocstrings +pymongo==4.6.3 + # via + # maggma (setup.py) + # mongogrant +pynacl==1.5.0 + # via paramiko +pytest==7.4.3 + # via + # maggma (setup.py) + # pytest-asyncio + # pytest-cov + # pytest-mock + # pytest-xdist +pytest-asyncio==0.23.2 + # via maggma (setup.py) +pytest-cov==4.1.0 + # via maggma (setup.py) +pytest-mock==3.12.0 + # via maggma (setup.py) +pytest-xdist==3.5.0 + # via maggma (setup.py) +python-dateutil==2.8.2 + # via + # botocore + # ghp-import + # maggma (setup.py) + # moto +python-dotenv==1.0.0 + # via pydantic-settings +python-multipart==0.0.7 + # via starlette +pyyaml==6.0.1 + # via + # mkdocs + # pre-commit + # pymdown-extensions + # pyyaml-env-tag + # starlette +pyyaml-env-tag==0.1 + # via mkdocs +pyzmq==25.1.2 + # via maggma (setup.py) +referencing==0.32.0 + # via + # jsonschema + # jsonschema-specifications +regex==2023.10.3 + # via + # maggma (setup.py) + # mkdocs-material +requests==2.31.0 + # via + # azure-core + # hvac + # mkdocs-material + # mongogrant + # moto + # msal + # responses +responses==0.21.0 + # via + # maggma (setup.py) + # moto +rich==13.7.0 + # via + # memray + # textual +rpds-py==0.13.2 + # via + # jsonschema + # referencing +ruamel-yaml==0.17.40 + # via maggma (setup.py) +ruamel-yaml-clib==0.2.8 + # via ruamel-yaml +ruff==0.1.7 + # via maggma (setup.py) +s3transfer==0.8.2 + # via boto3 +sentinels==1.0.0 + # via mongomock +six==1.16.0 + # via + # asttokens + # azure-core + # isodate + # python-dateutil +sniffio==1.3.0 + # via + # anyio + # httpx +sshtunnel==0.4.0 + # via maggma (setup.py) +stack-data==0.6.3 + # via ipython +starlette[full]==0.27.0 + # via + # fastapi + # maggma (setup.py) +textual==0.44.1 + # via memray +tqdm==4.66.3 + # via maggma (setup.py) +traitlets==5.14.0 + # via + # ipython + # jupyter-core + # matplotlib-inline + # nbformat +types-python-dateutil==2.8.19.14 + # via maggma (setup.py) +types-pyyaml==6.0.12.12 + # via maggma (setup.py) +types-setuptools==69.0.0.0 + # via maggma (setup.py) +typing-extensions==4.9.0 + # via + # azure-core + # azure-storage-blob + # fastapi + # pydantic + # pydantic-core + # pydash + # textual +uc-micro-py==1.0.2 + # via linkify-it-py +urllib3==2.0.7 + # via + # botocore + # requests + # responses +uvicorn==0.24.0.post1 + # via maggma (setup.py) +virtualenv==20.25.0 + # via pre-commit +watchdog==3.0.0 + # via mkdocs +wcwidth==0.2.12 + # via prompt-toolkit +werkzeug==3.0.3 + # via + # flask + # moto +xmltodict==0.13.0 + # via moto +zipp==3.17.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools From 42617f825651583cee5a122f2d4239e900baacef Mon Sep 17 00:00:00 2001 From: Ryan Date: Tue, 14 May 2024 12:08:35 -0400 Subject: [PATCH 4/6] revert adding python 3.12 --- .github/workflows/testing.yml | 2 +- .github/workflows/upgrade-dependencies.yml | 2 +- requirements/ubuntu-latest_py3.12.txt | 161 ------- requirements/ubuntu-latest_py3.12_extras.txt | 451 ------------------- setup.py | 1 - 5 files changed, 2 insertions(+), 615 deletions(-) delete mode 100644 requirements/ubuntu-latest_py3.12.txt delete mode 100644 requirements/ubuntu-latest_py3.12_extras.txt diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index c83a38927..bdbf86193 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -51,7 +51,7 @@ jobs: max-parallel: 6 matrix: os: [ubuntu-latest] - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11"] runs-on: ${{ matrix.os }} diff --git a/.github/workflows/upgrade-dependencies.yml b/.github/workflows/upgrade-dependencies.yml index 78d95db29..362a0ce49 100644 --- a/.github/workflows/upgrade-dependencies.yml +++ b/.github/workflows/upgrade-dependencies.yml @@ -16,7 +16,7 @@ jobs: matrix: os: ['ubuntu-latest', 'macos-latest'] package: ["."] - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11"] steps: - uses: actions/checkout@v4 with: diff --git a/requirements/ubuntu-latest_py3.12.txt b/requirements/ubuntu-latest_py3.12.txt deleted file mode 100644 index 728e6c17b..000000000 --- a/requirements/ubuntu-latest_py3.12.txt +++ /dev/null @@ -1,161 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --output-file=requirements/ubuntu-latest_py3.11.txt -# -aioitertools==0.11.0 - # via maggma (setup.py) -annotated-types==0.6.0 - # via pydantic -anyio==3.7.1 - # via - # fastapi - # starlette -attrs==23.1.0 - # via - # jsonschema - # referencing -bcrypt==4.1.1 - # via paramiko -blinker==1.7.0 - # via flask -boto3==1.33.11 - # via maggma (setup.py) -botocore==1.33.11 - # via - # boto3 - # s3transfer -certifi==2023.11.17 - # via requests -cffi==1.16.0 - # via - # cryptography - # pynacl -charset-normalizer==3.3.2 - # via requests -click==8.1.7 - # via - # flask - # mongogrant - # uvicorn -cryptography==42.0.4 - # via paramiko -dnspython==2.6.1 - # via - # maggma (setup.py) - # pymongo -fastapi==0.104.1 - # via maggma (setup.py) -flask==3.0.0 - # via mongogrant -h11==0.14.0 - # via uvicorn -idna==3.7 - # via - # anyio - # requests -itsdangerous==2.1.2 - # via flask -jinja2==3.1.4 - # via flask -jmespath==1.0.1 - # via - # boto3 - # botocore -jsonschema==4.20.0 - # via maggma (setup.py) -jsonschema-specifications==2023.11.2 - # via jsonschema -markupsafe==2.1.3 - # via - # jinja2 - # werkzeug -mongogrant==0.3.3 - # via maggma (setup.py) -mongomock==4.1.2 - # via maggma (setup.py) -monty==2023.11.3 - # via maggma (setup.py) -msgpack==1.0.7 - # via maggma (setup.py) -numpy==1.26.2 - # via maggma (setup.py) -orjson==3.9.15 - # via maggma (setup.py) -packaging==23.2 - # via mongomock -paramiko==3.3.1 - # via sshtunnel -pycparser==2.21 - # via cffi -pydantic==2.5.2 - # via - # fastapi - # maggma (setup.py) - # pydantic-settings -pydantic-core==2.14.5 - # via pydantic -pydantic-settings==2.1.0 - # via maggma (setup.py) -pydash==7.0.6 - # via maggma (setup.py) -pymongo==4.6.3 - # via - # maggma (setup.py) - # mongogrant -pynacl==1.5.0 - # via paramiko -python-dateutil==2.8.2 - # via - # botocore - # maggma (setup.py) -python-dotenv==1.0.0 - # via pydantic-settings -pyzmq==25.1.2 - # via maggma (setup.py) -referencing==0.32.0 - # via - # jsonschema - # jsonschema-specifications -requests==2.31.0 - # via mongogrant -rpds-py==0.13.2 - # via - # jsonschema - # referencing -ruamel-yaml==0.17.40 - # via maggma (setup.py) -ruamel-yaml-clib==0.2.8 - # via ruamel-yaml -s3transfer==0.8.2 - # via boto3 -sentinels==1.0.0 - # via mongomock -six==1.16.0 - # via python-dateutil -sniffio==1.3.0 - # via anyio -sshtunnel==0.4.0 - # via maggma (setup.py) -starlette==0.36.2 - # via fastapi -tqdm==4.66.3 - # via maggma (setup.py) -typing-extensions==4.9.0 - # via - # fastapi - # pydantic - # pydantic-core - # pydash -urllib3==2.0.7 - # via - # botocore - # requests -uvicorn==0.24.0.post1 - # via maggma (setup.py) -werkzeug==3.0.3 - # via flask - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/requirements/ubuntu-latest_py3.12_extras.txt b/requirements/ubuntu-latest_py3.12_extras.txt deleted file mode 100644 index 6fb811b6e..000000000 --- a/requirements/ubuntu-latest_py3.12_extras.txt +++ /dev/null @@ -1,451 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --all-extras --output-file=requirements/ubuntu-latest_py3.11_extras.txt -# -aioitertools==0.11.0 - # via maggma (setup.py) -annotated-types==0.6.0 - # via pydantic -anyio==3.7.1 - # via - # fastapi - # httpx - # starlette -asttokens==2.4.1 - # via stack-data -attrs==23.1.0 - # via - # jsonschema - # referencing -azure-core==1.29.5 - # via - # azure-identity - # azure-storage-blob -azure-identity==1.15.0 - # via maggma (setup.py) -azure-storage-blob==12.19.0 - # via maggma (setup.py) -babel==2.13.1 - # via mkdocs-material -bcrypt==4.1.1 - # via paramiko -blinker==1.7.0 - # via flask -boto3==1.33.11 - # via - # maggma (setup.py) - # moto -botocore==1.33.11 - # via - # boto3 - # moto - # s3transfer -certifi==2023.11.17 - # via - # httpcore - # httpx - # requests -cffi==1.16.0 - # via - # cryptography - # pynacl -cfgv==3.4.0 - # via pre-commit -charset-normalizer==3.3.2 - # via requests -click==8.1.7 - # via - # flask - # mkdocs - # mkdocstrings - # mongogrant - # uvicorn -colorama==0.4.6 - # via - # griffe - # mkdocs-material -coverage[toml]==7.3.2 - # via - # coverage - # pytest-cov -cryptography==42.0.4 - # via - # azure-identity - # azure-storage-blob - # moto - # msal - # paramiko - # pyjwt -csscompressor==0.9.5 - # via mkdocs-minify-plugin -decorator==5.1.1 - # via ipython -distlib==0.3.7 - # via virtualenv -dnspython==2.6.1 - # via - # maggma (setup.py) - # pymongo -execnet==2.0.2 - # via pytest-xdist -executing==2.0.1 - # via stack-data -fastapi==0.104.1 - # via maggma (setup.py) -fastjsonschema==2.19.0 - # via nbformat -filelock==3.13.1 - # via virtualenv -flask==3.0.0 - # via mongogrant -ghp-import==2.1.0 - # via mkdocs -griffe==0.38.1 - # via mkdocstrings-python -h11==0.14.0 - # via - # httpcore - # uvicorn -htmlmin2==0.1.13 - # via mkdocs-minify-plugin -httpcore==1.0.2 - # via httpx -httpx==0.25.2 - # via starlette -hvac==2.0.0 - # via maggma (setup.py) -identify==2.5.33 - # via pre-commit -idna==3.7 - # via - # anyio - # httpx - # requests -importlib-metadata==7.0.0 - # via textual -iniconfig==2.0.0 - # via pytest -ipython==8.18.1 - # via maggma (setup.py) -isodate==0.6.1 - # via azure-storage-blob -itsdangerous==2.1.2 - # via - # flask - # starlette -jedi==0.19.1 - # via ipython -jinja2==3.1.4 - # via - # flask - # maggma (setup.py) - # memray - # mkdocs - # mkdocs-material - # mkdocstrings - # moto - # starlette -jmespath==1.0.1 - # via - # boto3 - # botocore -jsmin==3.0.1 - # via mkdocs-minify-plugin -jsonschema==4.20.0 - # via - # maggma (setup.py) - # nbformat -jsonschema-specifications==2023.11.2 - # via jsonschema -jupyter-core==5.5.0 - # via nbformat -linkify-it-py==2.0.2 - # via markdown-it-py -markdown==3.5.1 - # via - # mkdocs - # mkdocs-autorefs - # mkdocs-material - # mkdocstrings - # pymdown-extensions -markdown-it-py[linkify,plugins]==3.0.0 - # via - # mdit-py-plugins - # rich - # textual -markupsafe==2.1.3 - # via - # jinja2 - # mkdocs - # mkdocstrings - # werkzeug -matplotlib-inline==0.1.6 - # via ipython -mdit-py-plugins==0.4.0 - # via markdown-it-py -mdurl==0.1.2 - # via markdown-it-py -memray==1.11.0 - # via maggma (setup.py) -mergedeep==1.3.4 - # via mkdocs -mkdocs==1.5.3 - # via - # maggma (setup.py) - # mkdocs-autorefs - # mkdocs-material - # mkdocs-minify-plugin - # mkdocstrings -mkdocs-autorefs==0.5.0 - # via mkdocstrings -mkdocs-material==9.5.1 - # via maggma (setup.py) -mkdocs-material-extensions==1.3.1 - # via mkdocs-material -mkdocs-minify-plugin==0.7.1 - # via maggma (setup.py) -mkdocstrings[python]==0.24.0 - # via - # maggma (setup.py) - # mkdocstrings - # mkdocstrings-python -mkdocstrings-python==1.7.5 - # via mkdocstrings -mongogrant==0.3.3 - # via maggma (setup.py) -mongomock==4.1.2 - # via maggma (setup.py) -monty==2023.11.3 - # via maggma (setup.py) -montydb==2.5.2 - # via maggma (setup.py) -moto==4.2.11 - # via maggma (setup.py) -msal==1.26.0 - # via - # azure-identity - # msal-extensions -msal-extensions==1.1.0 - # via azure-identity -msgpack==1.0.7 - # via maggma (setup.py) -nbformat==5.9.2 - # via maggma (setup.py) -nodeenv==1.8.0 - # via pre-commit -numpy==1.26.2 - # via maggma (setup.py) -orjson==3.9.15 - # via maggma (setup.py) -packaging==23.2 - # via - # mkdocs - # mongomock - # msal-extensions - # pytest -paginate==0.5.6 - # via mkdocs-material -paramiko==3.3.1 - # via sshtunnel -parso==0.8.3 - # via jedi -pathspec==0.12.1 - # via mkdocs -pexpect==4.9.0 - # via ipython -platformdirs==4.1.0 - # via - # jupyter-core - # mkdocs - # mkdocstrings - # virtualenv -pluggy==1.3.0 - # via pytest -portalocker==2.8.2 - # via msal-extensions -pre-commit==3.6.0 - # via maggma (setup.py) -prompt-toolkit==3.0.41 - # via ipython -ptyprocess==0.7.0 - # via pexpect -pure-eval==0.2.2 - # via stack-data -pycparser==2.21 - # via cffi -pydantic==2.5.2 - # via - # fastapi - # maggma (setup.py) - # pydantic-settings -pydantic-core==2.14.5 - # via pydantic -pydantic-settings==2.1.0 - # via maggma (setup.py) -pydash==7.0.6 - # via maggma (setup.py) -pygments==2.17.2 - # via - # ipython - # mkdocs-material - # rich -pyjwt[crypto]==2.8.0 - # via - # msal - # pyjwt -pymdown-extensions==10.5 - # via - # mkdocs-material - # mkdocstrings -pymongo==4.6.3 - # via - # maggma (setup.py) - # mongogrant -pynacl==1.5.0 - # via paramiko -pytest==7.4.3 - # via - # maggma (setup.py) - # pytest-asyncio - # pytest-cov - # pytest-mock - # pytest-xdist -pytest-asyncio==0.23.2 - # via maggma (setup.py) -pytest-cov==4.1.0 - # via maggma (setup.py) -pytest-mock==3.12.0 - # via maggma (setup.py) -pytest-xdist==3.5.0 - # via maggma (setup.py) -python-dateutil==2.8.2 - # via - # botocore - # ghp-import - # maggma (setup.py) - # moto -python-dotenv==1.0.0 - # via pydantic-settings -python-multipart==0.0.7 - # via starlette -pyyaml==6.0.1 - # via - # mkdocs - # pre-commit - # pymdown-extensions - # pyyaml-env-tag - # starlette -pyyaml-env-tag==0.1 - # via mkdocs -pyzmq==25.1.2 - # via maggma (setup.py) -referencing==0.32.0 - # via - # jsonschema - # jsonschema-specifications -regex==2023.10.3 - # via - # maggma (setup.py) - # mkdocs-material -requests==2.31.0 - # via - # azure-core - # hvac - # mkdocs-material - # mongogrant - # moto - # msal - # responses -responses==0.21.0 - # via - # maggma (setup.py) - # moto -rich==13.7.0 - # via - # memray - # textual -rpds-py==0.13.2 - # via - # jsonschema - # referencing -ruamel-yaml==0.17.40 - # via maggma (setup.py) -ruamel-yaml-clib==0.2.8 - # via ruamel-yaml -ruff==0.1.7 - # via maggma (setup.py) -s3transfer==0.8.2 - # via boto3 -sentinels==1.0.0 - # via mongomock -six==1.16.0 - # via - # asttokens - # azure-core - # isodate - # python-dateutil -sniffio==1.3.0 - # via - # anyio - # httpx -sshtunnel==0.4.0 - # via maggma (setup.py) -stack-data==0.6.3 - # via ipython -starlette[full]==0.27.0 - # via - # fastapi - # maggma (setup.py) -textual==0.44.1 - # via memray -tqdm==4.66.3 - # via maggma (setup.py) -traitlets==5.14.0 - # via - # ipython - # jupyter-core - # matplotlib-inline - # nbformat -types-python-dateutil==2.8.19.14 - # via maggma (setup.py) -types-pyyaml==6.0.12.12 - # via maggma (setup.py) -types-setuptools==69.0.0.0 - # via maggma (setup.py) -typing-extensions==4.9.0 - # via - # azure-core - # azure-storage-blob - # fastapi - # pydantic - # pydantic-core - # pydash - # textual -uc-micro-py==1.0.2 - # via linkify-it-py -urllib3==2.0.7 - # via - # botocore - # requests - # responses -uvicorn==0.24.0.post1 - # via maggma (setup.py) -virtualenv==20.25.0 - # via pre-commit -watchdog==3.0.0 - # via mkdocs -wcwidth==0.2.12 - # via prompt-toolkit -werkzeug==3.0.3 - # via - # flask - # moto -xmltodict==0.13.0 - # via moto -zipp==3.17.0 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/setup.py b/setup.py index c0bcd2300..16cb8946a 100644 --- a/setup.py +++ b/setup.py @@ -81,7 +81,6 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", "Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", "Intended Audience :: System Administrators", From a9a416f60caed89751c1438280f5f9fd74a6149a Mon Sep 17 00:00:00 2001 From: Ryan Date: Tue, 14 May 2024 12:28:33 -0400 Subject: [PATCH 5/6] pre-commit auto fixes --- src/maggma/api/API.py | 8 +-- src/maggma/api/models.py | 6 +- src/maggma/api/query_operator/core.py | 5 +- src/maggma/api/query_operator/dynamic.py | 20 +++--- src/maggma/api/query_operator/pagination.py | 4 +- src/maggma/api/query_operator/sorting.py | 4 +- .../api/query_operator/sparse_fields.py | 8 +-- src/maggma/api/resource/aggregation.py | 10 +-- src/maggma/api/resource/core.py | 7 +- src/maggma/api/resource/post_resource.py | 16 ++--- src/maggma/api/resource/read_resource.py | 14 ++-- src/maggma/api/resource/s3_url.py | 4 +- src/maggma/api/resource/submission.py | 18 ++--- src/maggma/api/resource/utils.py | 6 +- src/maggma/api/utils.py | 26 ++++--- src/maggma/builders/group_builder.py | 21 +++--- src/maggma/builders/map_builder.py | 13 ++-- src/maggma/builders/projection_builder.py | 26 +++---- src/maggma/cli/__init__.py | 2 +- src/maggma/cli/distributed.py | 3 +- src/maggma/cli/multiprocessing.py | 4 +- src/maggma/cli/rabbitmq.py | 4 +- src/maggma/cli/settings.py | 2 +- src/maggma/cli/source_loader.py | 3 +- src/maggma/core/builder.py | 11 +-- src/maggma/core/store.py | 39 ++++++----- src/maggma/core/validator.py | 5 +- src/maggma/stores/advanced_stores.py | 57 +++++++-------- src/maggma/stores/aws.py | 43 ++++++------ src/maggma/stores/azure.py | 41 +++++------ src/maggma/stores/compound_stores.py | 63 +++++++++-------- src/maggma/stores/file_store.py | 35 +++++----- src/maggma/stores/gridfs.py | 43 ++++++------ src/maggma/stores/mongolike.py | 70 ++++++++++--------- src/maggma/stores/open_data.py | 39 ++++++----- src/maggma/stores/shared_stores.py | 69 +++++++++--------- src/maggma/stores/ssh_tunnel.py | 6 +- src/maggma/utils.py | 13 ++-- src/maggma/validators.py | 9 ++- tests/api/test_api.py | 4 +- tests/builders/test_copy_builder.py | 2 +- tests/builders/test_group_builder.py | 3 +- 42 files changed, 395 insertions(+), 391 deletions(-) diff --git a/src/maggma/api/API.py b/src/maggma/api/API.py index ae09b6d16..fb7b0cdc9 100644 --- a/src/maggma/api/API.py +++ b/src/maggma/api/API.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Dict, List, Optional +from typing import Optional import uvicorn from fastapi import FastAPI @@ -18,13 +18,13 @@ class API(MSONable): def __init__( self, - resources: Dict[str, List[Resource]], + resources: dict[str, list[Resource]], title: str = "Generic API", version: str = "v0.0.0", debug: bool = False, - heartbeat_meta: Optional[Dict] = None, + heartbeat_meta: Optional[dict] = None, description: Optional[str] = None, - tags_meta: Optional[List[Dict]] = None, + tags_meta: Optional[list[dict]] = None, ): """ Args: diff --git a/src/maggma/api/models.py b/src/maggma/api/models.py index c58cee633..0f3c0d413 100644 --- a/src/maggma/api/models.py +++ b/src/maggma/api/models.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Generic, List, Optional, TypeVar +from typing import Generic, Optional, TypeVar from pydantic import BaseModel, Field, validator @@ -51,8 +51,8 @@ class Response(BaseModel, Generic[DataT]): A Generic API Response. """ - data: Optional[List[DataT]] = Field(None, description="List of returned data") - errors: Optional[List[Error]] = Field(None, description="Any errors on processing this query") + data: Optional[list[DataT]] = Field(None, description="List of returned data") + errors: Optional[list[Error]] = Field(None, description="Any errors on processing this query") meta: Optional[Meta] = Field(None, description="Extra information for the query") @validator("errors", always=True) diff --git a/src/maggma/api/query_operator/core.py b/src/maggma/api/query_operator/core.py index 127d622bd..633ee5342 100644 --- a/src/maggma/api/query_operator/core.py +++ b/src/maggma/api/query_operator/core.py @@ -1,5 +1,4 @@ from abc import ABCMeta, abstractmethod -from typing import Dict, List from monty.json import MSONable @@ -18,7 +17,7 @@ def query(self) -> STORE_PARAMS: The query function that does the work for this query operator. """ - def meta(self) -> Dict: + def meta(self) -> dict: """ Returns meta data to return with the Response. @@ -28,7 +27,7 @@ def meta(self) -> Dict: """ return {} - def post_process(self, docs: List[Dict], query: Dict) -> List[Dict]: + def post_process(self, docs: list[dict], query: dict) -> list[dict]: """ An optional post-processing function for the data. diff --git a/src/maggma/api/query_operator/dynamic.py b/src/maggma/api/query_operator/dynamic.py index 2715038ed..5d4b9b8f9 100644 --- a/src/maggma/api/query_operator/dynamic.py +++ b/src/maggma/api/query_operator/dynamic.py @@ -1,6 +1,6 @@ import inspect from abc import abstractmethod -from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union +from typing import Any, Callable, Optional, Union from fastapi.params import Query from monty.json import MontyDecoder @@ -17,15 +17,15 @@ class DynamicQueryOperator(QueryOperator): def __init__( self, - model: Type[BaseModel], - fields: Optional[List[str]] = None, - excluded_fields: Optional[List[str]] = None, + model: type[BaseModel], + fields: Optional[list[str]] = None, + excluded_fields: Optional[list[str]] = None, ): self.model = model self.fields = fields self.excluded_fields = excluded_fields - all_fields: Dict[str, FieldInfo] = model.model_fields + all_fields: dict[str, FieldInfo] = model.model_fields param_fields = fields or list(set(all_fields.keys()) - set(excluded_fields or [])) # Convert the fields into operator tuples @@ -60,7 +60,7 @@ def query(**kwargs) -> STORE_PARAMS: return {"criteria": final_crit} # building the signatures for FastAPI Swagger UI - signatures: List = [ + signatures: list = [ inspect.Parameter( op[0], inspect.Parameter.POSITIONAL_OR_KEYWORD, @@ -78,7 +78,7 @@ def query(self): "Stub query function for abstract class." @abstractmethod - def field_to_operator(self, name: str, field: FieldInfo) -> List[Tuple[str, Any, Query, Callable[..., Dict]]]: + def field_to_operator(self, name: str, field: FieldInfo) -> list[tuple[str, Any, Query, Callable[..., dict]]]: """ Converts a PyDantic FieldInfo into a Tuple with the - query param name, @@ -95,7 +95,7 @@ def from_dict(cls, d): decoder = MontyDecoder() return cls(**{k: decoder.process_decoded(v) for k, v in d.items()}) - def as_dict(self) -> Dict: + def as_dict(self) -> dict: """ Special as_dict implemented to convert pydantic models into strings. """ @@ -107,7 +107,7 @@ def as_dict(self) -> Dict: class NumericQuery(DynamicQueryOperator): "Query Operator to enable searching on numeric fields." - def field_to_operator(self, name: str, field: FieldInfo) -> List[Tuple[str, Any, Query, Callable[..., Dict]]]: + def field_to_operator(self, name: str, field: FieldInfo) -> list[tuple[str, Any, Query, Callable[..., dict]]]: """ Converts a PyDantic FieldInfo into a Tuple with the query_param name, @@ -192,7 +192,7 @@ def field_to_operator(self, name: str, field: FieldInfo) -> List[Tuple[str, Any, class StringQueryOperator(DynamicQueryOperator): "Query Operator to enable searching on numeric fields." - def field_to_operator(self, name: str, field: FieldInfo) -> List[Tuple[str, Any, Query, Callable[..., Dict]]]: + def field_to_operator(self, name: str, field: FieldInfo) -> list[tuple[str, Any, Query, Callable[..., dict]]]: """ Converts a PyDantic FieldInfo into a Tuple with the query_param name, diff --git a/src/maggma/api/query_operator/pagination.py b/src/maggma/api/query_operator/pagination.py index 742e930c6..79aa1f319 100644 --- a/src/maggma/api/query_operator/pagination.py +++ b/src/maggma/api/query_operator/pagination.py @@ -1,5 +1,3 @@ -from typing import Dict - from fastapi import HTTPException, Query from maggma.api.query_operator import QueryOperator @@ -82,7 +80,7 @@ def query( def query(self): "Stub query function for abstract class." - def meta(self) -> Dict: + def meta(self) -> dict: """ Metadata for the pagination params. """ diff --git a/src/maggma/api/query_operator/sorting.py b/src/maggma/api/query_operator/sorting.py index 3d47179f0..d308e4aae 100644 --- a/src/maggma/api/query_operator/sorting.py +++ b/src/maggma/api/query_operator/sorting.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Optional from fastapi import Query from fastapi.exceptions import HTTPException @@ -10,7 +10,7 @@ class SortQuery(QueryOperator): """Method to generate the sorting portion of a query.""" - def __init__(self, fields: Optional[List[str]] = None, max_num: Optional[int] = None): + def __init__(self, fields: Optional[list[str]] = None, max_num: Optional[int] = None): """Sort query configuration. Args: diff --git a/src/maggma/api/query_operator/sparse_fields.py b/src/maggma/api/query_operator/sparse_fields.py index 3b33363b2..f992c8630 100644 --- a/src/maggma/api/query_operator/sparse_fields.py +++ b/src/maggma/api/query_operator/sparse_fields.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional, Type +from typing import Optional from fastapi import Query from pydantic import BaseModel @@ -9,7 +9,7 @@ class SparseFieldsQuery(QueryOperator): - def __init__(self, model: Type[BaseModel], default_fields: Optional[List[str]] = None): + def __init__(self, model: type[BaseModel], default_fields: Optional[list[str]] = None): """ Args: model: PyDantic Model that represents the underlying data source @@ -46,13 +46,13 @@ def query( def query(self): "Stub query function for abstract class." - def meta(self) -> Dict: + def meta(self) -> dict: """ Returns metadata for the Sparse field set. """ return {"default_fields": self.default_fields} - def as_dict(self) -> Dict: + def as_dict(self) -> dict: """ Special as_dict implemented to convert pydantic models into strings. """ diff --git a/src/maggma/api/resource/aggregation.py b/src/maggma/api/resource/aggregation.py index b7e7848d1..a3bcf3ec6 100644 --- a/src/maggma/api/resource/aggregation.py +++ b/src/maggma/api/resource/aggregation.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List, Optional, Type +from typing import Any, Optional import orjson from fastapi import HTTPException, Request, Response @@ -23,10 +23,10 @@ class AggregationResource(Resource): def __init__( self, store: Store, - model: Type[BaseModel], + model: type[BaseModel], pipeline_query_operator: QueryOperator, timeout: Optional[int] = None, - tags: Optional[List[str]] = None, + tags: Optional[list[str]] = None, include_in_schema: Optional[bool] = True, sub_path: Optional[str] = "/", header_processor: Optional[HeaderProcessor] = None, @@ -66,11 +66,11 @@ def prepare_endpoint(self): def build_dynamic_model_search(self): model_name = self.model.__name__ - def search(**queries: Dict[str, STORE_PARAMS]) -> Dict: + def search(**queries: dict[str, STORE_PARAMS]) -> dict: request: Request = queries.pop("request") # type: ignore queries.pop("temp_response") # type: ignore - query: Dict[Any, Any] = merge_queries(list(queries.values())) # type: ignore + query: dict[Any, Any] = merge_queries(list(queries.values())) # type: ignore self.store.connect() diff --git a/src/maggma/api/resource/core.py b/src/maggma/api/resource/core.py index 23e358b92..bb183c8dd 100644 --- a/src/maggma/api/resource/core.py +++ b/src/maggma/api/resource/core.py @@ -1,6 +1,5 @@ import logging from abc import ABCMeta, abstractmethod -from typing import Dict, Type from fastapi import APIRouter, FastAPI, Request, Response from monty.json import MontyDecoder, MSONable @@ -18,7 +17,7 @@ class Resource(MSONable, metaclass=ABCMeta): def __init__( self, - model: Type[BaseModel], + model: type[BaseModel], ): """ Args: @@ -68,7 +67,7 @@ def run(self): # pragma: no cover app.include_router(self.router, prefix="") uvicorn.run(app) - def as_dict(self) -> Dict: + def as_dict(self) -> dict: """ Special as_dict implemented to convert pydantic models into strings. """ @@ -78,7 +77,7 @@ def as_dict(self) -> Dict: return d @classmethod - def from_dict(cls, d: Dict): + def from_dict(cls, d: dict): if isinstance(d["model"], str): d["model"] = dynamic_import(d["model"]) d = {k: MontyDecoder().process_decoded(v) for k, v in d.items()} diff --git a/src/maggma/api/resource/post_resource.py b/src/maggma/api/resource/post_resource.py index deab50455..b60b56e3d 100644 --- a/src/maggma/api/resource/post_resource.py +++ b/src/maggma/api/resource/post_resource.py @@ -1,5 +1,5 @@ from inspect import signature -from typing import Any, Dict, List, Optional, Type +from typing import Any, Optional from fastapi import HTTPException, Request from pydantic import BaseModel @@ -23,11 +23,11 @@ class PostOnlyResource(Resource): def __init__( self, store: Store, - model: Type[BaseModel], - tags: Optional[List[str]] = None, - query_operators: Optional[List[QueryOperator]] = None, - key_fields: Optional[List[str]] = None, - query: Optional[Dict] = None, + model: type[BaseModel], + tags: Optional[list[str]] = None, + query_operators: Optional[list[QueryOperator]] = None, + key_fields: Optional[list[str]] = None, + query: Optional[dict] = None, timeout: Optional[int] = None, include_in_schema: Optional[bool] = True, sub_path: Optional[str] = "/", @@ -81,7 +81,7 @@ def prepare_endpoint(self): def build_dynamic_model_search(self): model_name = self.model.__name__ - def search(**queries: Dict[str, STORE_PARAMS]) -> Dict: + def search(**queries: dict[str, STORE_PARAMS]) -> dict: request: Request = queries.pop("request") # type: ignore queries.pop("temp_response") # type: ignore @@ -96,7 +96,7 @@ def search(**queries: Dict[str, STORE_PARAMS]) -> Dict: detail="Request contains query parameters which cannot be used: {}".format(", ".join(overlap)), ) - query: Dict[Any, Any] = merge_queries(list(queries.values())) # type: ignore + query: dict[Any, Any] = merge_queries(list(queries.values())) # type: ignore query["criteria"].update(self.query) self.store.connect() diff --git a/src/maggma/api/resource/read_resource.py b/src/maggma/api/resource/read_resource.py index 2d602f49e..a256e1191 100644 --- a/src/maggma/api/resource/read_resource.py +++ b/src/maggma/api/resource/read_resource.py @@ -1,5 +1,5 @@ from inspect import signature -from typing import Any, Dict, List, Optional, Type, Union +from typing import Any, Optional, Union import orjson from fastapi import Depends, HTTPException, Path, Request, Response @@ -27,10 +27,10 @@ class ReadOnlyResource(Resource): def __init__( self, store: Store, - model: Type[BaseModel], - tags: Optional[List[str]] = None, - query_operators: Optional[List[QueryOperator]] = None, - key_fields: Optional[List[str]] = None, + model: type[BaseModel], + tags: Optional[list[str]] = None, + query_operators: Optional[list[QueryOperator]] = None, + key_fields: Optional[list[str]] = None, hint_scheme: Optional[HintScheme] = None, header_processor: Optional[HeaderProcessor] = None, timeout: Optional[int] = None, @@ -193,7 +193,7 @@ def get_by_key( def build_dynamic_model_search(self): model_name = self.model.__name__ - def search(**queries: Dict[str, STORE_PARAMS]) -> Union[Dict, Response]: + def search(**queries: dict[str, STORE_PARAMS]) -> Union[dict, Response]: request: Request = queries.pop("request") # type: ignore temp_response: Response = queries.pop("temp_response") # type: ignore @@ -216,7 +216,7 @@ def search(**queries: Dict[str, STORE_PARAMS]) -> Union[Dict, Response]: detail="Request contains query parameters which cannot be used: {}".format(", ".join(overlap)), ) - query: Dict[Any, Any] = merge_queries(list(queries.values())) # type: ignore + query: dict[Any, Any] = merge_queries(list(queries.values())) # type: ignore if self.hint_scheme is not None: # pragma: no cover hints = self.hint_scheme.generate_hints(query) diff --git a/src/maggma/api/resource/s3_url.py b/src/maggma/api/resource/s3_url.py index b12d8779a..a6a7b342a 100644 --- a/src/maggma/api/resource/s3_url.py +++ b/src/maggma/api/resource/s3_url.py @@ -1,5 +1,5 @@ from datetime import datetime, timedelta -from typing import List, Optional +from typing import Optional import orjson from botocore.exceptions import ClientError @@ -22,7 +22,7 @@ def __init__( self, store: S3Store, url_lifetime: int, - tags: Optional[List[str]] = None, + tags: Optional[list[str]] = None, header_processor: Optional[HeaderProcessor] = None, disable_validation: bool = False, include_in_schema: Optional[bool] = True, diff --git a/src/maggma/api/resource/submission.py b/src/maggma/api/resource/submission.py index ef1a9905b..0a7700ed0 100644 --- a/src/maggma/api/resource/submission.py +++ b/src/maggma/api/resource/submission.py @@ -1,7 +1,7 @@ from datetime import datetime from enum import Enum from inspect import signature -from typing import Any, List, Optional, Type +from typing import Any, Optional from uuid import uuid4 from fastapi import HTTPException, Path, Request @@ -27,14 +27,14 @@ class SubmissionResource(Resource): def __init__( self, store: Store, - model: Type[BaseModel], - post_query_operators: List[QueryOperator], - get_query_operators: List[QueryOperator], - patch_query_operators: Optional[List[QueryOperator]] = None, - tags: Optional[List[str]] = None, + model: type[BaseModel], + post_query_operators: list[QueryOperator], + get_query_operators: list[QueryOperator], + patch_query_operators: Optional[list[QueryOperator]] = None, + tags: Optional[list[str]] = None, timeout: Optional[int] = None, include_in_schema: Optional[bool] = True, - duplicate_fields_check: Optional[List[str]] = None, + duplicate_fields_check: Optional[list[str]] = None, enable_default_search: Optional[bool] = True, state_enum: Optional[Enum] = None, default_state: Optional[Any] = None, @@ -97,12 +97,12 @@ def __init__( if state_enum is not None: new_fields["state"] = ( - List[state_enum], # type: ignore + list[state_enum], # type: ignore Field(..., description="List of data status descriptions"), ) new_fields["updated"] = ( - List[datetime], + list[datetime], Field(..., description="List of status update datetimes"), ) diff --git a/src/maggma/api/resource/utils.py b/src/maggma/api/resource/utils.py index 51ca9388b..9ff21e652 100644 --- a/src/maggma/api/resource/utils.py +++ b/src/maggma/api/resource/utils.py @@ -1,4 +1,4 @@ -from typing import Callable, Dict, List +from typing import Callable from fastapi import Depends, Request, Response @@ -8,8 +8,8 @@ def attach_query_ops( - function: Callable[[List[STORE_PARAMS]], Dict], query_ops: List[QueryOperator] -) -> Callable[[List[STORE_PARAMS]], Dict]: + function: Callable[[list[STORE_PARAMS]], dict], query_ops: list[QueryOperator] +) -> Callable[[list[STORE_PARAMS]], dict]: """ Attach query operators to API compliant function The function has to take a list of STORE_PARAMs as the only argument. diff --git a/src/maggma/api/utils.py b/src/maggma/api/utils.py index 6dcefed04..41b333811 100644 --- a/src/maggma/api/utils.py +++ b/src/maggma/api/utils.py @@ -3,10 +3,8 @@ from typing import ( Any, Callable, - Dict, - List, Optional, - Type, + Union, get_args, # pragma: no cover ) @@ -15,12 +13,12 @@ from pydantic import BaseModel from pydantic._internal._utils import lenient_issubclass from pydantic.fields import FieldInfo -from typing_extensions import Literal, Union +from typing_extensions import Literal from maggma.utils import get_flat_models_from_model QUERY_PARAMS = ["criteria", "properties", "skip", "limit"] -STORE_PARAMS = Dict[ +STORE_PARAMS = dict[ Literal[ "criteria", "properties", @@ -37,9 +35,9 @@ ] -def merge_queries(queries: List[STORE_PARAMS]) -> STORE_PARAMS: +def merge_queries(queries: list[STORE_PARAMS]) -> STORE_PARAMS: criteria: STORE_PARAMS = {} - properties: List[str] = [] + properties: list[str] = [] for sub_query in queries: if "criteria" in sub_query: @@ -56,7 +54,7 @@ def merge_queries(queries: List[STORE_PARAMS]) -> STORE_PARAMS: } -def attach_signature(function: Callable, defaults: Dict, annotations: Dict): +def attach_signature(function: Callable, defaults: dict, annotations: dict): """ Attaches signature for defaults and annotations for parameters to function. @@ -70,19 +68,19 @@ def attach_signature(function: Callable, defaults: Dict, annotations: Dict): inspect.Parameter( param, inspect.Parameter.POSITIONAL_OR_KEYWORD, - default=defaults.get(param, None), - annotation=annotations.get(param, None), + default=defaults.get(param), + annotation=annotations.get(param), ) for param in annotations - if param not in defaults.keys() + if param not in defaults ] optional_params = [ inspect.Parameter( param, inspect.Parameter.POSITIONAL_OR_KEYWORD, - default=defaults.get(param, None), - annotation=annotations.get(param, None), + default=defaults.get(param), + annotation=annotations.get(param), ) for param in defaults ] @@ -140,7 +138,7 @@ def api_sanitize( return pydantic_model -def allow_msonable_dict(monty_cls: Type[MSONable]): +def allow_msonable_dict(monty_cls: type[MSONable]): """ Patch Monty to allow for dict values for MSONable. """ diff --git a/src/maggma/builders/group_builder.py b/src/maggma/builders/group_builder.py index 3f6df7ca6..2ad7bacb9 100644 --- a/src/maggma/builders/group_builder.py +++ b/src/maggma/builders/group_builder.py @@ -3,10 +3,11 @@ """ import traceback from abc import ABCMeta, abstractmethod +from collections.abc import Iterable, Iterator from datetime import datetime from math import ceil from time import time -from typing import Dict, Iterable, Iterator, List, Optional, Set, Tuple +from typing import Optional from pydash import get @@ -28,9 +29,9 @@ def __init__( self, source: Store, target: Store, - grouping_keys: List[str], - query: Optional[Dict] = None, - projection: Optional[List] = None, + grouping_keys: list[str], + query: Optional[dict] = None, + projection: Optional[list] = None, timeout: int = 0, store_process_time: bool = True, retry_failed: bool = False, @@ -89,7 +90,7 @@ def ensure_indexes(self): "for each of source and target." ) - def prechunk(self, number_splits: int) -> Iterator[Dict]: + def prechunk(self, number_splits: int) -> Iterator[dict]: """ Generic prechunk for group builder to perform domain-decomposition by the grouping keys. @@ -120,7 +121,7 @@ def get_items(self): docs = list(self.source.query(criteria=dict(zip(self.grouping_keys, group)), properties=projection)) yield docs - def process_item(self, item: List[Dict]) -> Dict[Tuple, Dict]: # type: ignore + def process_item(self, item: list[dict]) -> dict[tuple, dict]: # type: ignore keys = [d[self.source.key] for d in item] self.logger.debug(f"Processing: {keys}") @@ -152,7 +153,7 @@ def process_item(self, item: List[Dict]) -> Dict[Tuple, Dict]: # type: ignore return processed - def update_targets(self, items: List[Dict]): + def update_targets(self, items: list[dict]): """ Generic update targets for Group Builder. """ @@ -165,7 +166,7 @@ def update_targets(self, items: List[Dict]): target.update(items) @abstractmethod - def unary_function(self, items: List[Dict]) -> Dict: + def unary_function(self, items: list[dict]) -> dict: """ Processing function for GroupBuilder. @@ -212,14 +213,14 @@ def get_ids_to_process(self) -> Iterable: self.logger.info(f"Found {len(new_ids)} updated IDs to process") return list(new_ids | unprocessed_ids) - def get_groups_from_keys(self, keys) -> Set[Tuple]: + def get_groups_from_keys(self, keys) -> set[tuple]: """ Get the groups by grouping_keys for these documents. """ grouping_keys = self.grouping_keys - groups: Set[Tuple] = set() + groups: set[tuple] = set() for chunked_keys in grouper(keys, self.chunk_size): docs = list( diff --git a/src/maggma/builders/map_builder.py b/src/maggma/builders/map_builder.py index 15392d3ea..75f0f60b2 100644 --- a/src/maggma/builders/map_builder.py +++ b/src/maggma/builders/map_builder.py @@ -3,10 +3,11 @@ """ import traceback from abc import ABCMeta, abstractmethod +from collections.abc import Iterator from datetime import datetime from math import ceil from time import time -from typing import Dict, Iterator, List, Optional +from typing import Optional from maggma.core import Builder, Store from maggma.utils import Timeout, grouper @@ -26,8 +27,8 @@ def __init__( self, source: Store, target: Store, - query: Optional[Dict] = None, - projection: Optional[List] = None, + query: Optional[dict] = None, + projection: Optional[list] = None, delete_orphans: bool = False, timeout: int = 0, store_process_time: bool = True, @@ -84,7 +85,7 @@ def ensure_indexes(self): "for each of source and target." ) - def prechunk(self, number_splits: int) -> Iterator[Dict]: + def prechunk(self, number_splits: int) -> Iterator[dict]: """ Generic prechunk for map builder to perform domain-decomposition by the key field. @@ -133,7 +134,7 @@ def get_items(self): ): yield doc - def process_item(self, item: Dict): + def process_item(self, item: dict): """ Generic process items to process a dictionary using a map function. @@ -171,7 +172,7 @@ def process_item(self, item: Dict): out.update(processed) return out - def update_targets(self, items: List[Dict]): + def update_targets(self, items: list[dict]): """ Generic update targets for Map Builder. """ diff --git a/src/maggma/builders/projection_builder.py b/src/maggma/builders/projection_builder.py index ec0dc845c..bf2989811 100644 --- a/src/maggma/builders/projection_builder.py +++ b/src/maggma/builders/projection_builder.py @@ -1,7 +1,8 @@ +from collections.abc import Iterable from copy import deepcopy from datetime import datetime from itertools import chain -from typing import Dict, Iterable, List, Optional, Union +from typing import Optional, Union from pydash import get @@ -27,10 +28,10 @@ class Projection_Builder(Builder): def __init__( self, - source_stores: List[Store], + source_stores: list[Store], target_store: Store, - fields_to_project: Union[List[Union[List, Dict]], None] = None, - query_by_key: Optional[List] = None, + fields_to_project: Union[list[Union[list, dict]], None] = None, + query_by_key: Optional[list] = None, **kwargs, ): """ @@ -73,7 +74,7 @@ def __init__( raise TypeError("Input fields_to_project must be a list. E.g. [['str1','str2'],{'A':'str1','B':str2'}]") # interpret fields_to_project to create projection_mapping attribute - projection_mapping: List[Dict] # PEP 484 Type Hinting + projection_mapping: list[dict] # PEP 484 Type Hinting if fields_to_project is None: projection_mapping = [{}] * len(source_stores) else: @@ -147,7 +148,7 @@ def get_items(self) -> Iterable: # project all fields from store if corresponding element # in projection_mapping is an empty dict, # else only project the specified fields - properties: Union[List, None] + properties: Union[list, None] if projection == {}: # all fields are projected properties = None self.logger.debug(f"For store {store.collection_name} getting all properties") @@ -178,14 +179,13 @@ def get_items(self) -> Iterable: unsorted_items_to_process.append(item) self.logger.debug( - "Example fields of one output item from {} store sent to process_items: {}".format( - store.collection_name, item.keys() - ) + f"Example fields of one output item from {store.collection_name} store sent to" + "process_items: {item.keys()}" ) yield unsorted_items_to_process - def process_item(self, items: Union[List, Iterable]) -> List[Dict]: + def process_item(self, items: Union[list, Iterable]) -> list[dict]: """ Takes a chunk of items belonging to a subset of key values and groups them by key value. Combines items for each @@ -203,14 +203,14 @@ def process_item(self, items: Union[List, Iterable]) -> List[Dict]: items_sorted_by_key = {} # type: Dict for i in items: key_value = i[key] - if key_value not in items_sorted_by_key.keys(): + if key_value not in items_sorted_by_key: items_sorted_by_key[key_value] = [] items_sorted_by_key[key_value].append(i) items_for_target = [] for k, i_sorted in items_sorted_by_key.items(): self.logger.debug(f"Combined items for {key}: {k}") - target_doc: Dict = {} + target_doc: dict = {} for i in i_sorted: target_doc.update(i) # last modification is adding key value avoid overwriting @@ -220,7 +220,7 @@ def process_item(self, items: Union[List, Iterable]) -> List[Dict]: return items_for_target - def update_targets(self, items: List): + def update_targets(self, items: list): """ Adds a last_updated field to items and then adds them to the target store. diff --git a/src/maggma/cli/__init__.py b/src/maggma/cli/__init__.py index ef87e2b3e..a30c1a093 100644 --- a/src/maggma/cli/__init__.py +++ b/src/maggma/cli/__init__.py @@ -226,4 +226,4 @@ def run( if memray_file: import subprocess - subprocess.run(["memray", "flamegraph", memray_file], shell=False) + subprocess.run(["memray", "flamegraph", memray_file], shell=False, check=False) diff --git a/src/maggma/cli/distributed.py b/src/maggma/cli/distributed.py index e1aa0f07c..e93462c2b 100644 --- a/src/maggma/cli/distributed.py +++ b/src/maggma/cli/distributed.py @@ -7,7 +7,6 @@ from logging import getLogger from random import randint from time import perf_counter -from typing import List import numpy as np import zmq @@ -28,7 +27,7 @@ def find_port(): return sock.getsockname()[1] -def manager(url: str, port: int, builders: List[Builder], num_chunks: int, num_workers: int): +def manager(url: str, port: int, builders: list[Builder], num_chunks: int, num_workers: int): """ Really simple manager for distributed processing that uses a builder prechunk to modify the builder and send out modified builders for each worker to run. diff --git a/src/maggma/cli/multiprocessing.py b/src/maggma/cli/multiprocessing.py index 6f2a407c5..42a4e4dfb 100644 --- a/src/maggma/cli/multiprocessing.py +++ b/src/maggma/cli/multiprocessing.py @@ -5,7 +5,7 @@ from concurrent.futures import ProcessPoolExecutor from logging import getLogger from types import GeneratorType -from typing import Any, Callable, Dict, Optional +from typing import Any, Callable, Optional from aioitertools import enumerate from tqdm.auto import tqdm @@ -150,7 +150,7 @@ async def multi( num_processes, no_bars=False, heartbeat_func: Optional[Callable[..., Any]] = None, - heartbeat_func_kwargs: Optional[Dict[Any, Any]] = None, + heartbeat_func_kwargs: Optional[dict[Any, Any]] = None, ): builder.connect() cursor = builder.get_items() diff --git a/src/maggma/cli/rabbitmq.py b/src/maggma/cli/rabbitmq.py index b6b756b82..36bd6e1c7 100644 --- a/src/maggma/cli/rabbitmq.py +++ b/src/maggma/cli/rabbitmq.py @@ -7,7 +7,7 @@ from logging import getLogger from random import randint from time import perf_counter -from typing import List, Literal +from typing import Literal import numpy as np from monty.json import jsanitize @@ -34,7 +34,7 @@ def find_port(): def manager( url: str, - builders: List[Builder], + builders: list[Builder], num_chunks: int, num_workers: int, queue_prefix: str, diff --git a/src/maggma/cli/settings.py b/src/maggma/cli/settings.py index af7657838..22a18cf92 100644 --- a/src/maggma/cli/settings.py +++ b/src/maggma/cli/settings.py @@ -1,9 +1,9 @@ import platform import tempfile +from typing import Optional from pydantic import Field from pydantic_settings import BaseSettings -from typing_extensions import Optional tempdir = "/tmp" if platform.system() == "Darwin" else tempfile.gettempdir() diff --git a/src/maggma/cli/source_loader.py b/src/maggma/cli/source_loader.py index 83be487a5..30bfc880a 100644 --- a/src/maggma/cli/source_loader.py +++ b/src/maggma/cli/source_loader.py @@ -4,7 +4,6 @@ from importlib.abc import Loader, MetaPathFinder from importlib.machinery import ModuleSpec, SourceFileLoader from pathlib import Path -from typing import List from maggma.core import Builder @@ -113,7 +112,7 @@ def spec_from_source(file_path: str) -> ModuleSpec: return spec -def load_builder_from_source(file_path: str) -> List[Builder]: +def load_builder_from_source(file_path: str) -> list[Builder]: """ Loads Maggma Builders from a Python source file. """ diff --git a/src/maggma/core/builder.py b/src/maggma/core/builder.py index 9017d92df..25f81f96a 100644 --- a/src/maggma/core/builder.py +++ b/src/maggma/core/builder.py @@ -4,7 +4,8 @@ import logging from abc import ABCMeta, abstractmethod -from typing import Any, Dict, Iterable, List, Union +from collections.abc import Iterable +from typing import Any, Union from monty.json import MontyDecoder, MSONable @@ -25,8 +26,8 @@ class Builder(MSONable, metaclass=ABCMeta): def __init__( self, - sources: Union[List[Store], Store], - targets: Union[List[Store], Store], + sources: Union[list[Store], Store], + targets: Union[list[Store], Store], chunk_size: int = 1000, ): """ @@ -51,7 +52,7 @@ def connect(self): for s in self.sources + self.targets: s.connect() - def prechunk(self, number_splits: int) -> Iterable[Dict]: + def prechunk(self, number_splits: int) -> Iterable[dict]: """ Part of a domain-decomposition paradigm to allow the builder to operate on multiple nodes by dividing up the IO as well as the compute @@ -93,7 +94,7 @@ def process_item(self, item: Any) -> Any: return item @abstractmethod - def update_targets(self, items: List): + def update_targets(self, items: list): """ Takes a list of items from process item and updates the targets with them. Can also perform other book keeping in the process such as storing gridfs oids, etc. diff --git a/src/maggma/core/store.py b/src/maggma/core/store.py index b98a20f7f..98c983fa2 100644 --- a/src/maggma/core/store.py +++ b/src/maggma/core/store.py @@ -4,9 +4,10 @@ import logging from abc import ABCMeta, abstractmethod, abstractproperty +from collections.abc import Iterator from datetime import datetime from enum import Enum -from typing import Callable, Dict, Iterator, List, Optional, Tuple, Union +from typing import Callable, Optional, Union from monty.dev import deprecated from monty.json import MontyDecoder, MSONable @@ -54,7 +55,7 @@ def __init__( self.key = key self.last_updated_field = last_updated_field self.last_updated_type = last_updated_type - self._lu_func: Tuple[Callable, Callable] = ( + self._lu_func: tuple[Callable, Callable] = ( LU_KEY_ISOFORMAT if DateTimeFormat(last_updated_type) == DateTimeFormat.IsoFormat else (identity, identity) ) self.validator = validator @@ -89,7 +90,7 @@ def close(self): """ @abstractmethod - def count(self, criteria: Optional[Dict] = None) -> int: + def count(self, criteria: Optional[dict] = None) -> int: """ Counts the number of documents matching the query criteria. @@ -100,12 +101,12 @@ def count(self, criteria: Optional[Dict] = None) -> int: @abstractmethod def query( self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Dict]: + ) -> Iterator[dict]: """ Queries the Store for a set of documents. @@ -119,7 +120,7 @@ def query( """ @abstractmethod - def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = None): + def update(self, docs: Union[list[dict], dict], key: Union[list, str, None] = None): """ Update documents into the Store. @@ -147,13 +148,13 @@ def ensure_index(self, key: str, unique: bool = False) -> bool: @abstractmethod def groupby( self, - keys: Union[List[str], str], - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + keys: Union[list[str], str], + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Tuple[Dict, List[Dict]]]: + ) -> Iterator[tuple[dict, list[dict]]]: """ Simple grouping function that will group documents by keys. @@ -172,7 +173,7 @@ def groupby( """ @abstractmethod - def remove_docs(self, criteria: Dict): + def remove_docs(self, criteria: dict): """ Remove docs matching the query dictionary. @@ -182,9 +183,9 @@ def remove_docs(self, criteria: Dict): def query_one( self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, ): """ Queries the Store for a single document. @@ -197,7 +198,7 @@ def query_one( """ return next(self.query(criteria=criteria, properties=properties, sort=sort), None) - def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool = False) -> List: + def distinct(self, field: str, criteria: Optional[dict] = None, all_exist: bool = False) -> list: """ Get all distinct values for a field. @@ -236,7 +237,7 @@ def last_updated(self) -> datetime: return self._lu_func[0](get(doc, self.last_updated_field)) - def newer_in(self, target: "Store", criteria: Optional[Dict] = None, exhaustive: bool = False) -> List[str]: + def newer_in(self, target: "Store", criteria: Optional[dict] = None, exhaustive: bool = False) -> list[str]: """ Returns the keys of documents that are newer in the target Store than this Store. diff --git a/src/maggma/core/validator.py b/src/maggma/core/validator.py index f323658af..ee4796472 100644 --- a/src/maggma/core/validator.py +++ b/src/maggma/core/validator.py @@ -5,7 +5,6 @@ """ from abc import ABCMeta, abstractmethod -from typing import Dict, List from monty.json import MSONable @@ -18,7 +17,7 @@ class Validator(MSONable, metaclass=ABCMeta): """ @abstractmethod - def is_valid(self, doc: Dict) -> bool: + def is_valid(self, doc: dict) -> bool: """ Determines if the document is valid. @@ -27,7 +26,7 @@ def is_valid(self, doc: Dict) -> bool: """ @abstractmethod - def validation_errors(self, doc: Dict) -> List[str]: + def validation_errors(self, doc: dict) -> list[str]: """ If document is not valid, provides a list of strings to display for why validation has failed. diff --git a/src/maggma/stores/advanced_stores.py b/src/maggma/stores/advanced_stores.py index 138aa06a3..c0fc8b808 100644 --- a/src/maggma/stores/advanced_stores.py +++ b/src/maggma/stores/advanced_stores.py @@ -3,7 +3,8 @@ """ import json import os -from typing import Dict, Iterator, List, Optional, Tuple, Union +from collections.abc import Iterator +from typing import Optional, Union from mongogrant import Client from mongogrant.client import check @@ -191,7 +192,7 @@ class AliasingStore(Store): Special Store that aliases for the primary accessors. """ - def __init__(self, store: Store, aliases: Dict, **kwargs): + def __init__(self, store: Store, aliases: dict, **kwargs): """ Args: store: the store to wrap around @@ -219,7 +220,7 @@ def name(self) -> str: """ return self.store.name - def count(self, criteria: Optional[Dict] = None) -> int: + def count(self, criteria: Optional[dict] = None) -> int: """ Counts the number of documents matching the query criteria. @@ -232,12 +233,12 @@ def count(self, criteria: Optional[Dict] = None) -> int: def query( self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Dict]: + ) -> Iterator[dict]: """ Queries the Store for a set of documents. @@ -262,7 +263,7 @@ def query( substitute(d, self.aliases) yield d - def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool = False) -> List: + def distinct(self, field: str, criteria: Optional[dict] = None, all_exist: bool = False) -> list: """ Get all distinct values for a field. @@ -278,13 +279,13 @@ def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool def groupby( self, - keys: Union[List[str], str], - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + keys: Union[list[str], str], + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Tuple[Dict, List[Dict]]]: + ) -> Iterator[tuple[dict, list[dict]]]: """ Simple grouping function that will group documents by keys. @@ -319,7 +320,7 @@ def groupby( return self.store.groupby(keys=keys, properties=properties, criteria=criteria, skip=skip, limit=limit) - def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = None): + def update(self, docs: Union[list[dict], dict], key: Union[list, str, None] = None): """ Update documents into the Store. @@ -340,7 +341,7 @@ def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = No self.store.update(docs, key=key) - def remove_docs(self, criteria: Dict): + def remove_docs(self, criteria: dict): """ Remove docs matching the query dictionary. @@ -411,7 +412,7 @@ def name(self) -> str: return f"Sandbox[{self.store.name}][{self.sandbox}]" @property - def sbx_criteria(self) -> Dict: + def sbx_criteria(self) -> dict: """ Returns: the sandbox criteria dict used to filter the source store. @@ -420,7 +421,7 @@ def sbx_criteria(self) -> Dict: return {"sbxn": self.sandbox} return {"$or": [{"sbxn": {"$in": [self.sandbox]}}, {"sbxn": {"$exists": False}}]} - def count(self, criteria: Optional[Dict] = None) -> int: + def count(self, criteria: Optional[dict] = None) -> int: """ Counts the number of documents matching the query criteria. @@ -432,12 +433,12 @@ def count(self, criteria: Optional[Dict] = None) -> int: def query( self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Dict]: + ) -> Iterator[dict]: """ Queries the Store for a set of documents. @@ -454,13 +455,13 @@ def query( def groupby( self, - keys: Union[List[str], str], - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + keys: Union[list[str], str], + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Tuple[Dict, List[Dict]]]: + ) -> Iterator[tuple[dict, list[dict]]]: """ Simple grouping function that will group documents by keys. @@ -481,7 +482,7 @@ def groupby( return self.store.groupby(keys=keys, properties=properties, criteria=criteria, skip=skip, limit=limit) - def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = None): + def update(self, docs: Union[list[dict], dict], key: Union[list, str, None] = None): """ Update documents into the Store. @@ -500,7 +501,7 @@ def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = No self.store.update(docs, key=key) - def remove_docs(self, criteria: Dict): + def remove_docs(self, criteria: dict): """ Remove docs matching the query dictionary. diff --git a/src/maggma/stores/aws.py b/src/maggma/stores/aws.py index 6059aec68..13f45b76e 100644 --- a/src/maggma/stores/aws.py +++ b/src/maggma/stores/aws.py @@ -3,12 +3,13 @@ import threading import warnings import zlib +from collections.abc import Iterator from concurrent.futures import wait from concurrent.futures.thread import ThreadPoolExecutor from hashlib import sha1 from io import BytesIO from json import dumps -from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Union +from typing import Any, Callable, Optional, Union import msgpack # type: ignore from monty.msgpack import default as monty_default @@ -47,7 +48,7 @@ def __init__( key: str = "fs_id", store_hash: bool = True, unpack_data: bool = True, - searchable_fields: Optional[List[str]] = None, + searchable_fields: Optional[list[str]] = None, index_store_kwargs: Optional[dict] = None, **kwargs, ): @@ -151,7 +152,7 @@ def _collection(self): # For now returns the index collection since that is what we would "search" on return self.index._collection - def count(self, criteria: Optional[Dict] = None) -> int: + def count(self, criteria: Optional[dict] = None) -> int: """ Counts the number of documents matching the query criteria. @@ -162,12 +163,12 @@ def count(self, criteria: Optional[Dict] = None) -> int: def query( self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Dict]: + ) -> Iterator[dict]: """ Queries the Store for a set of documents. @@ -214,7 +215,7 @@ def query( yield data - def _read_data(self, data: bytes, compress_header: str) -> Dict: + def _read_data(self, data: bytes, compress_header: str) -> dict: """Reads the data and transforms it into a dictionary. Allows for subclasses to apply custom schemes for transforming the data retrieved from S3. @@ -241,7 +242,7 @@ def _unpack(data: bytes, compressed: bool): # Should just return the unpacked object then let the user run process_decoded return msgpack.unpackb(data, raw=False) - def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool = False) -> List: + def distinct(self, field: str, criteria: Optional[dict] = None, all_exist: bool = False) -> list: """ Get all distinct values for a field. @@ -254,13 +255,13 @@ def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool def groupby( self, - keys: Union[List[str], str], - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + keys: Union[list[str], str], + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Tuple[Dict, List[Dict]]]: + ) -> Iterator[tuple[dict, list[dict]]]: """ Simple grouping function that will group documents by keys. @@ -300,9 +301,9 @@ def ensure_index(self, key: str, unique: bool = False) -> bool: def update( self, - docs: Union[List[Dict], Dict], - key: Union[List, str, None] = None, - additional_metadata: Union[str, List[str], None] = None, + docs: Union[list[dict], dict], + key: Union[list, str, None] = None, + additional_metadata: Union[str, list[str], None] = None, ): """ Update documents into the Store. @@ -331,7 +332,7 @@ def update( self._write_to_s3_and_index(docs, key + additional_metadata + self.searchable_fields) - def _write_to_s3_and_index(self, docs: List[Dict], search_keys: List[str]): + def _write_to_s3_and_index(self, docs: list[dict], search_keys: list[str]): """Implements updating of the provided documents in S3 and the index. Allows for subclasses to apply custom approaches to parellizing the writing. @@ -416,7 +417,7 @@ def _get_decompression_function(self) -> Callable: """Returns the function to use for decompressing data.""" return zlib.decompress - def write_doc_to_s3(self, doc: Dict, search_keys: List[str]) -> Dict: + def write_doc_to_s3(self, doc: dict, search_keys: list[str]) -> dict: """ Write the data to s3 and return the metadata to be inserted into the index db. @@ -484,7 +485,7 @@ def _sanitize_key(key): # Additionally, MinIO requires lowercase keys return str(key).replace("_", "-").lower() - def remove_docs(self, criteria: Dict, remove_s3_object: bool = False): + def remove_docs(self, criteria: dict, remove_s3_object: bool = False): """ Remove docs matching the query dictionary. @@ -508,7 +509,7 @@ def remove_docs(self, criteria: Dict, remove_s3_object: bool = False): def last_updated(self): return self.index.last_updated - def newer_in(self, target: Store, criteria: Optional[Dict] = None, exhaustive: bool = False) -> List[str]: + def newer_in(self, target: Store, criteria: Optional[dict] = None, exhaustive: bool = False) -> list[str]: """ Returns the keys of documents that are newer in the target Store than this Store. diff --git a/src/maggma/stores/azure.py b/src/maggma/stores/azure.py index 8136b6daa..906bd7b14 100644 --- a/src/maggma/stores/azure.py +++ b/src/maggma/stores/azure.py @@ -5,11 +5,12 @@ import threading import warnings import zlib +from collections.abc import Iterator from concurrent.futures import wait from concurrent.futures.thread import ThreadPoolExecutor from hashlib import sha1 from json import dumps -from typing import Dict, Iterator, List, Optional, Tuple, Union +from typing import Optional, Union import msgpack # type: ignore from monty.msgpack import default as monty_default @@ -50,7 +51,7 @@ def __init__( key: str = "fs_id", store_hash: bool = True, unpack_data: bool = True, - searchable_fields: Optional[List[str]] = None, + searchable_fields: Optional[list[str]] = None, key_sanitize_dict: Optional[dict] = None, create_container: bool = False, **kwargs, @@ -169,7 +170,7 @@ def _collection(self): # For now returns the index collection since that is what we would "search" on return self.index._collection - def count(self, criteria: Optional[Dict] = None) -> int: + def count(self, criteria: Optional[dict] = None) -> int: """ Counts the number of documents matching the query criteria. @@ -181,12 +182,12 @@ def count(self, criteria: Optional[Dict] = None) -> int: def query( self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Dict]: + ) -> Iterator[dict]: """ Queries the Store for a set of documents. @@ -239,7 +240,7 @@ def _unpack(data: bytes, compressed: bool): # Should just return the unpacked object then let the user run process_decoded return msgpack.unpackb(data, raw=False) - def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool = False) -> List: + def distinct(self, field: str, criteria: Optional[dict] = None, all_exist: bool = False) -> list: """ Get all distinct values for a field. @@ -252,13 +253,13 @@ def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool def groupby( self, - keys: Union[List[str], str], - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + keys: Union[list[str], str], + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Tuple[Dict, List[Dict]]]: + ) -> Iterator[tuple[dict, list[dict]]]: """ Simple grouping function that will group documents by keys. @@ -299,9 +300,9 @@ def ensure_index(self, key: str, unique: bool = False) -> bool: def update( self, - docs: Union[List[Dict], Dict], - key: Union[List, str, None] = None, - additional_metadata: Union[str, List[str], None] = None, + docs: Union[list[dict], dict], + key: Union[list, str, None] = None, + additional_metadata: Union[str, list[str], None] = None, ): """ Update documents into the Store. @@ -378,7 +379,7 @@ def _get_container(self) -> Optional[ContainerClient]: self._thread_local.container = container return self._thread_local.container - def write_doc_to_blob(self, doc: Dict, search_keys: List[str]): + def write_doc_to_blob(self, doc: dict, search_keys: list[str]): """ Write the data to an Azure blob and return the metadata to be inserted into the index db. @@ -449,7 +450,7 @@ def _sanitize_key(self, key): return new_key - def remove_docs(self, criteria: Dict, remove_blob_object: bool = False): + def remove_docs(self, criteria: dict, remove_blob_object: bool = False): """ Remove docs matching the query dictionary. @@ -476,7 +477,7 @@ def remove_docs(self, criteria: Dict, remove_blob_object: bool = False): def last_updated(self): return self.index.last_updated - def newer_in(self, target: Store, criteria: Optional[Dict] = None, exhaustive: bool = False) -> List[str]: + def newer_in(self, target: Store, criteria: Optional[dict] = None, exhaustive: bool = False) -> list[str]: """ Returns the keys of documents that are newer in the target Store than this Store. @@ -519,7 +520,7 @@ def rebuild_index_from_blob_data(self, **kwargs): # TODO maybe it can be avoided to reupload the data, since it is paid self.update(unpacked_data, **kwargs) - def rebuild_metadata_from_index(self, index_query: Optional[Dict] = None): + def rebuild_metadata_from_index(self, index_query: Optional[dict] = None): """ Read data from the index store and populate the metadata of the Azure Blob. Force all of the keys to be lower case to be Minio compatible diff --git a/src/maggma/stores/compound_stores.py b/src/maggma/stores/compound_stores.py index 5a27ca467..1e2ce1bb8 100644 --- a/src/maggma/stores/compound_stores.py +++ b/src/maggma/stores/compound_stores.py @@ -1,7 +1,8 @@ """ Special stores that combine underlying Stores together. """ +from collections.abc import Iterator from datetime import datetime from itertools import groupby -from typing import Dict, Iterator, List, Optional, Tuple, Union +from typing import Optional, Union from pydash import set_ from pymongo import MongoClient @@ -19,14 +20,14 @@ class JointStore(Store): def __init__( self, database: str, - collection_names: List[str], + collection_names: list[str], host: str = "localhost", port: int = 27017, username: str = "", password: str = "", main: Optional[str] = None, merge_at_root: bool = False, - mongoclient_kwargs: Optional[Dict] = None, + mongoclient_kwargs: Optional[dict] = None, **kwargs, ): """ @@ -102,7 +103,7 @@ def _collection(self): return self._coll @property - def nonmain_names(self) -> List: + def nonmain_names(self) -> list: """ alll non-main collection names. """ @@ -216,7 +217,7 @@ def _get_pipeline(self, criteria=None, properties=None, skip=0, limit=0): pipeline.append({"$limit": limit}) return pipeline - def count(self, criteria: Optional[Dict] = None) -> int: + def count(self, criteria: Optional[dict] = None) -> int: """ Counts the number of documents matching the query criteria. @@ -230,25 +231,25 @@ def count(self, criteria: Optional[Dict] = None) -> int: def query( self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Dict]: + ) -> Iterator[dict]: pipeline = self._get_pipeline(criteria=criteria, properties=properties, skip=skip, limit=limit) agg = self._collection.aggregate(pipeline) yield from agg def groupby( self, - keys: Union[List[str], str], - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + keys: Union[list[str], str], + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Tuple[Dict, List[Dict]]]: + ) -> Iterator[tuple[dict, list[dict]]]: pipeline = self._get_pipeline(criteria=criteria, properties=properties, skip=skip, limit=limit) if not isinstance(keys, list): keys = [keys] @@ -283,7 +284,7 @@ def query_one(self, criteria=None, properties=None, **kwargs): except StopIteration: return None - def remove_docs(self, criteria: Dict): + def remove_docs(self, criteria: dict): """ Remove docs matching the query dictionary. @@ -316,7 +317,7 @@ def __eq__(self, other: object) -> bool: class ConcatStore(Store): """Store concatting multiple stores.""" - def __init__(self, stores: List[Store], **kwargs): + def __init__(self, stores: list[Store], **kwargs): """ Initialize a ConcatStore that concatenates multiple stores together to appear as one store. @@ -371,7 +372,7 @@ def last_updated(self) -> datetime: lus.append(lu) return max(lus) - def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = None): + def update(self, docs: Union[list[dict], dict], key: Union[list, str, None] = None): """ Update documents into the Store Not implemented in ConcatStore. @@ -385,7 +386,7 @@ def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = No """ raise NotImplementedError("No update method for ConcatStore") - def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool = False) -> List: + def distinct(self, field: str, criteria: Optional[dict] = None, all_exist: bool = False) -> list: """ Get all distinct values for a field. @@ -412,7 +413,7 @@ def ensure_index(self, key: str, unique: bool = False) -> bool: """ return all(store.ensure_index(key, unique) for store in self.stores) - def count(self, criteria: Optional[Dict] = None) -> int: + def count(self, criteria: Optional[dict] = None) -> int: """ Counts the number of documents matching the query criteria. @@ -425,12 +426,12 @@ def count(self, criteria: Optional[Dict] = None) -> int: def query( self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Dict]: + ) -> Iterator[dict]: """ Queries across all Store for a set of documents. @@ -449,13 +450,13 @@ def query( def groupby( self, - keys: Union[List[str], str], - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + keys: Union[list[str], str], + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Tuple[Dict, List[Dict]]]: + ) -> Iterator[tuple[dict, list[dict]]]: """ Simple grouping function that will group documents by keys. @@ -490,16 +491,16 @@ def groupby( for _key, group in temp_docs: docs.extend(group) - def key_set(d: Dict) -> Tuple: + def key_set(d: dict) -> tuple: "index function based on passed in keys." - return tuple(d.get(k, None) for k in keys) + return tuple(d.get(k) for k in keys) sorted_docs = sorted(docs, key=key_set) for vals, group_iter in groupby(sorted_docs, key=key_set): id_dict = dict(zip(keys, vals)) yield id_dict, list(group_iter) - def remove_docs(self, criteria: Dict): + def remove_docs(self, criteria: dict): """ Remove docs matching the query dictionary. diff --git a/src/maggma/stores/file_store.py b/src/maggma/stores/file_store.py index fa1a3296a..54fb6d8a4 100644 --- a/src/maggma/stores/file_store.py +++ b/src/maggma/stores/file_store.py @@ -8,9 +8,10 @@ import os import re import warnings +from collections.abc import Iterator from datetime import datetime, timezone from pathlib import Path -from typing import Callable, Dict, Iterator, List, Optional, Union +from typing import Callable, Optional, Union from monty.io import zopen from pymongo import UpdateOne @@ -49,7 +50,7 @@ class FileStore(MemoryStore): def __init__( self, path: Union[str, Path], - file_filters: Optional[List] = None, + file_filters: Optional[list] = None, max_depth: Optional[int] = None, read_only: bool = True, include_orphans: bool = False, @@ -133,9 +134,9 @@ def name(self) -> str: def add_metadata( self, - metadata: Optional[Dict] = None, - query: Optional[Dict] = None, - auto_data: Optional[Callable[[Dict], Dict]] = None, + metadata: Optional[dict] = None, + query: Optional[dict] = None, + auto_data: Optional[Callable[[dict], dict]] = None, **kwargs, ): """ @@ -180,7 +181,7 @@ def get_metadata_from_filename(d): self.update(updated_docs, key=self.key) - def read(self) -> List[Dict]: + def read(self) -> list[dict]: """ Iterate through all files in the Store folder and populate the Store with dictionaries containing basic information about each file. @@ -216,7 +217,7 @@ def read(self) -> List[Dict]: return file_list - def _create_record_from_file(self, f: Path) -> Dict: + def _create_record_from_file(self, f: Path) -> dict: """ Given the path to a file, return a Dict that constitutes a record of basic information about that file. The keys in the returned dict @@ -328,7 +329,7 @@ def connect(self, force_reset: bool = False): if len(requests) > 0: self._collection.bulk_write(requests, ordered=False) - def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = None): + def update(self, docs: Union[list[dict], dict], key: Union[list, str, None] = None): """ Update items in the Store. Only possible if the store is not read only. Any new fields that are added will be written to the JSON file in the root directory @@ -375,14 +376,14 @@ def _filter_data(self, d): def query( # type: ignore self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, - hint: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, + hint: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, contents_size_limit: Optional[int] = 0, - ) -> Iterator[Dict]: + ) -> Iterator[dict]: """ Queries the Store for a set of documents. @@ -463,9 +464,9 @@ def query( # type: ignore def query_one( self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, contents_size_limit: Optional[int] = None, ): """ @@ -490,7 +491,7 @@ def query_one( None, ) - def remove_docs(self, criteria: Dict, confirm: bool = False): + def remove_docs(self, criteria: dict, confirm: bool = False): """ Remove items matching the query dictionary. diff --git a/src/maggma/stores/gridfs.py b/src/maggma/stores/gridfs.py index 2350dc14d..bb6b9ae0f 100644 --- a/src/maggma/stores/gridfs.py +++ b/src/maggma/stores/gridfs.py @@ -7,8 +7,9 @@ import copy import json import zlib +from collections.abc import Iterator from datetime import datetime -from typing import Any, Dict, Iterator, List, Optional, Tuple, Union +from typing import Any, Optional, Union import gridfs from monty.json import jsanitize @@ -52,9 +53,9 @@ def __init__( password: str = "", compression: bool = False, ensure_metadata: bool = False, - searchable_fields: Optional[List[str]] = None, + searchable_fields: Optional[list[str]] = None, auth_source: Optional[str] = None, - mongoclient_kwargs: Optional[Dict] = None, + mongoclient_kwargs: Optional[dict] = None, ssh_tunnel: Optional[SSHTunnel] = None, **kwargs, ): @@ -178,7 +179,7 @@ def last_updated(self) -> datetime: return self._files_store.last_updated @classmethod - def transform_criteria(cls, criteria: Dict) -> Dict: + def transform_criteria(cls, criteria: dict) -> dict: """ Allow client to not need to prepend 'metadata.' to query fields. @@ -194,7 +195,7 @@ def transform_criteria(cls, criteria: Dict) -> Dict: return new_criteria - def count(self, criteria: Optional[Dict] = None) -> int: + def count(self, criteria: Optional[dict] = None) -> int: """ Counts the number of documents matching the query criteria. @@ -208,12 +209,12 @@ def count(self, criteria: Optional[Dict] = None) -> int: def query( self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Dict]: + ) -> Iterator[dict]: """ Queries the GridFS Store for a set of documents. Will check to see if data can be returned from @@ -272,7 +273,7 @@ def query( yield data - def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool = False) -> List: + def distinct(self, field: str, criteria: Optional[dict] = None, all_exist: bool = False) -> list: """ Get all distinct values for a field. This function only operates on the metadata in the files collection. @@ -291,13 +292,13 @@ def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool def groupby( self, - keys: Union[List[str], str], - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + keys: Union[list[str], str], + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Tuple[Dict, List[Dict]]]: + ) -> Iterator[tuple[dict, list[dict]]]: """ Simple grouping function that will group documents by keys. Will only work if the keys are included in the files @@ -347,9 +348,9 @@ def ensure_index(self, key: str, unique: Optional[bool] = False) -> bool: def update( self, - docs: Union[List[Dict], Dict], - key: Union[List, str, None] = None, - additional_metadata: Union[str, List[str], None] = None, + docs: Union[list[dict], dict], + key: Union[list, str, None] = None, + additional_metadata: Union[str, list[str], None] = None, ): """ Update documents into the Store. @@ -401,7 +402,7 @@ def update( for fdoc in self._files_collection.find(search_doc, ["_id"]).sort("uploadDate", -1).skip(1): self._collection.delete(fdoc["_id"]) - def remove_docs(self, criteria: Dict): + def remove_docs(self, criteria: dict): """ Remove docs matching the query dictionary. @@ -448,8 +449,8 @@ def __init__( database: Optional[str] = None, compression: bool = False, ensure_metadata: bool = False, - searchable_fields: Optional[List[str]] = None, - mongoclient_kwargs: Optional[Dict] = None, + searchable_fields: Optional[list[str]] = None, + mongoclient_kwargs: Optional[dict] = None, **kwargs, ): """ diff --git a/src/maggma/stores/mongolike.py b/src/maggma/stores/mongolike.py index 6d431aa1a..bcbd11c72 100644 --- a/src/maggma/stores/mongolike.py +++ b/src/maggma/stores/mongolike.py @@ -13,9 +13,11 @@ from maggma.stores.ssh_tunnel import SSHTunnel try: - from typing import Any, Callable, Dict, Iterator, List, Literal, Optional, Tuple, Union + from collections.abc import Iterator + from typing import Any, Callable, Literal, Optional, Union except ImportError: - from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Union + from collections.abc import Iterator + from typing import Any, Callable, Optional, Union from typing_extensions import Literal @@ -55,8 +57,8 @@ def __init__( ssh_tunnel: Optional[SSHTunnel] = None, safe_update: bool = False, auth_source: Optional[str] = None, - mongoclient_kwargs: Optional[Dict] = None, - default_sort: Optional[Dict[str, Union[Sort, int]]] = None, + mongoclient_kwargs: Optional[dict] = None, + default_sort: Optional[dict[str, Union[Sort, int]]] = None, **kwargs, ): """ @@ -168,7 +170,7 @@ def from_launchpad_file(cls, lp_file, collection_name, **kwargs): return cls(**db_creds, **kwargs) - def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool = False) -> List: + def distinct(self, field: str, criteria: Optional[dict] = None, all_exist: bool = False) -> list: """ Get all distinct values for a field. @@ -191,13 +193,13 @@ def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool def groupby( self, - keys: Union[List[str], str], - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + keys: Union[list[str], str], + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Tuple[Dict, List[Dict]]]: + ) -> Iterator[tuple[dict, list[dict]]]: """ Simple grouping function that will group documents by keys. @@ -266,8 +268,8 @@ def _collection(self): def count( self, - criteria: Optional[Dict] = None, - hint: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + hint: Optional[dict[str, Union[Sort, int]]] = None, ) -> int: """ Counts the number of documents matching the query criteria. @@ -295,14 +297,14 @@ def count( def query( # type: ignore self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, - hint: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, + hint: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, **kwargs, - ) -> Iterator[Dict]: + ) -> Iterator[dict]: """ Queries the Store for a set of documents. @@ -368,7 +370,7 @@ def ensure_index(self, key: str, unique: Optional[bool] = False) -> bool: except Exception: return False - def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = None): + def update(self, docs: Union[list[dict], dict], key: Union[list, str, None] = None): """ Update documents into the Store. @@ -416,7 +418,7 @@ def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = No else: raise e - def remove_docs(self, criteria: Dict): + def remove_docs(self, criteria: dict): """ Remove docs matching the query dictionary. @@ -457,8 +459,8 @@ def __init__( collection_name: str, database: Optional[str] = None, ssh_tunnel: Optional[SSHTunnel] = None, - mongoclient_kwargs: Optional[Dict] = None, - default_sort: Optional[Dict[str, Union[Sort, int]]] = None, + mongoclient_kwargs: Optional[dict] = None, + default_sort: Optional[dict[str, Union[Sort, int]]] = None, **kwargs, ): """ @@ -555,13 +557,13 @@ def __hash__(self): def groupby( self, - keys: Union[List[str], str], - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + keys: Union[list[str], str], + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Tuple[Dict, List[Dict]]]: + ) -> Iterator[tuple[dict, list[dict]]]: """ Simple grouping function that will group documents by keys. @@ -617,7 +619,7 @@ class JSONStore(MemoryStore): def __init__( self, - paths: Union[str, List[str]], + paths: Union[str, list[str]], read_only: bool = True, serialization_option: Optional[int] = None, serialization_default: Optional[Callable[[Any], Any]] = None, @@ -694,7 +696,7 @@ def connect(self, force_reset: bool = False): # create the .json file if it does not exist if not self.read_only and not Path(self.paths[0]).exists(): with zopen(self.paths[0], "w", encoding=self.encoding) as f: - data: List[dict] = [] + data: list[dict] = [] bytesdata = orjson.dumps(data) f.write(bytesdata.decode("utf-8")) @@ -712,7 +714,7 @@ def connect(self, force_reset: bool = False): """ ) - def read_json_file(self, path) -> List: + def read_json_file(self, path) -> list: """ Helper method to read the contents of a JSON file and generate a list of docs. @@ -736,7 +738,7 @@ def read_json_file(self, path) -> List: return objects - def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = None): + def update(self, docs: Union[list[dict], dict], key: Union[list, str, None] = None): """ Update documents into the Store. @@ -753,7 +755,7 @@ def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = No if not self.read_only: self.update_json_file() - def remove_docs(self, criteria: Dict): + def remove_docs(self, criteria: dict): """ Remove docs matching the query dictionary. @@ -891,8 +893,8 @@ def name(self) -> str: def count( self, - criteria: Optional[Dict] = None, - hint: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + hint: Optional[dict[str, Union[Sort, int]]] = None, ) -> int: """ Counts the number of documents matching the query criteria. @@ -913,7 +915,7 @@ def count( return self._collection.count_documents(filter=criteria) - def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = None): + def update(self, docs: Union[list[dict], dict], key: Union[list, str, None] = None): """ Update documents into the Store. diff --git a/src/maggma/stores/open_data.py b/src/maggma/stores/open_data.py index 328788d0f..876313c08 100644 --- a/src/maggma/stores/open_data.py +++ b/src/maggma/stores/open_data.py @@ -1,8 +1,9 @@ import gzip import re +from collections.abc import Generator from datetime import datetime from io import BytesIO, StringIO -from typing import Dict, Generator, List, Optional, Tuple, Union +from typing import Optional, Union import jsonlines import numpy as np @@ -60,7 +61,7 @@ def index_data(self): def set_index_data(self, new_index: pd.DataFrame): self._data = new_index - def _verify_criteria(self, criteria: Dict) -> Tuple[str, str, List]: + def _verify_criteria(self, criteria: dict) -> tuple[str, str, list]: query_string, is_in_key, is_in_list = "", None, None if criteria and "query" not in criteria and "is_in" not in criteria: raise AttributeError("Pandas memory store only support query or is_in") @@ -76,12 +77,12 @@ def _verify_criteria(self, criteria: Dict) -> Tuple[str, str, List]: def query( self, - criteria: Optional[Dict] = None, - properties: Union[List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - criteria_fields: Union[List, None] = None, + criteria_fields: Union[list, None] = None, ) -> pd.DataFrame: """ Queries the Store for a set of documents. @@ -135,9 +136,9 @@ def _query( index: pd.DataFrame, query_string: str, is_in_key: str, - is_in_list: List, - properties: Union[List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + is_in_list: list, + properties: Union[list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, ) -> pd.DataFrame: @@ -160,7 +161,7 @@ def _query( ret = ret[:limit] return ret - def count(self, criteria: Optional[Dict] = None, criteria_fields: Union[List, None] = None) -> int: + def count(self, criteria: Optional[dict] = None, criteria_fields: Union[list, None] = None) -> int: """ Counts the number of documents matching the query criteria. @@ -174,7 +175,7 @@ def count(self, criteria: Optional[Dict] = None, criteria_fields: Union[List, No return len(self.query(criteria=criteria, criteria_fields=criteria_fields)) def distinct( - self, field: str, criteria: Optional[Dict] = None, criteria_fields: Union[List, None] = None + self, field: str, criteria: Optional[dict] = None, criteria_fields: Union[list, None] = None ) -> pd.Series: """ Get all distinct values for a field. @@ -209,9 +210,9 @@ def last_updated(self) -> datetime: def newer_in( self, target: "PandasMemoryStore", - criteria: Optional[Dict] = None, + criteria: Optional[dict] = None, exhaustive: bool = False, - criteria_fields: Union[List, None] = None, + criteria_fields: Union[list, None] = None, ) -> pd.Series: """ Returns the keys of documents that are newer in the target @@ -463,10 +464,10 @@ class OpenDataStore(S3IndexStore): def __init__( self, index: S3IndexStore = None, # set _index to this and create property - searchable_fields: Optional[List[str]] = None, + searchable_fields: Optional[list[str]] = None, object_file_extension: str = ".jsonl.gz", access_as_public_bucket: bool = False, - object_grouping: Optional[List[str]] = None, + object_grouping: Optional[list[str]] = None, **kwargs, ): """Initializes an OpenDataStore. @@ -541,12 +542,12 @@ def update( def query( self, - criteria: Optional[Dict] = None, - properties: Union[List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - criteria_fields: Union[List, None] = None, + criteria_fields: Union[list, None] = None, ) -> pd.DataFrame: """ Queries the Store for a set of documents. diff --git a/src/maggma/stores/shared_stores.py b/src/maggma/stores/shared_stores.py index 11babc5bc..18ff2238c 100644 --- a/src/maggma/stores/shared_stores.py +++ b/src/maggma/stores/shared_stores.py @@ -1,7 +1,8 @@ +from collections.abc import Iterator from functools import partial from multiprocessing.managers import BaseManager from threading import Lock -from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Union +from typing import Any, Callable, Optional, Union from monty.json import MontyDecoder @@ -87,7 +88,7 @@ def close(self): """ self.multistore.close(self.store) - def count(self, criteria: Optional[Dict] = None) -> int: + def count(self, criteria: Optional[dict] = None) -> int: """ Counts the number of documents matching the query criteria. @@ -98,12 +99,12 @@ def count(self, criteria: Optional[Dict] = None) -> int: def query( self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, - ) -> Iterator[Dict]: + ) -> Iterator[dict]: """ Queries the Store for a set of documents. @@ -124,7 +125,7 @@ def query( limit=limit, ) - def update(self, docs: Union[List[Dict], Dict], key: Union[List, str, None] = None, **kwargs): + def update(self, docs: Union[list[dict], dict], key: Union[list, str, None] = None, **kwargs): """ Update documents into the Store. @@ -152,14 +153,14 @@ def ensure_index(self, key: str, unique: bool = False, **kwargs) -> bool: def groupby( self, - keys: Union[List[str], str], - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + keys: Union[list[str], str], + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, **kwargs, - ) -> Iterator[Tuple[Dict, List[Dict]]]: + ) -> Iterator[tuple[dict, list[dict]]]: """ Simple grouping function that will group documents by keys. @@ -180,7 +181,7 @@ def groupby( self.store, keys=keys, criteria=criteria, properties=properties, sort=sort, skip=skip, limit=limit, **kwargs ) - def remove_docs(self, criteria: Dict, **kwargs): + def remove_docs(self, criteria: dict, **kwargs): """ Remove docs matching the query dictionary. @@ -191,9 +192,9 @@ def remove_docs(self, criteria: Dict, **kwargs): def query_one( self, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, **kwargs, ): """ @@ -207,7 +208,7 @@ def query_one( """ return self.multistore.query_one(self.store, criteria=criteria, properties=properties, sort=sort, **kwargs) - def distinct(self, field: str, criteria: Optional[Dict] = None, all_exist: bool = False, **kwargs) -> List: + def distinct(self, field: str, criteria: Optional[dict] = None, all_exist: bool = False, **kwargs) -> list: """ Get all distinct values for a field. @@ -392,7 +393,7 @@ def close_all(self): for store in self._stores: store.close() - def count(self, store: Store, criteria: Optional[Dict] = None, **kwargs) -> int: + def count(self, store: Store, criteria: Optional[dict] = None, **kwargs) -> int: """ Counts the number of documents matching the query criteria. @@ -405,13 +406,13 @@ def count(self, store: Store, criteria: Optional[Dict] = None, **kwargs) -> int: def query( self, store: Store, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, **kwargs, - ) -> List[Dict]: + ) -> list[dict]: """ Queries the Store for a set of documents. @@ -431,7 +432,7 @@ def query( ) ) - def update(self, store: Store, docs: Union[List[Dict], Dict], key: Union[List, str, None] = None, **kwargs): + def update(self, store: Store, docs: Union[list[dict], dict], key: Union[list, str, None] = None, **kwargs): """ Update documents into the Store. @@ -462,14 +463,14 @@ def ensure_index(self, store: Store, key: str, unique: bool = False, **kwargs) - def groupby( self, store: Store, - keys: Union[List[str], str], - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + keys: Union[list[str], str], + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, skip: int = 0, limit: int = 0, **kwargs, - ) -> Iterator[Tuple[Dict, List[Dict]]]: + ) -> Iterator[tuple[dict, list[dict]]]: """ Simple grouping function that will group documents by keys. @@ -491,7 +492,7 @@ def groupby( keys=keys, criteria=criteria, properties=properties, sort=sort, skip=skip, limit=limit, **kwargs ) - def remove_docs(self, store: Store, criteria: Dict, **kwargs): + def remove_docs(self, store: Store, criteria: dict, **kwargs): """ Remove docs matching the query dictionary. @@ -504,9 +505,9 @@ def remove_docs(self, store: Store, criteria: Dict, **kwargs): def query_one( self, store: Store, - criteria: Optional[Dict] = None, - properties: Union[Dict, List, None] = None, - sort: Optional[Dict[str, Union[Sort, int]]] = None, + criteria: Optional[dict] = None, + properties: Union[dict, list, None] = None, + sort: Optional[dict[str, Union[Sort, int]]] = None, **kwargs, ): """ @@ -525,8 +526,8 @@ def query_one( ) def distinct( - self, store: Store, field: str, criteria: Optional[Dict] = None, all_exist: bool = False, **kwargs - ) -> List: + self, store: Store, field: str, criteria: Optional[dict] = None, all_exist: bool = False, **kwargs + ) -> list: """ Get all distinct values for a field. diff --git a/src/maggma/stores/ssh_tunnel.py b/src/maggma/stores/ssh_tunnel.py index 1732d9843..510837e79 100644 --- a/src/maggma/stores/ssh_tunnel.py +++ b/src/maggma/stores/ssh_tunnel.py @@ -1,5 +1,5 @@ from socket import socket -from typing import Dict, Optional, Tuple +from typing import Optional from monty.json import MSONable from sshtunnel import SSHTunnelForwarder @@ -8,7 +8,7 @@ class SSHTunnel(MSONable): """SSH tunnel to remote server.""" - __TUNNELS: Dict[str, SSHTunnelForwarder] = {} + __TUNNELS: dict[str, SSHTunnelForwarder] = {} def __init__( self, @@ -81,7 +81,7 @@ def stop(self): self.tunnel.stop() @property - def local_address(self) -> Tuple[str, int]: + def local_address(self) -> tuple[str, int]: return self.tunnel.local_bind_address diff --git a/src/maggma/utils.py b/src/maggma/utils.py index 49786b3c0..c4fe278e9 100644 --- a/src/maggma/utils.py +++ b/src/maggma/utils.py @@ -5,9 +5,10 @@ import logging import signal import uuid +from collections.abc import Iterable from datetime import datetime, timedelta from importlib import import_module -from typing import Dict, Iterable, Optional, Set, Union +from typing import Optional, Union from bson.json_util import ObjectId from dateutil import parser @@ -103,7 +104,7 @@ def to_dt(s: Union[datetime, str]) -> datetime: LU_KEY_ISOFORMAT = (to_dt, to_isoformat_ceil_ms) -def recursive_update(d: Dict, u: Dict): +def recursive_update(d: dict, u: dict): """ Recursive updates d with values from u. @@ -135,7 +136,7 @@ def grouper(iterable: Iterable, n: int) -> Iterable: return iter(lambda: list(itertools.islice(iterable, n)), []) -def lazy_substitute(d: Dict, aliases: Dict): +def lazy_substitute(d: dict, aliases: dict): """ Simple top level substitute that doesn't dive into mongo like strings. """ @@ -145,7 +146,7 @@ def lazy_substitute(d: Dict, aliases: Dict): del d[key] -def substitute(d: Dict, aliases: Dict): +def substitute(d: dict, aliases: dict): """ Substitutes keys in dictionary Accepts multilevel mongo like keys. @@ -156,7 +157,7 @@ def substitute(d: Dict, aliases: Dict): unset(d, key) -def unset(d: Dict, key: str): +def unset(d: dict, key: str): """ Unsets a key. """ @@ -265,7 +266,7 @@ def emit(self, record): self.reporting_store.update(maggma_record, key="_id") -def get_flat_models_from_model(model: BaseModel, known_models: Optional[Set[BaseModel]] = None): +def get_flat_models_from_model(model: BaseModel, known_models: Optional[set[BaseModel]] = None): """Get all sub-models from a pydantic model. Args: diff --git a/src/maggma/validators.py b/src/maggma/validators.py index 09affcf44..7228ba0e7 100644 --- a/src/maggma/validators.py +++ b/src/maggma/validators.py @@ -4,7 +4,6 @@ that Store. """ -from typing import Dict, List from jsonschema import ValidationError, validate from jsonschema.validators import validator_for @@ -23,7 +22,7 @@ class JSONSchemaValidator(Validator): JSON schema. See the tests for an example of this. """ - def __init__(self, schema: Dict, strict: bool = False): + def __init__(self, schema: dict, strict: bool = False): """ Args: strict: Informs Store how to treat Validator: if @@ -45,7 +44,7 @@ def strict(self) -> bool: return self._strict @property - def schema(self) -> Dict: + def schema(self) -> dict: """ Defines a JSON schema for your document, which is used by the default `validate_doc()` method. @@ -58,7 +57,7 @@ def schema(self) -> Dict: """ return self._schema - def is_valid(self, doc: Dict) -> bool: + def is_valid(self, doc: dict) -> bool: """ Returns True or False if validator initialized with strict=False, or returns True or raises ValidationError @@ -75,7 +74,7 @@ def is_valid(self, doc: Dict) -> bool: raise return False - def validation_errors(self, doc: Dict) -> List[str]: + def validation_errors(self, doc: dict) -> list[str]: """ If document is not valid, provides a list of strings to display for why validation has failed. diff --git a/tests/api/test_api.py b/tests/api/test_api.py index c92e39483..cc0256afa 100644 --- a/tests/api/test_api.py +++ b/tests/api/test_api.py @@ -1,7 +1,7 @@ import json from enum import Enum from random import choice, randint -from typing import Any, Tuple +from typing import Any from urllib.parse import urlencode import pytest @@ -74,7 +74,7 @@ def test_msonable(owner_store, pet_store): assert k in api_dict -def search_helper(payload, base: str = "/?", debug=True) -> Tuple[Response, Any]: +def search_helper(payload, base: str = "/?", debug=True) -> tuple[Response, Any]: """ Helper function to directly query search endpoints diff --git a/tests/builders/test_copy_builder.py b/tests/builders/test_copy_builder.py index d8c0bfde1..6a0993844 100644 --- a/tests/builders/test_copy_builder.py +++ b/tests/builders/test_copy_builder.py @@ -40,7 +40,7 @@ def old_docs(now): @pytest.fixture() def new_docs(now): toc = now + timedelta(seconds=1) - return [{"lu": toc, "k": k, "v": "new"} for k in range(0, 10)] + return [{"lu": toc, "k": k, "v": "new"} for k in range(10)] @pytest.fixture() diff --git a/tests/builders/test_group_builder.py b/tests/builders/test_group_builder.py index 808c9b36a..fe9d63ca0 100644 --- a/tests/builders/test_group_builder.py +++ b/tests/builders/test_group_builder.py @@ -3,7 +3,6 @@ """ from datetime import datetime from random import randint -from typing import Dict, List import pytest @@ -41,7 +40,7 @@ def target(): class DummyGrouper(GroupBuilder): - def unary_function(self, items: List[Dict]) -> Dict: + def unary_function(self, items: list[dict]) -> dict: """ Processing function for GroupBuilder From 1fc0e33d9cb08add3d19ec4272e0030f61970bbd Mon Sep 17 00:00:00 2001 From: Ryan Date: Tue, 14 May 2024 12:30:07 -0400 Subject: [PATCH 6/6] update imports --- src/maggma/api/utils.py | 2 +- src/maggma/stores/mongolike.py | 17 ++++------------- 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/src/maggma/api/utils.py b/src/maggma/api/utils.py index 41b333811..18df8d12f 100644 --- a/src/maggma/api/utils.py +++ b/src/maggma/api/utils.py @@ -3,6 +3,7 @@ from typing import ( Any, Callable, + Literal, Optional, Union, get_args, # pragma: no cover @@ -13,7 +14,6 @@ from pydantic import BaseModel from pydantic._internal._utils import lenient_issubclass from pydantic.fields import FieldInfo -from typing_extensions import Literal from maggma.utils import get_flat_models_from_model diff --git a/src/maggma/stores/mongolike.py b/src/maggma/stores/mongolike.py index bcbd11c72..83d4098fa 100644 --- a/src/maggma/stores/mongolike.py +++ b/src/maggma/stores/mongolike.py @@ -5,21 +5,10 @@ """ import warnings +from collections.abc import Iterator from itertools import chain, groupby from pathlib import Path - -from ruamel.yaml import YAML - -from maggma.stores.ssh_tunnel import SSHTunnel - -try: - from collections.abc import Iterator - from typing import Any, Callable, Literal, Optional, Union -except ImportError: - from collections.abc import Iterator - from typing import Any, Callable, Optional, Union - - from typing_extensions import Literal +from typing import Any, Callable, Literal, Optional, Union import bson import mongomock @@ -31,8 +20,10 @@ from pydash import get, has, set_ from pymongo import MongoClient, ReplaceOne, uri_parser from pymongo.errors import ConfigurationError, DocumentTooLarge, OperationFailure +from ruamel.yaml import YAML from maggma.core import Sort, Store, StoreError +from maggma.stores.ssh_tunnel import SSHTunnel from maggma.utils import confirm_field_index, to_dt try: