From a67df2a879e76f58d5aeab12afe21d4350b89f02 Mon Sep 17 00:00:00 2001 From: Will Date: Mon, 14 Jun 2021 16:41:53 -0500 Subject: [PATCH 1/2] feat(elasticsearch): elasticsearch implementation drop-in replace redis with ES --- .github/workflows/ci.yml | 4 - .secrets.baseline | 4 +- README.md | 27 +- docker-compose.yml | 33 +- poetry.lock | 506 +++++++----------- pyproject.toml | 4 +- src/mds/agg_mds/datastore/__init__.py | 38 +- .../agg_mds/datastore/elasticsearch_dao.py | 234 ++++++++ src/mds/agg_mds/datastore/redis_cache.py | 119 ---- src/mds/agg_mds/query.py | 15 +- src/mds/config.py | 3 +- src/mds/main.py | 6 +- src/mds/populate.py | 97 ++-- tests/conftest.py | 79 +-- tests/test_agg_mds_datastore.py | 48 +- tests/test_agg_mds_elasticsearch_dao.py | 248 +++++++++ tests/test_agg_mds_query.py | 276 +++++----- tests/test_agg_mds_redis_cache.py | 179 ------- tests/test_populate.py | 83 +-- 19 files changed, 979 insertions(+), 1024 deletions(-) create mode 100644 src/mds/agg_mds/datastore/elasticsearch_dao.py delete mode 100644 src/mds/agg_mds/datastore/redis_cache.py create mode 100644 tests/test_agg_mds_elasticsearch_dao.py delete mode 100644 tests/test_agg_mds_redis_cache.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a5b8a7bc..1d9b6607 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,10 +16,6 @@ jobs: ports: - 5432:5432 options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - redis: - image: redislabs/rejson:latest - ports: - - 6379:6379 steps: - uses: actions/checkout@v1 - name: Set up Python 3.7 diff --git a/.secrets.baseline b/.secrets.baseline index d360ae63..b5ea8211 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -3,7 +3,7 @@ "files": "poetry.lock", "lines": null }, - "generated_at": "2021-05-13T15:22:19Z", + "generated_at": "2021-06-22T16:17:18Z", "plugins_used": [ { "name": "AWSKeyDetector" @@ -70,7 +70,7 @@ { "hashed_secret": "6eae3a5b062c6d0d79f070c26e6d62486b40cb46", "is_verified": false, - "line_number": 60, + "line_number": 50, "type": "Secret Keyword" } ] diff --git a/README.md b/README.md index 0074efc0..5c90f663 100644 --- a/README.md +++ b/README.md @@ -19,17 +19,7 @@ The aggregated MDS is a service which caches metadata from commons metadata serv The aggregate metadata APIs and migrations are disabled by default unless `USE_AGG_MDS=true` is specified. -The aggregate cache is built using Redis and the [RedisJson](http://redisjson.io) module. To quickly populate it you can run the following: - -```bash -docker run -p 6379:6379 --name redis-redisjson redislabs/rejson:latest -``` - -and then - -```bash -python src/mds/populate.py --config configs/brh_config.json -``` +The aggregate cache is built using Elasticsearch. See the `docker-compose.yaml` file (specifically the `aggregate_migration` service) for details regarding how aggregate data is populated. ## Installation @@ -54,14 +44,13 @@ Create a file `.env` in the root directory of the checkout: (uncomment to override the default) ```python -# DB_HOST = "..." # default: localhost -# DB_PORT = ... # default: 5432 -# DB_USER = "..." # default: current user -# DB_PASSWORD = "..." # default: empty -# DB_DATABASE = "..." # default: current user -# USE_AGG_MDS = "..." # default: false -# REDIS_DB_HOST = "..." # default: localhost -# REDIS_DB_PORT = "..." # default: 6379 +# DB_HOST = "..." # default: localhost +# DB_PORT = ... # default: 5432 +# DB_USER = "..." # default: current user +# DB_PASSWORD = "..." # default: empty +# DB_DATABASE = "..." # default: current user +# USE_AGG_MDS = "..." # default: false +# GEN3_ES_ENDPOINT = "..." # default: empty ``` Run database schema migration: diff --git a/docker-compose.yml b/docker-compose.yml index c90ad0f0..d2c7aa2e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,12 +8,12 @@ services: - .:/src depends_on: - db_migration - - redis_migration + - aggregate_migration environment: - DB_HOST=db - DB_USER=metadata_user - USE_AGG_MDS=true - - REDIS_DB_HOST=redis + - GEN3_ES_ENDPOINT=http://esproxy-service:9200 command: /env/bin/uvicorn --host 0.0.0.0 --port 80 mds.asgi:app --reload db_migration: build: . @@ -26,15 +26,14 @@ services: - DB_HOST=db - DB_USER=metadata_user command: /env/bin/alembic upgrade head - redis_migration: + aggregate_migration: build: . image: mds volumes: - .:/src environment: - USE_AGG_MDS=true - - REDIS_DB_HOST=redis - command: /env/bin/python /src/src/mds/populate.py --config /src/configs/brh_config.json --hostname redis + command: /env/bin/python /src/src/mds/populate.py --config /src/configs/brh_config.json --hostname esproxy-service --port 9200 db: image: postgres environment: @@ -43,7 +42,25 @@ services: volumes: - ./postgres-data:/var/lib/postgresql/data - ./postgres-init:/docker-entrypoint-initdb.d:ro - redis: - image: redislabs/rejson:latest + esproxy-service: + image: docker.elastic.co/elasticsearch/elasticsearch-oss:6.8.12 + container_name: esproxy-service + environment: + - cluster.name=elasticsearch-cluster + - bootstrap.memory_lock=false + - "ES_JAVA_OPTS=-Xms1g -Xmx1g" + entrypoint: + - /bin/bash + # mmapfs requires systemctl update - see https://www.elastic.co/guide/en/elasticsearch/reference/current/index-modules-store.html#mmapfs + command: + - -c + - "echo -e 'cluster.name: docker-cluster\nhttp.host: 0.0.0.0\nindex.store.type: niofs' > /usr/share/elasticsearch/config/elasticsearch.yml && /usr/local/bin/docker-entrypoint.sh eswrapper" + ulimits: + memlock: + soft: -1 + hard: -1 + nofile: + soft: 65536 + hard: 65536 ports: - - "6379:6379" + - 9200:9200 diff --git a/poetry.lock b/poetry.lock index 43ddcc53..36a108a6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,18 +1,6 @@ -[[package]] -name = "aioredis" -version = "1.3.1" -description = "asyncio (PEP 3156) Redis support" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -async-timeout = "*" -hiredis = "*" - [[package]] name = "alembic" -version = "1.6.0" +version = "1.6.5" description = "A database migration tool for SQLAlchemy." category = "main" optional = false @@ -24,17 +12,9 @@ python-dateutil = "*" python-editor = ">=0.3" SQLAlchemy = ">=1.3.0" -[[package]] -name = "async-timeout" -version = "3.0.1" -description = "Timeout context manager for asyncio programs" -category = "main" -optional = false -python-versions = ">=3.5.3" - [[package]] name = "asyncpg" -version = "0.22.0" +version = "0.23.0" description = "An asyncio PostgreSQL driver" category = "main" optional = false @@ -66,21 +46,21 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "20.3.0" +version = "21.2.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] [[package]] name = "authlib" -version = "0.15.3" +version = "0.15.4" description = "The ultimate Python library in building OAuth and OpenID Connect servers." category = "main" optional = false @@ -152,7 +132,7 @@ python-versions = "*" [[package]] name = "certifi" -version = "2020.12.5" +version = "2021.5.30" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -225,7 +205,7 @@ test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pret [[package]] name = "dataclasses-json" -version = "0.5.3" +version = "0.5.4" description = "Easily serialize dataclasses to and from JSON" category = "main" optional = false @@ -241,21 +221,19 @@ typing-inspect = ">=0.4.0" dev = ["pytest (>=6.2.3)", "ipython", "mypy (>=0.710)", "hypothesis", "portray", "flake8", "simplejson"] [[package]] -name = "fakeredis" -version = "1.5.0" -description = "Fake implementation of redis API for testing purposes." -category = "dev" +name = "elasticsearch" +version = "6.8.2" +description = "Python client for Elasticsearch" +category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, <4" [package.dependencies] -redis = "<3.6.0" -six = ">=1.12" -sortedcontainers = "*" +urllib3 = ">=1.21.1" [package.extras] -aioredis = ["aioredis"] -lua = ["lupa"] +develop = ["requests (>=2.0.0,<3.0.0)", "nose", "coverage", "mock", "pyyaml", "nosexcover", "numpy", "pandas", "sphinx (<1.7)", "sphinx-rtd-theme"] +requests = ["requests (>=2.4.0,<3.0.0)"] [[package]] name = "fastapi" @@ -277,7 +255,7 @@ test = ["pytest (>=4.0.0)", "pytest-cov", "mypy", "black", "isort", "requests", [[package]] name = "gen3authz" -version = "1.0.5" +version = "1.1.0" description = "Gen3 authz client" category = "main" optional = false @@ -311,15 +289,15 @@ quart = ["gino-quart (>=0.1.0,<0.2.0)"] [[package]] name = "gino-starlette" -version = "0.1.1" +version = "0.1.2" description = "An extension for GINO to integrate with Starlette" category = "main" optional = false python-versions = ">=3.6,<4.0" [package.dependencies] -gino = ">=1.0.0rc2,<2.0.0" -starlette = ">=0.13.0,<0.14.0" +gino = ">=1.0.0,<2.0.0" +starlette = ">=0.13.0,<0.15.0" [[package]] name = "gunicorn" @@ -355,14 +333,6 @@ python-versions = "*" hpack = ">=3.0,<4" hyperframe = ">=5.2.0,<6" -[[package]] -name = "hiredis" -version = "2.0.0" -description = "Python wrapper for hiredis" -category = "main" -optional = false -python-versions = ">=3.6" - [[package]] name = "hpack" version = "3.0.0" @@ -468,24 +438,24 @@ lingua = ["lingua"] [[package]] name = "markupsafe" -version = "1.1.1" +version = "2.0.1" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +python-versions = ">=3.6" [[package]] name = "marshmallow" -version = "3.11.1" +version = "3.12.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." category = "main" optional = false python-versions = ">=3.5" [package.extras] -dev = ["pytest", "pytz", "simplejson", "mypy (==0.812)", "flake8 (==3.9.0)", "flake8-bugbear (==21.3.2)", "pre-commit (>=2.4,<3.0)", "tox"] -docs = ["sphinx (==3.4.3)", "sphinx-issues (==1.2.0)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.2)"] -lint = ["mypy (==0.812)", "flake8 (==3.9.0)", "flake8-bugbear (==21.3.2)", "pre-commit (>=2.4,<3.0)"] +dev = ["pytest", "pytz", "simplejson", "mypy (==0.812)", "flake8 (==3.9.2)", "flake8-bugbear (==21.4.3)", "pre-commit (>=2.4,<3.0)", "tox"] +docs = ["sphinx (==4.0.0)", "sphinx-issues (==1.2.0)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.4)"] +lint = ["mypy (==0.812)", "flake8 (==3.9.2)", "flake8-bugbear (==21.4.3)", "pre-commit (>=2.4,<3.0)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -501,7 +471,7 @@ marshmallow = ">=2.0.0" [[package]] name = "more-itertools" -version = "8.7.0" +version = "8.8.0" description = "More routines for operating on iterables, beyond itertools" category = "main" optional = false @@ -550,11 +520,11 @@ dev = ["pre-commit", "tox"] [[package]] name = "psycopg2-binary" -version = "2.8.6" +version = "2.9.1" description = "psycopg2 - Python-PostgreSQL Database Adapter" category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +python-versions = ">=3.6" [[package]] name = "py" @@ -574,7 +544,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.8.1" +version = "1.8.2" description = "Data validation and settings management using python 3.6 type hinting" category = "main" optional = false @@ -650,7 +620,7 @@ testing = ["coverage", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" -version = "2.11.1" +version = "2.12.1" description = "Pytest plugin for measuring coverage." category = "dev" optional = false @@ -659,9 +629,10 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.dependencies] coverage = ">=5.2.1" pytest = ">=4.6" +toml = "*" [package.extras] -testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] [[package]] name = "python-dateutil" @@ -690,29 +661,6 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -[[package]] -name = "redis" -version = "3.5.3" -description = "Python client for Redis key-value store" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.extras] -hiredis = ["hiredis (>=0.1.3)"] - -[[package]] -name = "rejson" -version = "0.5.4" -description = "ReJSON Python Client" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -redis = ">=3.0.0" -six = ">=1.10" - [[package]] name = "requests" version = "2.25.1" @@ -745,7 +693,7 @@ httpx = ">=0.12,<0.13" [[package]] name = "rfc3986" -version = "1.4.0" +version = "1.5.0" description = "Validating URI References per RFC 3986" category = "main" optional = false @@ -756,7 +704,7 @@ idna2008 = ["idna"] [[package]] name = "six" -version = "1.15.0" +version = "1.16.0" description = "Python 2 and 3 compatibility utilities" category = "main" optional = false @@ -770,14 +718,6 @@ category = "main" optional = false python-versions = ">=3.5" -[[package]] -name = "sortedcontainers" -version = "2.3.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "sqlalchemy" version = "1.3.24" @@ -817,6 +757,14 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + [[package]] name = "typing-extensions" version = "3.10.0.0" @@ -827,7 +775,7 @@ python-versions = "*" [[package]] name = "typing-inspect" -version = "0.6.0" +version = "0.7.1" description = "Runtime inspection utilities for typing module." category = "main" optional = false @@ -839,16 +787,16 @@ typing-extensions = ">=3.7.4" [[package]] name = "urllib3" -version = "1.26.4" +version = "1.26.5" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] +brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] -brotli = ["brotlipy (>=0.6.0)"] [[package]] name = "uvicorn" @@ -920,37 +868,29 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "de704c7ed4dc646582728610511f8edebafb6698c84109c2c2d6a1e88a0645cb" +content-hash = "363adf38d6b966f74883f4f3b41f3d9c6b4eeae882204bbc964c23a39197261b" [metadata.files] -aioredis = [ - {file = "aioredis-1.3.1-py3-none-any.whl", hash = "sha256:b61808d7e97b7cd5a92ed574937a079c9387fdadd22bfbfa7ad2fd319ecc26e3"}, - {file = "aioredis-1.3.1.tar.gz", hash = "sha256:15f8af30b044c771aee6787e5ec24694c048184c7b9e54c3b60c750a4b93273a"}, -] alembic = [ - {file = "alembic-1.6.0-py2.py3-none-any.whl", hash = "sha256:d7f6d4dc6abed18e1591932a85349a7d621298ef0daa40021609cdca54a6047c"}, - {file = "alembic-1.6.0.tar.gz", hash = "sha256:3ff4f90d23dd283d7822d78ffbc07cb256344ae1d60500b933378bc13407efcc"}, -] -async-timeout = [ - {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, - {file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"}, + {file = "alembic-1.6.5-py2.py3-none-any.whl", hash = "sha256:e78be5b919f5bb184e3e0e2dd1ca986f2362e29a2bc933c446fe89f39dbe4e9c"}, + {file = "alembic-1.6.5.tar.gz", hash = "sha256:a21fedebb3fb8f6bbbba51a11114f08c78709377051384c9c5ead5705ee93a51"}, ] asyncpg = [ - {file = "asyncpg-0.22.0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:ccd75cfb4710c7e8debc19516e2e1d4c9863cce3f7a45a3822980d04b16f4fdd"}, - {file = "asyncpg-0.22.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:3af9a8511569983481b5cf94db17b7cbecd06b5398aac9c82e4acb69bb1f4090"}, - {file = "asyncpg-0.22.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:d1cb6e5b58a4e017335f2a1886e153a32bd213ffa9f7129ee5aced2a7210fa3c"}, - {file = "asyncpg-0.22.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0f4604a88386d68c46bf7b50c201a9718515b0d2df6d5e9ce024d78ed0f7189c"}, - {file = "asyncpg-0.22.0-cp36-cp36m-win_amd64.whl", hash = "sha256:b37efafbbec505287bd1499a88f4b59ff2b470709a1d8f7e4db198d3e2c5a2c4"}, - {file = "asyncpg-0.22.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:1d3efdec14f3fbcc665b77619f8b420564f98b89632a21694be2101dafa6bcf2"}, - {file = "asyncpg-0.22.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f1df7cfd12ef484210717e7827cc2d4d550b16a1b4dd4566c93914c7a2259352"}, - {file = "asyncpg-0.22.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f514b13bc54bde65db6cd1d0832ae27f21093e3cb66f741e078fab77768971c"}, - {file = "asyncpg-0.22.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:82e23ba5b37c0c7ee96f290a95cbf9815b2d29b302e8b9c4af1de9b7759fd27b"}, - {file = "asyncpg-0.22.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:062e4ff80e68fe56066c44a8c51989a98785904bf86f49058a242a5887be6ce3"}, - {file = "asyncpg-0.22.0-cp38-cp38-win_amd64.whl", hash = "sha256:e7a67fb0244e4a5b3baaa40092d0efd642da032b5e891d75947dab993b47d925"}, - {file = "asyncpg-0.22.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:1bbe5e829de506c743cbd5240b3722e487c53669a5f1e159abcc3b92a64a985e"}, - {file = "asyncpg-0.22.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:2cb730241dfe650b9626eae00490cca4cfeb00871ed8b8f389f3a4507b328683"}, - {file = "asyncpg-0.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:2e3875c82ae609b21e562e6befdc35e52c4290e49d03e7529275d59a0595ca97"}, - {file = "asyncpg-0.22.0.tar.gz", hash = "sha256:348ad471d9bdd77f0609a00c860142f47c81c9123f4064d13d65c8569415d802"}, + {file = "asyncpg-0.23.0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:f86378bbfbec7334af03bad4d5fd432149286665ecc8bfbcb7135da56b15d34b"}, + {file = "asyncpg-0.23.0-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:255839c8c52ebd72d6d0159564d7eb8f70fcf6cc9ce7cdc7e98328fd3279bf52"}, + {file = "asyncpg-0.23.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:11102ac2febbc208427f39e4555537ecf188bd70ef7b285fc92c6c16b748b4c6"}, + {file = "asyncpg-0.23.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d82d94badd34c8adbc5c85b85085317444cd9e062fc8b956221b34ba4c823b56"}, + {file = "asyncpg-0.23.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a88654ede00596a7bdaa08066ff0505aed491f790621dcdb478066c7ddfd1a3d"}, + {file = "asyncpg-0.23.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a2031df7573c80186339039cc2c4e684648fea5eaa9537c24f18c509bda2cd3f"}, + {file = "asyncpg-0.23.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2710b5740cbd572e0fddc20986a44707f05d3f84e29fab72abe87fb8c2fc6885"}, + {file = "asyncpg-0.23.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b784138e69752aaa905b60c5a07a891445706824358fe1440d47113db72c8946"}, + {file = "asyncpg-0.23.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a19429d480a387346ae74b38da20e8da004337f14e5066f4bd6a10a8bbe74d3c"}, + {file = "asyncpg-0.23.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:43c44d323c3bd6514fbe6a892ccfdc551259bd92e98dd34ad1a52bad8c7974f3"}, + {file = "asyncpg-0.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:df84f3e93cd08cb31a252510a2e7be4bb15e6dff8a06d91f94c057a305d5d55d"}, + {file = "asyncpg-0.23.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:98bef539326408da0c2ed0714432e4c79e345820697914318013588ff235b581"}, + {file = "asyncpg-0.23.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bd6e1f3db9889b5d987b6a1cab49c5b5070756290f3420a4c7a63d942d73ab69"}, + {file = "asyncpg-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:ceedd46f569f5efb8b4def3d1dd6a0d85e1a44722608d68aa1d2d0f8693c1bff"}, + {file = "asyncpg-0.23.0.tar.gz", hash = "sha256:812dafa4c9e264d430adcc0f5899f0dc5413155a605088af696f952d72d36b5e"}, ] asynctest = [ {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, @@ -961,12 +901,12 @@ atomicwrites = [ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, - {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, + {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, + {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, ] authlib = [ - {file = "Authlib-0.15.3-py2.py3-none-any.whl", hash = "sha256:0f6af3a38d37dd77361808dd3f2e258b647668dac6d2cefcefc4c4ebc3c7d2b2"}, - {file = "Authlib-0.15.3.tar.gz", hash = "sha256:7dde11ba45db51e97169c261362fab3193073100b7387e60c159db1eec470bbc"}, + {file = "Authlib-0.15.4-py2.py3-none-any.whl", hash = "sha256:d9fe5edb59801b16583faa86f88d798d99d952979b9616d5c735b9170b41ae2c"}, + {file = "Authlib-0.15.4.tar.gz", hash = "sha256:37df3a2554bc6fe0da3cc6848c44fac2ae40634a7f8fc72543947f4330b26464"}, ] authutils = [ {file = "authutils-5.0.5-py3-none-any.whl", hash = "sha256:91e81838b8ba419d5fd92550747f8f60a1f5e91fee4146109e35728bea140034"}, @@ -988,8 +928,8 @@ cdislogging = [ {file = "cdislogging-1.0.0.tar.gz", hash = "sha256:a1cc2e48d5fc26d4b354b80c6497f1f1136f3e3e4f1d1855de8980ccf497fa0a"}, ] certifi = [ - {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, - {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, + {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, + {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, ] cffi = [ {file = "cffi-1.14.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991"}, @@ -1111,28 +1051,28 @@ cryptography = [ {file = "cryptography-3.4.7.tar.gz", hash = "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713"}, ] dataclasses-json = [ - {file = "dataclasses-json-0.5.3.tar.gz", hash = "sha256:fe17da934cfc4ec792ebe7e9a303434ecf4f5f8d8a7705acfbbe7ccbd34bf1ae"}, - {file = "dataclasses_json-0.5.3-py3-none-any.whl", hash = "sha256:740e7b564d72ddaa0f66406b4ecb799447afda2799c1c425a4a76151bfcfda50"}, + {file = "dataclasses-json-0.5.4.tar.gz", hash = "sha256:6c3976816fd3cdd8db3be2b516b64fc083acd46ac22c680d3dc24cb1d6ae3367"}, + {file = "dataclasses_json-0.5.4-py3-none-any.whl", hash = "sha256:0b25143f621d0122a2de123c156a5f6909c28d0fdd8c2e1ca2a6e4042130ad32"}, ] -fakeredis = [ - {file = "fakeredis-1.5.0-py3-none-any.whl", hash = "sha256:e0416e4941cecd3089b0d901e60c8dc3c944f6384f5e29e2261c0d3c5fa99669"}, - {file = "fakeredis-1.5.0.tar.gz", hash = "sha256:1ac0cef767c37f51718874a33afb5413e69d132988cb6a80c6e6dbeddf8c7623"}, +elasticsearch = [ + {file = "elasticsearch-6.8.2-py2.py3-none-any.whl", hash = "sha256:1aedf00b73f5d1e77cb4df70fec58f2efb664be4ce2686374239aa6c0373c65c"}, + {file = "elasticsearch-6.8.2.tar.gz", hash = "sha256:c3a560bb83e4981b5a5c82080d2ceb99686d33692ef53365656129478aa5ddb2"}, ] fastapi = [ {file = "fastapi-0.54.2-py3-none-any.whl", hash = "sha256:c8651f8316956240c2ffe5bc05c334c8359a3887e642720a9b23319c51e82907"}, {file = "fastapi-0.54.2.tar.gz", hash = "sha256:fff1b4a7fdf4812abb4507fb7aa30ef4206a0435839626ebe3b2871ec9aa367f"}, ] gen3authz = [ - {file = "gen3authz-1.0.5-py3-none-any.whl", hash = "sha256:f610bfd1166361dc2d5bd41035cbb6e9997b064c0885cf43e05232366d4a0866"}, - {file = "gen3authz-1.0.5.tar.gz", hash = "sha256:ff160385114e008484f468bf053bb0f3898441e610ec0f37dbbec63e419dcae2"}, + {file = "gen3authz-1.1.0-py3-none-any.whl", hash = "sha256:7b3f7f09455687a6afd0721c76392f696196e9eeb0c2478d1f68a0d3144e6170"}, + {file = "gen3authz-1.1.0.tar.gz", hash = "sha256:58e470e29e8492648dec2f6a4a001fb90c1fae0f65f6e1e18fed827916df9c30"}, ] gino = [ {file = "gino-1.0.1-py3-none-any.whl", hash = "sha256:56df57cfdefbaf897a7c4897c265a0e91a8cca80716fb64f7d3cf6d501fdfb3d"}, {file = "gino-1.0.1.tar.gz", hash = "sha256:fe4189e82fe9d20c4a5f03fc775fb91c168061c5176b4c95623caeef22316150"}, ] gino-starlette = [ - {file = "gino-starlette-0.1.1.tar.gz", hash = "sha256:a1afe419b34146449a502a5483085a60a75a46639534fff50510172b47c930fb"}, - {file = "gino_starlette-0.1.1-py3-none-any.whl", hash = "sha256:de6ec87168097a52668359c842e9e3be4d339423c7805c615377975a1a19cb6c"}, + {file = "gino-starlette-0.1.2.tar.gz", hash = "sha256:868200bdd205bd4c2ce0856861daf2b2b6cf6c9de7010fbf9a2ca36871d6b8dc"}, + {file = "gino_starlette-0.1.2-py3-none-any.whl", hash = "sha256:5f874610a50b306331fc52e6937b3febf490470ce791142f791a41f938c7c89a"}, ] gunicorn = [ {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, @@ -1146,49 +1086,6 @@ h2 = [ {file = "h2-3.2.0-py2.py3-none-any.whl", hash = "sha256:61e0f6601fa709f35cdb730863b4e5ec7ad449792add80d1410d4174ed139af5"}, {file = "h2-3.2.0.tar.gz", hash = "sha256:875f41ebd6f2c44781259005b157faed1a5031df3ae5aa7bcb4628a6c0782f14"}, ] -hiredis = [ - {file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05"}, - {file = "hiredis-2.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a"}, - {file = "hiredis-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63"}, - {file = "hiredis-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6"}, - {file = "hiredis-2.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e"}, - {file = "hiredis-2.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79"}, - {file = "hiredis-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc"}, - {file = "hiredis-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"}, - {file = "hiredis-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db"}, - {file = "hiredis-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048"}, - {file = "hiredis-2.0.0-cp38-cp38-win32.whl", hash = "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426"}, - {file = "hiredis-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581"}, - {file = "hiredis-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0"}, - {file = "hiredis-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e"}, - {file = "hiredis-2.0.0-cp39-cp39-win32.whl", hash = "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d"}, - {file = "hiredis-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9"}, - {file = "hiredis-2.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54"}, - {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27"}, - {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d"}, - {file = "hiredis-2.0.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163"}, - {file = "hiredis-2.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a"}, - {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87"}, - {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41"}, - {file = "hiredis-2.0.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0"}, - {file = "hiredis-2.0.0.tar.gz", hash = "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a"}, -] hpack = [ {file = "hpack-3.0.0-py2.py3-none-any.whl", hash = "sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89"}, {file = "hpack-3.0.0.tar.gz", hash = "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"}, @@ -1238,70 +1135,52 @@ mako = [ {file = "Mako-1.1.4.tar.gz", hash = "sha256:17831f0b7087c313c0ffae2bcbbd3c1d5ba9eeac9c38f2eb7b50e8c99fe9d5ab"}, ] markupsafe = [ - {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, - {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"}, - {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183"}, - {file = "MarkupSafe-1.1.1-cp27-cp27m-win32.whl", hash = "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b"}, - {file = "MarkupSafe-1.1.1-cp27-cp27m-win_amd64.whl", hash = "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e"}, - {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f"}, - {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-win32.whl", hash = "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-win_amd64.whl", hash = "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, - {file = "MarkupSafe-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c"}, - {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb"}, - {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014"}, - {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850"}, - {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85"}, - {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621"}, - {file = "MarkupSafe-1.1.1-cp39-cp39-win32.whl", hash = "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39"}, - {file = "MarkupSafe-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8"}, - {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, ] marshmallow = [ - {file = "marshmallow-3.11.1-py2.py3-none-any.whl", hash = "sha256:0dd42891a5ef288217ed6410917f3c6048f585f8692075a0052c24f9bfff9dfd"}, - {file = "marshmallow-3.11.1.tar.gz", hash = "sha256:16e99cb7f630c0ef4d7d364ed0109ac194268dde123966076ab3dafb9ae3906b"}, + {file = "marshmallow-3.12.1-py2.py3-none-any.whl", hash = "sha256:b45cde981d1835145257b4a3c5cb7b80786dcf5f50dd2990749a50c16cb48e01"}, + {file = "marshmallow-3.12.1.tar.gz", hash = "sha256:8050475b70470cc58f4441ee92375db611792ba39ca1ad41d39cad193ea9e040"}, ] marshmallow-enum = [ {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"}, {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"}, ] more-itertools = [ - {file = "more-itertools-8.7.0.tar.gz", hash = "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713"}, - {file = "more_itertools-8.7.0-py3-none-any.whl", hash = "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced"}, + {file = "more-itertools-8.8.0.tar.gz", hash = "sha256:83f0308e05477c68f56ea3a888172c78ed5d5b3c282addb67508e7ba6c8f813a"}, + {file = "more_itertools-8.8.0-py3-none-any.whl", hash = "sha256:2cf89ec599962f2ddc4d568a05defc40e0a587fbc10d5989713638864c36be4d"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, @@ -1320,41 +1199,35 @@ pluggy = [ {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] psycopg2-binary = [ - {file = "psycopg2-binary-2.8.6.tar.gz", hash = "sha256:11b9c0ebce097180129e422379b824ae21c8f2a6596b159c7659e2e5a00e1aa0"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:d14b140a4439d816e3b1229a4a525df917d6ea22a0771a2a78332273fd9528a4"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1fabed9ea2acc4efe4671b92c669a213db744d2af8a9fc5d69a8e9bc14b7a9db"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f5ab93a2cb2d8338b1674be43b442a7f544a0971da062a5da774ed40587f18f5"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-win32.whl", hash = "sha256:b4afc542c0ac0db720cf516dd20c0846f71c248d2b3d21013aa0d4ef9c71ca25"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27m-win_amd64.whl", hash = "sha256:e74a55f6bad0e7d3968399deb50f61f4db1926acf4a6d83beaaa7df986f48b1c"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:0deac2af1a587ae12836aa07970f5cb91964f05a7c6cdb69d8425ff4c15d4e2c"}, - {file = "psycopg2_binary-2.8.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ad20d2eb875aaa1ea6d0f2916949f5c08a19c74d05b16ce6ebf6d24f2c9f75d1"}, - {file = "psycopg2_binary-2.8.6-cp34-cp34m-win32.whl", hash = "sha256:950bc22bb56ee6ff142a2cb9ee980b571dd0912b0334aa3fe0fe3788d860bea2"}, - {file = "psycopg2_binary-2.8.6-cp34-cp34m-win_amd64.whl", hash = "sha256:b8a3715b3c4e604bcc94c90a825cd7f5635417453b253499664f784fc4da0152"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:d1b4ab59e02d9008efe10ceabd0b31e79519da6fb67f7d8e8977118832d0f449"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:ac0c682111fbf404525dfc0f18a8b5f11be52657d4f96e9fcb75daf4f3984859"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7d92a09b788cbb1aec325af5fcba9fed7203897bbd9269d5691bb1e3bce29550"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-win32.whl", hash = "sha256:aaa4213c862f0ef00022751161df35804127b78adf4a2755b9f991a507e425fd"}, - {file = "psycopg2_binary-2.8.6-cp35-cp35m-win_amd64.whl", hash = "sha256:c2507d796fca339c8fb03216364cca68d87e037c1f774977c8fc377627d01c71"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:ee69dad2c7155756ad114c02db06002f4cded41132cc51378e57aad79cc8e4f4"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e82aba2188b9ba309fd8e271702bd0d0fc9148ae3150532bbb474f4590039ffb"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d5227b229005a696cc67676e24c214740efd90b148de5733419ac9aaba3773da"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-win32.whl", hash = "sha256:a0eb43a07386c3f1f1ebb4dc7aafb13f67188eab896e7397aa1ee95a9c884eb2"}, - {file = "psycopg2_binary-2.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:e1f57aa70d3f7cc6947fd88636a481638263ba04a742b4a37dd25c373e41491a"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:833709a5c66ca52f1d21d41865a637223b368c0ee76ea54ca5bad6f2526c7679"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ba28584e6bca48c59eecbf7efb1576ca214b47f05194646b081717fa628dfddf"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6a32f3a4cb2f6e1a0b15215f448e8ce2da192fd4ff35084d80d5e39da683e79b"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-win32.whl", hash = "sha256:0e4dc3d5996760104746e6cfcdb519d9d2cd27c738296525d5867ea695774e67"}, - {file = "psycopg2_binary-2.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:cec7e622ebc545dbb4564e483dd20e4e404da17ae07e06f3e780b2dacd5cee66"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-macosx_10_9_x86_64.macosx_10_9_intel.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:ba381aec3a5dc29634f20692349d73f2d21f17653bda1decf0b52b11d694541f"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a0c50db33c32594305b0ef9abc0cb7db13de7621d2cadf8392a1d9b3c437ef77"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2dac98e85565d5688e8ab7bdea5446674a83a3945a8f416ad0110018d1501b94"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-win32.whl", hash = "sha256:bd1be66dde2b82f80afb9459fc618216753f67109b859a361cf7def5c7968729"}, - {file = "psycopg2_binary-2.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:8cd0fb36c7412996859cb4606a35969dd01f4ea34d9812a141cd920c3b18be77"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-macosx_10_9_x86_64.macosx_10_9_intel.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:89705f45ce07b2dfa806ee84439ec67c5d9a0ef20154e0e475e2b2ed392a5b83"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:42ec1035841b389e8cc3692277a0bd81cdfe0b65d575a2c8862cec7a80e62e52"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7312e931b90fe14f925729cde58022f5d034241918a5c4f9797cac62f6b3a9dd"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-win32.whl", hash = "sha256:6422f2ff0919fd720195f64ffd8f924c1395d30f9a495f31e2392c2efafb5056"}, - {file = "psycopg2_binary-2.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:15978a1fbd225583dd8cdaf37e67ccc278b5abecb4caf6b2d6b8e2b948e953f6"}, + {file = "psycopg2-binary-2.9.1.tar.gz", hash = "sha256:b0221ca5a9837e040ebf61f48899926b5783668b7807419e4adae8175a31f773"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:c250a7ec489b652c892e4f0a5d122cc14c3780f9f643e1a326754aedf82d9a76"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aef9aee84ec78af51107181d02fe8773b100b01c5dfde351184ad9223eab3698"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123c3fb684e9abfc47218d3784c7b4c47c8587951ea4dd5bc38b6636ac57f616"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:995fc41ebda5a7a663a254a1dcac52638c3e847f48307b5416ee373da15075d7"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:fbb42a541b1093385a2d8c7eec94d26d30437d0e77c1d25dae1dcc46741a385e"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-win32.whl", hash = "sha256:20f1ab44d8c352074e2d7ca67dc00843067788791be373e67a0911998787ce7d"}, + {file = "psycopg2_binary-2.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f6fac64a38f6768e7bc7b035b9e10d8a538a9fadce06b983fb3e6fa55ac5f5ce"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:1e3a362790edc0a365385b1ac4cc0acc429a0c0d662d829a50b6ce743ae61b5a"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f8559617b1fcf59a9aedba2c9838b5b6aa211ffedecabca412b92a1ff75aac1a"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a36c7eb6152ba5467fb264d73844877be8b0847874d4822b7cf2d3c0cb8cdcb0"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:2f62c207d1740b0bde5c4e949f857b044818f734a3d57f1d0d0edc65050532ed"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:cfc523edecddaef56f6740d7de1ce24a2fdf94fd5e704091856a201872e37f9f"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-win32.whl", hash = "sha256:1e85b74cbbb3056e3656f1cc4781294df03383127a8114cbc6531e8b8367bf1e"}, + {file = "psycopg2_binary-2.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1473c0215b0613dd938db54a653f68251a45a78b05f6fc21af4326f40e8360a2"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:35c4310f8febe41f442d3c65066ca93cccefd75013df3d8c736c5b93ec288140"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c13d72ed6af7fd2c8acbd95661cf9477f94e381fce0792c04981a8283b52917"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14db1752acdd2187d99cb2ca0a1a6dfe57fc65c3281e0f20e597aac8d2a5bd90"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:aed4a9a7e3221b3e252c39d0bf794c438dc5453bc2963e8befe9d4cd324dff72"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:da113b70f6ec40e7d81b43d1b139b9db6a05727ab8be1ee559f3a69854a69d34"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-win32.whl", hash = "sha256:4235f9d5ddcab0b8dbd723dca56ea2922b485ea00e1dafacf33b0c7e840b3d32"}, + {file = "psycopg2_binary-2.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:988b47ac70d204aed01589ed342303da7c4d84b56c2f4c4b8b00deda123372bf"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:7360647ea04db2e7dff1648d1da825c8cf68dc5fbd80b8fb5b3ee9f068dcd21a"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca86db5b561b894f9e5f115d6a159fff2a2570a652e07889d8a383b5fae66eb4"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ced67f1e34e1a450cdb48eb53ca73b60aa0af21c46b9b35ac3e581cf9f00e31"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:0f2e04bd2a2ab54fa44ee67fe2d002bb90cee1c0f1cc0ebc3148af7b02034cbd"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:3242b9619de955ab44581a03a64bdd7d5e470cc4183e8fcadd85ab9d3756ce7a"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-win32.whl", hash = "sha256:0b7dae87f0b729922e06f85f667de7bf16455d411971b2043bbd9577af9d1975"}, + {file = "psycopg2_binary-2.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:b4d7679a08fea64573c969f6994a2631908bb2c0e69a7235648642f3d2e39a68"}, ] py = [ {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, @@ -1365,28 +1238,28 @@ pycparser = [ {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, ] pydantic = [ - {file = "pydantic-1.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0c40162796fc8d0aa744875b60e4dc36834db9f2a25dbf9ba9664b1915a23850"}, - {file = "pydantic-1.8.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:fff29fe54ec419338c522b908154a2efabeee4f483e48990f87e189661f31ce3"}, - {file = "pydantic-1.8.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:fbfb608febde1afd4743c6822c19060a8dbdd3eb30f98e36061ba4973308059e"}, - {file = "pydantic-1.8.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:eb8ccf12295113ce0de38f80b25f736d62f0a8d87c6b88aca645f168f9c78771"}, - {file = "pydantic-1.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:20d42f1be7c7acc352b3d09b0cf505a9fab9deb93125061b376fbe1f06a5459f"}, - {file = "pydantic-1.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dde4ca368e82791de97c2ec019681ffb437728090c0ff0c3852708cf923e0c7d"}, - {file = "pydantic-1.8.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:3bbd023c981cbe26e6e21c8d2ce78485f85c2e77f7bab5ec15b7d2a1f491918f"}, - {file = "pydantic-1.8.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:830ef1a148012b640186bf4d9789a206c56071ff38f2460a32ae67ca21880eb8"}, - {file = "pydantic-1.8.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:fb77f7a7e111db1832ae3f8f44203691e15b1fa7e5a1cb9691d4e2659aee41c4"}, - {file = "pydantic-1.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3bcb9d7e1f9849a6bdbd027aabb3a06414abd6068cb3b21c49427956cce5038a"}, - {file = "pydantic-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2287ebff0018eec3cc69b1d09d4b7cebf277726fa1bd96b45806283c1d808683"}, - {file = "pydantic-1.8.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:4bbc47cf7925c86a345d03b07086696ed916c7663cb76aa409edaa54546e53e2"}, - {file = "pydantic-1.8.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:6388ef4ef1435364c8cc9a8192238aed030595e873d8462447ccef2e17387125"}, - {file = "pydantic-1.8.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:dd4888b300769ecec194ca8f2699415f5f7760365ddbe243d4fd6581485fa5f0"}, - {file = "pydantic-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:8fbb677e4e89c8ab3d450df7b1d9caed23f254072e8597c33279460eeae59b99"}, - {file = "pydantic-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2f2736d9a996b976cfdfe52455ad27462308c9d3d0ae21a2aa8b4cd1a78f47b9"}, - {file = "pydantic-1.8.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3114d74329873af0a0e8004627f5389f3bb27f956b965ddd3e355fe984a1789c"}, - {file = "pydantic-1.8.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:258576f2d997ee4573469633592e8b99aa13bda182fcc28e875f866016c8e07e"}, - {file = "pydantic-1.8.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c17a0b35c854049e67c68b48d55e026c84f35593c66d69b278b8b49e2484346f"}, - {file = "pydantic-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8bc082afef97c5fd3903d05c6f7bb3a6af9fc18631b4cc9fedeb4720efb0c58"}, - {file = "pydantic-1.8.1-py3-none-any.whl", hash = "sha256:e3f8790c47ac42549dc8b045a67b0ca371c7f66e73040d0197ce6172b385e520"}, - {file = "pydantic-1.8.1.tar.gz", hash = "sha256:26cf3cb2e68ec6c0cfcb6293e69fb3450c5fd1ace87f46b64f678b0d29eac4c3"}, + {file = "pydantic-1.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:05ddfd37c1720c392f4e0d43c484217b7521558302e7069ce8d318438d297739"}, + {file = "pydantic-1.8.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a7c6002203fe2c5a1b5cbb141bb85060cbff88c2d78eccbc72d97eb7022c43e4"}, + {file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:589eb6cd6361e8ac341db97602eb7f354551482368a37f4fd086c0733548308e"}, + {file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:10e5622224245941efc193ad1d159887872776df7a8fd592ed746aa25d071840"}, + {file = "pydantic-1.8.2-cp36-cp36m-win_amd64.whl", hash = "sha256:99a9fc39470010c45c161a1dc584997f1feb13f689ecf645f59bb4ba623e586b"}, + {file = "pydantic-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a83db7205f60c6a86f2c44a61791d993dff4b73135df1973ecd9eed5ea0bda20"}, + {file = "pydantic-1.8.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:41b542c0b3c42dc17da70554bc6f38cbc30d7066d2c2815a94499b5684582ecb"}, + {file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:ea5cb40a3b23b3265f6325727ddfc45141b08ed665458be8c6285e7b85bd73a1"}, + {file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:18b5ea242dd3e62dbf89b2b0ec9ba6c7b5abaf6af85b95a97b00279f65845a23"}, + {file = "pydantic-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:234a6c19f1c14e25e362cb05c68afb7f183eb931dd3cd4605eafff055ebbf287"}, + {file = "pydantic-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:021ea0e4133e8c824775a0cfe098677acf6fa5a3cbf9206a376eed3fc09302cd"}, + {file = "pydantic-1.8.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e710876437bc07bd414ff453ac8ec63d219e7690128d925c6e82889d674bb505"}, + {file = "pydantic-1.8.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:ac8eed4ca3bd3aadc58a13c2aa93cd8a884bcf21cb019f8cfecaae3b6ce3746e"}, + {file = "pydantic-1.8.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4a03cbbe743e9c7247ceae6f0d8898f7a64bb65800a45cbdc52d65e370570820"}, + {file = "pydantic-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:8621559dcf5afacf0069ed194278f35c255dc1a1385c28b32dd6c110fd6531b3"}, + {file = "pydantic-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8b223557f9510cf0bfd8b01316bf6dd281cf41826607eada99662f5e4963f316"}, + {file = "pydantic-1.8.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:244ad78eeb388a43b0c927e74d3af78008e944074b7d0f4f696ddd5b2af43c62"}, + {file = "pydantic-1.8.2-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:05ef5246a7ffd2ce12a619cbb29f3307b7c4509307b1b49f456657b43529dc6f"}, + {file = "pydantic-1.8.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:54cd5121383f4a461ff7644c7ca20c0419d58052db70d8791eacbbe31528916b"}, + {file = "pydantic-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:4be75bebf676a5f0f87937c6ddb061fa39cbea067240d98e298508c1bda6f3f3"}, + {file = "pydantic-1.8.2-py3-none-any.whl", hash = "sha256:fec866a0b59f372b7e776f2d7308511784dace622e0992a0b59ea3ccee0ae833"}, + {file = "pydantic-1.8.2.tar.gz", hash = "sha256:26464e57ccaafe72b7ad156fdaa4e9b9ef051f69e175dbbb463283000c05ab7b"}, ] pyjwt = [ {file = "PyJWT-1.7.1-py2.py3-none-any.whl", hash = "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e"}, @@ -1405,8 +1278,8 @@ pytest-asyncio = [ {file = "pytest_asyncio-0.15.1-py3-none-any.whl", hash = "sha256:3042bcdf1c5d978f6b74d96a151c4cfb9dcece65006198389ccd7e6c60eb1eea"}, ] pytest-cov = [ - {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, - {file = "pytest_cov-2.11.1-py2.py3-none-any.whl", hash = "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"}, + {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, + {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, ] python-dateutil = [ {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, @@ -1450,13 +1323,6 @@ pyyaml = [ {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] -redis = [ - {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, - {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, -] -rejson = [ - {file = "rejson-0.5.4.tar.gz", hash = "sha256:0fd0933412c36f7928d6072efc7d795368342344fc19698341604414cfbc2e6e"}, -] requests = [ {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, @@ -1466,21 +1332,17 @@ respx = [ {file = "respx-0.10.1.tar.gz", hash = "sha256:190d1fb5bddaf6fcc1319a3cdfbd682c77d7167017b3283cbe79b8fb74927135"}, ] rfc3986 = [ - {file = "rfc3986-1.4.0-py2.py3-none-any.whl", hash = "sha256:af9147e9aceda37c91a05f4deb128d4b4b49d6b199775fd2d2927768abdc8f50"}, - {file = "rfc3986-1.4.0.tar.gz", hash = "sha256:112398da31a3344dc25dbf477d8df6cb34f9278a94fee2625d89e4514be8bb9d"}, + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, ] six = [ - {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, - {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] sniffio = [ {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, ] -sortedcontainers = [ - {file = "sortedcontainers-2.3.0-py2.py3-none-any.whl", hash = "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f"}, - {file = "sortedcontainers-2.3.0.tar.gz", hash = "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1"}, -] sqlalchemy = [ {file = "SQLAlchemy-1.3.24-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:87a2725ad7d41cd7376373c15fd8bf674e9c33ca56d0b8036add2d634dba372e"}, {file = "SQLAlchemy-1.3.24-cp27-cp27m-win32.whl", hash = "sha256:f597a243b8550a3a0b15122b14e49d8a7e622ba1c9d29776af741f1845478d79"}, @@ -1524,19 +1386,23 @@ starlette = [ stringcase = [ {file = "stringcase-1.2.0.tar.gz", hash = "sha256:48a06980661908efe8d9d34eab2b6c13aefa2163b3ced26972902e3bdfd87008"}, ] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] typing-extensions = [ {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, ] typing-inspect = [ - {file = "typing_inspect-0.6.0-py2-none-any.whl", hash = "sha256:de08f50a22955ddec353876df7b2545994d6df08a2f45d54ac8c05e530372ca0"}, - {file = "typing_inspect-0.6.0-py3-none-any.whl", hash = "sha256:3b98390df4d999a28cf5b35d8b333425af5da2ece8a4ea9e98f71e7591347b4f"}, - {file = "typing_inspect-0.6.0.tar.gz", hash = "sha256:8f1b1dd25908dbfd81d3bebc218011531e7ab614ba6e5bf7826d887c834afab7"}, + {file = "typing_inspect-0.7.1-py2-none-any.whl", hash = "sha256:b1f56c0783ef0f25fb064a01be6e5407e54cf4a4bf4f3ba3fe51e0bd6dcea9e5"}, + {file = "typing_inspect-0.7.1-py3-none-any.whl", hash = "sha256:3cd7d4563e997719a710a3bfe7ffb544c6b72069b6812a02e9b414a8fa3aaa6b"}, + {file = "typing_inspect-0.7.1.tar.gz", hash = "sha256:047d4097d9b17f46531bf6f014356111a1b6fb821a24fe7ac909853ca2a782aa"}, ] urllib3 = [ - {file = "urllib3-1.26.4-py2.py3-none-any.whl", hash = "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df"}, - {file = "urllib3-1.26.4.tar.gz", hash = "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"}, + {file = "urllib3-1.26.5-py2.py3-none-any.whl", hash = "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c"}, + {file = "urllib3-1.26.5.tar.gz", hash = "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"}, ] uvicorn = [ {file = "uvicorn-0.11.8-py3-none-any.whl", hash = "sha256:4b70ddb4c1946e39db9f3082d53e323dfd50634b95fd83625d778729ef1730ef"}, diff --git a/pyproject.toml b/pyproject.toml index 0f32f504..617afeb4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,11 +20,10 @@ authutils = "^5.0.4" cdislogging = "^1.0" click = "==7.1.*,>=7.1.2" pyyaml = "==5.4.*,>=5.4.1" -rejson = "==0.5.*,>=0.5.4" setuptools = "==52.0.*,>=52.0.0" dataclasses-json = "==0.5.*,>=0.5.2" -aioredis = ">=1.3.1" pytest-asyncio = "^0.15.1" +elasticsearch = "<7.0" [tool.poetry.dev-dependencies] pytest = "^5.3" @@ -32,7 +31,6 @@ pytest-cov = "^2.8" pyyaml = "^5.3.1" requests = "^2.22" respx = "*" -fakeredis = "^1.5.0" nest-asyncio = "^1.5.1" [tool.poetry.plugins."mds.modules"] diff --git a/src/mds/agg_mds/datastore/__init__.py b/src/mds/agg_mds/datastore/__init__.py index 96df87ec..493f663f 100644 --- a/src/mds/agg_mds/datastore/__init__.py +++ b/src/mds/agg_mds/datastore/__init__.py @@ -1,45 +1,57 @@ -from .redis_cache import redis_cache as redis_client +import mds.agg_mds.datastore.elasticsearch_dao as client + +""" +This abstraction may seem pointless, but workds towards a few goals. This adds +a separation between the API and the data source, which helps mock any +datasource in tests. It also helps ensure the contract around the API without +it needing to know anything about the underlying persistence, and makes it +simpler to swap out and test new backends in the future. +""" async def init(hostname, port): - await redis_client.init_cache(hostname, port) + await client.init(hostname, port) async def drop_all(): - await redis_client.json_sets("commons", []) + await client.drop_all() async def close(): - await redis_client.close() + await client.close() async def get_status(): - return await redis_client.get_status() + return await client.get_status() async def update_metadata(*args): - await redis_client.update_metadata(*args) + await client.update_metadata(*args) async def get_commons_metadata(*args): - return await redis_client.get_commons_metadata(*args) + return await client.get_commons_metadata(*args) async def get_all_named_commons_metadata(*args): - return await redis_client.get_all_named_commons_metadata(*args) + return await client.get_all_named_commons_metadata(*args) -async def get_commons_metadata_guid(*args): - return await redis_client.get_commons_metadata_guid(*args) +async def get_by_guid(*args): + return await client.get_by_guid(*args) async def get_commons_attribute(*args): - return await redis_client.get_commons_attribute(*args) + return await client.get_commons_attribute(*args) async def get_commons(): - return await redis_client.get_commons() + return await client.get_commons() async def get_all_metadata(*args): - return await redis_client.get_all_metadata(*args) + return await client.get_all_metadata(*args) + + +async def get_aggregations(*args): + return await client.get_aggregations(*args) diff --git a/src/mds/agg_mds/datastore/elasticsearch_dao.py b/src/mds/agg_mds/datastore/elasticsearch_dao.py new file mode 100644 index 00000000..0c5edcf1 --- /dev/null +++ b/src/mds/agg_mds/datastore/elasticsearch_dao.py @@ -0,0 +1,234 @@ +from elasticsearch import Elasticsearch +from typing import List, Dict +from typing import Any +import json +from mds import logger + + +agg_mds_index = "commons-index" +agg_mds_type = "commons" + + +agg_mds_info_index = "commons-info-index" +agg_mds_info_type = "commons-info" + + +mapping = { + "mappings": { + "commons": { + "properties": { + "__manifest": { + "type": "nested", + }, + "tags": { + "type": "nested", + }, + } + } + } +} + +elastic_search_client = None + + +async def init(hostname: str = "0.0.0.0", port: int = 9200): + global elastic_search_client + elastic_search_client = Elasticsearch( + [hostname], + scheme="http", + port=port, + ) + + +async def drop_all(): + res = elastic_search_client.indices.delete(index="_all", ignore=[400, 404]) + logger.debug(f"deleted all indexes: {res}") + res = elastic_search_client.indices.create(index=agg_mds_index, body=mapping) + logger.debug(f"created index {agg_mds_index}: {res}") + res = elastic_search_client.indices.create( + index=agg_mds_info_index, + ) + logger.debug(f"created index {agg_mds_info_index}: {res}") + + +def normalize_string_or_object(doc, key): + if key in doc and isinstance(doc[key], str): + manifest = doc[key] + doc[key] = None if manifest is "" else json.loads(manifest) + + +async def update_metadata( + name: str, + data: List[Dict], + guid_arr: List[str], + tags: Dict[str, List[str]], + info: Dict[str, str], +): + elastic_search_client.index( + index=agg_mds_info_index, + doc_type=agg_mds_info_type, + id=name, + body=info, + ) + + for doc in data: + key = list(doc.keys())[0] + # Flatten out this structure + doc = doc[key]["gen3_discovery"] + + normalize_string_or_object(doc, "__manifest") + # TODO WFH Why do we have this redundant field? Which commons has this? + normalize_string_or_object(doc, "_file_manifest") + normalize_string_or_object(doc, "advSearchFilters") + elastic_search_client.index( + index=agg_mds_index, doc_type=agg_mds_type, id=key, body=doc + ) + + +async def get_status(): + return elastic_search_client.cluster.health() + + +async def close(): + pass + + +async def get_commons(): + try: + res = elastic_search_client.search( + index=agg_mds_index, + body={ + "size": 0, + "aggs": {"commons_names": {"terms": {"field": "commons_name.keyword"}}}, + }, + ) + return { + "commons": [ + x["key"] for x in res["aggregations"]["commons_names"]["buckets"] + ] + } + except Exception as error: + logger.error(error) + return [] + + +async def get_all_metadata(limit, offset): + try: + res = elastic_search_client.search( + index=agg_mds_index, + body={"size": limit, "from": offset, "query": {"match_all": {}}}, + ) + byCommons = {} + for record in res["hits"]["hits"]: + id = record["_id"] + normalized = record["_source"] + commons_name = normalized["commons_name"] + if commons_name not in byCommons: + byCommons[commons_name] = [] + byCommons[commons_name].append( + { + id: { + "gen3_discovery": normalized, + } + } + ) + return byCommons + except Exception as error: + logger.error(error) + return {} + + +async def get_all_named_commons_metadata(name): + try: + return elastic_search_client.search( + index=agg_mds_index, + body={"query": {"match": {"commons_name.keyword": name}}}, + ) + except Exception as error: + logger.error(error) + return {} + + +async def metadata_tags(name): + try: + return elastic_search_client.search( + index=agg_mds_index, + body={ + "size": 0, + "aggs": { + "tags": { + "nested": {"path": "tags"}, + "aggs": { + "categories": { + "terms": {"field": "tags.category.keyword"}, + "aggs": { + "name": { + "terms": { + "field": "tags.name.keyword", + } + } + }, + } + }, + } + }, + }, + ) + except Exception as error: + logger.error(error) + return [] + + +async def get_commons_attribute(name, what): + try: + data = elastic_search_client.search( + index=agg_mds_info_index, + body={ + "query": { + "terms": { + "_id": [name], + } + } + }, + ) + return data["hits"]["hits"][0]["_source"] + except Exception as error: + logger.error(error) + return None + + +async def get_aggregations(name): + try: + res = elastic_search_client.search( + index=agg_mds_index, + body={ + "size": 0, + "query": { + "constant_score": { + "filter": { + "match": {"commons_name": name}, + } + } + }, + "aggs": {"_subjects_count": {"sum": {"field": "_subjects_count"}}}, + }, + ) + return { + "_subjects_count": res["aggregations"]["_subjects_count"]["value"], + } + except Exception as error: + logger.error(error) + return [] + + +async def get_by_guid(guid): + try: + data = elastic_search_client.get( + index=agg_mds_index, + doc_type=agg_mds_type, + id=guid, + ) + return data["_source"] + except Exception as error: + logger.error(error) + return None diff --git a/src/mds/agg_mds/datastore/redis_cache.py b/src/mds/agg_mds/datastore/redis_cache.py deleted file mode 100644 index d6a28f70..00000000 --- a/src/mds/agg_mds/datastore/redis_cache.py +++ /dev/null @@ -1,119 +0,0 @@ -from typing import Optional, List, Dict, Set -import json -from typing import Any -from aioredis import Redis, create_redis_pool -from datetime import datetime - - -class RedisCache: - def __init__(self): - self.redis_cache: Optional[Redis] = None - - async def init_cache(self, hostname: str = "0.0.0.0", port: int = 6379): - self.redis_cache = await create_redis_pool( - f"redis://{hostname}:{port}/0?encoding=utf-8" - ) - - async def keys(self, pattern): - return await self.redis_cache.keys(pattern) - - async def set(self, key, value): - return await self.redis_cache.set(key, value) - - async def get(self, key): - return await self.redis_cache.get(key) - - async def json_sets(self, key: str, value: Any, path: str = "."): - return await self.redis_cache.execute("JSON.SET", key, path, json.dumps(value)) - - async def json_get(self, key: str, path: str = "."): - resp = await self.redis_cache.execute("JSON.GET", key, path) - if not resp: - return None - return json.loads(resp) - - async def json_arr_appends(self, key: str, value: Any): - await self.redis_cache.execute("JSON.ARRAPPEND", key, ".", json.dumps(value)) - - async def json_arr_index(self, key: str, guid: str): - await self.redis_cache.execute("JSON.ARRINDEX", key, ".guids", f'"{guid}"') - - async def close(self): - self.redis_cache.close() - await self.redis_cache.wait_closed() - - # Higher level functions - - async def update_metadata( - self, - name: str, - data: List[Dict], - guid_arr: List[str], - tags: Dict[str, List[str]], - info: Dict[str, str], - aggregations: Dict[str, Dict[str, str]], - ): - await self.json_sets(f"{name}", {}) - await self.json_sets(name, data, ".metadata") - await self.json_sets(name, guid_arr, ".guids") - await self.json_sets(name, tags, ".tags") - await self.json_sets(name, info, ".info") - await self.json_sets(name, aggregations, ".aggregations") - await self.set_status(name, len(data), "none") - await self.json_arr_appends("commons", name) - - async def set_status(self, name: str, err: str, count: int): - await self.json_sets( - f"{name}.status", - { - "last_update": datetime.now().strftime("%Y%m%d%H%M%S"), - "error": err, - "count": count, - }, - ) - - async def get_status(self): - commons = await self.json_get("commons") - results = {} - for name in commons: - results.update({name: await self.json_get(f"{name}.status")}) - return results - - async def get_commons_metadata(self, name: str, limit: int, offset: int): - resp = await self.json_get(name, ".metadata") - if resp is None: - return None - return resp[offset : offset + limit] - - async def get_all_named_commons_metadata(self, name: str): - return await self.json_get(name, ".metadata") - - async def get_commons_metadata_guid(self, name: str, guid: str): - resp = await self.json_get(name, f".metadata") - if resp is None: - return None - idx = await self.json_arr_index(name, guid) - if idx is None: - return None - return resp[idx] - - async def get_commons_attribute(self, name: str, what: str): - return await self.json_get(name, what) - - async def get_commons(self): - resp = await self.json_get("commons") - if resp is None: - return None - return {"commons": resp} - - async def get_all_metadata(self, limit: int, offset: int): - commons = await self.get_commons() - results = {} - if commons is None: - return {} - for name in commons["commons"]: - results[name] = await self.get_commons_metadata(name, limit, offset) - return results - - -redis_cache = RedisCache() diff --git a/src/mds/agg_mds/query.py b/src/mds/agg_mds/query.py index e776593d..f3478094 100644 --- a/src/mds/agg_mds/query.py +++ b/src/mds/agg_mds/query.py @@ -1,7 +1,8 @@ from fastapi import HTTPException, Query, APIRouter, Request from starlette.status import HTTP_404_NOT_FOUND -from mds.agg_mds import datastore from mds import config +from mds.agg_mds import datastore + mod = APIRouter() @@ -23,6 +24,8 @@ async def metadata( ), offset: int = Query(0, description="Return results at this given offset."), ): + # TODO WFH How to properly return this? We think grouping by MDS is probably + # not ideal in reality. We already have commons_name in the results. """ Returns all metadata from all registered commons in the form: { @@ -85,7 +88,7 @@ async def metadata_info(name: str): @mod.get("/aggregate/metadata/{name}/aggregations") async def metadata_aggregations(name: str): - res = await datastore.get_commons_attribute(name, "aggregations") + res = await datastore.get_aggregations(name) if res: return res else: @@ -95,17 +98,17 @@ async def metadata_aggregations(name: str): ) -@mod.get("/aggregate/metadata/{name}/guid/{guid}:path") -async def metadata_name_guid(name: str, guid: str): +@mod.get("/aggregate/metadata/guid/{guid}") +async def metadata_name_guid(guid: str): """Get the metadata of the GUID in the named commons.""" - res = await datastore.get_commons_metadata_guid(name, guid) + res = await datastore.get_by_guid(guid) if res: return res else: raise HTTPException( HTTP_404_NOT_FOUND, { - "message": f"no common/guid exists with the given: {name}/{guid}", + "message": f"no entry exists with the given guid: {guid}", "code": 404, }, ) diff --git a/src/mds/config.py b/src/mds/config.py index 18a43373..cd6f90a8 100644 --- a/src/mds/config.py +++ b/src/mds/config.py @@ -27,8 +27,7 @@ def __init__(self, value): DB_USER = config("DB_USER", default=None) DB_PASSWORD = config("DB_PASSWORD", cast=Secret, default=None) DB_DATABASE = config("DB_DATABASE", default=None) -REDIS_HOST = config("REDIS_DB_HOST", default="localhost") -REDIS_PORT = config("REDIS_DB_PORT", cast=int, default=6379) +ES_ENDPOINT = config("GEN3_ES_ENDPOINT", default="http://localhost:9200") if TESTING: DB_DATABASE = "test_" + (DB_DATABASE or "metadata") diff --git a/src/mds/main.py b/src/mds/main.py index 5e3b685a..5389e60b 100644 --- a/src/mds/main.py +++ b/src/mds/main.py @@ -4,6 +4,7 @@ import pkg_resources from fastapi import FastAPI, APIRouter import httpx +from urllib.parse import urlparse from mds.agg_mds import datastore as aggregate_datastore @@ -41,8 +42,9 @@ async def shutdown_event(): async def startup_event(): if config.USE_AGG_MDS: logger.info("Creating aggregate datastore.") + url_parts = urlparse(config.ES_ENDPOINT) await aggregate_datastore.init( - hostname=config.REDIS_HOST, port=config.REDIS_PORT + hostname=url_parts.hostname, port=url_parts.port ) return app @@ -115,7 +117,5 @@ async def get_status(): * count: number of entries :return: """ - # return await redis_cache.get_status() - now = await db.scalar("SELECT now()") return dict(status="OK", timestamp=now) diff --git a/src/mds/populate.py b/src/mds/populate.py index 841854f6..ff9689f4 100644 --- a/src/mds/populate.py +++ b/src/mds/populate.py @@ -20,36 +20,14 @@ def parse_args(argv: List[str]) -> Namespace: parser = argparse.ArgumentParser() parser.add_argument("--config", help="config file to use", type=str, required=True) parser.add_argument( - "--hostname", help="hostname of redis server", type=str, default="localhost" + "--hostname", help="hostname of server", type=str, default="localhost" ) - parser.add_argument("--port", help="port of redis server", type=int, default=6379) + parser.add_argument("--port", help="port of server", type=int, default=6379) known_args, unknown_args = parser.parse_known_args(argv) return known_args -async def main(commons_config: Commons, hostname: str, port: int) -> None: - """ - Given a config structure, pull all metadata from each one in the config and cache into the following - structure: - { - "commons_name" : { - "metadata" : [ array of metadata entries ], - "field_mapping" : { dictionary of field_name to column_name }, - "guids: [ array of guids, used to index into the metadata array ], - "tags": { 'category' : [ values ] }, - "commons_url" : "url of commons portal" - }, - "..." : { - } - """ - - if not config.USE_AGG_MDS: - print("aggregate MDS disabled") - exit(1) - - await datastore.init(hostname, port) - await datastore.drop_all() - +async def insert_data(commons_config): for name, common in commons_config.commons.items(): results = pull_mds(common.mds_url) mds_arr = [{k: v} for k, v in results.items()] @@ -61,15 +39,22 @@ async def main(commons_config: Commons, hostname: str, port: int) -> None: mds_arr = await filter_entries(common, mds_arr) tags = {} - aggregations = { - x: {"count": 0, "missing": 0, "sum": 0, "notANumber": 0} - for x in commons_config.aggregation - } + # inject common_name field into each entry for x in mds_arr: key = next(iter(x.keys())) entry = next(iter(x.values())) + def normalize(entry: Dict[Any, Any]): + for column, field in common.columns_to_fields.items(): + if field == column: + continue + if column in entry["gen3_discovery"]: + entry["gen3_discovery"][field] = entry["gen3_discovery"][column] + return entry + + entry = normalize(entry) + # add the common field and url to the entry entry[common.study_data_field]["commons_name"] = name @@ -79,27 +64,7 @@ async def main(commons_config: Commons, hostname: str, port: int) -> None: tags[t["category"]] = set() tags[t["category"]].add(t["name"]) - # build aggregation counts on column fields - for aggName in commons_config.aggregation: - if ( - aggName not in common.columns_to_fields.keys() - or common.columns_to_fields[aggName] - not in entry[common.study_data_field] - ): - aggregations[aggName]["missing"] += 1 - else: - aggregations[aggName]["count"] += 1 - try: - value = int( - entry[common.study_data_field][ - common.columns_to_fields[aggName] - ] - ) - aggregations[aggName]["sum"] += value - except ValueError: - aggregations[aggName]["notANumber"] += 1 - - # process tags set to list + # process tags set to list for k, v in tags.items(): tags[k] = list(tags[k]) @@ -117,11 +82,35 @@ def normalize(entry: Dict[Any, Any]): data = [normalize(x) for x in mds_arr] - # build index of keys. which is used to compute the index into the .metadata array - # Admittedly a hack but will be faster than using json path, until the release of RedisJson v1.2 keys = list(results.keys()) info = {"commons_url": common.commons_url} - await datastore.update_metadata(name, mds_arr, keys, tags, info, aggregations) + res = await datastore.update_metadata(name, mds_arr, keys, tags, info) + + +async def main(commons_config: Commons, hostname: str, port: int) -> None: + """ + Given a config structure, pull all metadata from each one in the config and cache into the following + structure: + { + "commons_name" : { + "metadata" : [ array of metadata entries ], + "field_mapping" : { dictionary of field_name to column_name }, + "guids: [ array of guids, used to index into the metadata array ], + "tags": { 'category' : [ values ] }, + "commons_url" : "url of commons portal" + }, + "..." : { + } + """ + + if not config.USE_AGG_MDS: + print("aggregate MDS disabled") + exit(1) + + await datastore.init(hostname, port) + await datastore.drop_all() + + await insert_data(commons_config) res = await datastore.get_status() print(res) @@ -156,7 +145,7 @@ async def filter_entries( if __name__ == "__main__": """ - Runs a redis "populate" procedure. Assumes Redis is already running. + Runs a "populate" procedure. Assumes the datastore is ready. """ args: Namespace = parse_args(sys.argv) commons = parse_config_from_file(Path(args.config)) diff --git a/tests/conftest.py b/tests/conftest.py index 890322b6..96c803c7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -37,84 +37,7 @@ def setup_test_database(): @pytest.fixture() -def mock_aggregate_datastore(): - store = { - "commons": {}, - "info": {}, - "aggregations": {}, - "tags": {}, - } - - async def mock_init(hostname, port): - pass - - async def mock_drop_all(): - pass - - async def mock_get_status(): - pass - - async def mock_close(): - pass - - async def mock_get_all_metadata(limit, offset): - return store["commons"] - - async def mock_get_all_named_commons_metadata(name): - return store["commons"].get(name, {}) - - async def mock_update_metadata(name, data, guid_arr, tags, info, aggregations): - store["commons"][name] = data - store["info"][name] = info - store["aggregations"][name] = aggregations - store["tags"][name] = tags - - async def mock_get_commons(): - keys = list(store["commons"].keys()) - return None if len(keys) == 0 else {"commons": keys} - - async def mock_get_commons_attribute(name, type): - return store[type].get(name, {}) - - async def mock_get_commons_metadata_guid(name, guid): - studies = store["commons"].get(name, []) - return next((x for x in studies if list(x.keys())[0] == guid), None) - - patches = [] - patches.append(patch.object(datastore, "init", mock_init)) - patches.append(patch.object(datastore, "drop_all", mock_drop_all)) - patches.append(patch.object(datastore, "get_status", mock_get_status)) - patches.append(patch.object(datastore, "close", mock_close)) - patches.append(patch.object(datastore, "get_all_metadata", mock_get_all_metadata)) - patches.append( - patch.object( - datastore, - "get_all_named_commons_metadata", - mock_get_all_named_commons_metadata, - ) - ) - patches.append(patch.object(datastore, "update_metadata", mock_update_metadata)) - patches.append(patch.object(datastore, "get_commons", mock_get_commons)) - patches.append( - patch.object(datastore, "get_commons_attribute", mock_get_commons_attribute) - ) - patches.append( - patch.object( - datastore, "get_commons_metadata_guid", mock_get_commons_metadata_guid - ) - ) - - for patched_function in patches: - patched_function.start() - - yield datastore - - for patched_function in patches: - patched_function.stop() - - -@pytest.fixture() -def client(mock_aggregate_datastore): +def client(): from mds import config from mds.main import get_app diff --git a/tests/test_agg_mds_datastore.py b/tests/test_agg_mds_datastore.py index 885cd725..215a9b97 100644 --- a/tests/test_agg_mds_datastore.py +++ b/tests/test_agg_mds_datastore.py @@ -10,76 +10,76 @@ @pytest.mark.asyncio async def test_init(): - with patch("mds.agg_mds.datastore.redis_client", AsyncMock()) as mock_redis_cache: + with patch("mds.agg_mds.datastore.client", AsyncMock()) as mock_client: await datastore.init("host", 9999) - mock_redis_cache.init_cache.assert_called_with("host", 9999) + mock_client.init.assert_called_with("host", 9999) @pytest.mark.asyncio async def test_drop_all(): - with patch("mds.agg_mds.datastore.redis_client", AsyncMock()) as mock_redis_cache: + with patch("mds.agg_mds.datastore.client", AsyncMock()) as mock_client: await datastore.drop_all() - mock_redis_cache.json_sets.assert_called_with("commons", []) + mock_client.drop_all.assert_called_with() @pytest.mark.asyncio async def test_close(): - with patch("mds.agg_mds.datastore.redis_client", AsyncMock()) as mock_redis_cache: + with patch("mds.agg_mds.datastore.client", AsyncMock()) as mock_client: await datastore.close() - mock_redis_cache.close.assert_called_with() + mock_client.close.assert_called_with() @pytest.mark.asyncio async def test_get_status(): - with patch("mds.agg_mds.datastore.redis_client", AsyncMock()) as mock_redis_cache: + with patch("mds.agg_mds.datastore.client", AsyncMock()) as mock_client: await datastore.get_status() - mock_redis_cache.get_status.assert_called_with() + mock_client.get_status.assert_called_with() @pytest.mark.asyncio async def test_update_metadata(): - with patch("mds.agg_mds.datastore.redis_client", AsyncMock()) as mock_redis_cache: + with patch("mds.agg_mds.datastore.client", AsyncMock()) as mock_client: await datastore.update_metadata() - mock_redis_cache.update_metadata.assert_called_with() + mock_client.update_metadata.assert_called_with() @pytest.mark.asyncio async def test_get_commons_metadata(): - with patch("mds.agg_mds.datastore.redis_client", AsyncMock()) as mock_redis_cache: + with patch("mds.agg_mds.datastore.client", AsyncMock()) as mock_client: await datastore.get_commons_metadata() - mock_redis_cache.get_commons_metadata.assert_called_with() + mock_client.get_commons_metadata.assert_called_with() @pytest.mark.asyncio async def test_get_all_named_commons_metadata(): - with patch("mds.agg_mds.datastore.redis_client", AsyncMock()) as mock_redis_cache: + with patch("mds.agg_mds.datastore.client", AsyncMock()) as mock_client: await datastore.get_all_named_commons_metadata() - mock_redis_cache.get_all_named_commons_metadata.assert_called_with() + mock_client.get_all_named_commons_metadata.assert_called_with() @pytest.mark.asyncio -async def test_get_commons_metadata_guid(): - with patch("mds.agg_mds.datastore.redis_client", AsyncMock()) as mock_redis_cache: - await datastore.get_commons_metadata_guid() - mock_redis_cache.get_commons_metadata_guid.assert_called_with() +async def test_get_by_guid(): + with patch("mds.agg_mds.datastore.client", AsyncMock()) as mock_client: + await datastore.get_by_guid("123") + mock_client.get_by_guid.assert_called_with("123") @pytest.mark.asyncio async def test_get_commons_attribute(): - with patch("mds.agg_mds.datastore.redis_client", AsyncMock()) as mock_redis_cache: + with patch("mds.agg_mds.datastore.client", AsyncMock()) as mock_client: await datastore.get_commons_attribute() - mock_redis_cache.get_commons_attribute.assert_called_with() + mock_client.get_commons_attribute.assert_called_with() @pytest.mark.asyncio async def test_get_commons(): - with patch("mds.agg_mds.datastore.redis_client", AsyncMock()) as mock_redis_cache: + with patch("mds.agg_mds.datastore.client", AsyncMock()) as mock_client: await datastore.get_commons() - mock_redis_cache.get_commons.assert_called_with() + mock_client.get_commons.assert_called_with() @pytest.mark.asyncio async def test_get_all_metadata(): - with patch("mds.agg_mds.datastore.redis_client", AsyncMock()) as mock_redis_cache: + with patch("mds.agg_mds.datastore.client", AsyncMock()) as mock_client: await datastore.get_all_metadata() - mock_redis_cache.get_all_metadata.assert_called_with() + mock_client.get_all_metadata.assert_called_with() diff --git a/tests/test_agg_mds_elasticsearch_dao.py b/tests/test_agg_mds_elasticsearch_dao.py new file mode 100644 index 00000000..96bdc357 --- /dev/null +++ b/tests/test_agg_mds_elasticsearch_dao.py @@ -0,0 +1,248 @@ +import json +from unittest.mock import patch, call, MagicMock +from conftest import AsyncMock +import pytest +import mds +from mds.agg_mds.datastore import elasticsearch_dao +from mds.agg_mds.datastore.elasticsearch_dao import mapping +import nest_asyncio + + +@pytest.mark.asyncio +async def test_init(): + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.Elasticsearch", MagicMock() + ) as mock_client: + await elasticsearch_dao.init("myhost") + mock_client.assert_called_with(["myhost"], port=9200, scheme="http") + + +@pytest.mark.asyncio +async def test_drop_all(): + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.indices", + MagicMock(), + ) as mock_indices: + await elasticsearch_dao.drop_all() + mock_indices.delete.assert_called_with(index="_all", ignore=[400, 404]) + mock_indices.create.assert_has_calls( + [ + call(body=mapping, index="commons-index"), + call(index="commons-info-index"), + ], + any_order=True, + ) + + +@pytest.mark.asyncio +async def test_update_metadata(): + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.index", + MagicMock(), + ) as mock_index: + await elasticsearch_dao.update_metadata( + "my_commons", + [ + { + "my_id": { + "gen3_discovery": { + "one": "one", + } + } + } + ], + [], + {}, + {}, + ) + mock_index.assert_has_calls( + [ + call( + body={}, + doc_type="commons-info", + id="my_commons", + index="commons-info-index", + ), + call( + body={"one": "one"}, + doc_type="commons", + id="my_id", + index="commons-index", + ), + ], + ) + + +@pytest.mark.asyncio +async def test_get_status(): + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.cluster", + MagicMock(), + ) as mock_cluster: + await elasticsearch_dao.get_status() + + mock_cluster.health.assert_called_with() + + +@pytest.mark.asyncio +async def test_get_commons(): + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.search", + MagicMock(), + ) as mock_search: + await elasticsearch_dao.get_commons() + mock_search.assert_called_with( + index="commons-index", + body={ + "size": 0, + "aggs": {"commons_names": {"terms": {"field": "commons_name.keyword"}}}, + }, + ) + + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.search", + MagicMock(side_effect=Exception("some error")), + ) as mock_search: + assert await elasticsearch_dao.get_commons() == [] + + +@pytest.mark.asyncio +async def test_get_all_metadata(): + response = { + "hits": {"hits": [{"_id": 1, "_source": {"commons_name": "my-commons"}}]} + } + + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.search", + MagicMock(return_value=response), + ) as mock_search: + await elasticsearch_dao.get_all_metadata(5, 9) + mock_search.assert_called_with( + index="commons-index", + body={"size": 5, "from": 9, "query": {"match_all": {}}}, + ) + + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.search", + MagicMock(side_effect=Exception("some error")), + ) as mock_search: + assert await elasticsearch_dao.get_all_metadata(5, 9) == {} + + +@pytest.mark.asyncio +async def test_get_all_named_commons_metadata(): + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client", MagicMock() + ) as mock_client: + await elasticsearch_dao.get_all_named_commons_metadata("my-commons") + mock_client.search.assert_called_with( + index="commons-index", + body={"query": {"match": {"commons_name.keyword": "my-commons"}}}, + ) + + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.search", + MagicMock(side_effect=Exception("some error")), + ) as mock_search: + assert ( + await elasticsearch_dao.get_all_named_commons_metadata("my-commons") == {} + ) + + +@pytest.mark.asyncio +async def test_metadata_tags(): + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client", MagicMock() + ) as mock_client: + await elasticsearch_dao.metadata_tags("my-commons") + mock_client.search.assert_called_with( + index="commons-index", + body={ + "size": 0, + "aggs": { + "tags": { + "nested": {"path": "tags"}, + "aggs": { + "categories": { + "terms": {"field": "tags.category.keyword"}, + "aggs": { + "name": {"terms": {"field": "tags.name.keyword"}} + }, + } + }, + } + }, + }, + ) + + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.search", + MagicMock(side_effect=Exception("some error")), + ) as mock_search: + assert await elasticsearch_dao.metadata_tags("my-commons") == [] + + +@pytest.mark.asyncio +async def test_get_commons_attribute(): + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client", MagicMock() + ) as mock_client: + await elasticsearch_dao.get_commons_attribute("my-commons", "attribute") + mock_client.search.assert_called_with( + index="commons-info-index", + body={"query": {"terms": {"_id": ["my-commons"]}}}, + ) + + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.search", + MagicMock(side_effect=Exception("some error")), + ) as mock_search: + assert ( + await elasticsearch_dao.get_commons_attribute("my-commons", "attribute") + == None + ) + + +@pytest.mark.asyncio +async def test_get_aggregations(): + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client", MagicMock() + ) as mock_client: + await elasticsearch_dao.get_aggregations("my-commons") + mock_client.search.assert_called_with( + index="commons-index", + body={ + "size": 0, + "query": { + "constant_score": { + "filter": {"match": {"commons_name": "my-commons"}} + } + }, + "aggs": {"_subjects_count": {"sum": {"field": "_subjects_count"}}}, + }, + ) + + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.search", + MagicMock(side_effect=Exception("some error")), + ) as mock_search: + assert await elasticsearch_dao.get_aggregations("my-commons") == [] + + +@pytest.mark.asyncio +async def test_get_by_guid(): + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client", MagicMock() + ) as mock_client: + await elasticsearch_dao.get_by_guid("my-commons") + mock_client.get.assert_called_with( + index="commons-index", + doc_type="commons", + id="my-commons", + ) + + with patch( + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.get", + MagicMock(side_effect=Exception("some error")), + ) as mock_get: + assert await elasticsearch_dao.get_by_guid("my-commons") == None diff --git a/tests/test_agg_mds_query.py b/tests/test_agg_mds_query.py index 92350e3a..51b7c381 100644 --- a/tests/test_agg_mds_query.py +++ b/tests/test_agg_mds_query.py @@ -2,6 +2,8 @@ import pytest import nest_asyncio from mds.agg_mds import datastore +from unittest.mock import patch +from conftest import AsyncMock # https://github.com/encode/starlette/issues/440 @@ -10,64 +12,36 @@ @pytest.mark.asyncio async def test_aggregate_commons(client): - resp = client.get("/aggregate/commons") - assert resp.status_code == 200 - assert resp.json() == None - - await datastore.update_metadata( - "commons1", - [], - [], - None, - None, - None, - ) - await datastore.update_metadata( - "commons2", - [], - [], - None, - None, - None, - ) - resp = client.get("/aggregate/commons") - assert resp.status_code == 200 - assert resp.json() == {"commons": ["commons1", "commons2"]} + with patch.object( + datastore, "get_commons", AsyncMock(return_value={}) + ) as datastore_mock: + resp = client.get("/aggregate/commons") + assert resp.status_code == 200 + assert resp.json() == {} + datastore.get_commons.assert_called_with() + + with patch.object( + datastore, + "get_commons", + AsyncMock(return_value={"commons": ["commons1", "commons2"]}), + ) as datastore_mock: + resp = client.get("/aggregate/commons") + assert resp.status_code == 200 + assert resp.json() == {"commons": ["commons1", "commons2"]} + datastore.get_commons.assert_called_with() @pytest.mark.asyncio async def test_aggregate_metadata(client): - resp = client.get("/aggregate/metadata") - assert resp.status_code == 200 - assert resp.json() == {} - - await datastore.update_metadata( - "commons1", - [ - { - "study1": {}, - } - ], - ["study1"], - None, - None, - None, - ) - await datastore.update_metadata( - "commons2", - [ - { - "study2": {}, - } - ], - ["study2"], - None, - None, - None, - ) - resp = client.get("/aggregate/metadata") - assert resp.status_code == 200 - assert resp.json() == { + with patch.object( + datastore, "get_all_metadata", AsyncMock(return_value=[]) + ) as datastore_mock: + resp = client.get("/aggregate/metadata") + assert resp.status_code == 200 + assert resp.json() == [] + datastore.get_all_metadata.assert_called_with(20, 0) + + mock_data = { "commons1": [ { "study1": {}, @@ -80,129 +54,125 @@ async def test_aggregate_metadata(client): ], } + with patch.object( + datastore, "get_all_metadata", AsyncMock(return_value=mock_data) + ) as datastore_mock: + resp = client.get("/aggregate/metadata") + assert resp.status_code == 200 + assert resp.json() == mock_data + datastore.get_all_metadata.assert_called_with(20, 0) + @pytest.mark.asyncio async def test_aggregate_metadata_name(client): - resp = client.get("/aggregate/metadata/commons1") - assert resp.status_code == 404 - assert resp.json() == { - "detail": { - "code": 404, - "message": "no common exists with the given: commons1", + with patch.object( + datastore, "get_all_named_commons_metadata", AsyncMock(return_value=None) + ) as datastore_mock: + resp = client.get("/aggregate/metadata/commons1") + assert resp.status_code == 404 + assert resp.json() == { + "detail": { + "code": 404, + "message": "no common exists with the given: commons1", + } } - } + datastore.get_all_named_commons_metadata.assert_called_with("commons1") - await datastore.update_metadata( - "commons1", - [ - { - "study1": {}, - } - ], - ["study1"], - None, - None, - None, - ) - resp = client.get("/aggregate/metadata/commons1") - assert resp.status_code == 200 - assert resp.json() == [{"study1": {}}] + with patch.object( + datastore, + "get_all_named_commons_metadata", + AsyncMock(return_value=[{"study1": {}}]), + ) as datastore_mock: + resp = client.get("/aggregate/metadata/commons1") + assert resp.status_code == 200 + assert resp.json() == [{"study1": {}}] + datastore.get_all_named_commons_metadata.assert_called_with("commons1") @pytest.mark.asyncio async def test_aggregate_metadata_tags(client): - resp = client.get("/aggregate/metadata/commons1/tags") - assert resp.status_code == 404 - assert resp.json() == { - "detail": { - "code": 404, - "message": "no common exists with the given: commons1", + with patch.object( + datastore, "get_commons_attribute", AsyncMock(return_value=None) + ) as datastore_mock: + resp = client.get("/aggregate/metadata/commons1/tags") + assert resp.status_code == 404 + assert resp.json() == { + "detail": { + "code": 404, + "message": "no common exists with the given: commons1", + } } - } - await datastore.update_metadata( - "commons1", - [ - { - "study1": {}, - } - ], - ["study1"], - ["mytag1"], - None, - None, - ) - resp = client.get("/aggregate/metadata/commons1/tags") - assert resp.status_code == 200 - assert resp.json() == ["mytag1"] + with patch.object( + datastore, "get_commons_attribute", AsyncMock(return_value=["mytag1"]) + ) as datastore_mock: + resp = client.get("/aggregate/metadata/commons1/tags") + assert resp.status_code == 200 + assert resp.json() == ["mytag1"] + datastore.get_commons_attribute.assert_called_with("commons1", "tags") @pytest.mark.asyncio async def test_aggregate_metadata_info(client): - resp = client.get("/aggregate/metadata/commons1/info") - assert resp.status_code == 404 - assert resp.json() == { - "detail": { - "code": 404, - "message": "no common exists with the given: commons1", + with patch.object( + datastore, "get_commons_attribute", AsyncMock(return_value=None) + ) as datastore_mock: + resp = client.get("/aggregate/metadata/commons1/info") + assert resp.status_code == 404 + assert resp.json() == { + "detail": { + "code": 404, + "message": "no common exists with the given: commons1", + } } - } + datastore.get_commons_attribute.assert_called_with("commons1", "info") - await datastore.update_metadata( - "commons1", - [ - { - "study1": {}, - } - ], - ["guid1"], - None, - {"commons_url": "http://commons"}, - None, - ) - resp = client.get("/aggregate/metadata/commons1/info") - assert resp.status_code == 200 - assert resp.json() == {"commons_url": "http://commons"} + with patch.object( + datastore, + "get_commons_attribute", + AsyncMock(return_value={"commons_url": "http://commons"}), + ) as datastore_mock: + resp = client.get("/aggregate/metadata/commons1/info") + assert resp.status_code == 200 + assert resp.json() == {"commons_url": "http://commons"} + datastore.get_commons_attribute.assert_called_with("commons1", "info") @pytest.mark.asyncio async def test_metadata_aggregations(client): - resp = client.get("/aggregate/metadata/commons1/aggregations") - assert resp.status_code == 404 - assert resp.json() == { - "detail": { - "code": 404, - "message": "no common exists with the given: commons1", + with patch.object( + datastore, "get_aggregations", AsyncMock(return_value=None) + ) as datastore_mock: + resp = client.get("/aggregate/metadata/commons1/aggregations") + assert resp.status_code == 404 + assert resp.json() == { + "detail": { + "code": 404, + "message": "no common exists with the given: commons1", + } } - } + datastore.get_aggregations.assert_called_with("commons1") @pytest.mark.asyncio async def test_aggregate_metadata_name_guid(client): - resp = client.get("/aggregate/metadata/commons1/guid/study2:path") - assert resp.status_code == 404 - assert resp.json() == { - "detail": { - "code": 404, - "message": "no common/guid exists with the given: commons1/study2", + with patch.object( + datastore, "get_by_guid", AsyncMock(return_value=None) + ) as datastore_mock: + resp = client.get("/aggregate/metadata/guid/123") + assert resp.status_code == 404 + assert resp.json() == { + "detail": { + "code": 404, + "message": "no entry exists with the given guid: 123", + } } - } - - await datastore.update_metadata( - "commons1", - [ - { - "study1": {}, - }, - { - "study2": {}, - }, - ], - ["study1", "study2"], - None, - {"commons_url": "http://commons"}, - None, - ) - resp = client.get("/aggregate/metadata/commons1/guid/study2:path") - assert resp.status_code == 200 - assert resp.json() == {"study2": {}} + datastore.get_by_guid.assert_called_with("123") + + with patch.object( + datastore, "get_by_guid", AsyncMock(return_value={"study2": {}}) + ) as datastore_mock: + resp = client.get("/aggregate/metadata/guid/123") + assert resp.status_code == 200 + assert resp.json() == {"study2": {}} + datastore.get_by_guid.assert_called_with("123") diff --git a/tests/test_agg_mds_redis_cache.py b/tests/test_agg_mds_redis_cache.py deleted file mode 100644 index 3826145a..00000000 --- a/tests/test_agg_mds_redis_cache.py +++ /dev/null @@ -1,179 +0,0 @@ -import json -from unittest.mock import patch, call, MagicMock -from conftest import AsyncMock -import pytest -import mds -from mds.agg_mds.datastore.redis_cache import RedisCache -import mds.agg_mds.datastore.redis_cache -import nest_asyncio -import fakeredis.aioredis -import aioredis -from datetime import datetime - - -@pytest.mark.asyncio -async def test_init_cache(): - cache = RedisCache() - - async def mock_pool(address): - return f"mock:result:{address}" - - with patch.object( - mds.agg_mds.datastore.redis_cache, "create_redis_pool", mock_pool - ): - await cache.init_cache() - assert cache.redis_cache == "mock:result:redis://0.0.0.0:6379/0?encoding=utf-8" - - -@pytest.mark.asyncio -async def test_keys(): - cache = RedisCache() - cache.redis_cache = await fakeredis.aioredis.create_redis_pool() - - await cache.redis_cache.set("commons1", "some data") - keys = await cache.keys("commons1") - assert keys == [b"commons1"] - - -@pytest.mark.asyncio -async def test_json_get(): - cache = RedisCache() - cache.redis_cache = await fakeredis.aioredis.create_redis_pool() - - with patch.object(cache.redis_cache, "execute", AsyncMock(return_value=None)): - resp = await cache.json_get("some_key") - assert resp == None - - with patch.object( - cache.redis_cache, "execute", AsyncMock(return_value=json.dumps({})) - ): - resp = await cache.json_get("some_key") - assert resp == {} - - -@pytest.mark.asyncio -async def test_get_status(): - cache = RedisCache() - - mock_data = ["status2", "status1", ["commons1", "commons2"]] - - async def mock_json_get(arg1, arg2=None): - return mock_data.pop() - - patch.object(cache, "json_get", mock_json_get).start() - - assert await cache.get_status() == { - "commons1": "status1", - "commons2": "status2", - } - - -@pytest.mark.asyncio -async def test_close(): - cache = RedisCache() - cache.redis_cache = await fakeredis.aioredis.create_redis_pool() - - patch.object(cache.redis_cache, "close", MagicMock()).start() - patch.object(cache.redis_cache, "wait_closed", AsyncMock()).start() - - await cache.close() - - cache.redis_cache.close.assert_called_with() - cache.redis_cache.wait_closed.assert_called_with() - - -@pytest.mark.asyncio -async def test_update_metadata(): - cache = RedisCache() - cache.json_sets = AsyncMock() - cache.json_arr_appends = AsyncMock() - - now = datetime.now() - with patch("mds.agg_mds.datastore.redis_cache.datetime") as mock_date: - mock_date.now = MagicMock(return_value=now) - await cache.update_metadata("commons1", [], [], {}, {}, {}) - - cache.json_sets.assert_has_calls( - [ - call("commons1", {}), - call("commons1", [], ".metadata"), - call("commons1", [], ".guids"), - call("commons1", {}, ".tags"), - call("commons1", {}, ".info"), - call("commons1", {}, ".aggregations"), - call( - "commons1.status", - { - "last_update": now.strftime("%Y%m%d%H%M%S"), - "error": 0, - "count": "none", - }, - ), - ] - ) - cache.json_arr_appends.assert_called_with("commons", "commons1") - - -@pytest.mark.asyncio -async def test_get_commons_metadata(): - cache = RedisCache() - - with patch.object(cache, "json_get", AsyncMock(return_value=None)): - assert await cache.get_commons_metadata("commons1", 3, 2) == None - - with patch.object( - cache, "json_get", AsyncMock(return_value=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) - ): - assert await cache.get_commons_metadata("commons1", 3, 2) == [3, 4, 5] - - -@pytest.mark.asyncio -async def test_get_commons_metadata_guid(): - cache = RedisCache() - - patch.object(cache, "json_get", AsyncMock(return_value=[])).start() - patch.object(cache, "json_arr_index", AsyncMock(return_value=None)).start() - - assert await cache.get_commons_metadata_guid("commons1", "guid1") == None - - patch.object( - cache, "json_get", AsyncMock(return_value=["commons0", "commons1", "commons2"]) - ).start() - patch.object(cache, "json_arr_index", AsyncMock(return_value=1)).start() - - assert await cache.get_commons_metadata_guid("commons1", "guid1") == "commons1" - - -@pytest.mark.asyncio -async def test_get_commons_attribute(): - cache = RedisCache() - cache.json_get = AsyncMock() - - await cache.get_commons_attribute("something", "other") - - cache.json_get.assert_called_with("something", "other") - - -@pytest.mark.asyncio -async def test_get_all_metadata(): - cache = RedisCache() - - patch.object(cache, "json_get", AsyncMock(return_value=None)).start() - - assert await cache.get_all_metadata(2, 4) == {} - - mock_data = [ - [None, "recordX2", "recordY2", "recordZ2", None], - [None, None, None, "recordX1", "recordY1", "recordZ1", None], - ["commons1", "commons2"], - ] - - async def mock_json_get(arg1, arg2=None): - return mock_data.pop() - - patch.object(cache, "json_get", mock_json_get).start() - - assert await cache.get_all_metadata(2, 4) == { - "commons1": ["recordY1", "recordZ1"], - "commons2": [None], - } diff --git a/tests/test_populate.py b/tests/test_populate.py index 377c9231..afc4b8f2 100644 --- a/tests/test_populate.py +++ b/tests/test_populate.py @@ -1,9 +1,10 @@ import pytest from argparse import Namespace -from mds.populate import parse_args, main, filter_entries +from mds.populate import parse_args, main, insert_data, filter_entries from mds.agg_mds.commons import MDSInstance, Commons import respx -from unittest.mock import patch +from unittest.mock import patch, MagicMock +from conftest import AsyncMock @pytest.mark.asyncio @@ -27,41 +28,7 @@ async def test_parse_args(): @pytest.mark.asyncio -async def test_main(mock_aggregate_datastore): - def mock_pull_mds(url): - return { - "thing": { - "gen3_discovery": { - "commons_name": "my_commons", - "tags": [{"category": "tag_category", "name": "tag_name"}], - "_subjects_count": 30, - } - } - } - - with patch("mds.populate.pull_mds", mock_pull_mds): - await main( - Commons( - { - "my_commons": MDSInstance( - "http://mds", - "http://commons", - { - "short_name": "name", - "full_name": "full_name", - "_subjects_count": "_subjects_count", - "study_id": "study_id", - "_unique_id": "_unique_id", - "study_description": "study_description", - }, - ), - }, - ["_subjects_count"], - ), - "", - 0, - ) - +async def test_main(): with patch("mds.config.USE_AGG_MDS", False): with pytest.raises(SystemExit) as pytest_wrapped_e: await main(None, "", 0) @@ -69,6 +36,48 @@ def mock_pull_mds(url): assert pytest_wrapped_e.value.code == 1 +@pytest.mark.asyncio +async def test_insert_data(): + patch( + "mds.populate.pull_mds", + MagicMock( + return_value={ + "study1": { + "gen3_discovery": { + "tags": [{"category": "tag_category", "name": "tag_name"}], + }, + }, + "study2": { + "gen3_discovery": { + "tags": [{"category": "tag_category", "name": "tag_name"}], + }, + }, + } + ), + ).start() + patch("mds.agg_mds.datastore.update_metadata", AsyncMock(return_value=None)).start() + + await insert_data( + Commons( + { + "my_commons": MDSInstance( + "http://mds", + "http://commons", + { + "short_name": "name", + "full_name": "full_name", + "_subjects_count": "_subjects_count", + "study_id": "study_id", + "_unique_id": "_unique_id", + "study_description": "study_description", + }, + ), + }, + ["_subjects_count"], + ), + ) + + @pytest.mark.asyncio async def test_filter_entries(): resp = await filter_entries( From 1bf9d80d8ed8fc3a47c2c365e9ae4aac64835e33 Mon Sep 17 00:00:00 2001 From: Will Date: Wed, 30 Jun 2021 09:49:50 -0500 Subject: [PATCH 2/2] fix(cleanup): pep8 standards, better /_status --- src/mds/agg_mds/datastore/__init__.py | 7 +++ .../agg_mds/datastore/elasticsearch_dao.py | 55 ++++++++++--------- src/mds/main.py | 9 ++- tests/test_agg_mds_elasticsearch_dao.py | 10 ++-- tests/test_main.py | 29 ++++++++++ 5 files changed, 79 insertions(+), 31 deletions(-) create mode 100644 tests/test_main.py diff --git a/src/mds/agg_mds/datastore/__init__.py b/src/mds/agg_mds/datastore/__init__.py index 493f663f..a049d1e2 100644 --- a/src/mds/agg_mds/datastore/__init__.py +++ b/src/mds/agg_mds/datastore/__init__.py @@ -22,6 +22,9 @@ async def close(): async def get_status(): + """ + Returns "OK" or raises an error indicating the status of the datastore: + """ return await client.get_status() @@ -55,3 +58,7 @@ async def get_all_metadata(*args): async def get_aggregations(*args): return await client.get_aggregations(*args) + + +async def search(*args): + return await client.search(*args) diff --git a/src/mds/agg_mds/datastore/elasticsearch_dao.py b/src/mds/agg_mds/datastore/elasticsearch_dao.py index 0c5edcf1..ad6d7426 100644 --- a/src/mds/agg_mds/datastore/elasticsearch_dao.py +++ b/src/mds/agg_mds/datastore/elasticsearch_dao.py @@ -5,15 +5,19 @@ from mds import logger -agg_mds_index = "commons-index" -agg_mds_type = "commons" +# TODO WFH Why do we have both __manifest and _file_manifest? +FIELDS_TO_NORMALIZE = ["__manifest", "_file_manifest", "advSearchFilters"] -agg_mds_info_index = "commons-info-index" -agg_mds_info_type = "commons-info" +AGG_MDS_INDEX = "commons-index" +AGG_MDS_TYPE = "commons" -mapping = { +AGG_MDS_INFO_INDEX = "commons-info-index" +AGG_MDS_INFO_TYPE = "commons-info" + + +MAPPING = { "mappings": { "commons": { "properties": { @@ -43,12 +47,12 @@ async def init(hostname: str = "0.0.0.0", port: int = 9200): async def drop_all(): res = elastic_search_client.indices.delete(index="_all", ignore=[400, 404]) logger.debug(f"deleted all indexes: {res}") - res = elastic_search_client.indices.create(index=agg_mds_index, body=mapping) - logger.debug(f"created index {agg_mds_index}: {res}") + res = elastic_search_client.indices.create(index=AGG_MDS_INDEX, body=MAPPING) + logger.debug(f"created index {AGG_MDS_INDEX}: {res}") res = elastic_search_client.indices.create( - index=agg_mds_info_index, + index=AGG_MDS_INFO_INDEX, ) - logger.debug(f"created index {agg_mds_info_index}: {res}") + logger.debug(f"created index {AGG_MDS_INFO_INDEX}: {res}") def normalize_string_or_object(doc, key): @@ -65,8 +69,8 @@ async def update_metadata( info: Dict[str, str], ): elastic_search_client.index( - index=agg_mds_info_index, - doc_type=agg_mds_info_type, + index=AGG_MDS_INFO_INDEX, + doc_type=AGG_MDS_INFO_TYPE, id=name, body=info, ) @@ -76,17 +80,18 @@ async def update_metadata( # Flatten out this structure doc = doc[key]["gen3_discovery"] - normalize_string_or_object(doc, "__manifest") - # TODO WFH Why do we have this redundant field? Which commons has this? - normalize_string_or_object(doc, "_file_manifest") - normalize_string_or_object(doc, "advSearchFilters") + for field in FIELDS_TO_NORMALIZE: + normalize_string_or_object(doc, field) + elastic_search_client.index( - index=agg_mds_index, doc_type=agg_mds_type, id=key, body=doc + index=AGG_MDS_INDEX, doc_type=AGG_MDS_TYPE, id=key, body=doc ) async def get_status(): - return elastic_search_client.cluster.health() + if not elastic_search_client.ping(): + raise ValueError("Connection failed") + return "OK" async def close(): @@ -96,7 +101,7 @@ async def close(): async def get_commons(): try: res = elastic_search_client.search( - index=agg_mds_index, + index=AGG_MDS_INDEX, body={ "size": 0, "aggs": {"commons_names": {"terms": {"field": "commons_name.keyword"}}}, @@ -115,7 +120,7 @@ async def get_commons(): async def get_all_metadata(limit, offset): try: res = elastic_search_client.search( - index=agg_mds_index, + index=AGG_MDS_INDEX, body={"size": limit, "from": offset, "query": {"match_all": {}}}, ) byCommons = {} @@ -141,7 +146,7 @@ async def get_all_metadata(limit, offset): async def get_all_named_commons_metadata(name): try: return elastic_search_client.search( - index=agg_mds_index, + index=AGG_MDS_INDEX, body={"query": {"match": {"commons_name.keyword": name}}}, ) except Exception as error: @@ -152,7 +157,7 @@ async def get_all_named_commons_metadata(name): async def metadata_tags(name): try: return elastic_search_client.search( - index=agg_mds_index, + index=AGG_MDS_INDEX, body={ "size": 0, "aggs": { @@ -182,7 +187,7 @@ async def metadata_tags(name): async def get_commons_attribute(name, what): try: data = elastic_search_client.search( - index=agg_mds_info_index, + index=AGG_MDS_INFO_INDEX, body={ "query": { "terms": { @@ -200,7 +205,7 @@ async def get_commons_attribute(name, what): async def get_aggregations(name): try: res = elastic_search_client.search( - index=agg_mds_index, + index=AGG_MDS_INDEX, body={ "size": 0, "query": { @@ -224,8 +229,8 @@ async def get_aggregations(name): async def get_by_guid(guid): try: data = elastic_search_client.get( - index=agg_mds_index, - doc_type=agg_mds_type, + index=AGG_MDS_INDEX, + doc_type=AGG_MDS_TYPE, id=guid, ) return data["_source"] diff --git a/src/mds/main.py b/src/mds/main.py index 5389e60b..0a8f8847 100644 --- a/src/mds/main.py +++ b/src/mds/main.py @@ -111,11 +111,18 @@ def get_version(): @router.get("/_status") async def get_status(): """ - Returns the status of all the cached commons. There are two fields per common: + Returns the status of the MDS: * error: if there was no error this will be "none" * last_update: timestamp of the last data pull from the commons * count: number of entries :return: """ now = await db.scalar("SELECT now()") + + try: + await aggregate_datastore.get_status() + except Exception as error: + logger.error("error with aggregate datastore connection: %s", error) + return dict(error="aggregate datastore offline") + return dict(status="OK", timestamp=now) diff --git a/tests/test_agg_mds_elasticsearch_dao.py b/tests/test_agg_mds_elasticsearch_dao.py index 96bdc357..f774cad7 100644 --- a/tests/test_agg_mds_elasticsearch_dao.py +++ b/tests/test_agg_mds_elasticsearch_dao.py @@ -4,7 +4,7 @@ import pytest import mds from mds.agg_mds.datastore import elasticsearch_dao -from mds.agg_mds.datastore.elasticsearch_dao import mapping +from mds.agg_mds.datastore.elasticsearch_dao import MAPPING import nest_asyncio @@ -27,7 +27,7 @@ async def test_drop_all(): mock_indices.delete.assert_called_with(index="_all", ignore=[400, 404]) mock_indices.create.assert_has_calls( [ - call(body=mapping, index="commons-index"), + call(body=MAPPING, index="commons-index"), call(index="commons-info-index"), ], any_order=True, @@ -76,12 +76,12 @@ async def test_update_metadata(): @pytest.mark.asyncio async def test_get_status(): with patch( - "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client.cluster", + "mds.agg_mds.datastore.elasticsearch_dao.elastic_search_client", MagicMock(), - ) as mock_cluster: + ) as mock_client: await elasticsearch_dao.get_status() - mock_cluster.health.assert_called_with() + mock_client.ping.assert_called_with() @pytest.mark.asyncio diff --git a/tests/test_main.py b/tests/test_main.py new file mode 100644 index 00000000..8d517b1f --- /dev/null +++ b/tests/test_main.py @@ -0,0 +1,29 @@ +import gino +import pytest +from unittest.mock import patch +from conftest import AsyncMock + + +def test_status_success(client): + patch( + "mds.main.aggregate_datastore.get_status", AsyncMock(return_value="some status") + ).start() + patch("mds.main.db.scalar", AsyncMock(return_value="some time")).start() + + resp = client.get("/_status") + resp.raise_for_status() + assert resp.status_code == 200 + assert resp.json() == {"status": "OK", "timestamp": "some time"} + + +def test_status_aggregate_error(client): + patch( + "mds.main.aggregate_datastore.get_status", + AsyncMock(side_effect=Exception("some error")), + ).start() + patch("mds.main.db.scalar", AsyncMock(return_value="some time")).start() + + resp = client.get("/_status") + resp.raise_for_status() + assert resp.status_code == 200 + assert resp.json() == {"error": "aggregate datastore offline"}