From 931463c61f2a55fcd4a94e0c0987526e82686601 Mon Sep 17 00:00:00 2001 From: Kevin James Date: Thu, 14 Sep 2023 20:30:48 +0100 Subject: [PATCH] docs: fixup docs build pdoc -> sphinx, and fix various build issues. --- .circleci/config.yml | 53 ++-- .gitignore | 1 + README.rst | 4 +- auth/README.rst | 36 +-- auth/gcloud/aio/auth/__init__.py | 86 ++++-- auth/poetry.lock | 2 +- auth/poetry.rest.lock | 2 +- auth/pyproject.rest.toml | 6 +- auth/pyproject.toml | 6 +- bigquery/README.rst | 10 +- bigquery/gcloud/aio/bigquery/__init__.py | 25 +- bigquery/poetry.lock | 2 +- bigquery/poetry.rest.lock | 2 +- bigquery/pyproject.rest.toml | 6 +- bigquery/pyproject.toml | 6 +- conf.py | 30 ++ datastore/README.rst | 10 +- datastore/gcloud/aio/datastore/__init__.py | 294 +++++++++---------- datastore/poetry.lock | 2 +- datastore/poetry.rest.lock | 2 +- datastore/pyproject.rest.toml | 6 +- datastore/pyproject.toml | 6 +- docs/aio.md | 89 ------ docs/gcloud.md | 9 - docs/rest.md | 45 --- index.rst | 26 ++ kms/README.rst | 10 +- kms/gcloud/aio/kms/__init__.py | 53 ++-- kms/poetry.lock | 2 +- kms/poetry.rest.lock | 2 +- kms/pyproject.rest.toml | 6 +- kms/pyproject.toml | 6 +- pubsub/README.rst | 10 +- pubsub/gcloud/aio/pubsub/__init__.py | 321 +++++++++++---------- pubsub/poetry.lock | 2 +- pubsub/poetry.rest.lock | 2 +- pubsub/pyproject.rest.toml | 6 +- pubsub/pyproject.toml | 6 +- storage/README.rst | 10 +- storage/gcloud/aio/storage/__init__.py | 219 +++++++------- storage/gcloud/aio/storage/blob.py | 1 + storage/gcloud/aio/storage/storage.py | 43 +-- storage/poetry.lock | 2 +- storage/poetry.rest.lock | 2 +- storage/pyproject.rest.toml | 6 +- storage/pyproject.toml | 6 +- taskqueue/README.rst | 10 +- taskqueue/gcloud/aio/taskqueue/__init__.py | 29 +- taskqueue/poetry.lock | 2 +- taskqueue/poetry.rest.lock | 2 +- taskqueue/pyproject.rest.toml | 6 +- taskqueue/pyproject.toml | 6 +- 52 files changed, 747 insertions(+), 789 deletions(-) create mode 100644 conf.py delete mode 100644 docs/aio.md delete mode 100644 docs/gcloud.md delete mode 100644 docs/rest.md create mode 100644 index.rst diff --git a/.circleci/config.yml b/.circleci/config.yml index 688742584..e632e2501 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -98,37 +98,22 @@ jobs: docs: executor: python311 steps: - - run: pip install pdoc + - run: pip install sphinx sphinx-autoapi sphinx-sizzle-theme - checkout - - attach_workspace: - at: rest - - run: - name: install all sub-projects (aio) - command: | - pip install -e ./auth - pip install -e ./bigquery - pip install -e ./datastore - pip install -e ./kms - pip install -e ./pubsub - pip install -e ./storage - pip install -e ./taskqueue - - run: - name: install all sub-projects (rest) - command: | - cd rest/ - pip install -e ./auth - pip install -e ./bigquery - pip install -e ./datastore - pip install -e ./kms - pip install -e ./pubsub - pip install -e ./storage - pip install -e ./taskqueue - - run: mkdir -p ./build - - run: pdoc -o ./build/docs gcloud + # TODO: generate gcloud-rest-* docs + # - attach_workspace: + # at: rest + # TODO: add -W after fixing build issues + - run: sphinx-build -b html --keep-going . build/docs/ + # https://github.blog/2009-12-29-bypassing-jekyll-on-github-pages/ + # Uploading /root/project/build/docs/.nojekyll: SKIPPED, file is empty + - run: echo " " > build/docs/.nojekyll - persist_to_workspace: root: ./build paths: - ./docs + - store_artifacts: + path: ./build/docs/ pages: docker: @@ -137,16 +122,16 @@ jobs: - checkout - attach_workspace: at: build - - run: npm install -g --silent gh-pages@2.0.1 - - run: - name: configure git creds - command: | - git config user.email "voiceai-eng+ci@dialpad.com" - git config user.name "Vi Eng (CI)" + - run: npm install -g --silent gh-pages@6.0.0 - add_ssh_keys: fingerprints: - "f6:b5:5d:10:ed:5d:cd:e0:83:28:dd:39:8c:f8:0b:c3" - - run: gh-pages --message "[skip ci] updated docs" --dist build/docs + # https://github.com/tschaub/gh-pages/issues/354 + - run: mkdir ./tmp-gh-pages + - run: + command: gh-pages -u "Vi Eng CI " -m "[skip ci] updated docs" -td build/docs/ + environment: + CACHE_DIR: ./tmp-gh-pages github: docker: @@ -268,8 +253,6 @@ workflows: only: /master/ tags: ignore: /.*/ - requires: - - build-rest - pages: filters: branches: diff --git a/.gitignore b/.gitignore index 5f48834ba..165385cf4 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,7 @@ **/__pycache__/* **/build/* **/dist/* +**/docs/* **/venv/* *.pyc */.coverage diff --git a/README.rst b/README.rst index 3529793e8..5f69f839c 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,5 @@ -(Asyncio OR Threadsafe) Google Cloud Client Library for Python -============================================================== +(Asyncio OR Threadsafe) Google Cloud Client Libraries for Python +================================================================ This repository contains a shared codebase for two projects: ``gcloud-aio-*`` and ``gcloud-rest-*``. Both of them are HTTP implementations of the Google diff --git a/auth/README.rst b/auth/README.rst index 5076d6b26..2c0f40061 100644 --- a/auth/README.rst +++ b/auth/README.rst @@ -12,7 +12,7 @@ against Google Cloud. The other ``gcloud-aio-*`` package components accept a these components or define one for each. Each component corresponds to a given Google Cloud service and each service requires various "`scopes`_". -|pypi| |pythons-aio| |pythons-rest| +|pypi| |pythons| Installation ------------ @@ -26,32 +26,6 @@ Usage See `our docs`_. -CLI -~~~ - -This project can also be used to help you manually authenticate to test GCP -routes, eg. we can list our project's uptime checks with a tool such as -``curl``: - -.. code-block:: console - - # using default application credentials - curl \ - -H "Authorization: Bearer $(python3 -c 'from gcloud.rest.auth import Token; print(Token().get())')" \ - "https://monitoring.googleapis.com/v3/projects/PROJECT_ID/uptimeCheckConfigs" - - # using a service account (make sure to provide a scope!) - export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service.json - curl \ - -H "Authorization: Bearer $(python3 -c 'from gcloud.rest.auth import Token; print(Token(scopes=["'"https://www.googleapis.com/auth/cloud-platform"'"]).get())')" \ - "https://monitoring.googleapis.com/v3/projects/PROJECT_ID/uptimeCheckConfigs" - - # using legacy account credentials - export GOOGLE_APPLICATION_CREDENTIALS=~/.config/gcloud/legacy_credentials/EMAIL@DOMAIN.TLD/adc.json - curl \ - -H "Authorization: Bearer $(python3 -c 'from gcloud.rest.auth import Token; print(Token().get())')" \ - "https://monitoring.googleapis.com/v3/projects/PROJECT_ID/uptimeCheckConfigs" - Contributing ------------ @@ -65,10 +39,6 @@ Please see our `contributing guide`_. :alt: Latest PyPI Version (gcloud-aio-auth) :target: https://pypi.org/project/gcloud-aio-auth/ -.. |pythons-aio| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-auth.svg?style=flat-square&label=python (aio) - :alt: Python Version Support (gcloud-aio-auth) +.. |pythons| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-auth.svg?style=flat-square&label=python + :alt: Python Version Support :target: https://pypi.org/project/gcloud-aio-auth/ - -.. |pythons-rest| image:: https://img.shields.io/pypi/pyversions/gcloud-rest-auth.svg?style=flat-square&label=python (rest) - :alt: Python Version Support (gcloud-rest-auth) - :target: https://pypi.org/project/gcloud-rest-auth/ diff --git a/auth/gcloud/aio/auth/__init__.py b/auth/gcloud/aio/auth/__init__.py index ee4ec2411..8beb9d216 100644 --- a/auth/gcloud/aio/auth/__init__.py +++ b/auth/gcloud/aio/auth/__init__.py @@ -1,45 +1,75 @@ +# pylint: disable=line-too-long """ This library implements various methods for working with the Google IAM / auth APIs. This includes authenticating for the purpose of using other Google APIs, managing service accounts and public keys, URL-signing blobs, etc. -## Installation +Installation +------------ -```console -$ pip install --upgrade gcloud-aio-auth -``` +.. code-block:: console -## Usage + $ pip install --upgrade gcloud-aio-auth -```python -from gcloud.aio.auth import IamClient -from gcloud.aio.auth import Token +Usage +----- +.. code-block:: python -client = IamClient() -pubkeys = await client.list_public_keys() + from gcloud.aio.auth import IamClient + from gcloud.aio.auth import Token -token = Token() -print(await token.get()) -``` -Additionally, the `Token` constructor accepts the following optional arguments: + client = IamClient() + pubkeys = await client.list_public_keys() -* `service_file`: path to a [service account][service-account] authorized user - file, or any other application credentials. Alternatively, you can pass a - file-like object, like an `io.StringIO` instance, in case your credentials - are not stored in a file but in memory. If omitted, will attempt to find one - on your path or fallback to generating a token from GCE metadata. -* `session`: an `aiohttp.ClientSession` instance to be used for all requests. - If omitted, a default session will be created. If you use the default - session, you may be interested in using `Token()` as a context manager - (`async with Token(..) as token:`) or explicitly calling the `Token.close()` - method to ensure the session is cleaned up appropriately. -* `scopes`: an optional list of GCP `scopes`_ for which to generate our token. - Only valid (and required!) for [service account][service-account] - authentication. + token = Token() + print(await token.get()) -[service-account]: https://console.cloud.google.com/iam-admin/serviceaccounts +Additionally, the ``Token`` constructor accepts the following optional +arguments: + +* ``service_file``: path to a `service account`_ authorized user file, or any + other application credentials. Alternatively, you can pass a file-like + object, like an ``io.StringIO`` instance, in case your credentials are not + stored in a file but in memory. If omitted, will attempt to find one on your + path or fallback to generating a token from GCE metadata. +* ``session``: an ``aiohttp.ClientSession`` instance to be used for all + requests. If omitted, a default session will be created. If you use the + default session, you may be interested in using ``Token()`` as a context + manager (``async with Token(..) as token:``) or explicitly calling the + ``Token.close()`` method to ensure the session is cleaned up appropriately. +* ``scopes``: an optional list of GCP `scopes`_ for which to generate our + token. Only valid (and required!) for `service account`_ authentication. + +CLI +--- + +This project can also be used to help you manually authenticate to test GCP +routes, eg. we can list our project's uptime checks with a tool such as +``curl``: + +.. code-block:: console + + # using default application credentials + curl \ + -H "Authorization: Bearer $(python3 -c 'from gcloud.rest.auth import Token; print(Token().get())')" \ + "https://monitoring.googleapis.com/v3/projects/PROJECT_ID/uptimeCheckConfigs" + + # using a service account (make sure to provide a scope!) + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service.json + curl \ + -H "Authorization: Bearer $(python3 -c 'from gcloud.rest.auth import Token; print(Token(scopes=["'"https://www.googleapis.com/auth/cloud-platform"'"]).get())')" \ + "https://monitoring.googleapis.com/v3/projects/PROJECT_ID/uptimeCheckConfigs" + + # using legacy account credentials + export GOOGLE_APPLICATION_CREDENTIALS=~/.config/gcloud/legacy_credentials/EMAIL@DOMAIN.TLD/adc.json + curl \ + -H "Authorization: Bearer $(python3 -c 'from gcloud.rest.auth import Token; print(Token().get())')" \ + "https://monitoring.googleapis.com/v3/projects/PROJECT_ID/uptimeCheckConfigs" + +.. _service account: https://console.cloud.google.com/iam-admin/serviceaccounts +.. _scopes: https://developers.google.com/identity/protocols/oauth2/scopes """ import importlib.metadata diff --git a/auth/poetry.lock b/auth/poetry.lock index c469be59a..f6160a3fd 100644 --- a/auth/poetry.lock +++ b/auth/poetry.lock @@ -790,4 +790,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "dce1e7c3c0deacc4e59aa4c48819e72ed8874419a7d4e06589dc3aaf8253955e" +content-hash = "145e73f66e544ca6cac36ac0b1ff7341d8298160c322f6718941a30dec8e1cf9" diff --git a/auth/poetry.rest.lock b/auth/poetry.rest.lock index ad96b908d..3d692e4ec 100644 --- a/auth/poetry.rest.lock +++ b/auth/poetry.rest.lock @@ -430,4 +430,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "3908fc8aa6dc4fb7a815f828ec6cdbc3cf91eac8d03fdbce4eb54be36142a79f" +content-hash = "5ad630524b29b6e5c67bf12f8e1b64476c9a1d41dd2a46f1849df62fc90bfc1b" diff --git a/auth/pyproject.rest.toml b/auth/pyproject.rest.toml index c222c2a81..2efd33829 100644 --- a/auth/pyproject.rest.toml +++ b/auth/pyproject.rest.toml @@ -29,11 +29,15 @@ cryptography = ">= 2.0.0, < 44.0.0" # pin max to < (major + 3) pyjwt = ">= 1.5.3, < 3.0.0" requests = ">= 2.2.1, < 3.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] pytest = ">= 4.0.0, < 8.0.0" # pytest-asyncio = ">= 0.16.0, < 0.22.0" pytest-mock = ">= 2.0.0, < 4.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] # addopts = "-Werror" # TODO: fixme # asyncio_mode = "auto" diff --git a/auth/pyproject.toml b/auth/pyproject.toml index e4ec110f2..f91456d91 100644 --- a/auth/pyproject.toml +++ b/auth/pyproject.toml @@ -29,11 +29,15 @@ cryptography = ">= 2.0.0, < 44.0.0" # pin max to < (major + 3) pyjwt = ">= 1.5.3, < 3.0.0" # requests = ">= 2.2.1, < 3.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] pytest = ">= 4.0.0, < 8.0.0" pytest-asyncio = ">= 0.16.0, < 0.22.0" pytest-mock = ">= 2.0.0, < 4.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] # addopts = "-Werror" # TODO: fixme asyncio_mode = "auto" diff --git a/bigquery/README.rst b/bigquery/README.rst index 07c8ba9b4..8f54bd540 100644 --- a/bigquery/README.rst +++ b/bigquery/README.rst @@ -4,7 +4,7 @@ This is a shared codebase for ``gcloud-aio-bigquery`` and ``gcloud-rest-bigquery`` -|pypi| |pythons-aio| |pythons-rest| +|pypi| |pythons| Installation ------------ @@ -30,10 +30,6 @@ Please see our `contributing guide`_. :alt: Latest PyPI Version (gcloud-aio-bigquery) :target: https://pypi.org/project/gcloud-aio-bigquery/ -.. |pythons-aio| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-bigquery.svg?style=flat-square&label=python (aio) - :alt: Python Version Support (gcloud-aio-bigquery) +.. |pythons| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-bigquery.svg?style=flat-square&label=python + :alt: Python Version Support :target: https://pypi.org/project/gcloud-aio-bigquery/ - -.. |pythons-rest| image:: https://img.shields.io/pypi/pyversions/gcloud-rest-bigquery.svg?style=flat-square&label=python (rest) - :alt: Python Version Support (gcloud-rest-bigquery) - :target: https://pypi.org/project/gcloud-rest-bigquery/ diff --git a/bigquery/gcloud/aio/bigquery/__init__.py b/bigquery/gcloud/aio/bigquery/__init__.py index 4b5f0e198..dc16647f4 100644 --- a/bigquery/gcloud/aio/bigquery/__init__.py +++ b/bigquery/gcloud/aio/bigquery/__init__.py @@ -1,26 +1,29 @@ +# pylint: disable=line-too-long """ This library implements various methods for working with the Google Bigquery APIs. -## Installation +Installation +------------ -```console -$ pip install --upgrade gcloud-aio-bigquery -``` +.. code-block:: console -## Usage + $ pip install --upgrade gcloud-aio-bigquery + +Usage +----- We're still working on documentation -- for now, you can use the -[smoke test][smoke-test] as an example. +`smoke test`_ as an example. -## Emulators +Emulators +--------- -For testing purposes, you may want to use `gcloud-aio-bigquery` along with a -local emulator. Setting the `$BIGQUERY_EMULATOR_HOST` environment variable to +For testing purposes, you may want to use ``gcloud-aio-bigquery`` along with a +local emulator. Setting the ``$BIGQUERY_EMULATOR_HOST`` environment variable to the address of your emulator should be enough to do the trick. -[smoke-test]: -https://github.com/talkiq/gcloud-aio/blob/master/bigquery/tests/integration/smoke_test.py +.. _smoke test: https://github.com/talkiq/gcloud-aio/blob/master/bigquery/tests/integration/smoke_test.py """ import importlib.metadata diff --git a/bigquery/poetry.lock b/bigquery/poetry.lock index a069f51c3..91547dea2 100644 --- a/bigquery/poetry.lock +++ b/bigquery/poetry.lock @@ -895,4 +895,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "263eec839508900f688a901bc16153587b2a7ca7033ae436fa81f8f68953866b" +content-hash = "a8bae8076e7c2a8859d3b98a5e5b5a25ad70b6e52adae5dfb3d455e482a28362" diff --git a/bigquery/poetry.rest.lock b/bigquery/poetry.rest.lock index bfc7658dd..d4d1d8967 100644 --- a/bigquery/poetry.rest.lock +++ b/bigquery/poetry.rest.lock @@ -523,4 +523,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "f4f44427b5768ade8fbd8964f300bcc624e7719e7757375c1ef69573d9b86ae5" +content-hash = "eba89a98dd69a344559640ff2b66aece533375ecad913154b2220b6c487fc309" diff --git a/bigquery/pyproject.rest.toml b/bigquery/pyproject.rest.toml index 4c8436a2e..d631da8e7 100644 --- a/bigquery/pyproject.rest.toml +++ b/bigquery/pyproject.rest.toml @@ -23,7 +23,7 @@ classifiers = [ python = ">= 3.8, < 4.0" gcloud-rest-auth = ">= 3.1.0, < 6.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] gcloud-rest-auth = { path = "../auth" } gcloud-rest-datastore = { path = "../datastore" } gcloud-rest-storage = { path = "../storage" } @@ -31,6 +31,10 @@ pytest = ">= 4.0.0, < 8.0.0" # pytest-asyncio = ">= 0.16.0, < 0.22.0" pytest-mock = ">= 2.0.0, < 4.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] # addopts = "-Werror" # TODO: fixme # asyncio_mode = "auto" diff --git a/bigquery/pyproject.toml b/bigquery/pyproject.toml index 9ce524cdb..4ad1d172b 100644 --- a/bigquery/pyproject.toml +++ b/bigquery/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ python = ">= 3.8, < 4.0" gcloud-aio-auth = ">= 3.1.0, < 6.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] gcloud-aio-auth = { path = "../auth" } gcloud-aio-datastore = { path = "../datastore" } gcloud-aio-storage = { path = "../storage" } @@ -31,6 +31,10 @@ pytest = ">= 4.0.0, < 8.0.0" pytest-asyncio = ">= 0.16.0, < 0.22.0" pytest-mock = ">= 2.0.0, < 4.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] # addopts = "-Werror" # TODO: fixme asyncio_mode = "auto" diff --git a/conf.py b/conf.py new file mode 100644 index 000000000..3ca695a5e --- /dev/null +++ b/conf.py @@ -0,0 +1,30 @@ +project = 'gcloud-aio' +author = 'TalkIQ' +project_copyright = '2017, TalkIQ' + +autoapi_add_toctree_entry = False +autoapi_dirs = [ + 'auth', + 'bigquery', + 'datastore', + 'kms', + 'pubsub', + 'storage', + 'taskqueue', +] +autoapi_ignore = [ + '*/tests/*', +] + +autodoc_typehints = 'description' + +exclude_patterns = ['README.rst', '*/README.rst'] + +extensions = [ + 'autoapi.extension', + 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', +] + +html_theme = 'sizzle' diff --git a/datastore/README.rst b/datastore/README.rst index b35247558..cf2ab19aa 100644 --- a/datastore/README.rst +++ b/datastore/README.rst @@ -4,7 +4,7 @@ This is a shared codebase for ``gcloud-aio-datastore`` and ``gcloud-rest-datastore`` -|pypi| |pythons-aio| |pythons-rest| +|pypi| |pythons| Installation ------------ @@ -30,10 +30,6 @@ Please see our `contributing guide`_. :alt: Latest PyPI Version (gcloud-aio-datastore) :target: https://pypi.org/project/gcloud-aio-datastore/ -.. |pythons-aio| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-datastore.svg?style=flat-square&label=python (aio) - :alt: Python Version Support (gcloud-aio-datastore) +.. |pythons| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-datastore.svg?style=flat-square&label=python + :alt: Python Version Support :target: https://pypi.org/project/gcloud-aio-datastore/ - -.. |pythons-rest| image:: https://img.shields.io/pypi/pyversions/gcloud-rest-datastore.svg?style=flat-square&label=python (rest) - :alt: Python Version Support (gcloud-rest-datastore) - :target: https://pypi.org/project/gcloud-rest-datastore/ diff --git a/datastore/gcloud/aio/datastore/__init__.py b/datastore/gcloud/aio/datastore/__init__.py index 64696f0fd..14cf87ba6 100644 --- a/datastore/gcloud/aio/datastore/__init__.py +++ b/datastore/gcloud/aio/datastore/__init__.py @@ -2,178 +2,182 @@ This library implements various methods for working with the Google Datastore APIs. -## Installation +Installation +------------ -```console -$ pip install --upgrade gcloud-aio-datastore -``` +.. code-block:: console -## Usage + $ pip install --upgrade gcloud-aio-datastore + +Usage +----- We're still working on documentation; for now, this should help get you started: -```python -from gcloud.aio.datastore import Datastore -from gcloud.aio.datastore import Direction -from gcloud.aio.datastore import Filter -from gcloud.aio.datastore import GQLQuery -from gcloud.aio.datastore import Key -from gcloud.aio.datastore import PathElement -from gcloud.aio.datastore import PropertyFilter -from gcloud.aio.datastore import PropertyFilterOperator -from gcloud.aio.datastore import PropertyOrder -from gcloud.aio.datastore import Query -from gcloud.aio.datastore import Value - -ds = Datastore('my-gcloud-project', '/path/to/creds.json') -key1 = Key('my-gcloud-project', [PathElement('Kind', 'entityname')]) -key2 = Key('my-gcloud-project', [PathElement('Kind', 'entityname2')]) - -# batched lookups -entities = await ds.lookup([key1, key2]) - -# convenience functions for any datastore mutations -await ds.insert(key1, {'a_boolean': True, 'meaning_of_life': 41}) -await ds.update(key1, {'a_boolean': True, 'meaning_of_life': 42}) -await ds.upsert(key1, {'animal': 'aardvark'}) -await ds.delete(key1) - -# or build your own mutation sequences with full transaction support -transaction = await ds.beginTransaction() -try: - mutations = [ - ds.make_mutation(Operation.INSERT, key1, - properties={'animal': 'sloth'}), - ds.make_mutation(Operation.UPSERT, key1, - properties={'animal': 'aardvark'}), - ds.make_mutation(Operation.INSERT, key2, - properties={'animal': 'aardvark'}), - ] - await ds.commit(transaction, mutations=mutations) -except Exception: - await ds.rollback(transaction) - -# support for partial keys -partial_key = Key('my-gcloud-project', [PathElement('Kind')]) -# and ID allocation or reservation -allocated_keys = await ds.allocateIds([partial_key]) -await ds.reserveIds(allocated_keys) - -# query support -property_filter = PropertyFilter(prop='answer', - operator=PropertyFilterOperator.EQUAL, - value=Value(42)) -property_order = PropertyOrder(prop='length', - direction=Direction.DESCENDING) -query = Query(kind='the_meaning_of_life', - query_filter=Filter(property_filter), - order=property_order) -results = await ds.runQuery(query, session=s) - -# alternatively, query support using GQL -gql_query = GQLQuery('SELECT * FROM meaning_of_life WHERE answer = @answer', - named_bindings={'answer': 42}) -results = await ds.runQuery(gql_query, session=s) - -# close the HTTP session -# Note that other options include: -# * providing your own session: `Datastore(.., session=session)` -# * using a context manager: `async with Datastore(..) as ds:` -await ds.close() -``` - -## Custom Subclasses - -`gcloud-aio-datastore` provides class interfaces mirroring all official Google -API types, ie. `Key` and `PathElement`, `Entity` and `EntityResult`, -`QueryResultBatch`, and `Value`. These types will be returned from arbitrary -Datastore operations, for example `Datastore.allocateIds(...)` will return a -list of `Key` entities. +.. code-block:: python + + from gcloud.aio.datastore import Datastore + from gcloud.aio.datastore import Direction + from gcloud.aio.datastore import Filter + from gcloud.aio.datastore import GQLQuery + from gcloud.aio.datastore import Key + from gcloud.aio.datastore import PathElement + from gcloud.aio.datastore import PropertyFilter + from gcloud.aio.datastore import PropertyFilterOperator + from gcloud.aio.datastore import PropertyOrder + from gcloud.aio.datastore import Query + from gcloud.aio.datastore import Value + + ds = Datastore('my-gcloud-project', '/path/to/creds.json') + key1 = Key('my-gcloud-project', [PathElement('Kind', 'entityname')]) + key2 = Key('my-gcloud-project', [PathElement('Kind', 'entityname2')]) + + # batched lookups + entities = await ds.lookup([key1, key2]) + + # convenience functions for any datastore mutations + await ds.insert(key1, {'a_boolean': True, 'meaning_of_life': 41}) + await ds.update(key1, {'a_boolean': True, 'meaning_of_life': 42}) + await ds.upsert(key1, {'animal': 'aardvark'}) + await ds.delete(key1) + + # or build your own mutation sequences with full transaction support + transaction = await ds.beginTransaction() + try: + mutations = [ + ds.make_mutation(Operation.INSERT, key1, + properties={'animal': 'sloth'}), + ds.make_mutation(Operation.UPSERT, key1, + properties={'animal': 'aardvark'}), + ds.make_mutation(Operation.INSERT, key2, + properties={'animal': 'aardvark'}), + ] + await ds.commit(transaction, mutations=mutations) + except Exception: + await ds.rollback(transaction) + + # support for partial keys + partial_key = Key('my-gcloud-project', [PathElement('Kind')]) + # and ID allocation or reservation + allocated_keys = await ds.allocateIds([partial_key]) + await ds.reserveIds(allocated_keys) + + # query support + property_filter = PropertyFilter(prop='answer', + operator=PropertyFilterOperator.EQUAL, + value=Value(42)) + property_order = PropertyOrder(prop='length', + direction=Direction.DESCENDING) + query = Query(kind='the_meaning_of_life', + query_filter=Filter(property_filter), + order=property_order) + results = await ds.runQuery(query, session=s) + + # alternatively, query support using GQL + gql_query = GQLQuery( + 'SELECT * FROM meaning_of_life WHERE answer = @answer', + named_bindings={'answer': 42}) + results = await ds.runQuery(gql_query, session=s) + + # close the HTTP session + # Note that other options include: + # * providing your own session: `Datastore(.., session=session)` + # * using a context manager: `async with Datastore(..) as ds:` + await ds.close() + +Custom Subclasses +----------------- + +``gcloud-aio-datastore`` provides class interfaces mirroring all official +Google API types, ie. ``Key`` and ``PathElement``, ``Entity`` and +``EntityResult``, ``QueryResultBatch``, and ``Value``. These types will be +returned from arbitrary Datastore operations, for example +``Datastore.allocateIds(...)`` will return a list of ``Key`` entities. For advanced usage, all of these datatypes may be overloaded. A common use-case may be to deserialize entities into more specific classes. For example, given a custom entity class such as: -```python -class MyEntityKind(gcloud.aio.datastore.Entity): - def __init__(self, key, properties = None) -> None: - self.key = key - self.is_an_aardvark = (properties or {}).get('aardvark', False) +.. code-block:: python + + class MyEntityKind(gcloud.aio.datastore.Entity): + def __init__(self, key, properties = None) -> None: + self.key = key + self.is_an_aardvark = (properties or {}).get('aardvark', False) - def __repr__(self): - return "I'm an aardvark!" if self.is_an_aardvark else "Sorry, nope" -``` + def __repr__(self): + return "I'm an aardvark!" if self.is_an_aardvark else "Sorry, nope" -We can then configure `gcloud-aio-datastore` to serialize/deserialize from this -custom entity class with: +We can then configure ``gcloud-aio-datastore`` to serialize/deserialize from +this custom entity class with: -```python -class MyCustomDatastore(gcloud.aio.datastore.Datastore): - entity_result_kind.entity_kind = MyEntityKind -``` +.. code-block:: python + + class MyCustomDatastore(gcloud.aio.datastore.Datastore): + entity_result_kind.entity_kind = MyEntityKind The full list of classes which may be overridden in this way is: -```python -class MyVeryCustomDatastore(gcloud.aio.datastore.Datastore): - datastore_operation_kind = DatastoreOperation - entity_result_kind = EntityResult - entity_result_kind.entity_kind = Entity - entity_result_kind.entity_kind.key_kind = Key - key_kind = Key - key_kind.path_element_kind = PathElement - mutation_result_kind = MutationResult - mutation_result_kind.key_kind = Key - query_result_batch_kind = QueryResultBatch - query_result_batch_kind.entity_result_kind = EntityResult - value_kind = Value - value_kind.key_kind = Key - -class MyVeryCustomQuery(gcloud.aio.datastore.Query): - value_kind = Value - -class MyVeryCustomGQLQuery(gcloud.aio.datastore.GQLQuery): - value_kind = Value -``` - -You can then drop-in the `MyVeryCustomDatastore` class anywhere where you -previously used `Datastore` and do the same for `Query` and `GQLQuery`. +.. code-block:: python + + class MyVeryCustomDatastore(gcloud.aio.datastore.Datastore): + datastore_operation_kind = DatastoreOperation + entity_result_kind = EntityResult + entity_result_kind.entity_kind = Entity + entity_result_kind.entity_kind.key_kind = Key + key_kind = Key + key_kind.path_element_kind = PathElement + mutation_result_kind = MutationResult + mutation_result_kind.key_kind = Key + query_result_batch_kind = QueryResultBatch + query_result_batch_kind.entity_result_kind = EntityResult + value_kind = Value + value_kind.key_kind = Key + + class MyVeryCustomQuery(gcloud.aio.datastore.Query): + value_kind = Value + + class MyVeryCustomGQLQuery(gcloud.aio.datastore.GQLQuery): + value_kind = Value + +You can then drop-in the ``MyVeryCustomDatastore`` class anywhere where you +previously used ``Datastore`` and do the same for ``Query`` and ``GQLQuery``. To override any sub-key, you'll need to override any parents which use it. For -example, if you want to use a custom Key kind and be able to use queries with -it, you will need to implement your own `Value`, `Query`, and `GQLQuery` -classes and wire them up to the rest of the custom classes: +example, if you want to use a custom ``Key`` kind and be able to use queries +with it, you will need to implement your own ``Value``, ``Query``, and +``GQLQuery`` classes and wire them up to the rest of the custom classes: + +.. code-block:: python -```python -class MyKey(gcloud.aio.datastore.Key): - pass + class MyKey(gcloud.aio.datastore.Key): + pass -class MyValue(gcloud.aio.datastore.Value): - key_kind = MyKey + class MyValue(gcloud.aio.datastore.Value): + key_kind = MyKey -class MyEntity(gcloud.aio.datastore.Entity): - key_kind = MyKey - value_kind = MyValue + class MyEntity(gcloud.aio.datastore.Entity): + key_kind = MyKey + value_kind = MyValue -class MyEntityResult(gcloud.aio.datastore.EntityResult): - entity_kind = MyEntity + class MyEntityResult(gcloud.aio.datastore.EntityResult): + entity_kind = MyEntity -class MyQueryResultBatch(gcloud.aio.datastore.QueryResultBatch): - entity_result_kind = MyEntityResult + class MyQueryResultBatch(gcloud.aio.datastore.QueryResultBatch): + entity_result_kind = MyEntityResult -class MyDatastore(gcloud.aio.datastore.Datastore): - key_kind = MyKey - entity_result_kind = MyEntityResult - query_result_batch = MyQueryResultBatch - value_kind = MyValue + class MyDatastore(gcloud.aio.datastore.Datastore): + key_kind = MyKey + entity_result_kind = MyEntityResult + query_result_batch = MyQueryResultBatch + value_kind = MyValue -class MyQuery(gcloud.aio.datastore.Query): - value_kind = MyValue + class MyQuery(gcloud.aio.datastore.Query): + value_kind = MyValue -class MyGQLQuery(gcloud.aio.datastore.GQLQuery): - value_kind = MyValue -``` + class MyGQLQuery(gcloud.aio.datastore.GQLQuery): + value_kind = MyValue """ import importlib.metadata diff --git a/datastore/poetry.lock b/datastore/poetry.lock index 3a4770f6c..e00e453a7 100644 --- a/datastore/poetry.lock +++ b/datastore/poetry.lock @@ -879,4 +879,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "b38b918d4c2f78c7626594007df17d4065448c762aa45c0b24920ec97c2a48ff" +content-hash = "3fa408528604910e47291a39ac4059e28b36f48a3faf81b7738dfb5fe6f81e7a" diff --git a/datastore/poetry.rest.lock b/datastore/poetry.rest.lock index 369753094..8fb749bc6 100644 --- a/datastore/poetry.rest.lock +++ b/datastore/poetry.rest.lock @@ -507,4 +507,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "058a96ad846b47713f7b22f9037f131ac904b6c18efbb0530bc536bedb5a5304" +content-hash = "047ff9dcd474e44b6a4cf53588bd279ca717f040df552d200550235f239aff9b" diff --git a/datastore/pyproject.rest.toml b/datastore/pyproject.rest.toml index 9d371a62f..b2495c0d7 100644 --- a/datastore/pyproject.rest.toml +++ b/datastore/pyproject.rest.toml @@ -23,7 +23,7 @@ classifiers = [ python = ">= 3.8, < 4.0" gcloud-rest-auth = ">= 3.1.0, < 6.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] # aiohttp = ">= 3.3.0, < 4.0.0" gcloud-rest-auth = { path = "../auth" } gcloud-rest-storage = { path = "../storage" } @@ -31,6 +31,10 @@ pytest = ">= 4.0.0, < 8.0.0" # pytest-asyncio = ">= 0.16.0, < 0.22.0" pytest-mock = ">= 2.0.0, < 4.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] # addopts = "-Werror" # TODO: fixme # asyncio_mode = "auto" diff --git a/datastore/pyproject.toml b/datastore/pyproject.toml index 268393ed9..9f1ba9841 100644 --- a/datastore/pyproject.toml +++ b/datastore/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ python = ">= 3.8, < 4.0" gcloud-aio-auth = ">= 3.1.0, < 6.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] aiohttp = ">= 3.3.0, < 4.0.0" gcloud-aio-auth = { path = "../auth" } gcloud-aio-storage = { path = "../storage" } @@ -31,6 +31,10 @@ pytest = ">= 4.0.0, < 8.0.0" pytest-asyncio = ">= 0.16.0, < 0.22.0" pytest-mock = ">= 2.0.0, < 4.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] # addopts = "-Werror" # TODO: fixme asyncio_mode = "auto" diff --git a/docs/aio.md b/docs/aio.md deleted file mode 100644 index 8adf9c7b9..000000000 --- a/docs/aio.md +++ /dev/null @@ -1,89 +0,0 @@ -## Emulator Usage - -All of our API clients are integrated to make use of the official Google emulators, where those exist. As a general rule, this means you can set the `$FOO_EMULATOR_HOST` environment variable (where `$FOO` is the service being emulated, such as `PUBSUB_EMULATOR_HOST`) and your `gcloud` client will point to the emulator rather than the live APIs. - -Alternatively, you can provide the `api_root` option to any relevant constructor to have full control over the API being used. Note that while the environment variable expects just the hostname (to support the standard Google emulator usecase), if you assign this value manually via the contructor arg you must include the entire path. - -If you override the API value (either by constructor option or environment variable), tls verification and other such security measures will be disabled as needed. This feature is not intended for production use! - -Note also that this library only supports a single version of the Google APIs at a given time (generally the most recent version). If the API you point to does not conform to the correct version of the spec, we make no promises as to what might happen. - -For example: - -```python -client = gcloud.aio.datastore.Datastore() -assert client._api_root == 'https://datastore.googleapis.com/v1' - -# generally set via `gcloud emulators datastore env-init` -os.environ['DATASTORE_EMULATOR_HOST'] = '127.0.0.1:8432' -client = gcloud.aio.datastore.Datastore() -assert client._api_root == 'http://127.0.0.1:8432/v1' - -client = gcloud.aio.datastore.Datastore(api_root='http://example.com/datastoreapi') -assert client._api_root == 'http://example.com/datastoreapi' -``` - -Note that, in any case, these values will be loaded at the time the class instance is constructed. - -## Session Management - -Since we use `aiohttp` under the hood, ensuring we properly close our `ClientSession` upon shutdown is important to avoid "unclosed connection" errors. - -As such, there are several possible ways to handle session management depending on your use-case. Note that these methods apply to all `gcloud-aio-*` classes. - -**Manually Close the Class** - -If you've created the class manually, you'll have to close it: - -```python -client = gcloud.aio.datastore.Datastore(...) -# use the class -await client.close() -``` - -**Context Manager** - -Alternatively, you can let a context manager handle that for you: - -```python -async with gcloud.aio.datastore.Datastore(...) as client: - # use the class -``` - -**Manage Your Own Session** - -If you need to manage your own session, you'll want to make sure you handle everything: - -```python -async with aiohttp.ClientSession() as session: - client = gcloud.aio.datastore.Datastore(..., session=session) - # use the class - - # DO NOT call `client.close()`, or the `async with ClientSession` will - # attempt to close a second time. -``` - -## Token Management - -By default, you should not need to care about managing a `gcloud.aio.auth.Token` instance. When you initialize a given client library, it will handle creating a token with the correct scopes. - -However, in some cases you may find it valuable to share a token across multiple libraries (eg. to include the HTTP calls in a single session or to reduce how many individual refreshes need to happen). In that case, you can pass it in as follows. - -Note that if you are using a service account file, setting explicit scopes is mandatory! As such, you'll need to make sure your token has the correct scopes for all the libraries you plan to use it with. - -```python -scopes = [ - 'https://www.googleapis.com/auth/cloudkms', - 'https://www.googleapis.com/auth/datastore', -] - -async with gcloud.aio.auth.Token(scopes=scopes) as token: - datastore = gcloud.aio.datastore.Datastore(..., token=token) - kms = gcloud.aio.kms.KMS(..., token=token) -``` - -## Compatibility - -Here are notes on compatibility issues. While we cannot offer specific support for issues originating from other projects, we can point toward known resolutions. - -- Google Cloud Functions pins `yarl`; `gcloud-aio-*` indirectly requires `yarl` via `aiohttp` and an unpinned version of `yarl` can cause your cloud functions to stop building. Please pin your requirements as described here: [Google Cloud Function Dependencies](https://cloud.google.com/functions/docs/writing/specifying-dependencies-python). diff --git a/docs/gcloud.md b/docs/gcloud.md deleted file mode 100644 index 3feab8b32..000000000 --- a/docs/gcloud.md +++ /dev/null @@ -1,9 +0,0 @@ -These docs cover two projects: `gcloud-aio-*` and `gcloud-rest-*`. Both of them are HTTP implementations of the Google Cloud client libraries. The former has been built to work with Python 3's asyncio. The later is a threadsafe `requests`-based implementation. - -For supported clients, see the modules in the sidebar. - -## Installation - -```console -$ pip install --upgrade gcloud-{aio,rest}-{client_name} -``` diff --git a/docs/rest.md b/docs/rest.md deleted file mode 100644 index 859005a7e..000000000 --- a/docs/rest.md +++ /dev/null @@ -1,45 +0,0 @@ -## Emulator Usage - -All of our API clients are integrated to make use of the official Google emulators, where those exist. As a general rule, this means you can set the `$FOO_EMULATOR_HOST` environment variable (where `$FOO` is the service being emulated, such as `PUBSUB_EMULATOR_HOST`) and your `gcloud` client will point to the emulator rather than the live APIs. - -Alternatively, you can provide the `api_root` option to any relevant constructor to have full control over the API being used. Note that while the environment variable expects just the hostname (to support the standard Google emulator usecase), if you assign this value manually via the contructor arg you must include the entire path. - -If you override the API value (either by constructor option or environment variable), tls verification and other such security measures will be disabled as needed. This feature is not intended for production use! - -Note also that this library only supports a single version of the Google APIs at a given time (generally the most recent version). If the API you point to does not conform to the correct version of the spec, we make no promises as to what might happen. - -For example: - -```python -client = gcloud.rest.datastore.Datastore() -assert client._api_root == 'https://datastore.googleapis.com/v1' - -# generally set via `gcloud emulators datastore env-init` -os.environ['DATASTORE_EMULATOR_HOST'] = '127.0.0.1:8432' -client = gcloud.rest.datastore.Datastore() -assert client._api_root == 'http://127.0.0.1:8432/v1' - -client = gcloud.rest.datastore.Datastore(api_root='http://example.com/datastoreapi') -assert client._api_root == 'http://example.com/datastoreapi' -``` - -Note that, in any case, these values will be loaded at the time the class instance is constructed. - -## Token Management - -By default, you should not need to care about managing a `gcloud.rest.auth.Token` instance. When you initialize a given client library, it will handle creating a token with the correct scopes. - -However, in some cases you may find it valuable to share a token across multiple libraries (eg. to include the HTTP calls in a single session or to reduce how many individual refreshes need to happen). In that case, you can pass it in as follows. - -Note that if you are using a service account file, setting explicit scopes is mandatory! As such, you'll need to make sure your token has the correct scopes for all the libraries you plan to use it with. - -```python -scopes = [ - 'https://www.googleapis.com/auth/cloudkms', - 'https://www.googleapis.com/auth/datastore', -] - -with gcloud.rest.auth.Token(scopes=scopes) as token: - datastore = gcloud.rest.datastore.Datastore(..., token=token) - kms = gcloud.rest.kms.KMS(..., token=token) -``` diff --git a/index.rst b/index.rst new file mode 100644 index 000000000..1dc8b3e43 --- /dev/null +++ b/index.rst @@ -0,0 +1,26 @@ +(Asyncio OR Threadsafe) Google Cloud Client Libraries for Python +================================================================ + +These docs describe the shared codebase for ``gcloud-aio-*`` and +``gcloud-rest-*``. Both of them are HTTP implementations of the Google Cloud +client libraries: the former has been built to with asyncio (using the +``aiohttp`` library) and the latter is a threadsafe ``requests``-based +implementation. + +Table of Contents +----------------- + +.. toctree:: + :maxdepth: 1 + + autoapi/auth/index + autoapi/bigquery/index + autoapi/datastore/index + autoapi/kms/index + autoapi/pubsub/index + autoapi/storage/index + autoapi/taskqueue/index + .github/CONTRIBUTING.rst + .github/RELEASE.rst + +* :ref:`modindex` diff --git a/kms/README.rst b/kms/README.rst index a8c423816..fa0e4354d 100644 --- a/kms/README.rst +++ b/kms/README.rst @@ -3,7 +3,7 @@ This is a shared codebase for ``gcloud-aio-kms`` and ``gcloud-rest-kms`` -|pypi| |pythons-aio| |pythons-rest| +|pypi| |pythons| Installation ------------ @@ -29,10 +29,6 @@ Please see our `contributing guide`_. :alt: Latest PyPI Version (gcloud-aio-kms) :target: https://pypi.org/project/gcloud-aio-kms/ -.. |pythons-aio| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-kms.svg?style=flat-square&label=python (aio) - :alt: Python Version Support (gcloud-aio-kms) +.. |pythons| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-kms.svg?style=flat-square&label=python + :alt: Python Version Support :target: https://pypi.org/project/gcloud-aio-kms/ - -.. |pythons-rest| image:: https://img.shields.io/pypi/pyversions/gcloud-rest-kms.svg?style=flat-square&label=python (rest) - :alt: Python Version Support (gcloud-rest-kms) - :target: https://pypi.org/project/gcloud-rest-kms/ diff --git a/kms/gcloud/aio/kms/__init__.py b/kms/gcloud/aio/kms/__init__.py index c07e5ee34..7d98fb448 100644 --- a/kms/gcloud/aio/kms/__init__.py +++ b/kms/gcloud/aio/kms/__init__.py @@ -1,42 +1,45 @@ """ This library implements various methods for working with the Google KMS APIs. -## Installation +Installation +------------ -```console -$ pip install --upgrade gcloud-aio-kms -``` +.. code-block:: console -## Usage + $ pip install --upgrade gcloud-aio-kms + +Usage +----- We're still working on more complete documentation, but roughly you can do: -```python -from gcloud.aio.kms import KMS -from gcloud.aio.kms import decode -from gcloud.aio.kms import encode +.. code-block:: python + + from gcloud.aio.kms import KMS + from gcloud.aio.kms import decode + from gcloud.aio.kms import encode -kms = KMS('my-kms-project', 'my-keyring', 'my-key-name') + kms = KMS('my-kms-project', 'my-keyring', 'my-key-name') -# encrypt -plaintext = b'the-best-animal-is-the-aardvark' -ciphertext = await kms.encrypt(encode(plaintext)) + # encrypt + plaintext = b'the-best-animal-is-the-aardvark' + ciphertext = await kms.encrypt(encode(plaintext)) -# decrypt -assert decode(await kms.decrypt(ciphertext)) == plaintext + # decrypt + assert decode(await kms.decrypt(ciphertext)) == plaintext -# close the HTTP session -# Note that other options include: -# * providing your own session: `KMS(.., session=session)` -# * using a context manager: `async with KMS(..) as kms:` -await kms.close() -``` + # close the HTTP session + # Note that other options include: + # * providing your own session: `KMS(.., session=session)` + # * using a context manager: `async with KMS(..) as kms:` + await kms.close() -## Emulators +Emulators +--------- -For testing purposes, you may want to use `gcloud-aio-kms` along with a local -emulator. Setting the `$KMS_EMULATOR_HOST` environment variable to the address -of your emulator should be enough to do the trick. +For testing purposes, you may want to use ``gcloud-aio-kms`` along with a local +emulator. Setting the ``$KMS_EMULATOR_HOST`` environment variable to the +address of your emulator should be enough to do the trick. """ import importlib.metadata diff --git a/kms/poetry.lock b/kms/poetry.lock index e43d9d92c..cd77cc3fa 100644 --- a/kms/poetry.lock +++ b/kms/poetry.lock @@ -775,4 +775,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "f6f49433e2cf922193197707ab367760361b2c1f4d14b83039a3c5015e637592" +content-hash = "8f22aa9a961f3b850fec2357fa2a098dc40059b512881912ca18747bf37020df" diff --git a/kms/poetry.rest.lock b/kms/poetry.rest.lock index 225b6bc0b..c57210f89 100644 --- a/kms/poetry.rest.lock +++ b/kms/poetry.rest.lock @@ -433,4 +433,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "24a217adf3c1db32aee1b20ac06e6d4641c2279f9c35b0a0f1212fae8fbea51c" +content-hash = "8d1eb0b16cbcf62d8d8198976595efb7eda1069bc5d31d4d32c3867cd438b35d" diff --git a/kms/pyproject.rest.toml b/kms/pyproject.rest.toml index 7ffaf83a6..bba731c2f 100644 --- a/kms/pyproject.rest.toml +++ b/kms/pyproject.rest.toml @@ -23,10 +23,14 @@ classifiers = [ python = ">= 3.8, < 4.0" gcloud-rest-auth = ">= 3.1.0, < 6.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] gcloud-rest-auth = { path = "../auth" } pytest = ">= 4.0.0, < 8.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] addopts = "-Werror" diff --git a/kms/pyproject.toml b/kms/pyproject.toml index 453a9be69..cb9f30c4b 100644 --- a/kms/pyproject.toml +++ b/kms/pyproject.toml @@ -23,10 +23,14 @@ classifiers = [ python = ">= 3.8, < 4.0" gcloud-aio-auth = ">= 3.1.0, < 6.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] gcloud-aio-auth = { path = "../auth" } pytest = ">= 4.0.0, < 8.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] addopts = "-Werror" diff --git a/pubsub/README.rst b/pubsub/README.rst index 855b37df5..6c03fca48 100644 --- a/pubsub/README.rst +++ b/pubsub/README.rst @@ -4,7 +4,7 @@ This is a shared codebase for ``gcloud-aio-pubsub`` and ``gcloud-rest-pubsub`` -|pypi| |pythons-aio| |pythons-rest| +|pypi| |pythons| Installation ------------ @@ -30,10 +30,6 @@ Please see our `contributing guide`_. :alt: Latest PyPI Version :target: https://pypi.org/project/gcloud-aio-pubsub/ -.. |pythons-aio| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-pubsub.svg?style=flat-square&label=python (aio) - :alt: Python Version Support (gcloud-aio-pubsub) +.. |pythons| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-pubsub.svg?style=flat-square&label=python + :alt: Python Version Support :target: https://pypi.org/project/gcloud-aio-pubsub/ - -.. |pythons-rest| image:: https://img.shields.io/pypi/pyversions/gcloud-rest-pubsub.svg?style=flat-square&label=python (rest) - :alt: Python Version Support (gcloud-rest-pubsub) - :target: https://pypi.org/project/gcloud-rest-pubsub/ diff --git a/pubsub/gcloud/aio/pubsub/__init__.py b/pubsub/gcloud/aio/pubsub/__init__.py index 32801f6e8..4b4d730c8 100644 --- a/pubsub/gcloud/aio/pubsub/__init__.py +++ b/pubsub/gcloud/aio/pubsub/__init__.py @@ -1,95 +1,99 @@ +# pylint: disable=line-too-long """ This library implements various methods for working with the Google Pubsub APIs. -## Installation +Installation +------------ -```console -$ pip install --upgrade gcloud-aio-pubsub -``` +.. code-block:: console -## Usage + $ pip install --upgrade gcloud-aio-pubsub -### Subscriber +Usage +----- -`gcloud-aio-pubsub` provides `SubscriberClient` as an interface to call +Subscriber +~~~~~~~~~~ + +``gcloud-aio-pubsub`` provides ``SubscriberClient`` as an interface to call pubsub's HTTP API: -```python -from gcloud.aio.pubsub import SubscriberClient -from gcloud.aio.pubsub import SubscriberMessage - -client = SubscriberClient() -# create subscription -await client.create_subscription( - 'projects//subscriptions/', - 'projects//topics/') - -# pull messages -messages: List[SubscriberMessage] = await client.pull( - 'projects//subscriptions/', - max_messages=10) -``` - -There's also `gcloud.aio.pubsub.subscribe` helper function you can use to setup -a pubsub processing pipeline. It is built with `asyncio` and thus only -available in the `gcloud-aio-pubsub` package. The usage is fairly simple: - -```python -from gcloud.aio.pubsub import SubscriberClient -from gcloud.aio.pubsub import subscribe - -subscriber_client = SubscriberClient() - -async def handler(message): - return - -await subscribe( - 'projects//subscriptions/', - handler, - subscriber_client, - num_producers=1, - max_messages_per_producer=100, - ack_window=0.3, - num_tasks_per_consumer=1, - enable_nack=True, - nack_window=0.3, -) -``` +.. code-block:: python + + from gcloud.aio.pubsub import SubscriberClient + from gcloud.aio.pubsub import SubscriberMessage + + client = SubscriberClient() + # create subscription + await client.create_subscription( + 'projects//subscriptions/', + 'projects//topics/') + + # pull messages + messages: List[SubscriberMessage] = await client.pull( + 'projects//subscriptions/', + max_messages=10) + +There's also ``gcloud.aio.pubsub.subscribe`` helper function you can use to +setup a pubsub processing pipeline. It is built with ``asyncio`` and thus only +available in the ``gcloud-aio-pubsub`` package. The usage is fairly simple: + +.. code-block:: python + + from gcloud.aio.pubsub import SubscriberClient + from gcloud.aio.pubsub import subscribe + + subscriber_client = SubscriberClient() + + async def handler(message): + return + + await subscribe( + 'projects//subscriptions/', + handler, + subscriber_client, + num_producers=1, + max_messages_per_producer=100, + ack_window=0.3, + num_tasks_per_consumer=1, + enable_nack=True, + nack_window=0.3, + ) While defaults are somewhat sensible, it is highly recommended to performance test your application and tweak function parameter to your specific needs. Here's a few hints: -- `handler`: An async function that will be called for each message. It should - accept an instance of `SubscriberMessage` as its only argument and return - `None` if the message should be acked. An exception raised within the handler - will result in the message being left to expire, and thus it will be - redelivered according to your subscription's ack deadline. -- `num_producers`: Number of workers that will be making `pull` requests to +- ``handler``: An async function that will be called for each message. It + should accept an instance of ``SubscriberMessage`` as its only argument and + return ``None`` if the message should be acked. An exception raised within + the handler will result in the message being left to expire, and thus it will + be redelivered according to your subscription's ack deadline. +- ``num_producers``: Number of workers that will be making ``pull`` requests to pubsub. Please note that a worker will only fetch new batch once the - `handler` was called for each message from the previous batch. This means + ``handler`` was called for each message from the previous batch. This means that running only a single worker will most likely make your application IO bound. If you notice this being an issue don't hesitate to bump this parameter. -- `max_messages_per_producer`: Number of pubsub messages a worker will try to - fetch in a single batch. This value is passed to `pull` [endpoint][endpoint] - as `maxMessages` parameter. A rule of thumb here is the faster your handler - is the bigger this value should be. -- `ack_window`: Ack requests are handled separately and are done in batches. +- ``max_messages_per_producer``: Number of pubsub messages a worker will try to + fetch in a single batch. This value is passed to ``pull`` `endpoint`_ as + ``maxMessages`` parameter. A rule of thumb here is the faster your handler is + the bigger this value should be. +- ``ack_window``: Ack requests are handled separately and are done in batches. This parameters specifies how often ack requests will be made. Setting it to - `0.0` will effectively disable batching. -- `num_tasks_per_consumer`: How many `handle` calls a worker can make until it - blocks to wait for them to return. If you process messages independently from - each other you should be good with the default value of `1`. If you do + ``0.0`` will effectively disable batching. +- ``num_tasks_per_consumer``: How many ``handle`` calls a worker can make until + it blocks to wait for them to return. If you process messages independently + from each other you should be good with the default value of ``1``. If you do something fancy (e.g. aggregate messages before processing them), you'll want - a higher pool here. You can think of `num_producers * num_tasks_per_consumer` - as an upper limit of how many messages can possibly be within your - application state at any given moment. -- `enable_nack`: If enabled messages for which `callback` raised an exception - will be explicitly nacked using `modifyAckDeadline` endpoint so they can be - retried immediately. -- `nack_window`: Same as `ack_window` but for nack requests. + a higher pool here. You can think of ``num_producers x + num_tasks_per_consumer`` as an upper limit of how many messages can possibly + be within your application state at any given moment. +- ``enable_nack``: If enabled messages for which ``callback`` raised an + exception will be explicitly nacked using ``modifyAckDeadline`` endpoint so + they can be retried immediately. +- ``nack_window``: Same as ``ack_window`` but for nack requests. Note that this method was built under the assumption that it is the main thread of your application. It may work just fine otherwise, but be aware that the @@ -97,70 +101,74 @@ async def handler(message): As it is generally assumed to run in the foreground, it relies on task cancellation to shut itself down (ie. caused by process termination). To cancel -it from a thread, you can send an `asyncio.CancelledError` event via -`Task.cancel()`: +it from a thread, you can send an ``asyncio.CancelledError`` event via +``Task.cancel()``: -```python -subscribe_task = asyncio.create_task(gcloud.aio.pubsub.subscribe(...)) +.. code-block:: python -# snip + subscribe_task = asyncio.create_task(gcloud.aio.pubsub.subscribe(...)) -subscribe_task.cancel() -``` + # snip -### Prometheus Metrics + subscribe_task.cancel() + +Prometheus Metrics +~~~~~~~~~~~~~~~~~~ If you like pull-based metrics like Prometheus you will be pleased to know that the subscriber records Prometheus metrics in the form -`gcloud_aio_pubsub_`, which will have no effect if you don't use +``gcloud_aio_pubsub_``, which will have no effect if you don't use Prometheus to scrape app metrics: -- `subscriber_batch_size` - [histogram] how many messages were pulled from the - subscription in a single batch -- `subscriber_consume` (labels: `outcome = {'succeeded', 'cancelled', 'failed', - 'failfast'}`) - [counter] a consume operation has completed with a given - outcome -- `subscriber_consume_latency_seconds` (labels: `phase = {'receive', - 'queueing', 'runtime'}`) - [histogram] how many seconds taken to receive a +- ``subscriber_batch_size`` - [histogram] how many messages were pulled from + the subscription in a single batch +- ``subscriber_consume`` (labels: ``outcome = {'succeeded', 'cancelled', + 'failed', 'failfast'}``) - [counter] a consume operation has completed with a + given outcome +- ``subscriber_consume_latency_seconds`` (labels: ``phase = {'receive', + 'queueing', 'runtime'}``) - [histogram] how many seconds taken to receive a message, while waiting for processing, or to complete the callback -- `subscriber_batch_status` (labels: `component = {'acker', 'nacker'}, outcome - = {'succeeded', 'failed'}`) - [counter] a batch has succeeded or failed to be - acked or nacked -- `subscriber_messages_processed` (labels: `component = {'acker', 'nacker'}`) - - [counter] the number of messages that were processed, either by being acked - or nacked -- `subscriber_messages_received` - [counter] the number of messages pulled from - pubsub - -### Metrics Agent (Deprecated) - -`subscribe` has also an optional `metrics_client` argument which will be +- ``subscriber_batch_status`` (labels: ``component = {'acker', 'nacker'}, + outcome = {'succeeded', 'failed'}``) - [counter] a batch has succeeded or + failed to be acked or nacked +- ``subscriber_messages_processed`` (labels: ``component = {'acker', + 'nacker'}``) - [counter] the number of messages that were processed, either + by being acked or nacked +- ``subscriber_messages_received`` - [counter] the number of messages pulled + from pubsub + +Metrics Agent (Deprecated) +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``subscribe`` has also an optional ``metrics_client`` argument which will be removed in a future release. You can provide any metrics agent that implements -the same interface as `MetricsAgent` (Datadog client will do ;) ) and get the +the same interface as ``MetricsAgent`` (Datadog client will do ;) ) and get the following metrics: -- `pubsub.producer.batch` - [histogram] actual size of a batch retrieved from +- ``pubsub.producer.batch`` - [histogram] actual size of a batch retrieved from pubsub. -- `pubsub.consumer.failfast` - [increment] a message was dropped due to its +- ``pubsub.consumer.failfast`` - [increment] a message was dropped due to its lease being expired. -- `pubsub.consumer.latency.receive` - [histogram] how many seconds it took for - a message to reach handler after it was published. -- `pubsub.consumer.succeeded` - [increment] `handler` call was successfull. -- `pubsub.consumer.failed` - [increment] `handler` call raised an exception. -- `pubsub.consumer.latency.runtime` - [histogram] `handler` execution time in - seconds. -- `pubsub.acker.batch.failed` - [increment] ack request failed. -- `pubsub.acker.batch` - [histogram] actual number of messages that was acked +- ``pubsub.consumer.latency.receive`` - [histogram] how many seconds it took + for a message to reach handler after it was published. +- ``pubsub.consumer.succeeded`` - [increment] ``handler`` call was successfull. +- ``pubsub.consumer.failed`` - [increment] ``handler`` call raised an + exception. +- ``pubsub.consumer.latency.runtime`` - [histogram] ``handler`` execution time + in seconds. +- ``pubsub.acker.batch.failed`` - [increment] ack request failed. +- ``pubsub.acker.batch`` - [histogram] actual number of messages that was acked in a single request. -## Publisher +Publisher +--------- -The `PublisherClient` is a dead-simple alternative to the official Google Cloud -Pub/Sub publisher client. The main design goal was to eliminate all the +The ``PublisherClient`` is a dead-simple alternative to the official Google +Cloud Pub/Sub publisher client. The main design goal was to eliminate all the additional gRPC overhead implemented by the upstream client. If migrating between this library and the official one, the main difference is -this: the `gcloud-{aio,rest}-pubsub` publisher's `.publish()` method +this: the ``gcloud-{aio,rest}-pubsub`` publisher's ``.publish()`` method *immediately* publishes the messages you've provided, rather than maintaining our own publishing queue, implementing batching and flow control, etc. If you're looking for a full-featured publishing library with all the bells and @@ -168,66 +176,67 @@ async def handler(message): looking to manage your own batching / timeouts / retry / threads / etc, this library should be a bit easier to work with. -```python -from gcloud.aio.pubsub import PubsubMessage -from gcloud.aio.pubsub import PublisherClient +.. code-block:: python + + from gcloud.aio.pubsub import PubsubMessage + from gcloud.aio.pubsub import PublisherClient -async with aiohttp.ClientSession() as session: - client = PublisherClient(session=session) + async with aiohttp.ClientSession() as session: + client = PublisherClient(session=session) - topic = client.topic_path('my-gcp-project', 'my-topic-name') + topic = client.topic_path('my-gcp-project', 'my-topic-name') - messages = [ - PubsubMessage(b'payload', attribute='value'), - PubsubMessage(b'other payload', other_attribute='whatever', - more_attributes='something else'), - ] - response = await client.publish(topic, messages) - # response == {'messageIds': ['1', '2']} -``` + messages = [ + PubsubMessage(b'payload', attribute='value'), + PubsubMessage(b'other payload', other_attribute='whatever', + more_attributes='something else'), + ] + response = await client.publish(topic, messages) + # response == {'messageIds': ['1', '2']} -## Emulators +Emulators +--------- -For testing purposes, you may want to use `gcloud-aio-pubsub` along with a -local Pubsub emulator. Setting the `$PUBSUB_EMULATOR_HOST` environment variable -to the local address of your emulator should be enough to do the trick. +For testing purposes, you may want to use ``gcloud-aio-pubsub`` along with a +local Pubsub emulator. Setting the ``$PUBSUB_EMULATOR_HOST`` environment +variable to the local address of your emulator should be enough to do the +trick. For example, using the official Google Pubsub emulator: -```shell -gcloud beta emulators pubsub start --host-port=0.0.0.0:8681 -export PUBSUB_EMULATOR_HOST='0.0.0.0:8681' -``` +.. code-block:: console -Any `gcloud-aio-pubsub` Publisher requests made with that environment variable -set will query the emulator instead of the official GCS APIs. + gcloud beta emulators pubsub start --host-port=0.0.0.0:8681 + export PUBSUB_EMULATOR_HOST='0.0.0.0:8681' + +Any ``gcloud-aio-pubsub`` Publisher requests made with that environment +variable set will query the emulator instead of the official GCS APIs. For easier ergonomics, you may be interested in -[thekevjames/gcloud-pubsub-emulator][emulator-docker]. +`thekevjames/gcloud-pubsub-emulator`_. -## Customization +Customization +------------- This library mostly tries to stay agnostic of potential use-cases; as such, we do not implement any sort of retrying or other policies under the assumption that we wouldn't get things right for every user's situation. As such, we recommend configuring your own policies on an as-needed basis. The -[backoff][backoff] library can make this quite straightforward! For example, -you may find it useful to configure something like: - -```python -class SubscriberClientWithBackoff(SubscriberClient): - @backoff.on_exception(backoff.expo, aiohttp.ClientResponseError, - max_tries=5, jitter=backoff.full_jitter) - async def pull(self, *args: Any, **kwargs: Any): - return await super().pull(*args, **kwargs) -``` - -[backoff]: https://pypi.org/project/backoff/ -[emulator-docker]: -https://github.com/TheKevJames/tools/tree/master/docker-gcloud-pubsub-emulator -[endpoint]: -https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull#request-body +`backoff`_ library can make this quite straightforward! For example, you may +find it useful to configure something like: + +.. code-block:: python + + class SubscriberClientWithBackoff(SubscriberClient): + @backoff.on_exception(backoff.expo, aiohttp.ClientResponseError, + max_tries=5, jitter=backoff.full_jitter) + async def pull(self, *args: Any, **kwargs: Any): + return await super().pull(*args, **kwargs) + +.. _backoff: https://pypi.org/project/backoff/ +.. _thekevjames/gcloud-pubsub-emulator: https://github.com/TheKevJames/tools/tree/master/docker-gcloud-pubsub-emulator +.. _endpoint: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull#request-body """ import importlib.metadata diff --git a/pubsub/poetry.lock b/pubsub/poetry.lock index 260d9f8e8..186349d31 100644 --- a/pubsub/poetry.lock +++ b/pubsub/poetry.lock @@ -824,4 +824,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "f7b930e11ef6e156eb7a7b198f0d96f64876e92c459469ba2f71dc7a8b8cb41f" +content-hash = "d1628da7c790112b71158c44d89e452b3a2dda4dce4f17a500805ddb645e86ae" diff --git a/pubsub/poetry.rest.lock b/pubsub/poetry.rest.lock index 3a419b0c0..7d7623942 100644 --- a/pubsub/poetry.rest.lock +++ b/pubsub/poetry.rest.lock @@ -450,4 +450,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "83f0edc0591a31d3396c6cfc748e71331a739aff959e4b7a112f3168c7c78a96" +content-hash = "5d38d9ca4da4344942b36c9437fc2a841b5bdc4828a8aee99c86dbeb5ecc28a9" diff --git a/pubsub/pyproject.rest.toml b/pubsub/pyproject.rest.toml index b14a9385f..4545cd62b 100644 --- a/pubsub/pyproject.rest.toml +++ b/pubsub/pyproject.rest.toml @@ -24,13 +24,17 @@ python = ">= 3.8, < 4.0" gcloud-rest-auth = ">= 3.3.0, < 6.0.0" # prometheus-client = ">= 0.13.1, < 1.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] # aiohttp = ">= 3.3.0, < 4.0.0" gcloud-rest-auth = { path = "../auth" } pytest = ">= 4.0.0, < 8.0.0" # pytest-asyncio = ">= 0.16.0, < 0.22.0" pytest-mock = ">= 2.0.0, < 4.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] # addopts = "-Werror" # TODO: fixme # asyncio_mode = "auto" diff --git a/pubsub/pyproject.toml b/pubsub/pyproject.toml index a0befafc9..d50d79f99 100644 --- a/pubsub/pyproject.toml +++ b/pubsub/pyproject.toml @@ -24,13 +24,17 @@ python = ">= 3.8, < 4.0" gcloud-aio-auth = ">= 3.3.0, < 6.0.0" prometheus-client = ">= 0.13.1, < 1.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] aiohttp = ">= 3.3.0, < 4.0.0" gcloud-aio-auth = { path = "../auth" } pytest = ">= 4.0.0, < 8.0.0" pytest-asyncio = ">= 0.16.0, < 0.22.0" pytest-mock = ">= 2.0.0, < 4.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] # addopts = "-Werror" # TODO: fixme asyncio_mode = "auto" diff --git a/storage/README.rst b/storage/README.rst index adb5e1fc4..a2d49c01f 100644 --- a/storage/README.rst +++ b/storage/README.rst @@ -4,7 +4,7 @@ This is a shared codebase for ``gcloud-aio-storage`` and ``gcloud-rest-storage`` -|pypi| |pythons-aio| |pythons-rest| +|pypi| |pythons| Installation ------------ @@ -30,10 +30,6 @@ Please see our `contributing guide`_. :alt: Latest PyPI Version (gcloud-aio-storage) :target: https://pypi.org/project/gcloud-aio-storage/ -.. |pythons-aio| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-storage.svg?style=flat-square&label=python (aio) - :alt: Python Version Support (gcloud-aio-storage) +.. |pythons| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-storage.svg?style=flat-square&label=python + :alt: Python Version Support :target: https://pypi.org/project/gcloud-aio-storage/ - -.. |pythons-rest| image:: https://img.shields.io/pypi/pyversions/gcloud-rest-storage.svg?style=flat-square&label=python (rest) - :alt: Python Version Support (gcloud-rest-storage) - :target: https://pypi.org/project/gcloud-rest-storage/ diff --git a/storage/gcloud/aio/storage/__init__.py b/storage/gcloud/aio/storage/__init__.py index 28707d25f..b8187805a 100644 --- a/storage/gcloud/aio/storage/__init__.py +++ b/storage/gcloud/aio/storage/__init__.py @@ -1,150 +1,155 @@ +# pylint: disable=line-too-long """ This library implements various methods for working with the Google Storage APIs. -## Installation +Installation +------------ -```console -$ pip install --upgrade gcloud-aio-storage -``` +.. code-block:: console -## Usage + $ pip install --upgrade gcloud-aio-storage + +Usage +----- To upload a file, you might do something like the following: -```python -import aiofiles -import aiohttp -from gcloud.aio.storage import Storage +.. code-block:: python + import aiofiles + import aiohttp + from gcloud.aio.storage import Storage -async with aiohttp.ClientSession() as session: - client = Storage(session=session) - async with aiofiles.open('/path/to/my/file', mode="r") as f: - output = await f.read() - status = await client.upload( - 'my-bucket-name', - 'path/to/gcs/folder', - output, - ) - print(status) -``` + async with aiohttp.ClientSession() as session: + client = Storage(session=session) + + async with aiofiles.open('/path/to/my/file', mode="r") as f: + output = await f.read() + status = await client.upload( + 'my-bucket-name', + 'path/to/gcs/folder', + output, + ) + print(status) Note that there are multiple ways to accomplish the above, ie,. by making use -of the `Bucket` and `Blob` convenience classes if that better fits your +of the ``Bucket`` and ``Blob`` convenience classes if that better fits your use-case. Of course, the major benefit of using an async library is being able to -parallelize operations like this. Since `gcloud-aio-storage` is fully +parallelize operations like this. Since ``gcloud-aio-storage`` is fully asyncio-compatible, you can use any of the builtin asyncio method to perform more complicated operations: -```python -my_files = { - '/local/path/to/file.1': 'path/in/gcs.1', - '/local/path/to/file.2': 'path/in/gcs.2', - '/local/path/to/file.3': 'different/gcs/path/filename.3', -} - -async with Storage() as client: - # Prepare all our upload data - uploads = [] - for local_name, gcs_name in my_files.items(): - async with aiofiles.open(local_name, mode="r") as f: - contents = await f.read() - uploads.append((gcs_name, contents)) - - # Simultaneously upload all files - await asyncio.gather( - *[ - client.upload('my-bucket-name', path, file_) - for path, file_ in uploads - ] - ) -``` - -You can also refer to the [smoke test][smoke-test] for more info and examples. - -Note that you can also let `gcloud-aio-storage` do its own session management, -so long as you give us a hint when to close that session: - -```python -async with Storage() as client: - # closes the client.session on leaving the context manager - -# OR - -client = Storage() -# do stuff -await client.close() # close the session explicitly -``` - -## File Encodings - -In some cases, `aiohttp` needs to transform the objects returned from GCS into -strings, eg. for debug logging and other such issues. The built-in `await -response.text()` operation relies on [chardet][chardet] for guessing the -character encoding in any cases where it can not be determined based on the -file metadata. +.. code-block:: python + + my_files = { + '/local/path/to/file.1': 'path/in/gcs.1', + '/local/path/to/file.2': 'path/in/gcs.2', + '/local/path/to/file.3': 'different/gcs/path/filename.3', + } + + async with Storage() as client: + # Prepare all our upload data + uploads = [] + for local_name, gcs_name in my_files.items(): + async with aiofiles.open(local_name, mode="r") as f: + contents = await f.read() + uploads.append((gcs_name, contents)) + + # Simultaneously upload all files + await asyncio.gather( + *[ + client.upload('my-bucket-name', path, file_) + for path, file_ in uploads + ] + ) + +You can also refer to the `smoke test`_ for more info and examples. + +Note that you can also let ``gcloud-aio-storage`` do its own session +management, so long as you give us a hint when to close that session: + +.. code-block:: python + + async with Storage() as client: + # closes the client.session on leaving the context manager + + # OR + + client = Storage() + # do stuff + await client.close() # close the session explicitly + +File Encodings +-------------- + +In some cases, ``aiohttp`` needs to transform the objects returned from GCS +into strings, eg. for debug logging and other such issues. The built-in ``await +response.text()`` operation relies on `chardet`_ for guessing the character +encoding in any cases where it can not be determined based on the file +metadata. Unfortunately, this operation can be extremely slow, especially in cases where you might be working with particularly large files. If you notice odd latency issues when reading your results, you may want to set your character encoding -more explicitly within GCS, eg. by ensuring you set the `contentType` of the -relevant objects to something suffixed with `; charset=utf-8`. For example, in -the case of `contentType='application/x-netcdf'` files exhibiting latency, you -could instead set `contentType='application/x-netcdf; charset=utf-8`. See -[Issue #172][issue-172] for more info! +more explicitly within GCS, eg. by ensuring you set the ``contentType`` of the +relevant objects to something suffixed with ``; charset=utf-8``. For example, +in the case of ``contentType='application/x-netcdf'`` files exhibiting latency, +you could instead set ``contentType='application/x-netcdf; charset=utf-8``. See +`Issue #172`_ for more info! -## Emulators +Emulators +--------- -For testing purposes, you may want to use `gcloud-aio-storage` along with a -local GCS emulator. Setting the `$STORAGE_EMULATOR_HOST` environment variable +For testing purposes, you may want to use ``gcloud-aio-storage`` along with a +local GCS emulator. Setting the ``$STORAGE_EMULATOR_HOST`` environment variable to the address of your emulator should be enough to do the trick. -For example, using [fsouza/fake-gcs-server][fake-gcs-server], you can do: +For example, using `fsouza/fake-gcs-server`_, you can do: + +.. code-block:: console -```shell -docker run -d -p 4443:4443 -v $PWD/my-sample-data:/data fsouza/fake-gcs-server -export STORAGE_EMULATOR_HOST='http://0.0.0.0:4443' -``` + docker run -d -p 4443:4443 -v $PWD/my-sample-data:/data fsouza/fake-gcs-server + export STORAGE_EMULATOR_HOST='http://0.0.0.0:4443' -Any `gcloud-aio-storage` requests made with that environment variable set will -query `fake-gcs-server` instead of the official GCS API. +Any ``gcloud-aio-storage`` requests made with that environment variable set +will query ``fake-gcs-server`` instead of the official GCS API. Note that some emulation systems require disabling SSL -- if you're using a custom http session, you may need to disable SSL verification. -## Customization +Customization +------------- This library mostly tries to stay agnostic of potential use-cases; as such, we do not implement any sort of retrying or other policies under the assumption that we wouldn't get things right for every user's situation. As such, we recommend configuring your own policies on an as-needed basis. The -[backoff][backoff] library can make this quite straightforward! For example, -you may find it useful to configure something like: - -```python -class StorageWithBackoff(gcloud.aio.storage.Storage): - @backoff.on_exception(backoff.expo, aiohttp.ClientResponseError, - max_tries=5, jitter=backoff.full_jitter) - async def copy(self, *args: Any, **kwargs: Any): - return await super().copy(*args, **kwargs) - - @backoff.on_exception(backoff.expo, aiohttp.ClientResponseError, - max_tries=10, jitter=backoff.full_jitter) - async def download(self, *args: Any, **kwargs: Any): - return await super().download(*args, **kwargs) -``` - -[backoff]: https://pypi.org/project/backoff/ -[chardet]: https://pypi.org/project/chardet/ -[fake-gcs-server]: https://github.com/fsouza/fake-gcs-server -[issue-172]: https://github.com/talkiq/gcloud-aio/issues/172 -[smoke-test]: -https://github.com/talkiq/gcloud-aio/blob/master/storage/tests/integration/smoke_test.py +`backoff`_ library can make this quite straightforward! For example, you may +find it useful to configure something like: + +.. code-block:: python + + class StorageWithBackoff(gcloud.aio.storage.Storage): + @backoff.on_exception(backoff.expo, aiohttp.ClientResponseError, + max_tries=5, jitter=backoff.full_jitter) + async def copy(self, *args: Any, **kwargs: Any): + return await super().copy(*args, **kwargs) + + @backoff.on_exception(backoff.expo, aiohttp.ClientResponseError, + max_tries=10, jitter=backoff.full_jitter) + async def download(self, *args: Any, **kwargs: Any): + return await super().download(*args, **kwargs) + +.. _Issue #172: https://github.com/talkiq/gcloud-aio/issues/172 +.. _backoff: https://pypi.org/project/backoff/ +.. _chardet: https://pypi.org/project/chardet/ +.. _fsouza/fake-gcs-server: https://github.com/fsouza/fake-gcs-server +.. _smoke test: https://github.com/talkiq/gcloud-aio/blob/master/storage/tests/integration/smoke_test.py """ import importlib.metadata diff --git a/storage/gcloud/aio/storage/blob.py b/storage/gcloud/aio/storage/blob.py index e38192b21..b20530da6 100644 --- a/storage/gcloud/aio/storage/blob.py +++ b/storage/gcloud/aio/storage/blob.py @@ -61,6 +61,7 @@ class PemKind(enum.Enum): if marker_id == 1: # "key" matched the zeroth provided marker arg, eg. PKCS8_MARKER """ + INVALID = -1 PKCS1 = 0 PKCS8 = 1 diff --git a/storage/gcloud/aio/storage/storage.py b/storage/gcloud/aio/storage/storage.py index cac587623..6fa439336 100644 --- a/storage/gcloud/aio/storage/storage.py +++ b/storage/gcloud/aio/storage/storage.py @@ -117,7 +117,8 @@ class UploadType(enum.Enum): class StreamResponse: - """This class provides an abstraction between the slightly different + """ + This class provides an abstraction between the slightly different recommended streaming implementations between requests and aiohttp. """ @@ -223,20 +224,21 @@ async def copy( session: Optional[Session] = None, ) -> Dict[str, Any]: """ - When files are too large, multiple calls to `rewriteTo` are made. We - refer to the same copy job by using the `rewriteToken` from the - previous return payload in subsequent `rewriteTo` calls. + When files are too large, multiple calls to ``rewriteTo`` are made. We + refer to the same copy job by using the ``rewriteToken`` from the + previous return payload in subsequent ``rewriteTo`` calls. - Using the `rewriteTo` GCS API is preferred in part because it is able - to make multiple calls to fully copy an object whereas the `copyTo` GCS - API only calls `rewriteTo` once under the hood, and thus may fail if - files are large. + Using the ``rewriteTo`` GCS API is preferred in part because it is able + to make multiple calls to fully copy an object whereas the ``copyTo`` + GCS API only calls ``rewriteTo`` once under the hood, and thus may fail + if files are large. In the rare case you need to resume a copy operation, include the - `rewriteToken` in the `params` dictionary. Once you begin a multi-part - copy operation, you then have 1 week to complete the copy job. + ``rewriteToken`` in the ``params`` dictionary. Once you begin a + multi-part copy operation, you then have 1 week to complete the copy + job. - https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite + See https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite """ # pylint: disable=too-many-locals if not new_name: @@ -361,18 +363,17 @@ async def download_stream( timeout: int = DEFAULT_TIMEOUT, session: Optional[Session] = None, ) -> StreamResponse: - """Download a GCS object in a buffered stream. + """ + Download a GCS object in a buffered stream. Args: - bucket (str): The bucket from which to download. - object_name (str): The object within the bucket to download. - headers (Optional[Dict[str, Any]], optional): Custom header values - for the request, such as range. Defaults to None. - timeout (int, optional): Timeout, in seconds, for the request. Note - that with this function, this is the time to the beginning of - the response data (TTFB). Defaults to 10. - session (Optional[Session], optional): A specific session to - (re)use. Defaults to None. + bucket: The bucket from which to download. + object_name: The object within the bucket to download. + headers: Custom header values for the request, such as range. + timeout: Timeout, in seconds, for the request. Note that with this + function, this is the time to the beginning of the response + data (TTFB). + session: A specific session to (re)use. Returns: StreamResponse: A object encapsulating the stream, similar to diff --git a/storage/poetry.lock b/storage/poetry.lock index 6fb232b9a..72fe590c3 100644 --- a/storage/poetry.lock +++ b/storage/poetry.lock @@ -860,4 +860,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "bc73e2085aac7d3f6ee165722aa83c5ba6a7cdbccc11d504e792f5558aca99c2" +content-hash = "04a94bfcb07156eafbf1c8d8c7f3467b457fdb6fd0e8c895c12e3ce8cd875453" diff --git a/storage/poetry.rest.lock b/storage/poetry.rest.lock index 5dc3db659..dfcd9231f 100644 --- a/storage/poetry.rest.lock +++ b/storage/poetry.rest.lock @@ -489,4 +489,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "bbb4eb63646f2d00a3e990e5e92c255aad9d73ac3bd190420ff86cf3609657d3" +content-hash = "00703de945fe62f62a817bb5493c490053480e41bec8a1221aceb5a853a7213b" diff --git a/storage/pyproject.rest.toml b/storage/pyproject.rest.toml index c0d5a05ee..2fd019676 100644 --- a/storage/pyproject.rest.toml +++ b/storage/pyproject.rest.toml @@ -26,12 +26,16 @@ gcloud-rest-auth = ">= 3.6.0, < 6.0.0" pyasn1-modules = ">= 0.2.1, < 0.4.0" rsa = ">= 3.1.4, < 5.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] gcloud-rest-auth = { path = "../auth" } pytest = ">= 4.0.0, < 8.0.0" # pytest-asyncio = ">= 0.16.0, < 0.22.0" pytest-mock = ">= 2.0.0, < 4.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] # addopts = "-Werror" # TODO: fixme # asyncio_mode = "auto" diff --git a/storage/pyproject.toml b/storage/pyproject.toml index 99238063e..60c57a8d8 100644 --- a/storage/pyproject.toml +++ b/storage/pyproject.toml @@ -26,12 +26,16 @@ gcloud-aio-auth = ">= 3.6.0, < 6.0.0" pyasn1-modules = ">= 0.2.1, < 0.4.0" rsa = ">= 3.1.4, < 5.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] gcloud-aio-auth = { path = "../auth" } pytest = ">= 4.0.0, < 8.0.0" pytest-asyncio = ">= 0.16.0, < 0.22.0" pytest-mock = ">= 2.0.0, < 4.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] # addopts = "-Werror" # TODO: fixme asyncio_mode = "auto" diff --git a/taskqueue/README.rst b/taskqueue/README.rst index 7c93bb037..1db24882c 100644 --- a/taskqueue/README.rst +++ b/taskqueue/README.rst @@ -1,7 +1,7 @@ (Asyncio OR Threadsafe) Python Client for Google Cloud Task Queue ================================================================= -|pypi| |pythons-aio| |pythons-rest| +|pypi| |pythons| Installation ------------ @@ -27,10 +27,6 @@ Please see our `contributing guide`_. :alt: Latest PyPI Version (gcloud-aio-taskqueue) :target: https://pypi.org/project/gcloud-aio-taskqueue/ -.. |pythons-aio| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-taskqueue.svg?style=flat-square&label=python (aio) - :alt: Python Version Support (gcloud-aio-taskqueue) +.. |pythons| image:: https://img.shields.io/pypi/pyversions/gcloud-aio-taskqueue.svg?style=flat-square&label=python + :alt: Python Version Support :target: https://pypi.org/project/gcloud-aio-taskqueue/ - -.. |pythons-rest| image:: https://img.shields.io/pypi/pyversions/gcloud-rest-taskqueue.svg?style=flat-square&label=python (rest) - :alt: Python Version Support (gcloud-rest-taskqueue) - :target: https://pypi.org/project/gcloud-rest-taskqueue/ diff --git a/taskqueue/gcloud/aio/taskqueue/__init__.py b/taskqueue/gcloud/aio/taskqueue/__init__.py index 2a3efa732..0bd7a19ce 100644 --- a/taskqueue/gcloud/aio/taskqueue/__init__.py +++ b/taskqueue/gcloud/aio/taskqueue/__init__.py @@ -1,26 +1,29 @@ +# pylint: disable=line-too-long """ This library implements various methods for working with the Google Taskqueue APIs. -## Installation +Installation +------------ -```console -$ pip install --upgrade gcloud-aio-taskqueue -``` +.. code-block:: console -## Usage + $ pip install --upgrade gcloud-aio-taskqueue -We're still working on documentation -- for now, you can use the -[smoke tests][smoke-tests] as an example. +Usage +----- -## Emulators +We're still working on documentation -- for now, you can use the `smoke tests`_ +as an example. -For testing purposes, you may want to use `gcloud-aio-taskqueue` along with a -local emulator. Setting the `$CLOUDTASKS_EMULATOR_HOST` environment variable to -the address of your emulator should be enough to do the trick. +Emulators +--------- -[smoke-tests]: -https://github.com/talkiq/gcloud-aio/tree/master/taskqueue/tests/integration +For testing purposes, you may want to use ``gcloud-aio-taskqueue`` along with a +local emulator. Setting the ``$CLOUDTASKS_EMULATOR_HOST`` environment variable +to the address of your emulator should be enough to do the trick. + +.. _smoke tests: https://github.com/talkiq/gcloud-aio/tree/master/taskqueue/tests/integration """ import importlib.metadata diff --git a/taskqueue/poetry.lock b/taskqueue/poetry.lock index 7d4166721..bb00c9346 100644 --- a/taskqueue/poetry.lock +++ b/taskqueue/poetry.lock @@ -810,4 +810,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "5ccf6ea4bfc61924930c3a05c2db400da4f69c6ce1704ccfe9125a69453b0c89" +content-hash = "106e8ec8de470c27acdfafac646f3e81535f61ea654f92184724d9e031d6a12d" diff --git a/taskqueue/poetry.rest.lock b/taskqueue/poetry.rest.lock index 7c02dc8e1..9a18c4411 100644 --- a/taskqueue/poetry.rest.lock +++ b/taskqueue/poetry.rest.lock @@ -450,4 +450,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = ">= 3.8, < 4.0" -content-hash = "804496eecc046a9e16129279c09cf029ce82e8977c5699f198f1019c927c67eb" +content-hash = "5f0e3e602032ab95c0a37761376c25bc90d7c0cbb39d98d33cbc2c3d6a4af637" diff --git a/taskqueue/pyproject.rest.toml b/taskqueue/pyproject.rest.toml index cf25ac228..5d708ff6c 100644 --- a/taskqueue/pyproject.rest.toml +++ b/taskqueue/pyproject.rest.toml @@ -23,13 +23,17 @@ classifiers = [ python = ">= 3.8, < 4.0" gcloud-rest-auth = ">= 3.1.0, < 6.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] # aiohttp = ">= 3.3.0, < 4.0.0" gcloud-rest-auth = { path = "../auth" } pytest = ">= 4.0.0, < 8.0.0" # pytest-asyncio = ">= 0.16.0, < 0.22.0" pytest-mock = ">= 2.0.0, < 4.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] addopts = "-Werror" # asyncio_mode = "auto" diff --git a/taskqueue/pyproject.toml b/taskqueue/pyproject.toml index b2359981a..f47145504 100644 --- a/taskqueue/pyproject.toml +++ b/taskqueue/pyproject.toml @@ -23,13 +23,17 @@ classifiers = [ python = ">= 3.8, < 4.0" gcloud-aio-auth = ">= 3.1.0, < 6.0.0" -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] aiohttp = ">= 3.3.0, < 4.0.0" gcloud-aio-auth = { path = "../auth" } pytest = ">= 4.0.0, < 8.0.0" pytest-asyncio = ">= 0.16.0, < 0.22.0" pytest-mock = ">= 2.0.0, < 4.0.0" +[[tool.poetry.source]] +name = "pypi" +priority = "primary" + [tool.pytest.ini_options] addopts = "-Werror" asyncio_mode = "auto"