diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 9ee60f7e..b8dcb4a4 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,3 +1,16 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
docker:
- image: gcr.io/repo-automation-bots/owlbot-python:latest
- digest: sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
+ digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7
diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml
index 3caf68d4..c8b40cc7 100644
--- a/.github/.OwlBot.yaml
+++ b/.github/.OwlBot.yaml
@@ -13,7 +13,7 @@
# limitations under the License.
docker:
- image: gcr.io/repo-automation-bots/owlbot-python:latest
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
begin-after-commit-hash: 7af2cb8b2b725641ac0d07e2f256d453682802e6
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 557e39e1..1b023b72 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -3,9 +3,10 @@
#
# For syntax help see:
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
+# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json.
-# The @googleapis/yoshi-python is the default owner for changes in this repo
-* @googleapis/yoshi-python @googleapis/actools-python
+# @googleapis/yoshi-python @googleapis/actools-python are the default owners for changes in this repo
+* @googleapis/yoshi-python @googleapis/actools-python
-# The python-samples-reviewers team is the default owner for samples changes
-/samples/ @googleapis/python-samples-owners
+# @googleapis/python-samples-reviewers @googleapis/actools-python are the default owners for samples changes
+/samples/ @googleapis/python-samples-reviewers @googleapis/actools-python
diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml
new file mode 100644
index 00000000..311ebbb8
--- /dev/null
+++ b/.github/auto-approve.yml
@@ -0,0 +1,3 @@
+# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve
+processes:
+ - "OwlBotTemplateChanges"
diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml
new file mode 100644
index 00000000..41bff0b5
--- /dev/null
+++ b/.github/auto-label.yaml
@@ -0,0 +1,15 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+requestsize:
+ enabled: true
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
index 4a559eaf..6d2c2a0e 100644
--- a/.github/sync-repo-settings.yaml
+++ b/.github/sync-repo-settings.yaml
@@ -1,12 +1,28 @@
-rebaseMergeAllowed: true
-squashMergeAllowed: true
-mergeCommitAllowed: false
+# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings
+# Rules for main branch protection
branchProtectionRules:
-- pattern: master
- isAdminEnforced: true
- requiredApprovingReviewCount: 1
+# Identifies the protection rule pattern. Name of the branch to be protected.
+# Defaults to `main`
+- pattern: main
requiresCodeOwnerReviews: true
requiresStrictStatusChecks: true
+ requiredStatusCheckContexts:
+ - 'cla/google'
+ # No Kokoro: the following are Github actions
+ - 'lint'
+ - 'mypy'
+ - 'unit_grpc_gcp-3.7'
+ - 'unit_grpc_gcp-3.8'
+ - 'unit_grpc_gcp-3.9'
+ - 'unit_grpc_gcp-3.10'
+ - 'unit-3.7'
+ - 'unit-3.8'
+ - 'unit-3.9'
+ - 'unit-3.10'
+ - 'unit_wo_grpc-3.10'
+ - 'cover'
+ - 'docs'
+ - 'docfx'
permissionRules:
- team: actools-python
permission: admin
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
new file mode 100644
index 00000000..6dd32fec
--- /dev/null
+++ b/.github/workflows/docs.yml
@@ -0,0 +1,38 @@
+on:
+ pull_request:
+ branches:
+ - v1
+name: docs
+jobs:
+ docs:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: "3.10"
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run docs
+ run: |
+ nox -s docs
+ docfx:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: "3.10"
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run docfx
+ run: |
+ nox -s docfx
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 00000000..cabd0e5b
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,25 @@
+on:
+ pull_request:
+ branches:
+ - v1
+name: lint
+jobs:
+ lint:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: "3.7"
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run lint
+ run: |
+ nox -s lint
+ - name: Run lint_setup_py
+ run: |
+ nox -s lint_setup_py
diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml
new file mode 100644
index 00000000..d9c98dba
--- /dev/null
+++ b/.github/workflows/mypy.yml
@@ -0,0 +1,22 @@
+on:
+ pull_request:
+ branches:
+ - v1
+name: mypy
+jobs:
+ mypy:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: "3.7"
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run mypy
+ run: |
+ nox -s mypy
diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml
new file mode 100644
index 00000000..bdce7eb7
--- /dev/null
+++ b/.github/workflows/unittest.yml
@@ -0,0 +1,73 @@
+name: "Unit tests"
+
+on:
+ pull_request:
+ branches:
+ - v1
+
+jobs:
+ run-unittests:
+ name: unit${{ matrix.option }}-${{ matrix.python }}
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ option: ["", "_grpc_gcp", "_wo_grpc"]
+ python:
+ - "3.7"
+ - "3.8"
+ - "3.9"
+ - "3.10"
+ exclude:
+ - option: "_wo_grpc"
+ python: 3.7
+ - option: "_wo_grpc"
+ python: 3.8
+ - option: "_wo_grpc"
+ python: 3.9
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python }}
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run unit tests
+ env:
+ COVERAGE_FILE: .coverage${{ matrix.option }}-${{matrix.python }}
+ run: |
+ nox -s unit${{ matrix.option }}-${{ matrix.python }}
+ - name: Upload coverage results
+ uses: actions/upload-artifact@v3
+ with:
+ name: coverage-artifacts
+ path: .coverage${{ matrix.option }}-${{ matrix.python }}
+
+ report-coverage:
+ name: cover
+ runs-on: ubuntu-latest
+ needs:
+ - run-unittests
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: "3.10"
+ - name: Install coverage
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install coverage
+ - name: Download coverage results
+ uses: actions/download-artifact@v3
+ with:
+ name: coverage-artifacts
+ path: .coverage-results/
+ - name: Report coverage results
+ run: |
+ coverage combine .coverage-results/.coverage*
+ coverage report --show-missing --fail-under=100
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index 8b9fd8a1..0394c8aa 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -41,7 +41,7 @@ python3 -m pip install --upgrade --quiet nox
python3 -m nox --version
# If this is a continuous build, send the test log to the FlakyBot.
-# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
cleanup() {
chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
diff --git a/.kokoro/continuous/prerelease-deps.cfg b/.kokoro/continuous/prerelease-deps.cfg
new file mode 100644
index 00000000..3595fb43
--- /dev/null
+++ b/.kokoro/continuous/prerelease-deps.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "prerelease_deps"
+}
diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile
index 4e1b1fb8..238b87b9 100644
--- a/.kokoro/docker/docs/Dockerfile
+++ b/.kokoro/docker/docs/Dockerfile
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from ubuntu:20.04
+from ubuntu:22.04
ENV DEBIAN_FRONTEND noninteractive
@@ -60,8 +60,24 @@ RUN apt-get update \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /var/cache/apt/archives/*.deb
+###################### Install python 3.8.11
+
+# Download python 3.8.11
+RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz
+
+# Extract files
+RUN tar -xvf Python-3.8.11.tgz
+
+# Install python 3.8.11
+RUN ./Python-3.8.11/configure --enable-optimizations
+RUN make altinstall
+
+###################### Install pip
RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
- && python3.8 /tmp/get-pip.py \
+ && python3 /tmp/get-pip.py \
&& rm /tmp/get-pip.py
+# Test pip
+RUN python3 -m pip
+
CMD ["python3.8"]
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
index 4847856f..48e89855 100644
--- a/.kokoro/docs/common.cfg
+++ b/.kokoro/docs/common.cfg
@@ -30,7 +30,9 @@ env_vars: {
env_vars: {
key: "V2_STAGING_BUCKET"
- value: "docs-staging-v2"
+ # Push non-cloud library docs to `docs-staging-v2-staging` instead of the
+ # Cloud RAD bucket `docs-staging-v2`
+ value: "docs-staging-v2-staging"
}
# It will upload the docker image after successful builds.
diff --git a/.kokoro/presubmit/prerelease-deps.cfg b/.kokoro/presubmit/prerelease-deps.cfg
new file mode 100644
index 00000000..3595fb43
--- /dev/null
+++ b/.kokoro/presubmit/prerelease-deps.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "prerelease_deps"
+}
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
index 8acb14e8..1c4d6237 100755
--- a/.kokoro/publish-docs.sh
+++ b/.kokoro/publish-docs.sh
@@ -21,14 +21,12 @@ export PYTHONUNBUFFERED=1
export PATH="${HOME}/.local/bin:${PATH}"
# Install nox
-python3 -m pip install --user --upgrade --quiet nox
+python3 -m pip install --require-hashes -r .kokoro/requirements.txt
python3 -m nox --version
# build docs
nox -s docs
-python3 -m pip install --user gcp-docuploader
-
# create metadata
python3 -m docuploader create-metadata \
--name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
index a00f93ec..697f7e6d 100755
--- a/.kokoro/release.sh
+++ b/.kokoro/release.sh
@@ -16,12 +16,9 @@
set -eo pipefail
# Start the releasetool reporter
-python3 -m pip install gcp-releasetool
+python3 -m pip install --require-hashes -r github/python-api-core/.kokoro/requirements.txt
python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
-# Ensure that we have the latest versions of Twine, Wheel, and Setuptools.
-python3 -m pip install --upgrade twine wheel setuptools
-
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in
new file mode 100644
index 00000000..7718391a
--- /dev/null
+++ b/.kokoro/requirements.in
@@ -0,0 +1,8 @@
+gcp-docuploader
+gcp-releasetool
+importlib-metadata
+typing-extensions
+twine
+wheel
+setuptools
+nox
\ No newline at end of file
diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt
new file mode 100644
index 00000000..385f2d4d
--- /dev/null
+++ b/.kokoro/requirements.txt
@@ -0,0 +1,472 @@
+#
+# This file is autogenerated by pip-compile with python 3.10
+# To update, run:
+#
+# pip-compile --allow-unsafe --generate-hashes requirements.in
+#
+argcomplete==2.0.0 \
+ --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \
+ --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e
+ # via nox
+attrs==22.1.0 \
+ --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \
+ --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c
+ # via gcp-releasetool
+bleach==5.0.1 \
+ --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \
+ --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c
+ # via readme-renderer
+cachetools==5.2.0 \
+ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \
+ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db
+ # via google-auth
+certifi==2022.6.15 \
+ --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \
+ --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412
+ # via requests
+cffi==1.15.1 \
+ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
+ --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
+ --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
+ --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
+ --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
+ --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
+ --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
+ --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
+ --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
+ --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
+ --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
+ --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
+ --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
+ --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
+ --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
+ --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
+ --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
+ --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
+ --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
+ --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
+ --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
+ --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
+ --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
+ --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
+ --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
+ --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
+ --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
+ --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
+ --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
+ --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
+ --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
+ --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
+ --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
+ --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
+ --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
+ --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
+ --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
+ --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
+ --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
+ --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
+ --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
+ --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
+ --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
+ --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
+ --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
+ --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
+ --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
+ --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
+ --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
+ --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
+ --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
+ --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
+ --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
+ --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
+ --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
+ --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
+ --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
+ --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
+ --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
+ --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
+ --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
+ --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
+ --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
+ --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
+ # via cryptography
+charset-normalizer==2.1.1 \
+ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \
+ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f
+ # via requests
+click==8.0.4 \
+ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \
+ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb
+ # via
+ # gcp-docuploader
+ # gcp-releasetool
+colorlog==6.7.0 \
+ --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \
+ --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5
+ # via
+ # gcp-docuploader
+ # nox
+commonmark==0.9.1 \
+ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \
+ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9
+ # via rich
+cryptography==37.0.4 \
+ --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \
+ --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \
+ --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \
+ --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \
+ --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \
+ --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \
+ --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \
+ --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \
+ --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \
+ --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \
+ --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \
+ --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \
+ --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \
+ --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \
+ --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \
+ --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \
+ --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \
+ --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \
+ --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \
+ --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \
+ --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \
+ --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9
+ # via
+ # gcp-releasetool
+ # secretstorage
+distlib==0.3.6 \
+ --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \
+ --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e
+ # via virtualenv
+docutils==0.19 \
+ --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \
+ --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc
+ # via readme-renderer
+filelock==3.8.0 \
+ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \
+ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4
+ # via virtualenv
+gcp-docuploader==0.6.3 \
+ --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \
+ --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b
+ # via -r requirements.in
+gcp-releasetool==1.8.7 \
+ --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \
+ --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d
+ # via -r requirements.in
+google-api-core==2.8.2 \
+ --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \
+ --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50
+ # via
+ # google-cloud-core
+ # google-cloud-storage
+google-auth==2.11.0 \
+ --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \
+ --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb
+ # via
+ # gcp-releasetool
+ # google-api-core
+ # google-cloud-core
+ # google-cloud-storage
+google-cloud-core==2.3.2 \
+ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \
+ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a
+ # via google-cloud-storage
+google-cloud-storage==2.5.0 \
+ --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \
+ --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235
+ # via gcp-docuploader
+google-crc32c==1.3.0 \
+ --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \
+ --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \
+ --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \
+ --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \
+ --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \
+ --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \
+ --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \
+ --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \
+ --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \
+ --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \
+ --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \
+ --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \
+ --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \
+ --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \
+ --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \
+ --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \
+ --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \
+ --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \
+ --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \
+ --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \
+ --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \
+ --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \
+ --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \
+ --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \
+ --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \
+ --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \
+ --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \
+ --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \
+ --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \
+ --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \
+ --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \
+ --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \
+ --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \
+ --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \
+ --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \
+ --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \
+ --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \
+ --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \
+ --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \
+ --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \
+ --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \
+ --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \
+ --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3
+ # via google-resumable-media
+google-resumable-media==2.3.3 \
+ --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \
+ --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5
+ # via google-cloud-storage
+googleapis-common-protos==1.56.4 \
+ --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \
+ --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417
+ # via google-api-core
+idna==3.3 \
+ --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \
+ --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d
+ # via requests
+importlib-metadata==4.12.0 \
+ --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \
+ --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23
+ # via
+ # -r requirements.in
+ # twine
+jaraco-classes==3.2.2 \
+ --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \
+ --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647
+ # via keyring
+jeepney==0.8.0 \
+ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \
+ --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755
+ # via
+ # keyring
+ # secretstorage
+jinja2==3.1.2 \
+ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
+ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
+ # via gcp-releasetool
+keyring==23.9.0 \
+ --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \
+ --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db
+ # via
+ # gcp-releasetool
+ # twine
+markupsafe==2.1.1 \
+ --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \
+ --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \
+ --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \
+ --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \
+ --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \
+ --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \
+ --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \
+ --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \
+ --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \
+ --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \
+ --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \
+ --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \
+ --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \
+ --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \
+ --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \
+ --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \
+ --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \
+ --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \
+ --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \
+ --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \
+ --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \
+ --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \
+ --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \
+ --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \
+ --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \
+ --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \
+ --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \
+ --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \
+ --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \
+ --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \
+ --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \
+ --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \
+ --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \
+ --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \
+ --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \
+ --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \
+ --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \
+ --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \
+ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \
+ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7
+ # via jinja2
+more-itertools==8.14.0 \
+ --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \
+ --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750
+ # via jaraco-classes
+nox==2022.8.7 \
+ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \
+ --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c
+ # via -r requirements.in
+packaging==21.3 \
+ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \
+ --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522
+ # via
+ # gcp-releasetool
+ # nox
+pkginfo==1.8.3 \
+ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \
+ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c
+ # via twine
+platformdirs==2.5.2 \
+ --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \
+ --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19
+ # via virtualenv
+protobuf==3.20.1 \
+ --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \
+ --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \
+ --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \
+ --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \
+ --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \
+ --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \
+ --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \
+ --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \
+ --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \
+ --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \
+ --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \
+ --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \
+ --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \
+ --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \
+ --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \
+ --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \
+ --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \
+ --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \
+ --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \
+ --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \
+ --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \
+ --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \
+ --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \
+ --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3
+ # via
+ # gcp-docuploader
+ # gcp-releasetool
+ # google-api-core
+py==1.11.0 \
+ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \
+ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378
+ # via nox
+pyasn1==0.4.8 \
+ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
+ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba
+ # via
+ # pyasn1-modules
+ # rsa
+pyasn1-modules==0.2.8 \
+ --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \
+ --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74
+ # via google-auth
+pycparser==2.21 \
+ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
+ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
+ # via cffi
+pygments==2.13.0 \
+ --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \
+ --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42
+ # via
+ # readme-renderer
+ # rich
+pyjwt==2.4.0 \
+ --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \
+ --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba
+ # via gcp-releasetool
+pyparsing==3.0.9 \
+ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
+ --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
+ # via packaging
+pyperclip==1.8.2 \
+ --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57
+ # via gcp-releasetool
+python-dateutil==2.8.2 \
+ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
+ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
+ # via gcp-releasetool
+readme-renderer==37.0 \
+ --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \
+ --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69
+ # via twine
+requests==2.28.1 \
+ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \
+ --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349
+ # via
+ # gcp-releasetool
+ # google-api-core
+ # google-cloud-storage
+ # requests-toolbelt
+ # twine
+requests-toolbelt==0.9.1 \
+ --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \
+ --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0
+ # via twine
+rfc3986==2.0.0 \
+ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \
+ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c
+ # via twine
+rich==12.5.1 \
+ --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \
+ --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca
+ # via twine
+rsa==4.9 \
+ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
+ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
+ # via google-auth
+secretstorage==3.3.3 \
+ --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \
+ --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99
+ # via keyring
+six==1.16.0 \
+ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
+ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
+ # via
+ # bleach
+ # gcp-docuploader
+ # google-auth
+ # python-dateutil
+twine==4.0.1 \
+ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \
+ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0
+ # via -r requirements.in
+typing-extensions==4.3.0 \
+ --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \
+ --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6
+ # via -r requirements.in
+urllib3==1.26.12 \
+ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \
+ --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997
+ # via
+ # requests
+ # twine
+virtualenv==20.16.4 \
+ --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \
+ --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22
+ # via nox
+webencodings==0.5.1 \
+ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
+ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
+ # via bleach
+wheel==0.37.1 \
+ --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \
+ --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4
+ # via -r requirements.in
+zipp==3.8.1 \
+ --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \
+ --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009
+ # via importlib-metadata
+
+# The following packages are considered to be unsafe in a requirements file:
+setuptools==65.2.0 \
+ --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \
+ --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750
+ # via -r requirements.in
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
index f5dddb4b..1a2b87b2 100644
--- a/.kokoro/samples/lint/common.cfg
+++ b/.kokoro/samples/lint/common.cfg
@@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.10/common.cfg
similarity index 87%
rename from .kokoro/samples/python3.6/common.cfg
rename to .kokoro/samples/python3.10/common.cfg
index 7b4f5cd0..40fb8d81 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.10/common.cfg
@@ -10,13 +10,13 @@ action {
# Specify which tests to run
env_vars: {
key: "RUN_TESTS_SESSION"
- value: "py-3.6"
+ value: "py-3.10"
}
# Declare build specific Cloud project.
env_vars: {
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
- value: "python-docs-samples-tests-py36"
+ value: "python-docs-samples-tests-310"
}
env_vars: {
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.10/continuous.cfg
similarity index 100%
rename from .kokoro/samples/python3.6/presubmit.cfg
rename to .kokoro/samples/python3.10/continuous.cfg
diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg
similarity index 100%
rename from .kokoro/samples/python3.6/periodic-head.cfg
rename to .kokoro/samples/python3.10/periodic-head.cfg
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg
similarity index 98%
rename from .kokoro/samples/python3.6/periodic.cfg
rename to .kokoro/samples/python3.10/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.6/periodic.cfg
+++ b/.kokoro/samples/python3.10/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.10/presubmit.cfg
similarity index 97%
rename from .kokoro/samples/python3.6/continuous.cfg
rename to .kokoro/samples/python3.10/presubmit.cfg
index 7218af14..a1c8d975 100644
--- a/.kokoro/samples/python3.6/continuous.cfg
+++ b/.kokoro/samples/python3.10/presubmit.cfg
@@ -3,5 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "True"
-}
-
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
index 1198d7ba..a3aa10b5 100644
--- a/.kokoro/samples/python3.7/common.cfg
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.7/periodic.cfg
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
index b7ec7f5e..20c941aa 100644
--- a/.kokoro/samples/python3.8/common.cfg
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.8/periodic.cfg
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
index cf034ec1..234887c6 100644
--- a/.kokoro/samples/python3.9/common.cfg
+++ b/.kokoro/samples/python3.9/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.9/periodic.cfg
+++ b/.kokoro/samples/python3.9/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
index a7858e4c..ba3a707b 100755
--- a/.kokoro/test-samples-against-head.sh
+++ b/.kokoro/test-samples-against-head.sh
@@ -23,6 +23,4 @@ set -eo pipefail
# Enables `**` to include files nested inside sub-folders
shopt -s globstar
-cd github/python-api-core
-
exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
index 311a8d54..2c6500ca 100755
--- a/.kokoro/test-samples-impl.sh
+++ b/.kokoro/test-samples-impl.sh
@@ -33,7 +33,7 @@ export PYTHONUNBUFFERED=1
env | grep KOKORO
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
+python3.9 -m pip install --upgrade --quiet nox
# Use secrets acessor service account to get secrets
if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
@@ -76,11 +76,11 @@ for file in samples/**/requirements.txt; do
echo "------------------------------------------------------------"
# Use nox to execute the tests for the project.
- python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ python3.9 -m nox -s "$RUN_TESTS_SESSION"
EXIT=$?
# If this is a periodic build, send the test log to the FlakyBot.
- # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
$KOKORO_GFILE_DIR/linux_amd64/flakybot
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index ee3146bd..11c042d3 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -24,8 +24,6 @@ set -eo pipefail
# Enables `**` to include files nested inside sub-folders
shopt -s globstar
-cd github/python-api-core
-
# Run periodic samples tests at latest release
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
# preserving the test runner implementation.
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 62eb5a77..46d23716 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -22,7 +22,7 @@ repos:
- id: end-of-file-fixer
- id: check-yaml
- repo: https://github.com/psf/black
- rev: 19.10b0
+ rev: 22.3.0
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
diff --git a/.repo-metadata.json b/.repo-metadata.json
index 59aa936d..0f0abd93 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -1,10 +1,12 @@
{
- "name": "google-api-core",
- "name_pretty": "Google API client core library",
- "client_documentation": "https://googleapis.dev/python/google-api-core/latest",
- "release_level": "ga",
- "language": "python",
- "library_type": "CORE",
- "repo": "googleapis/python-api-core",
- "distribution_name": "google-api-core"
-}
\ No newline at end of file
+ "name": "google-api-core",
+ "name_pretty": "Google API client core library",
+ "client_documentation": "https://googleapis.dev/python/google-api-core/latest",
+ "release_level": "stable",
+ "language": "python",
+ "library_type": "CORE",
+ "repo": "googleapis/python-api-core",
+ "distribution_name": "google-api-core",
+ "default_version": "",
+ "codeowner_team": "@googleapis/actools-python"
+}
diff --git a/.trampolinerc b/.trampolinerc
index 383b6ec8..0eee72ab 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -16,15 +16,26 @@
# Add required env vars here.
required_envvars+=(
- "STAGING_BUCKET"
- "V2_STAGING_BUCKET"
)
# Add env vars which are passed down into the container here.
pass_down_envvars+=(
+ "NOX_SESSION"
+ ###############
+ # Docs builds
+ ###############
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
- "NOX_SESSION"
+ ##################
+ # Samples builds
+ ##################
+ "INSTALL_LIBRARY_FROM_SOURCE"
+ "RUN_TESTS_SESSION"
+ "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ # Target directories.
+ "RUN_TESTS_DIRS"
+ # The nox session to run.
+ "RUN_TESTS_SESSION"
)
# Prevent unintentional override on the default image.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index df8c7819..f41c7dd8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -116,7 +116,7 @@
* Add support for `rest/` token in `x-goog-api-client` header ([#189](https://www.github.com/googleapis/python-api-core/issues/189)) ([15aca6b](https://www.github.com/googleapis/python-api-core/commit/15aca6b288b2ec5ce0251e442e1dfa7f52e1b124))
* retry google.auth TransportError and requests ConnectionError ([#178](https://www.github.com/googleapis/python-api-core/issues/178)) ([6ae04a8](https://www.github.com/googleapis/python-api-core/commit/6ae04a8d134fffe13f06081e15f9723c1b2ea334))
-### [1.26.3](https://www.github.com/googleapis/python-api-core/compare/v1.26.2...v1.26.3) (2021-03-25)
+## [1.26.3](https://www.github.com/googleapis/python-api-core/compare/v1.26.2...v1.26.3) (2021-03-25)
### Bug Fixes
@@ -128,14 +128,14 @@
* update python contributing guide ([#147](https://www.github.com/googleapis/python-api-core/issues/147)) ([1d76b57](https://www.github.com/googleapis/python-api-core/commit/1d76b57d1f218f7885f85dc7c052bad1ad3857ac))
-### [1.26.2](https://www.github.com/googleapis/python-api-core/compare/v1.26.1...v1.26.2) (2021-03-23)
+## [1.26.2](https://www.github.com/googleapis/python-api-core/compare/v1.26.1...v1.26.2) (2021-03-23)
### Bug Fixes
* save empty IAM policy bindings ([#155](https://www.github.com/googleapis/python-api-core/issues/155)) ([536c2ca](https://www.github.com/googleapis/python-api-core/commit/536c2cad814b8fa8cd346a3d7bd5f6b9889c4a6f))
-### [1.26.1](https://www.github.com/googleapis/python-api-core/compare/v1.26.0...v1.26.1) (2021-02-12)
+## [1.26.1](https://www.github.com/googleapis/python-api-core/compare/v1.26.0...v1.26.1) (2021-02-12)
### Bug Fixes
@@ -149,7 +149,7 @@
* allow default_host and default_scopes to be passed to create_channel ([#134](https://www.github.com/googleapis/python-api-core/issues/134)) ([94c76e0](https://www.github.com/googleapis/python-api-core/commit/94c76e0873e5b2f42331d5b1ad286c1e63b61395))
-### [1.25.1](https://www.github.com/googleapis/python-api-core/compare/v1.25.0...v1.25.1) (2021-01-25)
+## [1.25.1](https://www.github.com/googleapis/python-api-core/compare/v1.25.0...v1.25.1) (2021-01-25)
### Bug Fixes
@@ -173,7 +173,7 @@
* **python:** document adding Python 3.9 support, dropping 3.5 support ([#120](https://www.github.com/googleapis/python-api-core/issues/120)) ([b51b7f5](https://www.github.com/googleapis/python-api-core/commit/b51b7f587042fe9340371c1b5c8e9adf8001c43a)), closes [#787](https://www.github.com/googleapis/python-api-core/issues/787)
-### [1.24.1](https://www.github.com/googleapis/python-api-core/compare/v1.24.0...v1.24.1) (2020-12-16)
+## [1.24.1](https://www.github.com/googleapis/python-api-core/compare/v1.24.0...v1.24.1) (2020-12-16)
### Bug Fixes
@@ -206,28 +206,28 @@
* harden install to use full paths, and windows separators on windows ([#88](https://www.github.com/googleapis/python-api-core/issues/88)) ([db8e636](https://www.github.com/googleapis/python-api-core/commit/db8e636f545a8872f959e3f403cfec30ffed6c34))
* update out-of-date comment in exceptions.py ([#93](https://www.github.com/googleapis/python-api-core/issues/93)) ([70ebe42](https://www.github.com/googleapis/python-api-core/commit/70ebe42601b3d088b3421233ef7d8245229b7265))
-### [1.22.4](https://www.github.com/googleapis/python-api-core/compare/v1.22.3...v1.22.4) (2020-10-05)
+## [1.22.4](https://www.github.com/googleapis/python-api-core/compare/v1.22.3...v1.22.4) (2020-10-05)
### Bug Fixes
* use version.py instead of pkg_resources.get_distribution ([#80](https://www.github.com/googleapis/python-api-core/issues/80)) ([d480d97](https://www.github.com/googleapis/python-api-core/commit/d480d97e41cd6705325b3b649360553a83c23f47))
-### [1.22.3](https://www.github.com/googleapis/python-api-core/compare/v1.22.2...v1.22.3) (2020-10-02)
+## [1.22.3](https://www.github.com/googleapis/python-api-core/compare/v1.22.2...v1.22.3) (2020-10-02)
### Bug Fixes
* **deps:** require six >= 1.13.0 ([#78](https://www.github.com/googleapis/python-api-core/issues/78)) ([a7a8b98](https://www.github.com/googleapis/python-api-core/commit/a7a8b98602a3eb277fdc607ac69f3bcb147f3351)), closes [/github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES#L30-L31](https://www.github.com/googleapis//github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES/issues/L30-L31)
-### [1.22.2](https://www.github.com/googleapis/python-api-core/compare/v1.22.1...v1.22.2) (2020-09-03)
+## [1.22.2](https://www.github.com/googleapis/python-api-core/compare/v1.22.1...v1.22.2) (2020-09-03)
### Bug Fixes
* only add quota project id if supported ([#75](https://www.github.com/googleapis/python-api-core/issues/75)) ([8f8ee78](https://www.github.com/googleapis/python-api-core/commit/8f8ee7879e4f834f3c676e535ffc41b5b9b2de62))
-### [1.22.1](https://www.github.com/googleapis/python-api-core/compare/v1.22.0...v1.22.1) (2020-08-12)
+## [1.22.1](https://www.github.com/googleapis/python-api-core/compare/v1.22.0...v1.22.1) (2020-08-12)
### Documentation
@@ -258,7 +258,7 @@
* allow credentials files to be passed for channel creation ([#50](https://www.github.com/googleapis/python-api-core/issues/50)) ([ded92d0](https://www.github.com/googleapis/python-api-core/commit/ded92d0acdcde4295d0e5df05fda0d83783a3991))
-### [1.20.1](https://www.github.com/googleapis/python-api-core/compare/v1.20.0...v1.20.1) (2020-06-16)
+## [1.20.1](https://www.github.com/googleapis/python-api-core/compare/v1.20.0...v1.20.1) (2020-06-16)
### Bug Fixes
@@ -272,7 +272,7 @@
* allow disabling response stream pre-fetch ([#30](https://www.github.com/googleapis/python-api-core/issues/30)) ([74e0b0f](https://www.github.com/googleapis/python-api-core/commit/74e0b0f8387207933c120af15b2bb5d175dd8f84)), closes [#25](https://www.github.com/googleapis/python-api-core/issues/25)
-### [1.19.1](https://www.github.com/googleapis/python-api-core/compare/v1.19.0...v1.19.1) (2020-06-06)
+## [1.19.1](https://www.github.com/googleapis/python-api-core/compare/v1.19.0...v1.19.1) (2020-06-06)
### Bug Fixes
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index c7860adb..dddeddb9 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -1,4 +1,3 @@
-.. Generated by synthtool. DO NOT EDIT!
############
Contributing
############
@@ -22,7 +21,7 @@ In order to add a feature:
documentation.
- The feature must work fully on the following CPython versions:
- 2.7, 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows.
+ 3.7, 3.8, 3.9, and 3.10 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -50,9 +49,9 @@ You'll have to create a development environment using a Git checkout:
# Configure remotes such that you can pull changes from the googleapis/python-api-core
# repository into your local repository.
$ git remote add upstream git@github.com:googleapis/python-api-core.git
- # fetch and merge changes from upstream into master
+ # fetch and merge changes from upstream into main
$ git fetch upstream
- $ git merge upstream/master
+ $ git merge upstream/main
Now your local repo is set up such that you will push changes to your GitHub
repo, from which you can submit a pull request.
@@ -72,13 +71,13 @@ We use `nox `__ to instrument our tests.
- To run a single unit test::
- $ nox -s unit-3.9 -- -k
+ $ nox -s unit-3.10 -- -k
.. note::
- The unit tests and system tests are described in the
- ``noxfile.py`` files in each directory.
+ The unit tests tests are described in the ``noxfile.py`` files
+ in each directory.
.. nox: https://pypi.org/project/nox/
@@ -110,12 +109,12 @@ Coding Style
variables::
export GOOGLE_CLOUD_TESTING_REMOTE="upstream"
- export GOOGLE_CLOUD_TESTING_BRANCH="master"
+ export GOOGLE_CLOUD_TESTING_BRANCH="main"
By doing this, you are specifying the location of the most up-to-date
version of ``python-api-core``. The the suggested remote name ``upstream``
should point to the official ``googleapis`` checkout and the
- the branch should be the main branch on that remote (``master``).
+ the branch should be the main branch on that remote (``main``).
- This repository contains configuration for the
`pre-commit `__ tool, which automates checking
@@ -133,29 +132,6 @@ Exceptions to PEP8:
"Function-Under-Test"), which is PEP8-incompliant, but more readable.
Some also use a local variable, ``MUT`` (short for "Module-Under-Test").
-********************
-Running System Tests
-********************
-
-- To run system tests, you can execute::
-
- # Run all system tests
- $ nox -s system
-
- # Run a single system test
- $ nox -s system-3.8 -- -k
-
-
- .. note::
-
- System tests are only configured to run under Python 2.7 and 3.8.
- For expediency, we do not run them in older versions of Python 3.
-
- This alone will not run the tests. You'll need to change some local
- auth settings and change some configuration in your project to
- run all the tests.
-
-- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__.
*************
Test Coverage
@@ -209,7 +185,7 @@ The `description on PyPI`_ for the project comes directly from the
``README``. Due to the reStructuredText (``rst``) parser used by
PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst``
instead of
-``https://github.com/googleapis/python-api-core/blob/master/CONTRIBUTING.rst``)
+``https://github.com/googleapis/python-api-core/blob/main/CONTRIBUTING.rst``)
may cause problems creating links or rendering the description.
.. _description on PyPI: https://pypi.org/project/google-api-core
@@ -221,35 +197,21 @@ Supported Python Versions
We support:
-- `Python 2.7`_
-- `Python 3.6`_
- `Python 3.7`_
- `Python 3.8`_
- `Python 3.9`_
+- `Python 3.10`_
-.. _Python 2.7: https://docs.python.org/2.7/
-.. _Python 3.6: https://docs.python.org/3.6/
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
.. _Python 3.9: https://docs.python.org/3.9/
+.. _Python 3.10: https://docs.python.org/3.10/
Supported versions can be found in our ``noxfile.py`` `config`_.
-.. _config: https://github.com/googleapis/python-api-core/blob/master/noxfile.py
-
-
-We also explicitly decided to support Python 3 beginning with version 2.7.
-Reasons for this include:
-
-- Encouraging use of newest versions of Python 3
-- Taking the lead of `prominent`_ open-source `projects`_
-- `Unicode literal support`_ which allows for a cleaner codebase that
- works in both Python 2 and Python 3
+.. _config: https://github.com/googleapis/python-api-core/blob/main/noxfile.py
-.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django
-.. _projects: http://flask.pocoo.org/docs/0.10/python3/
-.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/
**********
Versioning
diff --git a/README.rst b/README.rst
index 244043ea..58ae26cb 100644
--- a/README.rst
+++ b/README.rst
@@ -1,7 +1,7 @@
Core Library for Google Client Libraries
========================================
-|pypi| |versions|
+|pypi| |versions|
This library is not meant to stand-alone. Instead it defines
common helpers used by all Google API clients. For more information, see the
@@ -16,8 +16,16 @@ common helpers used by all Google API clients. For more information, see the
Supported Python Versions
-------------------------
-Python >= 3.5
+Python >= 3.7
-Deprecated Python Versions
---------------------------
-Python == 2.7. Python 2.7 support will be removed on January 1, 2020.
+
+Unsupported Python Versions
+---------------------------
+
+Python == 2.7, Python == 3.5, Python == 3.6.
+
+The last version of this library compatible with Python 2.7 and 3.5 is
+`google-api-core==1.31.1`.
+
+The last version of this library compatible with Python 3.6 is
+`google-api-core==2.8.2`.
diff --git a/docs/auth.rst b/docs/auth.rst
index faf0228f..3dcc5fd3 100644
--- a/docs/auth.rst
+++ b/docs/auth.rst
@@ -103,25 +103,6 @@ After creation, you can pass it directly to a :class:`Client `
-just for Google App Engine:
-
-.. code:: python
-
- from google.auth import app_engine
- credentials = app_engine.Credentials()
-
Google Compute Engine Environment
---------------------------------
@@ -184,7 +165,7 @@ possible to call Google Cloud APIs with a user account via
getting started with the ``google-cloud-*`` library.
The simplest way to use credentials from a user account is via
-Application Default Credentials using ``gcloud auth login``
+Application Default Credentials using ``gcloud auth application-default login``
(as mentioned above) and :func:`google.auth.default`:
.. code:: python
diff --git a/docs/conf.py b/docs/conf.py
index 93516048..9a80171b 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -76,8 +76,8 @@
# The encoding of source files.
# source_encoding = 'utf-8-sig'
-# The master toctree document.
-master_doc = "index"
+# The root toctree document.
+root_doc = "index"
# General information about the project.
project = "google-api-core"
@@ -110,6 +110,7 @@
# directories to ignore when looking for source files.
exclude_patterns = [
"_build",
+ "**/.nox/**/*",
"samples/AUTHORING_GUIDE.md",
"samples/CONTRIBUTING.md",
"samples/snippets/README.rst",
@@ -279,7 +280,7 @@
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
- master_doc,
+ root_doc,
"google-api-core.tex",
"google-api-core Documentation",
author,
@@ -313,7 +314,13 @@
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
- (master_doc, "google-api-core", "google-api-core Documentation", [author], 1,)
+ (
+ root_doc,
+ "google-api-core",
+ "google-api-core Documentation",
+ [author],
+ 1,
+ )
]
# If true, show URL addresses after external links.
@@ -327,7 +334,7 @@
# dir menu entry, description, category)
texinfo_documents = [
(
- master_doc,
+ root_doc,
"google-api-core",
"google-api-core Documentation",
author,
@@ -354,7 +361,10 @@
intersphinx_mapping = {
"python": ("https://python.readthedocs.org/en/latest/", None),
"google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
- "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
+ "google.api_core": (
+ "https://googleapis.dev/python/google-api-core/latest/",
+ None,
+ ),
"grpc": ("https://grpc.github.io/grpc/python/", None),
"proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
"protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
diff --git a/google/__init__.py b/google/__init__.py
index 0d0a4c3a..9f1d5491 100644
--- a/google/__init__.py
+++ b/google/__init__.py
@@ -21,4 +21,5 @@
except ImportError:
import pkgutil
- __path__ = pkgutil.extend_path(__path__, __name__)
+ # See: https://github.com/python/mypy/issues/1422
+ __path__ = pkgutil.extend_path(__path__, __name__) # type: ignore
diff --git a/google/api_core/bidi.py b/google/api_core/bidi.py
index be52d97d..57f5f9dd 100644
--- a/google/api_core/bidi.py
+++ b/google/api_core/bidi.py
@@ -17,11 +17,10 @@
import collections
import datetime
import logging
+import queue as queue_module
import threading
import time
-from six.moves import queue
-
from google.api_core import exceptions
_LOGGER = logging.getLogger(__name__)
@@ -71,7 +70,7 @@ class _RequestQueueGenerator(object):
CPU consumed by spinning is pretty minuscule.
Args:
- queue (queue.Queue): The request queue.
+ queue (queue_module.Queue): The request queue.
period (float): The number of seconds to wait for items from the queue
before checking if the RPC is cancelled. In practice, this
determines the maximum amount of time the request consumption
@@ -108,7 +107,7 @@ def __iter__(self):
while True:
try:
item = self._queue.get(timeout=self._period)
- except queue.Empty:
+ except queue_module.Empty:
if not self._is_active():
_LOGGER.debug(
"Empty queue and inactive call, exiting request " "generator."
@@ -247,7 +246,7 @@ def __init__(self, start_rpc, initial_request=None, metadata=None):
self._start_rpc = start_rpc
self._initial_request = initial_request
self._rpc_metadata = metadata
- self._request_queue = queue.Queue()
+ self._request_queue = queue_module.Queue()
self._request_generator = None
self._is_active = False
self._callbacks = []
@@ -365,7 +364,7 @@ class ResumableBidiRpc(BidiRpc):
def should_recover(exc):
return (
isinstance(exc, grpc.RpcError) and
- exc.code() == grpc.StatusCode.UNVAILABLE)
+ exc.code() == grpc.StatusCode.UNAVAILABLE)
initial_request = example_pb2.StreamingRpcRequest(
setting='example')
@@ -590,7 +589,7 @@ class BackgroundConsumer(object):
def should_recover(exc):
return (
isinstance(exc, grpc.RpcError) and
- exc.code() == grpc.StatusCode.UNVAILABLE)
+ exc.code() == grpc.StatusCode.UNAVAILABLE)
initial_request = example_pb2.StreamingRpcRequest(
setting='example')
@@ -645,6 +644,7 @@ def _thread_main(self, ready):
# Keeping the lock throughout avoids that.
# In the future, we could use `Condition.wait_for` if we drop
# Python 2.7.
+ # See: https://github.com/googleapis/python-api-core/issues/211
with self._wake:
while self._paused:
_LOGGER.debug("paused, waiting for waking.")
@@ -727,7 +727,7 @@ def resume(self):
"""Resumes the response stream."""
with self._wake:
self._paused = False
- self._wake.notifyAll()
+ self._wake.notify_all()
@property
def is_paused(self):
diff --git a/google/api_core/client_info.py b/google/api_core/client_info.py
index adca5f32..3e4376c9 100644
--- a/google/api_core/client_info.py
+++ b/google/api_core/client_info.py
@@ -19,17 +19,20 @@
"""
import platform
-
-import pkg_resources
+from typing import Union
from google.api_core import version as api_core_version
_PY_VERSION = platform.python_version()
_API_CORE_VERSION = api_core_version.__version__
+_GRPC_VERSION: Union[str, None]
+
try:
- _GRPC_VERSION = pkg_resources.get_distribution("grpcio").version
-except pkg_resources.DistributionNotFound: # pragma: NO COVER
+ import grpc
+
+ _GRPC_VERSION = grpc.__version__
+except ImportError: # pragma: NO COVER
_GRPC_VERSION = None
@@ -42,7 +45,7 @@ class ClientInfo(object):
Args:
python_version (str): The Python interpreter version, for example,
- ``'2.7.13'``.
+ ``'3.9.6'``.
grpc_version (Optional[str]): The gRPC library version.
api_core_version (str): The google-api-core library version.
gapic_version (Optional[str]): The sversion of gapic-generated client
diff --git a/google/api_core/client_options.py b/google/api_core/client_options.py
index 0d9afc39..ee9f28a9 100644
--- a/google/api_core/client_options.py
+++ b/google/api_core/client_options.py
@@ -115,7 +115,7 @@ def from_dict(options):
"""Construct a client options object from a mapping object.
Args:
- options (six.moves.collections_abc.Mapping): A mapping object with client options.
+ options (collections.abc.Mapping): A mapping object with client options.
See the docstring for ClientOptions for details on valid arguments.
"""
diff --git a/google/api_core/datetime_helpers.py b/google/api_core/datetime_helpers.py
index e52fb1dd..9470863a 100644
--- a/google/api_core/datetime_helpers.py
+++ b/google/api_core/datetime_helpers.py
@@ -18,12 +18,10 @@
import datetime
import re
-import pytz
-
from google.protobuf import timestamp_pb2
-_UTC_EPOCH = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc)
+_UTC_EPOCH = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
_RFC3339_MICROS = "%Y-%m-%dT%H:%M:%S.%fZ"
_RFC3339_NO_FRACTION = "%Y-%m-%dT%H:%M:%S"
# datetime.strptime cannot handle nanosecond precision: parse w/ regex
@@ -83,9 +81,9 @@ def to_microseconds(value):
int: Microseconds since the unix epoch.
"""
if not value.tzinfo:
- value = value.replace(tzinfo=pytz.utc)
+ value = value.replace(tzinfo=datetime.timezone.utc)
# Regardless of what timezone is on the value, convert it to UTC.
- value = value.astimezone(pytz.utc)
+ value = value.astimezone(datetime.timezone.utc)
# Convert the datetime to a microsecond timestamp.
return int(calendar.timegm(value.timetuple()) * 1e6) + value.microsecond
@@ -153,10 +151,10 @@ def from_rfc3339(value):
micros = 0
else:
scale = 9 - len(fraction)
- nanos = int(fraction) * (10 ** scale)
+ nanos = int(fraction) * (10**scale)
micros = nanos // 1000
- return bare_seconds.replace(microsecond=micros, tzinfo=pytz.utc)
+ return bare_seconds.replace(microsecond=micros, tzinfo=datetime.timezone.utc)
from_rfc3339_nanos = from_rfc3339 # from_rfc3339_nanos method was deprecated.
@@ -247,7 +245,7 @@ def from_rfc3339(cls, stamp):
nanos = 0
else:
scale = 9 - len(fraction)
- nanos = int(fraction) * (10 ** scale)
+ nanos = int(fraction) * (10**scale)
return cls(
bare.year,
bare.month,
@@ -256,7 +254,7 @@ def from_rfc3339(cls, stamp):
bare.minute,
bare.second,
nanosecond=nanos,
- tzinfo=pytz.UTC,
+ tzinfo=datetime.timezone.utc,
)
def timestamp_pb(self):
@@ -265,7 +263,11 @@ def timestamp_pb(self):
Returns:
(:class:`~google.protobuf.timestamp_pb2.Timestamp`): Timestamp message
"""
- inst = self if self.tzinfo is not None else self.replace(tzinfo=pytz.UTC)
+ inst = (
+ self
+ if self.tzinfo is not None
+ else self.replace(tzinfo=datetime.timezone.utc)
+ )
delta = inst - _UTC_EPOCH
seconds = int(delta.total_seconds())
nanos = self._nanosecond or self.microsecond * 1000
@@ -292,5 +294,5 @@ def from_timestamp_pb(cls, stamp):
bare.minute,
bare.second,
nanosecond=stamp.nanos,
- tzinfo=pytz.UTC,
+ tzinfo=datetime.timezone.utc,
)
diff --git a/google/api_core/exceptions.py b/google/api_core/exceptions.py
index 412fc2ee..aaba8791 100644
--- a/google/api_core/exceptions.py
+++ b/google/api_core/exceptions.py
@@ -21,19 +21,31 @@
from __future__ import absolute_import
from __future__ import unicode_literals
-import six
-from six.moves import http_client
+import http.client
+from typing import Dict
+from typing import Union
+import warnings
+
+from google.rpc import error_details_pb2
try:
import grpc
+ try:
+ from grpc_status import rpc_status
+ except ImportError: # pragma: NO COVER
+ warnings.warn(
+ "Please install grpcio-status to obtain helpful grpc error messages.",
+ ImportWarning,
+ )
+ rpc_status = None
except ImportError: # pragma: NO COVER
grpc = None
# Lookup tables for mapping exceptions from HTTP and gRPC transports.
# Populated by _GoogleAPICallErrorMeta
-_HTTP_CODE_TO_EXCEPTION = {}
-_GRPC_CODE_TO_EXCEPTION = {}
+_HTTP_CODE_TO_EXCEPTION: Dict[int, Exception] = {}
+_GRPC_CODE_TO_EXCEPTION: Dict[int, Exception] = {}
# Additional lookup table to map integer status codes to grpc status code
# grpc does not currently support initializing enums from ints
@@ -56,7 +68,6 @@ class DuplicateCredentialArgs(GoogleAPIError):
pass
-@six.python_2_unicode_compatible
class RetryError(GoogleAPIError):
"""Raised when a function has exhausted all of its available retries.
@@ -92,19 +103,20 @@ def __new__(mcs, name, bases, class_dict):
return cls
-@six.python_2_unicode_compatible
-@six.add_metaclass(_GoogleAPICallErrorMeta)
-class GoogleAPICallError(GoogleAPIError):
+class GoogleAPICallError(GoogleAPIError, metaclass=_GoogleAPICallErrorMeta):
"""Base class for exceptions raised by calling API methods.
Args:
message (str): The exception message.
errors (Sequence[Any]): An optional list of error details.
+ details (Sequence[Any]): An optional list of objects defined in google.rpc.error_details.
response (Union[requests.Request, grpc.Call]): The response or
gRPC call metadata.
+ error_info (Union[error_details_pb2.ErrorInfo, None]): An optional object containing error info
+ (google.rpc.error_details.ErrorInfo).
"""
- code = None
+ code: Union[int, None] = None
"""Optional[int]: The HTTP status code associated with this error.
This may be ``None`` if the exception does not have a direct mapping
@@ -120,15 +132,56 @@ class GoogleAPICallError(GoogleAPIError):
This may be ``None`` if the exception does not match up to a gRPC error.
"""
- def __init__(self, message, errors=(), response=None):
+ def __init__(self, message, errors=(), details=(), response=None, error_info=None):
super(GoogleAPICallError, self).__init__(message)
self.message = message
"""str: The exception message."""
self._errors = errors
+ self._details = details
self._response = response
+ self._error_info = error_info
def __str__(self):
- return "{} {}".format(self.code, self.message)
+ if self.details:
+ return "{} {} {}".format(self.code, self.message, self.details)
+ else:
+ return "{} {}".format(self.code, self.message)
+
+ @property
+ def reason(self):
+ """The reason of the error.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
+
+ Returns:
+ Union[str, None]: An optional string containing reason of the error.
+ """
+ return self._error_info.reason if self._error_info else None
+
+ @property
+ def domain(self):
+ """The logical grouping to which the "reason" belongs.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
+
+ Returns:
+ Union[str, None]: An optional string containing a logical grouping to which the "reason" belongs.
+ """
+ return self._error_info.domain if self._error_info else None
+
+ @property
+ def metadata(self):
+ """Additional structured details about this error.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
+
+ Returns:
+ Union[Dict[str, str], None]: An optional object containing structured details about the error.
+ """
+ return self._error_info.metadata if self._error_info else None
@property
def errors(self):
@@ -139,6 +192,19 @@ def errors(self):
"""
return list(self._errors)
+ @property
+ def details(self):
+ """Information contained in google.rpc.status.details.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/status.proto
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto
+
+ Returns:
+ Sequence[Any]: A list of structured objects from error_details.proto
+ """
+ return list(self._details)
+
@property
def response(self):
"""Optional[Union[requests.Request, grpc.Call]]: The response or
@@ -153,25 +219,25 @@ class Redirection(GoogleAPICallError):
class MovedPermanently(Redirection):
"""Exception mapping a ``301 Moved Permanently`` response."""
- code = http_client.MOVED_PERMANENTLY
+ code = http.client.MOVED_PERMANENTLY
class NotModified(Redirection):
"""Exception mapping a ``304 Not Modified`` response."""
- code = http_client.NOT_MODIFIED
+ code = http.client.NOT_MODIFIED
class TemporaryRedirect(Redirection):
"""Exception mapping a ``307 Temporary Redirect`` response."""
- code = http_client.TEMPORARY_REDIRECT
+ code = http.client.TEMPORARY_REDIRECT
class ResumeIncomplete(Redirection):
"""Exception mapping a ``308 Resume Incomplete`` response.
- .. note:: :attr:`http_client.PERMANENT_REDIRECT` is ``308``, but Google
+ .. note:: :attr:`http.client.PERMANENT_REDIRECT` is ``308``, but Google
APIs differ in their use of this status code.
"""
@@ -185,7 +251,7 @@ class ClientError(GoogleAPICallError):
class BadRequest(ClientError):
"""Exception mapping a ``400 Bad Request`` response."""
- code = http_client.BAD_REQUEST
+ code = http.client.BAD_REQUEST
class InvalidArgument(BadRequest):
@@ -210,7 +276,7 @@ class OutOfRange(BadRequest):
class Unauthorized(ClientError):
"""Exception mapping a ``401 Unauthorized`` response."""
- code = http_client.UNAUTHORIZED
+ code = http.client.UNAUTHORIZED
class Unauthenticated(Unauthorized):
@@ -222,7 +288,7 @@ class Unauthenticated(Unauthorized):
class Forbidden(ClientError):
"""Exception mapping a ``403 Forbidden`` response."""
- code = http_client.FORBIDDEN
+ code = http.client.FORBIDDEN
class PermissionDenied(Forbidden):
@@ -235,20 +301,20 @@ class NotFound(ClientError):
"""Exception mapping a ``404 Not Found`` response or a
:attr:`grpc.StatusCode.NOT_FOUND` error."""
- code = http_client.NOT_FOUND
+ code = http.client.NOT_FOUND
grpc_status_code = grpc.StatusCode.NOT_FOUND if grpc is not None else None
class MethodNotAllowed(ClientError):
"""Exception mapping a ``405 Method Not Allowed`` response."""
- code = http_client.METHOD_NOT_ALLOWED
+ code = http.client.METHOD_NOT_ALLOWED
class Conflict(ClientError):
"""Exception mapping a ``409 Conflict`` response."""
- code = http_client.CONFLICT
+ code = http.client.CONFLICT
class AlreadyExists(Conflict):
@@ -266,26 +332,25 @@ class Aborted(Conflict):
class LengthRequired(ClientError):
"""Exception mapping a ``411 Length Required`` response."""
- code = http_client.LENGTH_REQUIRED
+ code = http.client.LENGTH_REQUIRED
class PreconditionFailed(ClientError):
"""Exception mapping a ``412 Precondition Failed`` response."""
- code = http_client.PRECONDITION_FAILED
+ code = http.client.PRECONDITION_FAILED
class RequestRangeNotSatisfiable(ClientError):
"""Exception mapping a ``416 Request Range Not Satisfiable`` response."""
- code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE
+ code = http.client.REQUESTED_RANGE_NOT_SATISFIABLE
class TooManyRequests(ClientError):
"""Exception mapping a ``429 Too Many Requests`` response."""
- # http_client does not define a constant for this in Python 2.
- code = 429
+ code = http.client.TOO_MANY_REQUESTS
class ResourceExhausted(TooManyRequests):
@@ -298,8 +363,7 @@ class Cancelled(ClientError):
"""Exception mapping a :attr:`grpc.StatusCode.CANCELLED` error."""
# This maps to HTTP status code 499. See
- # https://github.com/googleapis/googleapis/blob/master/google/rpc\
- # /code.proto
+ # https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto
code = 499
grpc_status_code = grpc.StatusCode.CANCELLED if grpc is not None else None
@@ -312,7 +376,7 @@ class InternalServerError(ServerError):
"""Exception mapping a ``500 Internal Server Error`` response. or a
:attr:`grpc.StatusCode.INTERNAL` error."""
- code = http_client.INTERNAL_SERVER_ERROR
+ code = http.client.INTERNAL_SERVER_ERROR
grpc_status_code = grpc.StatusCode.INTERNAL if grpc is not None else None
@@ -332,28 +396,28 @@ class MethodNotImplemented(ServerError):
"""Exception mapping a ``501 Not Implemented`` response or a
:attr:`grpc.StatusCode.UNIMPLEMENTED` error."""
- code = http_client.NOT_IMPLEMENTED
+ code = http.client.NOT_IMPLEMENTED
grpc_status_code = grpc.StatusCode.UNIMPLEMENTED if grpc is not None else None
class BadGateway(ServerError):
"""Exception mapping a ``502 Bad Gateway`` response."""
- code = http_client.BAD_GATEWAY
+ code = http.client.BAD_GATEWAY
class ServiceUnavailable(ServerError):
"""Exception mapping a ``503 Service Unavailable`` response or a
:attr:`grpc.StatusCode.UNAVAILABLE` error."""
- code = http_client.SERVICE_UNAVAILABLE
+ code = http.client.SERVICE_UNAVAILABLE
grpc_status_code = grpc.StatusCode.UNAVAILABLE if grpc is not None else None
class GatewayTimeout(ServerError):
"""Exception mapping a ``504 Gateway Timeout`` response."""
- code = http_client.GATEWAY_TIMEOUT
+ code = http.client.GATEWAY_TIMEOUT
class DeadlineExceeded(GatewayTimeout):
@@ -414,13 +478,30 @@ def from_http_response(response):
error_message = payload.get("error", {}).get("message", "unknown error")
errors = payload.get("error", {}).get("errors", ())
+ # In JSON, details are already formatted in developer-friendly way.
+ details = payload.get("error", {}).get("details", ())
+ error_info = list(
+ filter(
+ lambda detail: detail.get("@type", "")
+ == "type.googleapis.com/google.rpc.ErrorInfo",
+ details,
+ )
+ )
+ error_info = error_info[0] if error_info else None
message = "{method} {url}: {error}".format(
- method=response.request.method, url=response.request.url, error=error_message
+ method=response.request.method,
+ url=response.request.url,
+ error=error_message,
)
exception = from_http_status(
- response.status_code, message, errors=errors, response=response
+ response.status_code,
+ message,
+ errors=errors,
+ details=details,
+ response=response,
+ error_info=error_info,
)
return exception
@@ -467,6 +548,45 @@ def _is_informative_grpc_error(rpc_exc):
return hasattr(rpc_exc, "code") and hasattr(rpc_exc, "details")
+def _parse_grpc_error_details(rpc_exc):
+ try:
+ status = rpc_status.from_call(rpc_exc)
+ except NotImplementedError: # workaround
+ return [], None
+
+ if not status:
+ return [], None
+
+ possible_errors = [
+ error_details_pb2.BadRequest,
+ error_details_pb2.PreconditionFailure,
+ error_details_pb2.QuotaFailure,
+ error_details_pb2.ErrorInfo,
+ error_details_pb2.RetryInfo,
+ error_details_pb2.ResourceInfo,
+ error_details_pb2.RequestInfo,
+ error_details_pb2.DebugInfo,
+ error_details_pb2.Help,
+ error_details_pb2.LocalizedMessage,
+ ]
+ error_info = None
+ error_details = []
+ for detail in status.details:
+ matched_detail_cls = list(
+ filter(lambda x: detail.Is(x.DESCRIPTOR), possible_errors)
+ )
+ # If nothing matched, use detail directly.
+ if len(matched_detail_cls) == 0:
+ info = detail
+ else:
+ info = matched_detail_cls[0]()
+ detail.Unpack(info)
+ error_details.append(info)
+ if isinstance(info, error_details_pb2.ErrorInfo):
+ error_info = info
+ return error_details, error_info
+
+
def from_grpc_error(rpc_exc):
"""Create a :class:`GoogleAPICallError` from a :class:`grpc.RpcError`.
@@ -479,9 +599,17 @@ def from_grpc_error(rpc_exc):
"""
# NOTE(lidiz) All gRPC error shares the parent class grpc.RpcError.
# However, check for grpc.RpcError breaks backward compatibility.
- if isinstance(rpc_exc, grpc.Call) or _is_informative_grpc_error(rpc_exc):
+ if (
+ grpc is not None and isinstance(rpc_exc, grpc.Call)
+ ) or _is_informative_grpc_error(rpc_exc):
+ details, err_info = _parse_grpc_error_details(rpc_exc)
return from_grpc_status(
- rpc_exc.code(), rpc_exc.details(), errors=(rpc_exc,), response=rpc_exc
+ rpc_exc.code(),
+ rpc_exc.details(),
+ errors=(rpc_exc,),
+ details=details,
+ response=rpc_exc,
+ error_info=err_info,
)
else:
return GoogleAPICallError(str(rpc_exc), errors=(rpc_exc,), response=rpc_exc)
diff --git a/google/api_core/extended_operation.py b/google/api_core/extended_operation.py
new file mode 100644
index 00000000..cabae107
--- /dev/null
+++ b/google/api_core/extended_operation.py
@@ -0,0 +1,209 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Futures for extended long-running operations returned from Google Cloud APIs.
+
+These futures can be used to synchronously wait for the result of a
+long-running operations using :meth:`ExtendedOperation.result`:
+
+.. code-block:: python
+
+ extended_operation = my_api_client.long_running_method()
+
+ extended_operation.result()
+
+Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
+
+.. code-block:: python
+
+ extended_operation = my_api_client.long_running_method()
+
+ def my_callback(ex_op):
+ print(f"Operation {ex_op.name} completed")
+
+ extended_operation.add_done_callback(my_callback)
+
+"""
+
+import threading
+
+from google.api_core import exceptions
+from google.api_core.future import polling
+
+
+class ExtendedOperation(polling.PollingFuture):
+ """An ExtendedOperation future for interacting with a Google API Long-Running Operation.
+
+ Args:
+ extended_operation (proto.Message): The initial operation.
+ refresh (Callable[[], type(extended_operation)]): A callable that returns
+ the latest state of the operation.
+ cancel (Callable[[], None]): A callable that tries to cancel the operation.
+ retry: Optional(google.api_core.retry.Retry): The retry configuration used
+ when polling. This can be used to control how often :meth:`done`
+ is polled. Regardless of the retry's ``deadline``, it will be
+ overridden by the ``timeout`` argument to :meth:`result`.
+
+ Note: Most long-running API methods use google.api_core.operation.Operation
+ This class is a wrapper for a subset of methods that use alternative
+ Long-Running Operation (LRO) semantics.
+
+ Note: there is not a concrete type the extended operation must be.
+ It MUST have fields that correspond to the following, POSSIBLY WITH DIFFERENT NAMES:
+ * name: str
+ * status: Union[str, bool, enum.Enum]
+ * error_code: int
+ * error_message: str
+ """
+
+ def __init__(
+ self, extended_operation, refresh, cancel, retry=polling.DEFAULT_RETRY
+ ):
+ super().__init__(retry=retry)
+ self._extended_operation = extended_operation
+ self._refresh = refresh
+ self._cancel = cancel
+ # Note: the extended operation does not give a good way to indicate cancellation.
+ # We make do with manually tracking cancellation and checking for doneness.
+ self._cancelled = False
+ self._completion_lock = threading.Lock()
+ # Invoke in case the operation came back already complete.
+ self._handle_refreshed_operation()
+
+ # Note: the following four properties MUST be overridden in a subclass
+ # if, and only if, the fields in the corresponding extended operation message
+ # have different names.
+ #
+ # E.g. we have an extended operation class that looks like
+ #
+ # class MyOperation(proto.Message):
+ # moniker = proto.Field(proto.STRING, number=1)
+ # status_msg = proto.Field(proto.STRING, number=2)
+ # optional http_error_code = proto.Field(proto.INT32, number=3)
+ # optional http_error_msg = proto.Field(proto.STRING, number=4)
+ #
+ # the ExtendedOperation subclass would provide property overrrides that map
+ # to these (poorly named) fields.
+ @property
+ def name(self):
+ return self._extended_operation.name
+
+ @property
+ def status(self):
+ return self._extended_operation.status
+
+ @property
+ def error_code(self):
+ return self._extended_operation.error_code
+
+ @property
+ def error_message(self):
+ return self._extended_operation.error_message
+
+ def __getattr__(self, name):
+ return getattr(self._extended_operation, name)
+
+ def done(self, retry=polling.DEFAULT_RETRY):
+ self._refresh_and_update(retry)
+ return self._extended_operation.done
+
+ def cancel(self):
+ if self.done():
+ return False
+
+ self._cancel()
+ self._cancelled = True
+ return True
+
+ def cancelled(self):
+ # TODO(dovs): there is not currently a good way to determine whether the
+ # operation has been cancelled.
+ # The best we can do is manually keep track of cancellation
+ # and check for doneness.
+ if not self._cancelled:
+ return False
+
+ self._refresh_and_update()
+ return self._extended_operation.done
+
+ def _refresh_and_update(self, retry=polling.DEFAULT_RETRY):
+ if not self._extended_operation.done:
+ self._extended_operation = self._refresh(retry=retry)
+ self._handle_refreshed_operation()
+
+ def _handle_refreshed_operation(self):
+ with self._completion_lock:
+ if not self._extended_operation.done:
+ return
+
+ if self.error_code and self.error_message:
+ exception = exceptions.from_http_status(
+ status_code=self.error_code,
+ message=self.error_message,
+ response=self._extended_operation,
+ )
+ self.set_exception(exception)
+ elif self.error_code or self.error_message:
+ exception = exceptions.GoogleAPICallError(
+ f"Unexpected error {self.error_code}: {self.error_message}"
+ )
+ self.set_exception(exception)
+ else:
+ # Extended operations have no payload.
+ self.set_result(None)
+
+ @classmethod
+ def make(cls, refresh, cancel, extended_operation, **kwargs):
+ """
+ Return an instantiated ExtendedOperation (or child) that wraps
+ * a refresh callable
+ * a cancel callable (can be a no-op)
+ * an initial result
+
+ .. note::
+ It is the caller's responsibility to set up refresh and cancel
+ with their correct request argument.
+ The reason for this is that the services that use Extended Operations
+ have rpcs that look something like the following:
+
+ // service.proto
+ service MyLongService {
+ rpc StartLongTask(StartLongTaskRequest) returns (ExtendedOperation) {
+ option (google.cloud.operation_service) = "CustomOperationService";
+ }
+ }
+
+ service CustomOperationService {
+ rpc Get(GetOperationRequest) returns (ExtendedOperation) {
+ option (google.cloud.operation_polling_method) = true;
+ }
+ }
+
+ Any info needed for the poll, e.g. a name, path params, etc.
+ is held in the request, which the initial client method is in a much
+ better position to make made because the caller made the initial request.
+
+ TL;DR: the caller sets up closures for refresh and cancel that carry
+ the properly configured requests.
+
+ Args:
+ refresh (Callable[Optional[Retry]][type(extended_operation)]): A callable that
+ returns the latest state of the operation.
+ cancel (Callable[][Any]): A callable that tries to cancel the operation
+ on a best effort basis.
+ extended_operation (Any): The initial response of the long running method.
+ See the docstring for ExtendedOperation.__init__ for requirements on
+ the type and fields of extended_operation
+ """
+ return cls(extended_operation, refresh, cancel, **kwargs)
diff --git a/google/api_core/future/async_future.py b/google/api_core/future/async_future.py
index 0343fbe2..88c183f9 100644
--- a/google/api_core/future/async_future.py
+++ b/google/api_core/future/async_future.py
@@ -43,8 +43,10 @@ class AsyncFuture(base.Future):
The :meth:`done` method should be implemented by subclasses. The polling
behavior will repeatedly call ``done`` until it returns True.
- .. note: Privacy here is intended to prevent the final class from
- overexposing, not to prevent subclasses from accessing methods.
+ .. note::
+
+ Privacy here is intended to prevent the final class from
+ overexposing, not to prevent subclasses from accessing methods.
Args:
retry (google.api_core.retry.Retry): The retry configuration used
diff --git a/google/api_core/future/base.py b/google/api_core/future/base.py
index e7888ca3..f3005860 100644
--- a/google/api_core/future/base.py
+++ b/google/api_core/future/base.py
@@ -16,11 +16,8 @@
import abc
-import six
-
-@six.add_metaclass(abc.ABCMeta)
-class Future(object):
+class Future(object, metaclass=abc.ABCMeta):
# pylint: disable=missing-docstring
# We inherit the interfaces here from concurrent.futures.
diff --git a/google/api_core/future/polling.py b/google/api_core/future/polling.py
index 2f80efb5..02e680f6 100644
--- a/google/api_core/future/polling.py
+++ b/google/api_core/future/polling.py
@@ -45,8 +45,10 @@ class PollingFuture(base.Future):
The :meth:`done` method should be implemented by subclasses. The polling
behavior will repeatedly call ``done`` until it returns True.
- .. note: Privacy here is intended to prevent the final class from
- overexposing, not to prevent subclasses from accessing methods.
+ .. note::
+
+ Privacy here is intended to prevent the final class from
+ overexposing, not to prevent subclasses from accessing methods.
Args:
retry (google.api_core.retry.Retry): The retry configuration used
diff --git a/google/api_core/gapic_v1/__init__.py b/google/api_core/gapic_v1/__init__.py
index 6632047a..e5b7ad35 100644
--- a/google/api_core/gapic_v1/__init__.py
+++ b/google/api_core/gapic_v1/__init__.py
@@ -12,18 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import sys
-
from google.api_core.gapic_v1 import client_info
from google.api_core.gapic_v1 import config
+from google.api_core.gapic_v1 import config_async
from google.api_core.gapic_v1 import method
+from google.api_core.gapic_v1 import method_async
from google.api_core.gapic_v1 import routing_header
-__all__ = ["client_info", "config", "method", "routing_header"]
-
-if sys.version_info >= (3, 6):
- from google.api_core.gapic_v1 import config_async # noqa: F401
- from google.api_core.gapic_v1 import method_async # noqa: F401
-
- __all__.append("config_async")
- __all__.append("method_async")
+__all__ = [
+ "client_info",
+ "config",
+ "config_async",
+ "method",
+ "method_async",
+ "routing_header",
+]
diff --git a/google/api_core/gapic_v1/client_info.py b/google/api_core/gapic_v1/client_info.py
index bdc2ce44..fab0f542 100644
--- a/google/api_core/gapic_v1/client_info.py
+++ b/google/api_core/gapic_v1/client_info.py
@@ -33,7 +33,7 @@ class ClientInfo(client_info.ClientInfo):
Args:
python_version (str): The Python interpreter version, for example,
- ``'2.7.13'``.
+ ``'3.9.6'``.
grpc_version (Optional[str]): The gRPC library version.
api_core_version (str): The google-api-core library version.
gapic_version (Optional[str]): The sversion of gapic-generated client
diff --git a/google/api_core/gapic_v1/config.py b/google/api_core/gapic_v1/config.py
index 29e8645b..9c722871 100644
--- a/google/api_core/gapic_v1/config.py
+++ b/google/api_core/gapic_v1/config.py
@@ -21,7 +21,6 @@
import collections
import grpc
-import six
from google.api_core import exceptions
from google.api_core import retry
@@ -130,24 +129,20 @@ def parse_method_configs(interface_config, retry_impl=retry.Retry):
# Grab all the retry codes
retry_codes_map = {
name: retry_codes
- for name, retry_codes in six.iteritems(interface_config.get("retry_codes", {}))
+ for name, retry_codes in interface_config.get("retry_codes", {}).items()
}
# Grab all of the retry params
retry_params_map = {
name: retry_params
- for name, retry_params in six.iteritems(
- interface_config.get("retry_params", {})
- )
+ for name, retry_params in interface_config.get("retry_params", {}).items()
}
# Iterate through all the API methods and create a flat MethodConfig
# instance for each one.
method_configs = {}
- for method_name, method_params in six.iteritems(
- interface_config.get("methods", {})
- ):
+ for method_name, method_params in interface_config.get("methods", {}).items():
retry_params_name = method_params.get("retry_params_name")
if retry_params_name is not None:
diff --git a/google/api_core/gapic_v1/method.py b/google/api_core/gapic_v1/method.py
index 8bf82569..73c8d4bc 100644
--- a/google/api_core/gapic_v1/method.py
+++ b/google/api_core/gapic_v1/method.py
@@ -18,13 +18,26 @@
pagination, and long-running operations to gRPC methods.
"""
-from google.api_core import general_helpers
+import enum
+import functools
+
from google.api_core import grpc_helpers
from google.api_core import timeout
from google.api_core.gapic_v1 import client_info
USE_DEFAULT_METADATA = object()
-DEFAULT = object()
+
+
+class _MethodDefault(enum.Enum):
+ # Uses enum so that pytype/mypy knows that this is the only possible value.
+ # https://stackoverflow.com/a/60605919/101923
+ #
+ # Literal[_DEFAULT_VALUE] is an alternative, but only added in Python 3.8.
+ # https://docs.python.org/3/library/typing.html#typing.Literal
+ _DEFAULT_VALUE = object()
+
+
+DEFAULT = _MethodDefault._DEFAULT_VALUE
"""Sentinel value indicating that a retry or timeout argument was unspecified,
so the default should be used."""
@@ -110,26 +123,22 @@ def __init__(self, target, retry, timeout, metadata=None):
self._timeout = timeout
self._metadata = metadata
- def __call__(self, *args, **kwargs):
+ def __call__(self, *args, timeout=DEFAULT, retry=DEFAULT, **kwargs):
"""Invoke the low-level RPC with retry, timeout, and metadata."""
- # Note: Due to Python 2 lacking keyword-only arguments we use kwargs to
- # extract the retry and timeout params.
- timeout_ = _determine_timeout(
+ timeout = _determine_timeout(
self._timeout,
- kwargs.pop("timeout", self._timeout),
+ timeout,
# Use only the invocation-specified retry only for this, as we only
# want to adjust the timeout deadline if the *user* specified
# a different retry.
- kwargs.get("retry", None),
+ retry,
)
- retry = kwargs.pop("retry", self._retry)
-
if retry is DEFAULT:
retry = self._retry
# Apply all applicable decorators.
- wrapped_func = _apply_decorators(self._target, [retry, timeout_])
+ wrapped_func = _apply_decorators(self._target, [retry, timeout])
# Add the user agent metadata to the call.
if self._metadata is not None:
@@ -237,7 +246,7 @@ def get_topic(name, timeout=None):
else:
user_agent_metadata = None
- return general_helpers.wraps(func)(
+ return functools.wraps(func)(
_GapicCallable(
func, default_retry, default_timeout, metadata=user_agent_metadata
)
diff --git a/google/api_core/gapic_v1/method_async.py b/google/api_core/gapic_v1/method_async.py
index 76e57577..84c99aa2 100644
--- a/google/api_core/gapic_v1/method_async.py
+++ b/google/api_core/gapic_v1/method_async.py
@@ -17,7 +17,9 @@
pagination, and long-running operations to gRPC methods.
"""
-from google.api_core import general_helpers, grpc_helpers_async
+import functools
+
+from google.api_core import grpc_helpers_async
from google.api_core.gapic_v1 import client_info
from google.api_core.gapic_v1.method import _GapicCallable
from google.api_core.gapic_v1.method import DEFAULT # noqa: F401
@@ -41,6 +43,6 @@ def wrap_method(
metadata = [client_info.to_grpc_metadata()] if client_info is not None else None
- return general_helpers.wraps(func)(
+ return functools.wraps(func)(
_GapicCallable(func, default_retry, default_timeout, metadata=metadata)
)
diff --git a/google/api_core/gapic_v1/routing_header.py b/google/api_core/gapic_v1/routing_header.py
index 3fb12a6f..a7bcb5a8 100644
--- a/google/api_core/gapic_v1/routing_header.py
+++ b/google/api_core/gapic_v1/routing_header.py
@@ -20,9 +20,7 @@
Generally, these headers are specified as gRPC metadata.
"""
-import sys
-
-from six.moves.urllib.parse import urlencode
+from urllib.parse import urlencode
ROUTING_METADATA_KEY = "x-goog-request-params"
@@ -37,9 +35,6 @@ def to_routing_header(params):
Returns:
str: The routing header string.
"""
- if sys.version_info[0] < 3:
- # Python 2 does not have the "safe" parameter for urlencode.
- return urlencode(params).replace("%2F", "/")
return urlencode(
params,
# Per Google API policy (go/api-url-encoding), / is not encoded.
diff --git a/google/api_core/general_helpers.py b/google/api_core/general_helpers.py
index d2d0c440..fba78026 100644
--- a/google/api_core/general_helpers.py
+++ b/google/api_core/general_helpers.py
@@ -12,22 +12,5 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Helpers for general Python functionality."""
-
-import functools
-
-import six
-
-
-# functools.partial objects lack several attributes present on real function
-# objects. In Python 2 wraps fails on this so use a restricted set instead.
-_PARTIAL_VALID_ASSIGNMENTS = ("__doc__",)
-
-
-def wraps(wrapped):
- """A functools.wraps helper that handles partial objects on Python 2."""
- # https://github.com/google/pytype/issues/322
- if isinstance(wrapped, functools.partial): # pytype: disable=wrong-arg-types
- return six.wraps(wrapped, assigned=_PARTIAL_VALID_ASSIGNMENTS)
- else:
- return six.wraps(wrapped)
+# This import for backward compatibiltiy only.
+from functools import wraps # noqa: F401 pragma: NO COVER
diff --git a/google/api_core/grpc_helpers.py b/google/api_core/grpc_helpers.py
index 62d9e533..bb0f8fa1 100644
--- a/google/api_core/grpc_helpers.py
+++ b/google/api_core/grpc_helpers.py
@@ -15,13 +15,12 @@
"""Helpers for :mod:`grpc`."""
import collections
+import functools
+import warnings
import grpc
-import pkg_resources
-import six
from google.api_core import exceptions
-from google.api_core import general_helpers
import google.auth
import google.auth.credentials
import google.auth.transport.grpc
@@ -30,18 +29,17 @@
try:
import grpc_gcp
+ warnings.warn(
+ """Support for grpcio-gcp is deprecated. This feature will be
+ removed from `google-api-core` after January 1, 2024. If you need to
+ continue to use this feature, please pin to a specific version of
+ `google-api-core`.""",
+ DeprecationWarning,
+ )
HAS_GRPC_GCP = True
except ImportError:
HAS_GRPC_GCP = False
-try:
- # google.auth.__version__ was added in 1.26.0
- _GOOGLE_AUTH_VERSION = google.auth.__version__
-except AttributeError:
- try: # try pkg_resources if it is available
- _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
- except pkg_resources.DistributionNotFound: # pragma: NO COVER
- _GOOGLE_AUTH_VERSION = None
# The list of gRPC Callable interfaces that return iterators.
_STREAM_WRAP_CLASSES = (grpc.UnaryStreamMultiCallable, grpc.StreamStreamMultiCallable)
@@ -61,12 +59,12 @@ def _wrap_unary_errors(callable_):
"""Map errors for Unary-Unary and Stream-Unary gRPC callables."""
_patch_callable_name(callable_)
- @six.wraps(callable_)
+ @functools.wraps(callable_)
def error_remapped_callable(*args, **kwargs):
try:
return callable_(*args, **kwargs)
except grpc.RpcError as exc:
- six.raise_from(exceptions.from_grpc_error(exc), exc)
+ raise exceptions.from_grpc_error(exc) from exc
return error_remapped_callable
@@ -80,7 +78,7 @@ def __init__(self, wrapped, prefetch_first_result=True):
# to retrieve the first result, in order to fail, in order to trigger a retry.
try:
if prefetch_first_result:
- self._stored_first_result = six.next(self._wrapped)
+ self._stored_first_result = next(self._wrapped)
except TypeError:
# It is possible the wrapped method isn't an iterable (a grpc.Call
# for instance). If this happens don't store the first result.
@@ -93,7 +91,7 @@ def __iter__(self):
"""This iterator is also an iterable that returns itself."""
return self
- def next(self):
+ def __next__(self):
"""Get the next response from the stream.
Returns:
@@ -104,13 +102,10 @@ def next(self):
result = self._stored_first_result
del self._stored_first_result
return result
- return six.next(self._wrapped)
+ return next(self._wrapped)
except grpc.RpcError as exc:
# If the stream has already returned data, we cannot recover here.
- six.raise_from(exceptions.from_grpc_error(exc), exc)
-
- # Alias needed for Python 2/3 support.
- __next__ = next
+ raise exceptions.from_grpc_error(exc) from exc
# grpc.Call & grpc.RpcContext interface
@@ -148,7 +143,7 @@ def _wrap_stream_errors(callable_):
"""
_patch_callable_name(callable_)
- @general_helpers.wraps(callable_)
+ @functools.wraps(callable_)
def error_remapped_callable(*args, **kwargs):
try:
result = callable_(*args, **kwargs)
@@ -161,7 +156,7 @@ def error_remapped_callable(*args, **kwargs):
result, prefetch_first_result=prefetch_first
)
except grpc.RpcError as exc:
- six.raise_from(exceptions.from_grpc_error(exc), exc)
+ raise exceptions.from_grpc_error(exc) from exc
return error_remapped_callable
@@ -250,7 +245,9 @@ def _create_composite_credentials(
# Create the metadata plugin for inserting the authorization header.
metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin(
- credentials, request, default_host=default_host,
+ credentials,
+ request,
+ default_host=default_host,
)
# Create a set of grpc.CallCredentials using the metadata plugin.
@@ -295,6 +292,7 @@ def create_channel(
default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
kwargs: Additional key-word args passed to
:func:`grpc_gcp.secure_channel` or :func:`grpc.secure_channel`.
+ Note: `grpc_gcp` is only supported in environments with protobuf < 4.0.0.
Returns:
grpc.Channel: The created channel.
@@ -314,11 +312,8 @@ def create_channel(
)
if HAS_GRPC_GCP:
- # If grpc_gcp module is available use grpc_gcp.secure_channel,
- # otherwise, use grpc.secure_channel to create grpc channel.
return grpc_gcp.secure_channel(target, composite_credentials, **kwargs)
- else:
- return grpc.secure_channel(target, composite_credentials, **kwargs)
+ return grpc.secure_channel(target, composite_credentials, **kwargs)
_MethodCall = collections.namedtuple(
diff --git a/google/api_core/grpc_helpers_async.py b/google/api_core/grpc_helpers_async.py
index 92df645b..5a5bf2a6 100644
--- a/google/api_core/grpc_helpers_async.py
+++ b/google/api_core/grpc_helpers_async.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""AsyncIO helpers for :mod:`grpc` supporting 3.6+.
+"""AsyncIO helpers for :mod:`grpc` supporting 3.7+.
Please combine more detailed docstring in grpc_helpers.py to use following
functions. This module is implementing the same surface with AsyncIO semantics.
@@ -22,14 +22,11 @@
import functools
import grpc
-from grpc.experimental import aio
+from grpc import aio
from google.api_core import exceptions, grpc_helpers
-# TODO(lidiz) Support gRPC GCP wrapper
-HAS_GRPC_GCP = False
-
# NOTE(lidiz) Alternatively, we can hack "__getattribute__" to perform
# automatic patching for us. But that means the overhead of creating an
# extra Python function spreads to every single send and receive.
diff --git a/google/api_core/iam.py b/google/api_core/iam.py
index 59e53874..4437c701 100644
--- a/google/api_core/iam.py
+++ b/google/api_core/iam.py
@@ -52,14 +52,10 @@
"""
import collections
+import collections.abc
import operator
import warnings
-try:
- from collections import abc as collections_abc
-except ImportError: # Python 2.7
- import collections as collections_abc
-
# Generic IAM roles
OWNER_ROLE = "roles/owner"
@@ -84,7 +80,7 @@ class InvalidOperationException(Exception):
pass
-class Policy(collections_abc.MutableMapping):
+class Policy(collections.abc.MutableMapping):
"""IAM Policy
Args:
diff --git a/google/api_core/operations_v1/__init__.py b/google/api_core/operations_v1/__init__.py
index bc9befcb..61186451 100644
--- a/google/api_core/operations_v1/__init__.py
+++ b/google/api_core/operations_v1/__init__.py
@@ -14,11 +14,14 @@
"""Package for interacting with the google.longrunning.operations meta-API."""
-import sys
-
+from google.api_core.operations_v1.abstract_operations_client import AbstractOperationsClient
+from google.api_core.operations_v1.operations_async_client import OperationsAsyncClient
from google.api_core.operations_v1.operations_client import OperationsClient
+from google.api_core.operations_v1.transports.rest import OperationsRestTransport
-__all__ = ["OperationsClient"]
-if sys.version_info >= (3, 6, 0):
- from google.api_core.operations_v1.operations_async_client import OperationsAsyncClient # noqa: F401
- __all__.append("OperationsAsyncClient")
+__all__ = [
+ "AbstractOperationsClient",
+ "OperationsAsyncClient",
+ "OperationsClient",
+ "OperationsRestTransport"
+]
diff --git a/google/api_core/operations_v1/abstract_operations_client.py b/google/api_core/operations_v1/abstract_operations_client.py
new file mode 100644
index 00000000..e02bc199
--- /dev/null
+++ b/google/api_core/operations_v1/abstract_operations_client.py
@@ -0,0 +1,602 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from distutils import util
+import os
+import re
+from typing import Dict, Optional, Sequence, Tuple, Type, Union
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.api_core.operations_v1 import pagers
+from google.api_core.operations_v1.transports.base import (
+ DEFAULT_CLIENT_INFO,
+ OperationsTransport,
+)
+from google.api_core.operations_v1.transports.rest import OperationsRestTransport
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account # type: ignore
+
+OptionalRetry = Union[retries.Retry, object]
+
+
+class AbstractOperationsClientMeta(type):
+ """Metaclass for the Operations client.
+
+ This provides class-level methods for building and retrieving
+ support objects (e.g. transport) without polluting the client instance
+ objects.
+ """
+
+ _transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]]
+ _transport_registry["rest"] = OperationsRestTransport
+
+ def get_transport_class(
+ cls,
+ label: Optional[str] = None,
+ ) -> Type[OperationsTransport]:
+ """Returns an appropriate transport class.
+
+ Args:
+ label: The name of the desired transport. If none is
+ provided, then the first transport in the registry is used.
+
+ Returns:
+ The transport class to use.
+ """
+ # If a specific transport is requested, return that one.
+ if label:
+ return cls._transport_registry[label]
+
+ # No transport is requested; return the default (that is, the first one
+ # in the dictionary).
+ return next(iter(cls._transport_registry.values()))
+
+
+class AbstractOperationsClient(metaclass=AbstractOperationsClientMeta):
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+ """
+
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Converts api endpoint to mTLS endpoint.
+
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "longrunning.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AbstractOperationsClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AbstractOperationsClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> OperationsTransport:
+ """Returns the transport used by the client instance.
+
+ Returns:
+ OperationsTransport: The transport used by the client
+ instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def common_billing_account_path(
+ billing_account: str,
+ ) -> str:
+ """Returns a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(
+ folder: str,
+ ) -> str:
+ """Returns a fully-qualified folder string."""
+ return "folders/{folder}".format(
+ folder=folder,
+ )
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(
+ organization: str,
+ ) -> str:
+ """Returns a fully-qualified organization string."""
+ return "organizations/{organization}".format(
+ organization=organization,
+ )
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(
+ project: str,
+ ) -> str:
+ """Returns a fully-qualified project string."""
+ return "projects/{project}".format(
+ project=project,
+ )
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(
+ project: str,
+ location: str,
+ ) -> str:
+ """Returns a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project,
+ location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[ga_credentials.Credentials] = None,
+ transport: Union[str, OperationsTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiates the operations client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, OperationsTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ if isinstance(client_options, dict):
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ client_cert_source_func = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
+ else:
+ is_mtls = mtls.has_default_client_cert_source()
+ if is_mtls:
+ client_cert_source_func = mtls.default_client_cert_source()
+ else:
+ client_cert_source_func = None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ if is_mtls:
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = self.DEFAULT_ENDPOINT
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
+ "values: never, auto, always"
+ )
+
+ # Save or instantiate the transport.
+ # Ordinarily, we provide the transport, but allowing a custom transport
+ # instance provides an extensibility point for unusual situations.
+ if isinstance(transport, OperationsTransport):
+ # transport is a OperationsTransport instance.
+ if credentials or client_options.credentials_file:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its credentials directly."
+ )
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, provide its scopes "
+ "directly."
+ )
+ self._transport = transport
+ else:
+ Transport = type(self).get_transport_class(transport)
+ self._transport = Transport(
+ credentials=credentials,
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ client_cert_source_for_mtls=client_cert_source_func,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
+ always_use_jwt_access=True,
+ )
+
+ def list_operations(
+ self,
+ name: str,
+ filter_: Optional[str] = None,
+ *,
+ page_size: Optional[int] = None,
+ page_token: Optional[str] = None,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListOperationsPager:
+ r"""Lists operations that match the specified filter in the request.
+ If the server doesn't support this method, it returns
+ ``UNIMPLEMENTED``.
+
+ NOTE: the ``name`` binding allows API services to override the
+ binding to use different resource name schemes, such as
+ ``users/*/operations``. To override the binding, API services
+ can add a binding such as ``"/v1/{name=users/*}/operations"`` to
+ their service configuration. For backwards compatibility, the
+ default name includes the operations collection id, however
+ overriding users must ensure the name binding is the parent
+ resource, without the operations collection id.
+
+ Args:
+ name (str):
+ The name of the operation's parent
+ resource.
+ filter_ (str):
+ The standard list filter.
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api_core.operations_v1.pagers.ListOperationsPager:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create a protobuf request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+ if page_size is not None:
+ request.page_size = page_size
+ if page_token is not None:
+ request.page_token = page_token
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_operations]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListOperationsPager(
+ method=rpc,
+ request=request,
+ response=response,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def get_operation(
+ self,
+ name: str,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Gets the latest state of a long-running operation.
+ Clients can use this method to poll the operation result
+ at intervals as recommended by the API service.
+
+ Args:
+ name (str):
+ The name of the operation resource.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation:
+ This resource represents a long-
+ unning operation that is the result of a
+ network API call.
+
+ """
+
+ request = operations_pb2.GetOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def delete_operation(
+ self,
+ name: str,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a long-running operation. This method indicates that the
+ client is no longer interested in the operation result. It does
+ not cancel the operation. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be deleted.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ def cancel_operation(
+ self,
+ name: Optional[str] = None,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Starts asynchronous cancellation on a long-running operation.
+ The server makes a best effort to cancel the operation, but
+ success is not guaranteed. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients
+ can use
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation]
+ or other methods to check whether the cancellation succeeded or
+ whether the operation completed despite cancellation. On
+ successful cancellation, the operation is not deleted; instead,
+ it becomes an operation with an
+ [Operation.error][google.api_core.operations_v1.Operation.error] value with
+ a [google.rpc.Status.code][google.rpc.Status.code] of 1,
+ corresponding to ``Code.CANCELLED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be cancelled.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
diff --git a/google/api_core/operations_v1/pagers.py b/google/api_core/operations_v1/pagers.py
new file mode 100644
index 00000000..b8a47757
--- /dev/null
+++ b/google/api_core/operations_v1/pagers.py
@@ -0,0 +1,86 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import (
+ Any,
+ Callable,
+ Iterator,
+ Sequence,
+ Tuple,
+)
+
+from google.longrunning import operations_pb2
+
+
+class ListOperationsPager:
+ """A pager for iterating through ``list_operations`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``operations`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListOperations`` requests and continue to iterate
+ through the ``operations`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., operations_pb2.ListOperationsResponse],
+ request: operations_pb2.ListOperationsRequest,
+ response: operations_pb2.ListOperationsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.longrunning.operations_pb2.ListOperationsRequest):
+ The initial request object.
+ response (google.longrunning.operations_pb2.ListOperationsResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = request
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterator[operations_pb2.ListOperationsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterator[operations_pb2.Operation]:
+ for page in self.pages:
+ yield from page.operations
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/tests/unit/test_general_helpers.py b/google/api_core/operations_v1/transports/__init__.py
similarity index 53%
rename from tests/unit/test_general_helpers.py
rename to google/api_core/operations_v1/transports/__init__.py
index 027d4892..b443c078 100644
--- a/tests/unit/test_general_helpers.py
+++ b/google/api_core/operations_v1/transports/__init__.py
@@ -1,4 +1,5 @@
-# Copyright 2017, Google LLC
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,31 +12,19 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+#
+from collections import OrderedDict
+from typing import Dict, Type
-import functools
-
-from google.api_core import general_helpers
-
-
-def test_wraps_normal_func():
- def func():
- return 42
-
- @general_helpers.wraps(func)
- def replacement():
- return func()
-
- assert replacement() == 42
-
-
-def test_wraps_partial():
- def func():
- return 42
+from .base import OperationsTransport
+from .rest import OperationsRestTransport
- partial = functools.partial(func)
- @general_helpers.wraps(partial)
- def replacement():
- return func()
+# Compile a registry of transports.
+_transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]]
+_transport_registry["rest"] = OperationsRestTransport
- assert replacement() == 42
+__all__ = (
+ "OperationsTransport",
+ "OperationsRestTransport",
+)
diff --git a/google/api_core/operations_v1/transports/base.py b/google/api_core/operations_v1/transports/base.py
new file mode 100644
index 00000000..e19bc3e8
--- /dev/null
+++ b/google/api_core/operations_v1/transports/base.py
@@ -0,0 +1,227 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import abc
+from typing import Awaitable, Callable, Optional, Sequence, Union
+
+import google.api_core # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.api_core import version
+import google.auth # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
+
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=version.__version__,
+)
+
+
+class OperationsTransport(abc.ABC):
+ """Abstract transport class for Operations."""
+
+ AUTH_SCOPES = ()
+
+ DEFAULT_HOST: str = "longrunning.googleapis.com"
+
+ def __init__(
+ self,
+ *,
+ host: str = DEFAULT_HOST,
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ **kwargs,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ """
+ # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+ if ":" not in host:
+ host += ":443"
+ self._host = host
+
+ scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
+
+ # Save the scopes.
+ self._scopes = scopes
+
+ # If no credentials are provided, then determine the appropriate
+ # defaults.
+ if credentials and credentials_file:
+ raise core_exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = google.auth.load_credentials_from_file(
+ credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = google.auth.default(
+ **scopes_kwargs, quota_project_id=quota_project_id
+ )
+
+ # If the credentials are service account credentials, then always try to use self signed JWT.
+ if (
+ always_use_jwt_access
+ and isinstance(credentials, service_account.Credentials)
+ and hasattr(service_account.Credentials, "with_always_use_jwt_access")
+ ):
+ credentials = credentials.with_always_use_jwt_access(True)
+
+ # Save the credentials.
+ self._credentials = credentials
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.list_operations: gapic_v1.method.wrap_method(
+ self.list_operations,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ ),
+ self.get_operation: gapic_v1.method.wrap_method(
+ self.get_operation,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ ),
+ self.delete_operation: gapic_v1.method.wrap_method(
+ self.delete_operation,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ ),
+ self.cancel_operation: gapic_v1.method.wrap_method(
+ self.cancel_operation,
+ default_retry=retries.Retry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ ),
+ }
+
+ def close(self):
+ """Closes resources associated with the transport.
+
+ .. warning::
+ Only call this method if the transport is NOT shared
+ with other clients - this may cause errors in other clients!
+ """
+ raise NotImplementedError()
+
+ @property
+ def list_operations(
+ self,
+ ) -> Callable[
+ [operations_pb2.ListOperationsRequest],
+ Union[
+ operations_pb2.ListOperationsResponse,
+ Awaitable[operations_pb2.ListOperationsResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def get_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.GetOperationRequest],
+ Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def delete_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.DeleteOperationRequest],
+ Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def cancel_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.CancelOperationRequest],
+ Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
+ ]:
+ raise NotImplementedError()
+
+
+__all__ = ("OperationsTransport",)
diff --git a/google/api_core/operations_v1/transports/rest.py b/google/api_core/operations_v1/transports/rest.py
new file mode 100644
index 00000000..27ed7661
--- /dev/null
+++ b/google/api_core/operations_v1/transports/rest.py
@@ -0,0 +1,455 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
+
+from requests import __version__ as requests_version
+
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import path_template # type: ignore
+from google.api_core import rest_helpers # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.transport.requests import AuthorizedSession # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
+from google.protobuf import json_format # type: ignore
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport
+
+OptionalRetry = Union[retries.Retry, object]
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+ grpc_version=None,
+ rest_version=requests_version,
+)
+
+
+class OperationsRestTransport(OperationsTransport):
+ """REST backend transport for Operations.
+
+ Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends JSON representations of protocol buffers over HTTP/1.1
+ """
+
+ def __init__(
+ self,
+ *,
+ host: str = "longrunning.googleapis.com",
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme: str = "https",
+ http_options: Optional[Dict] = None,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
+ certificate to configure mutual TLS HTTP channel. It is ignored
+ if ``channel`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ http_options: a dictionary of http_options for transcoding, to override
+ the defaults from operatons.proto. Each method has an entry
+ with the corresponding http rules as value.
+
+ """
+ # Run the base constructor
+ # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
+ # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
+ # credentials object
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
+ )
+ self._session = AuthorizedSession(
+ self._credentials, default_host=self.DEFAULT_HOST
+ )
+ if client_cert_source_for_mtls:
+ self._session.configure_mtls_channel(client_cert_source_for_mtls)
+ self._prep_wrapped_messages(client_info)
+ self._http_options = http_options or {}
+
+ def _list_operations(
+ self,
+ request: operations_pb2.ListOperationsRequest,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.ListOperationsResponse:
+ r"""Call the list operations method over HTTP.
+
+ Args:
+ request (~.operations_pb2.ListOperationsRequest):
+ The request object. The request message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.ListOperationsResponse:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ """
+
+ http_options = [
+ {"method": "get", "uri": "/v1/{name=operations}"},
+ ]
+ if "google.longrunning.Operations.ListOperations" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.ListOperations"
+ ]
+
+ request_kwargs = json_format.MessageToDict(
+ request,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True,
+ )
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.ListOperationsRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(self._session, method)(
+ "https://{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ # Return the response
+ api_response = operations_pb2.ListOperationsResponse()
+ json_format.Parse(response.content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ def _get_operation(
+ self,
+ request: operations_pb2.GetOperationRequest,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Call the get operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.GetOperationRequest):
+ The request object. The request message for
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.Operation:
+ This resource represents a long-
+ unning operation that is the result of a
+ network API call.
+
+ """
+
+ http_options = [
+ {"method": "get", "uri": "/v1/{name=operations/**}"},
+ ]
+ if "google.longrunning.Operations.GetOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.GetOperation"
+ ]
+
+ request_kwargs = json_format.MessageToDict(
+ request,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True,
+ )
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.GetOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(self._session, method)(
+ "https://{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ # Return the response
+ api_response = operations_pb2.Operation()
+ json_format.Parse(response.content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ def _delete_operation(
+ self,
+ request: operations_pb2.DeleteOperationRequest,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> empty_pb2.Empty:
+ r"""Call the delete operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.DeleteOperationRequest):
+ The request object. The request message for
+ [Operations.DeleteOperation][google.api_core.operations_v1.Operations.DeleteOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {"method": "delete", "uri": "/v1/{name=operations/**}"},
+ ]
+ if "google.longrunning.Operations.DeleteOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.DeleteOperation"
+ ]
+
+ request_kwargs = json_format.MessageToDict(
+ request,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True,
+ )
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.DeleteOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(self._session, method)(
+ "https://{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ return empty_pb2.Empty()
+
+ def _cancel_operation(
+ self,
+ request: operations_pb2.CancelOperationRequest,
+ *,
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> empty_pb2.Empty:
+ r"""Call the cancel operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.CancelOperationRequest):
+ The request object. The request message for
+ [Operations.CancelOperation][google.api_core.operations_v1.Operations.CancelOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {"method": "post", "uri": "/v1/{name=operations/**}:cancel", "body": "*"},
+ ]
+ if "google.longrunning.Operations.CancelOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.CancelOperation"
+ ]
+
+ request_kwargs = json_format.MessageToDict(
+ request,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True,
+ )
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ # Jsonify the request body
+ body_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["body"], body_request)
+ body = json_format.MessageToDict(
+ body_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ response = getattr(self._session, method)(
+ "https://{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ data=body,
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ raise core_exceptions.from_http_response(response)
+
+ return empty_pb2.Empty()
+
+ @property
+ def list_operations(
+ self,
+ ) -> Callable[
+ [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse
+ ]:
+ return self._list_operations
+
+ @property
+ def get_operation(
+ self,
+ ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
+ return self._get_operation
+
+ @property
+ def delete_operation(
+ self,
+ ) -> Callable[[operations_pb2.DeleteOperationRequest], empty_pb2.Empty]:
+ return self._delete_operation
+
+ @property
+ def cancel_operation(
+ self,
+ ) -> Callable[[operations_pb2.CancelOperationRequest], empty_pb2.Empty]:
+ return self._cancel_operation
+
+
+__all__ = ("OperationsRestTransport",)
diff --git a/google/api_core/page_iterator.py b/google/api_core/page_iterator.py
index 49879bc9..7ddc5cbc 100644
--- a/google/api_core/page_iterator.py
+++ b/google/api_core/page_iterator.py
@@ -81,8 +81,6 @@
import abc
-import six
-
class Page(object):
"""Single page of results in an iterator.
@@ -127,18 +125,15 @@ def __iter__(self):
"""The :class:`Page` is an iterator of items."""
return self
- def next(self):
+ def __next__(self):
"""Get the next value in the page."""
- item = six.next(self._item_iter)
+ item = next(self._item_iter)
result = self._item_to_value(self._parent, item)
# Since we've successfully got the next value from the
# iterator, we update the number of remaining.
self._remaining -= 1
return result
- # Alias needed for Python 2/3 support.
- __next__ = next
-
def _item_to_value_identity(iterator, item):
"""An item to value transformer that returns the item un-changed."""
@@ -147,8 +142,7 @@ def _item_to_value_identity(iterator, item):
return item
-@six.add_metaclass(abc.ABCMeta)
-class Iterator(object):
+class Iterator(object, metaclass=abc.ABCMeta):
"""A generic class for iterating through API list responses.
Args:
@@ -235,9 +229,6 @@ def __next__(self):
self.__active_iterator = iter(self)
return next(self.__active_iterator)
- # Preserve Python 2 compatibility.
- next = __next__
-
def _page_iter(self, increment):
"""Generator of pages of API responses.
@@ -484,7 +475,7 @@ def _next_page(self):
there are no pages left.
"""
try:
- items = six.next(self._gax_page_iter)
+ items = next(self._gax_page_iter)
page = Page(self, items, self.item_to_value)
self.next_page_token = self._gax_page_iter.page_token or None
return page
diff --git a/google/api_core/path_template.py b/google/api_core/path_template.py
index f202d40f..2639459a 100644
--- a/google/api_core/path_template.py
+++ b/google/api_core/path_template.py
@@ -25,11 +25,11 @@
from __future__ import unicode_literals
+from collections import deque
+import copy
import functools
import re
-import six
-
# Regular expression for extracting variable parts from a path template.
# The variables can be expressed as:
#
@@ -66,7 +66,7 @@ def _expand_variable_match(positional_vars, named_vars, match):
"""Expand a matched variable with its value.
Args:
- positional_vars (list): A list of positonal variables. This list will
+ positional_vars (list): A list of positional variables. This list will
be modified.
named_vars (dict): A dictionary of named variables.
match (re.Match): A regular expression match.
@@ -83,7 +83,7 @@ def _expand_variable_match(positional_vars, named_vars, match):
name = match.group("name")
if name is not None:
try:
- return six.text_type(named_vars[name])
+ return str(named_vars[name])
except KeyError:
raise ValueError(
"Named variable '{}' not specified and needed by template "
@@ -91,7 +91,7 @@ def _expand_variable_match(positional_vars, named_vars, match):
)
elif positional is not None:
try:
- return six.text_type(positional_vars.pop(0))
+ return str(positional_vars.pop(0))
except IndexError:
raise ValueError(
"Positional variable not specified and needed by template "
@@ -172,6 +172,56 @@ def _generate_pattern_for_template(tmpl):
return _VARIABLE_RE.sub(_replace_variable_with_pattern, tmpl)
+def get_field(request, field):
+ """Get the value of a field from a given dictionary.
+
+ Args:
+ request (dict | Message): A dictionary or a Message object.
+ field (str): The key to the request in dot notation.
+
+ Returns:
+ The value of the field.
+ """
+ parts = field.split(".")
+ value = request
+
+ for part in parts:
+ if not isinstance(value, dict):
+ value = getattr(value, part, None)
+ else:
+ value = value.get(part)
+ if isinstance(value, dict):
+ return
+ return value
+
+
+def delete_field(request, field):
+ """Delete the value of a field from a given dictionary.
+
+ Args:
+ request (dict | Message): A dictionary object or a Message.
+ field (str): The key to the request in dot notation.
+ """
+ parts = deque(field.split("."))
+ while len(parts) > 1:
+ part = parts.popleft()
+ if not isinstance(request, dict):
+ if hasattr(request, part):
+ request = getattr(request, part, None)
+ else:
+ return
+ else:
+ request = request.get(part)
+ part = parts.popleft()
+ if not isinstance(request, dict):
+ if hasattr(request, part):
+ request.ClearField(part)
+ else:
+ return
+ else:
+ request.pop(part, None)
+
+
def validate(tmpl, path):
"""Validate a path against the path template.
@@ -195,3 +245,79 @@ def validate(tmpl, path):
"""
pattern = _generate_pattern_for_template(tmpl) + "$"
return True if re.match(pattern, path) is not None else False
+
+
+def transcode(http_options, message=None, **request_kwargs):
+ """Transcodes a grpc request pattern into a proper HTTP request following the rules outlined here,
+ https://github.com/googleapis/googleapis/blob/master/google/api/http.proto#L44-L312
+
+ Args:
+ http_options (list(dict)): A list of dicts which consist of these keys,
+ 'method' (str): The http method
+ 'uri' (str): The path template
+ 'body' (str): The body field name (optional)
+ (This is a simplified representation of the proto option `google.api.http`)
+
+ message (Message) : A request object (optional)
+ request_kwargs (dict) : A dict representing the request object
+
+ Returns:
+ dict: The transcoded request with these keys,
+ 'method' (str) : The http method
+ 'uri' (str) : The expanded uri
+ 'body' (dict | Message) : A dict or a Message representing the body (optional)
+ 'query_params' (dict | Message) : A dict or Message mapping query parameter variables and values
+
+ Raises:
+ ValueError: If the request does not match the given template.
+ """
+ transcoded_value = message or request_kwargs
+ for http_option in http_options:
+ request = {}
+
+ # Assign path
+ uri_template = http_option["uri"]
+ path_fields = [
+ match.group("name") for match in _VARIABLE_RE.finditer(uri_template)
+ ]
+ path_args = {field: get_field(transcoded_value, field) for field in path_fields}
+ request["uri"] = expand(uri_template, **path_args)
+
+ if not validate(uri_template, request["uri"]) or not all(path_args.values()):
+ continue
+
+ # Remove fields used in uri path from request
+ leftovers = copy.deepcopy(transcoded_value)
+ for path_field in path_fields:
+ delete_field(leftovers, path_field)
+
+ # Assign body and query params
+ body = http_option.get("body")
+
+ if body:
+ if body == "*":
+ request["body"] = leftovers
+ if message:
+ request["query_params"] = message.__class__()
+ else:
+ request["query_params"] = {}
+ else:
+ try:
+ if message:
+ request["body"] = getattr(leftovers, body)
+ delete_field(leftovers, body)
+ else:
+ request["body"] = leftovers.pop(body)
+ except (KeyError, AttributeError):
+ continue
+ request["query_params"] = leftovers
+ else:
+ request["query_params"] = leftovers
+ request["method"] = http_option["method"]
+ return request
+
+ raise ValueError(
+ "Request {} does not match any URL path template in available HttpRule's {}".format(
+ request_kwargs, [opt["uri"] for opt in http_options]
+ )
+ )
diff --git a/google/api_core/protobuf_helpers.py b/google/api_core/protobuf_helpers.py
index 8aff79aa..896e89c1 100644
--- a/google/api_core/protobuf_helpers.py
+++ b/google/api_core/protobuf_helpers.py
@@ -15,6 +15,7 @@
"""Helpers for :mod:`protobuf`."""
import collections
+import collections.abc
import copy
import inspect
@@ -22,11 +23,6 @@
from google.protobuf import message
from google.protobuf import wrappers_pb2
-try:
- from collections import abc as collections_abc
-except ImportError: # Python 2.7
- import collections as collections_abc
-
_SENTINEL = object()
_WRAPPER_TYPES = (
@@ -179,7 +175,7 @@ def get(msg_or_dict, key, default=_SENTINEL):
# If we get something else, complain.
if isinstance(msg_or_dict, message.Message):
answer = getattr(msg_or_dict, key, default)
- elif isinstance(msg_or_dict, collections_abc.Mapping):
+ elif isinstance(msg_or_dict, collections.abc.Mapping):
answer = msg_or_dict.get(key, default)
else:
raise TypeError(
@@ -204,7 +200,7 @@ def _set_field_on_message(msg, key, value):
"""Set helper for protobuf Messages."""
# Attempt to set the value on the types of objects we know how to deal
# with.
- if isinstance(value, (collections_abc.MutableSequence, tuple)):
+ if isinstance(value, (collections.abc.MutableSequence, tuple)):
# Clear the existing repeated protobuf message of any elements
# currently inside it.
while getattr(msg, key):
@@ -212,13 +208,13 @@ def _set_field_on_message(msg, key, value):
# Write our new elements to the repeated field.
for item in value:
- if isinstance(item, collections_abc.Mapping):
+ if isinstance(item, collections.abc.Mapping):
getattr(msg, key).add(**item)
else:
# protobuf's RepeatedCompositeContainer doesn't support
# append.
getattr(msg, key).extend([item])
- elif isinstance(value, collections_abc.Mapping):
+ elif isinstance(value, collections.abc.Mapping):
# Assign the dictionary values to the protobuf message.
for item_key, item_value in value.items():
set(getattr(msg, key), item_key, item_value)
@@ -241,7 +237,7 @@ def set(msg_or_dict, key, value):
TypeError: If ``msg_or_dict`` is not a Message or dictionary.
"""
# Sanity check: Is our target object valid?
- if not isinstance(msg_or_dict, (collections_abc.MutableMapping, message.Message)):
+ if not isinstance(msg_or_dict, (collections.abc.MutableMapping, message.Message)):
raise TypeError(
"set() expected a dict or protobuf message, got {!r}.".format(
type(msg_or_dict)
@@ -254,12 +250,12 @@ def set(msg_or_dict, key, value):
# If a subkey exists, then get that object and call this method
# recursively against it using the subkey.
if subkey is not None:
- if isinstance(msg_or_dict, collections_abc.MutableMapping):
+ if isinstance(msg_or_dict, collections.abc.MutableMapping):
msg_or_dict.setdefault(basekey, {})
set(get(msg_or_dict, basekey), subkey, value)
return
- if isinstance(msg_or_dict, collections_abc.MutableMapping):
+ if isinstance(msg_or_dict, collections.abc.MutableMapping):
msg_or_dict[key] = value
else:
_set_field_on_message(msg_or_dict, key, value)
diff --git a/google/api_core/py.typed b/google/api_core/py.typed
new file mode 100644
index 00000000..1d5517b1
--- /dev/null
+++ b/google/api_core/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-api-core package uses inline types.
diff --git a/google/api_core/rest_helpers.py b/google/api_core/rest_helpers.py
new file mode 100644
index 00000000..a78822f1
--- /dev/null
+++ b/google/api_core/rest_helpers.py
@@ -0,0 +1,109 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for rest transports."""
+
+import functools
+import operator
+
+
+def flatten_query_params(obj, strict=False):
+ """Flatten a dict into a list of (name,value) tuples.
+
+ The result is suitable for setting query params on an http request.
+
+ .. code-block:: python
+
+ >>> obj = {'a':
+ ... {'b':
+ ... {'c': ['x', 'y', 'z']} },
+ ... 'd': 'uvw',
+ ... 'e': True, }
+ >>> flatten_query_params(obj, strict=True)
+ [('a.b.c', 'x'), ('a.b.c', 'y'), ('a.b.c', 'z'), ('d', 'uvw'), ('e', 'true')]
+
+ Note that, as described in
+ https://github.com/googleapis/googleapis/blob/48d9fb8c8e287c472af500221c6450ecd45d7d39/google/api/http.proto#L117,
+ repeated fields (i.e. list-valued fields) may only contain primitive types (not lists or dicts).
+ This is enforced in this function.
+
+ Args:
+ obj: a possibly nested dictionary (from json), or None
+ strict: a bool, defaulting to False, to enforce that all values in the
+ result tuples be strings and, if boolean, lower-cased.
+
+ Returns: a list of tuples, with each tuple having a (possibly) multi-part name
+ and a scalar value.
+
+ Raises:
+ TypeError if obj is not a dict or None
+ ValueError if obj contains a list of non-primitive values.
+ """
+
+ if obj is not None and not isinstance(obj, dict):
+ raise TypeError("flatten_query_params must be called with dict object")
+
+ return _flatten(obj, key_path=[], strict=strict)
+
+
+def _flatten(obj, key_path, strict=False):
+ if obj is None:
+ return []
+ if isinstance(obj, dict):
+ return _flatten_dict(obj, key_path=key_path, strict=strict)
+ if isinstance(obj, list):
+ return _flatten_list(obj, key_path=key_path, strict=strict)
+ return _flatten_value(obj, key_path=key_path, strict=strict)
+
+
+def _is_primitive_value(obj):
+ if obj is None:
+ return False
+
+ if isinstance(obj, (list, dict)):
+ raise ValueError("query params may not contain repeated dicts or lists")
+
+ return True
+
+
+def _flatten_value(obj, key_path, strict=False):
+ return [(".".join(key_path), _canonicalize(obj, strict=strict))]
+
+
+def _flatten_dict(obj, key_path, strict=False):
+ items = (
+ _flatten(value, key_path=key_path + [key], strict=strict)
+ for key, value in obj.items()
+ )
+ return functools.reduce(operator.concat, items, [])
+
+
+def _flatten_list(elems, key_path, strict=False):
+ # Only lists of scalar values are supported.
+ # The name (key_path) is repeated for each value.
+ items = (
+ _flatten_value(elem, key_path=key_path, strict=strict)
+ for elem in elems
+ if _is_primitive_value(elem)
+ )
+ return functools.reduce(operator.concat, items, [])
+
+
+def _canonicalize(obj, strict=False):
+ if strict:
+ value = str(obj)
+ if isinstance(obj, bool):
+ value = value.lower()
+ return value
+ return obj
diff --git a/google/api_core/rest_streaming.py b/google/api_core/rest_streaming.py
new file mode 100644
index 00000000..f91381c1
--- /dev/null
+++ b/google/api_core/rest_streaming.py
@@ -0,0 +1,113 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for server-side streaming in REST."""
+
+from collections import deque
+import string
+from typing import Deque
+
+import requests
+
+
+class ResponseIterator:
+ """Iterator over REST API responses.
+
+ Args:
+ response (requests.Response): An API response object.
+ response_message_cls (Callable[proto.Message]): A proto
+ class expected to be returned from an API.
+ """
+
+ def __init__(self, response: requests.Response, response_message_cls):
+ self._response = response
+ self._response_message_cls = response_message_cls
+ # Inner iterator over HTTP response's content.
+ self._response_itr = self._response.iter_content(decode_unicode=True)
+ # Contains a list of JSON responses ready to be sent to user.
+ self._ready_objs: Deque[str] = deque()
+ # Current JSON response being built.
+ self._obj = ""
+ # Keeps track of the nesting level within a JSON object.
+ self._level = 0
+ # Keeps track whether HTTP response is currently sending values
+ # inside of a string value.
+ self._in_string = False
+ # Whether an escape symbol "\" was encountered.
+ self._escape_next = False
+
+ def cancel(self):
+ """Cancel existing streaming operation."""
+ self._response.close()
+
+ def _process_chunk(self, chunk: str):
+ if self._level == 0:
+ if chunk[0] != "[":
+ raise ValueError(
+ "Can only parse array of JSON objects, instead got %s" % chunk
+ )
+ for char in chunk:
+ if char == "{":
+ if self._level == 1:
+ # Level 1 corresponds to the outermost JSON object
+ # (i.e. the one we care about).
+ self._obj = ""
+ if not self._in_string:
+ self._level += 1
+ self._obj += char
+ elif char == "}":
+ self._obj += char
+ if not self._in_string:
+ self._level -= 1
+ if not self._in_string and self._level == 1:
+ self._ready_objs.append(self._obj)
+ elif char == '"':
+ # Helps to deal with an escaped quotes inside of a string.
+ if not self._escape_next:
+ self._in_string = not self._in_string
+ self._obj += char
+ elif char in string.whitespace:
+ if self._in_string:
+ self._obj += char
+ elif char == "[":
+ if self._level == 0:
+ self._level += 1
+ else:
+ self._obj += char
+ elif char == "]":
+ if self._level == 1:
+ self._level -= 1
+ else:
+ self._obj += char
+ else:
+ self._obj += char
+ self._escape_next = not self._escape_next if char == "\\" else False
+
+ def __next__(self):
+ while not self._ready_objs:
+ try:
+ chunk = next(self._response_itr)
+ self._process_chunk(chunk)
+ except StopIteration as e:
+ if self._level > 0:
+ raise ValueError("Unfinished stream: %s" % self._obj)
+ raise e
+ return self._grab()
+
+ def _grab(self):
+ # Add extra quotes to make json.loads happy.
+ return self._response_message_cls.from_json(self._ready_objs.popleft())
+
+ def __iter__(self):
+ return self
diff --git a/google/api_core/retry.py b/google/api_core/retry.py
index 313fc63c..ce496937 100644
--- a/google/api_core/retry.py
+++ b/google/api_core/retry.py
@@ -63,11 +63,9 @@ def check_if_exists():
import time
import requests.exceptions
-import six
from google.api_core import datetime_helpers
from google.api_core import exceptions
-from google.api_core import general_helpers
from google.auth import exceptions as auth_exceptions
_LOGGER = logging.getLogger(__name__)
@@ -115,8 +113,11 @@ def if_exception_type_predicate(exception):
``INTERNAL(13)`` and its subclasses.
- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429
- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503
-- :class:`google.api_core.exceptions.ResourceExhausted` - gRPC
- ``RESOURCE_EXHAUSTED(8)``
+- :class:`requests.exceptions.ConnectionError`
+- :class:`requests.exceptions.ChunkedEncodingError` - The server declared
+ chunked encoding but sent an invalid chunk.
+- :class:`google.auth.exceptions.TransportError` - Used to indicate an
+ error occurred during an HTTP request.
"""
# pylint: enable=invalid-name
@@ -201,15 +202,12 @@ def retry_target(target, predicate, sleep_generator, deadline, on_error=None):
if deadline_datetime is not None:
if deadline_datetime <= now:
- six.raise_from(
- exceptions.RetryError(
- "Deadline of {:.1f}s exceeded while calling target function".format(
- deadline
- ),
- last_exc,
+ raise exceptions.RetryError(
+ "Deadline of {:.1f}s exceeded while calling target function".format(
+ deadline
),
last_exc,
- )
+ ) from last_exc
else:
time_to_deadline = (deadline_datetime - now).total_seconds()
sleep = min(time_to_deadline, sleep)
@@ -222,7 +220,6 @@ def retry_target(target, predicate, sleep_generator, deadline, on_error=None):
raise ValueError("Sleep generator stopped yielding sleep values.")
-@six.python_2_unicode_compatible
class Retry(object):
"""Exponential retry decorator.
@@ -276,7 +273,7 @@ def __call__(self, func, on_error=None):
if self._on_error is not None:
on_error = self._on_error
- @general_helpers.wraps(func)
+ @functools.wraps(func)
def retry_wrapped_func(*args, **kwargs):
"""A wrapper that calls target function with retry."""
target = functools.partial(func, *args, **kwargs)
diff --git a/google/api_core/timeout.py b/google/api_core/timeout.py
index 17c1beab..73232180 100644
--- a/google/api_core/timeout.py
+++ b/google/api_core/timeout.py
@@ -54,11 +54,9 @@ def is_thing_ready(timeout=None):
from __future__ import unicode_literals
import datetime
-
-import six
+import functools
from google.api_core import datetime_helpers
-from google.api_core import general_helpers
_DEFAULT_INITIAL_TIMEOUT = 5.0 # seconds
_DEFAULT_MAXIMUM_TIMEOUT = 30.0 # seconds
@@ -68,7 +66,6 @@ def is_thing_ready(timeout=None):
_DEFAULT_DEADLINE = None
-@six.python_2_unicode_compatible
class ConstantTimeout(object):
"""A decorator that adds a constant timeout argument.
@@ -95,7 +92,7 @@ def __call__(self, func):
Callable: The wrapped function.
"""
- @general_helpers.wraps(func)
+ @functools.wraps(func)
def func_with_timeout(*args, **kwargs):
"""Wrapped function that adds timeout."""
kwargs["timeout"] = self._timeout
@@ -140,7 +137,6 @@ def _exponential_timeout_generator(initial, maximum, multiplier, deadline):
timeout = timeout * multiplier
-@six.python_2_unicode_compatible
class ExponentialTimeout(object):
"""A decorator that adds an exponentially increasing timeout argument.
@@ -207,7 +203,7 @@ def __call__(self, func):
self._initial, self._maximum, self._multiplier, self._deadline
)
- @general_helpers.wraps(func)
+ @functools.wraps(func)
def func_with_timeout(*args, **kwargs):
"""Wrapped function that adds timeout."""
kwargs["timeout"] = next(timeouts)
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 00000000..ce33582a
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,4 @@
+[mypy]
+python_version = 3.7
+namespace_packages = True
+ignore_missing_imports = True
diff --git a/noxfile.py b/noxfile.py
index 2f11137d..2d8f1e02 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -21,7 +21,7 @@
import nox # pytype: disable=import-error
-BLACK_VERSION = "black==19.10b0"
+BLACK_VERSION = "black==22.3.0"
BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
# Black and flake8 clash on the syntax for ignoring flake8's F401 in this file.
BLACK_EXCLUDES = ["--exclude", "^/google/api_core/operations_v1/__init__.py"]
@@ -29,17 +29,29 @@
DEFAULT_PYTHON_VERSION = "3.7"
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
-_MINIMAL_ASYNCIO_SUPPORT_PYTHON_VERSION = [3, 6]
-
-
-def _greater_or_equal_than_36(version_string):
+# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
+nox.options.sessions = [
+ "unit",
+ "unit_grpc_gcp",
+ "unit_wo_grpc",
+ "cover",
+ "pytype",
+ "mypy",
+ "lint",
+ "lint_setup_py",
+ "blacken",
+ "docs",
+]
+
+
+def _greater_or_equal_than_37(version_string):
tokens = version_string.split(".")
for i, token in enumerate(tokens):
try:
tokens[i] = int(token)
except ValueError:
pass
- return tokens >= [3, 6]
+ return tokens >= [3, 7]
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -52,7 +64,10 @@ def lint(session):
session.install("flake8", "flake8-import-order", BLACK_VERSION)
session.install(".")
session.run(
- "black", "--check", *BLACK_EXCLUDES, *BLACK_PATHS,
+ "black",
+ "--check",
+ *BLACK_EXCLUDES,
+ *BLACK_PATHS,
)
session.run("flake8", "google", "tests")
@@ -67,7 +82,7 @@ def blacken(session):
session.run("black", *BLACK_EXCLUDES, *BLACK_PATHS)
-def default(session):
+def default(session, install_grpc=True):
"""Default unit test session.
This is intended to be run **without** an interpreter set, so
@@ -80,56 +95,75 @@ def default(session):
)
# Install all test dependencies, then install this package in-place.
- session.install("mock", "pytest", "pytest-cov", "grpcio >= 1.0.2")
- session.install("-e", ".", "-c", constraints_path)
+ session.install("dataclasses", "mock", "pytest", "pytest-cov", "pytest-xdist")
+ if install_grpc:
+ session.install("-e", ".[grpc]", "-c", constraints_path)
+ else:
+ session.install("-e", ".", "-c", constraints_path)
pytest_args = [
"python",
"-m",
"py.test",
- "--quiet",
- "--cov=google.api_core",
- "--cov=tests.unit",
- "--cov-append",
- "--cov-config=.coveragerc",
- "--cov-report=",
- "--cov-fail-under=0",
- os.path.join("tests", "unit"),
+ *(
+ # Helpful for running a single test or testfile.
+ session.posargs
+ or [
+ "--quiet",
+ "--cov=google.api_core",
+ "--cov=tests.unit",
+ "--cov-append",
+ "--cov-config=.coveragerc",
+ "--cov-report=",
+ "--cov-fail-under=0",
+ # Running individual tests with parallelism enabled is usually not helpful.
+ "-n=auto",
+ os.path.join("tests", "unit"),
+ ]
+ ),
]
- pytest_args.extend(session.posargs)
- # Inject AsyncIO content and proto-plus, if version >= 3.6.
+ # Inject AsyncIO content and proto-plus, if version >= 3.7.
# proto-plus is needed for a field mask test in test_protobuf_helpers.py
- if _greater_or_equal_than_36(session.python):
+ if _greater_or_equal_than_37(session.python):
session.install("asyncmock", "pytest-asyncio", "proto-plus")
- pytest_args.append("--cov=tests.asyncio")
- pytest_args.append(os.path.join("tests", "asyncio"))
- session.run(*pytest_args)
- else:
- # Run py.test against the unit tests.
- session.run(*pytest_args)
+ # Having positional arguments means the user wants to run specific tests.
+ # Best not to add additional tests to that list.
+ if not session.posargs:
+ pytest_args.append("--cov=tests.asyncio")
+ pytest_args.append(os.path.join("tests", "asyncio"))
+ session.run(*pytest_args)
-@nox.session(python=["2.7", "3.6", "3.7", "3.8", "3.9"])
+
+@nox.session(python=["3.7", "3.8", "3.9", "3.10"])
def unit(session):
"""Run the unit test suite."""
default(session)
-@nox.session(python=["2.7", "3.6", "3.7", "3.8", "3.9"])
+@nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10"])
def unit_grpc_gcp(session):
"""Run the unit test suite with grpcio-gcp installed."""
constraints_path = str(
CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)
# Install grpcio-gcp
- session.install("grpcio-gcp", "-c", constraints_path)
+ session.install("-e", ".[grpcgcp]", "-c", constraints_path)
+ # Install protobuf < 4.0.0
+ session.install("protobuf<4.0.0")
default(session)
-@nox.session(python="3.6")
+@nox.session(python=["3.8", "3.10"])
+def unit_wo_grpc(session):
+ """Run the unit test suite w/o grpcio installed"""
+ default(session, install_grpc=False)
+
+
+@nox.session(python="3.8")
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
@@ -137,18 +171,28 @@ def lint_setup_py(session):
session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
-# No 2.7 due to https://github.com/google/importlab/issues/26.
-# No 3.7 because pytype supports up to 3.6 only.
-@nox.session(python="3.6")
+@nox.session(python="3.8")
def pytype(session):
"""Run type-checking."""
+ session.install(".[grpc]", "pytype >= 2019.3.21")
+ session.run("pytype")
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def mypy(session):
+ """Run type-checking."""
+ session.install(".[grpc]", "mypy")
session.install(
- ".", "grpcio >= 1.8.2", "grpcio-gcp >= 0.2.2", "pytype >= 2019.3.21"
+ "types-setuptools",
+ "types-requests",
+ "types-protobuf",
+ "types-mock",
+ "types-dataclasses",
)
- session.run("pytype")
+ session.run("mypy", "google", "tests")
-@nox.session(python="3.6")
+@nox.session(python="3.8")
def cover(session):
"""Run the final coverage report.
@@ -164,8 +208,7 @@ def cover(session):
def docs(session):
"""Build the docs for this library."""
- session.install(".", "grpcio >= 1.8.2", "grpcio-gcp >= 0.2.2")
- session.install("-e", ".")
+ session.install("-e", ".[grpc]")
session.install("sphinx==4.0.1", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
diff --git a/owlbot.py b/owlbot.py
index 5c590e4c..ab4f4f0a 100644
--- a/owlbot.py
+++ b/owlbot.py
@@ -16,14 +16,24 @@
import synthtool as s
from synthtool import gcp
+from synthtool.languages import python
common = gcp.CommonTemplates()
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
-templated_files = common.py_library(cov_level=100)
-s.move(templated_files, excludes=["noxfile.py", ".flake8", ".coveragerc", "setup.cfg"])
+excludes = [
+ "noxfile.py", # pytype
+ "setup.cfg", # pytype
+ ".flake8", # flake8-import-order, layout
+ ".coveragerc", # layout
+ "CONTRIBUTING.rst", # no systests
+ ".github/workflows/unittest.yml", # exclude unittest gh action
+ "README.rst",
+]
+templated_files = common.py_library(microgenerator=True, cov_level=100)
+s.move(templated_files, excludes=excludes)
# Add pytype support
s.replace(
@@ -37,4 +47,8 @@
""",
)
+s.replace(".github/workflows/lint.yml", "python-version: \"3.10\"", "python-version: \"3.7\"")
+
+python.configure_previous_major_version_branches()
+
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/renovate.json b/renovate.json
index c0489556..39b2a0ec 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,8 +1,11 @@
{
"extends": [
- "config:base", ":preserveSemverRanges"
+ "config:base",
+ "group:all",
+ ":preserveSemverRanges",
+ ":disableDependencyDashboard"
],
- "ignorePaths": [".pre-commit-config.yaml"],
+ "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"],
"pip_requirements": {
"fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
}
diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py
index d309d6e9..91b59676 100644
--- a/scripts/readme-gen/readme_gen.py
+++ b/scripts/readme-gen/readme_gen.py
@@ -28,7 +28,10 @@
jinja_env = jinja2.Environment(
trim_blocks=True,
loader=jinja2.FileSystemLoader(
- os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates'))))
+ os.path.abspath(os.path.join(os.path.dirname(__file__), "templates"))
+ ),
+ autoescape=True,
+)
README_TMPL = jinja_env.get_template('README.tmpl.rst')
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
index a0406dba..6f069c6c 100644
--- a/scripts/readme-gen/templates/install_deps.tmpl.rst
+++ b/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -12,7 +12,7 @@ Install Dependencies
.. _Python Development Environment Setup Guide:
https://cloud.google.com/python/setup
-#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+#. Create a virtualenv. Samples are compatible with Python 3.7+.
.. code-block:: bash
diff --git a/setup.cfg b/setup.cfg
index 0be0b3ff..f7b5a3bc 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,8 +1,5 @@
-[bdist_wheel]
-universal = 1
-
[pytype]
-python_version = 3.6
+python_version = 3.7
inputs =
google/
exclude =
diff --git a/setup.py b/setup.py
index 09bd7510..a088e6ac 100644
--- a/setup.py
+++ b/setup.py
@@ -29,21 +29,15 @@
# 'Development Status :: 5 - Production/Stable'
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
- "googleapis-common-protos >= 1.6.0, < 2.0dev",
- 'protobuf >= 3.12.0, < 3.18.0; python_version < "3"',
- 'protobuf >= 3.12.0, < 4.0.0dev; python_version > "3"',
- "google-auth >= 1.25.0, < 2.0dev",
+ "googleapis-common-protos >= 1.56.2, < 2.0dev",
+ "protobuf >= 3.20.1, <4.0.0dev",
+ "google-auth >= 1.25.0, < 3.0dev",
"requests >= 2.18.0, < 3.0.0dev",
- "setuptools >= 40.3.0",
- "packaging >= 14.3",
- "six >= 1.13.0",
- "pytz",
- 'futures >= 3.2.0; python_version < "3.2"',
]
extras = {
- "grpc": "grpcio >= 1.29.0, < 2.0dev",
- "grpcgcp": "grpcio-gcp >= 0.2.2",
- "grpcio-gcp": "grpcio-gcp >= 0.2.2",
+ "grpc": ["grpcio >= 1.33.2, < 2.0dev", "grpcio-status >= 1.33.2, < 2.0dev"],
+ "grpcgcp": "grpcio-gcp >= 0.2.2, < 1.0dev",
+ "grpcio-gcp": "grpcio-gcp >= 0.2.2, < 1.0dev",
}
@@ -87,14 +81,11 @@
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
- "Programming Language :: Python :: 2",
- "Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.5",
- "Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
"Operating System :: OS Independent",
"Topic :: Internet",
],
@@ -103,7 +94,7 @@
namespace_packages=namespaces,
install_requires=dependencies,
extras_require=extras,
- python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*",
+ python_requires=">=3.7",
include_package_data=True,
zip_safe=False,
)
diff --git a/testing/constraints-2.7.txt b/testing/constraints-2.7.txt
deleted file mode 100644
index 246c89d5..00000000
--- a/testing/constraints-2.7.txt
+++ /dev/null
@@ -1 +0,0 @@
-googleapis-common-protos >= 1.6.0, < 1.53dev
diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt
deleted file mode 100644
index ff5b4a7f..00000000
--- a/testing/constraints-3.6.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-# This constraints file is used to check that lower bounds
-# are correct in setup.py
-# List *all* library dependencies and extras in this file.
-# Pin the version to the lower bound.
-#
-# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
-# Then this file should have foo==1.14.0
-googleapis-common-protos==1.6.0
-protobuf==3.12.0
-google-auth==1.25.0
-requests==2.18.0
-setuptools==40.3.0
-packaging==14.3
-six==1.13.0
-grpcio==1.29.0
-grpcio-gcp==0.2.2
-grpcio-gcp==0.2.2
diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt
index e69de29b..fe671145 100644
--- a/testing/constraints-3.7.txt
+++ b/testing/constraints-3.7.txt
@@ -0,0 +1,15 @@
+# This constraints file is used to check that lower bounds
+# are correct in setup.py
+# List *all* library dependencies and extras in this file.
+# Pin the version to the lower bound.
+#
+# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
+# Then this file should have foo==1.14.0
+googleapis-common-protos==1.56.2
+protobuf==3.20.1
+google-auth==1.25.0
+requests==2.18.0
+packaging==14.3
+grpcio==1.33.2
+grpcio-status==1.33.2
+grpcio-gcp==0.2.2
diff --git a/tests/asyncio/gapic/test_config_async.py b/tests/asyncio/gapic/test_config_async.py
index 1f6ea9e2..dbb05d5e 100644
--- a/tests/asyncio/gapic/test_config_async.py
+++ b/tests/asyncio/gapic/test_config_async.py
@@ -12,6 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import exceptions
from google.api_core.gapic_v1 import config_async
diff --git a/tests/asyncio/gapic/test_method_async.py b/tests/asyncio/gapic/test_method_async.py
index 2c6bbab9..11847da7 100644
--- a/tests/asyncio/gapic/test_method_async.py
+++ b/tests/asyncio/gapic/test_method_async.py
@@ -14,10 +14,14 @@
import datetime
-from grpc.experimental import aio
import mock
import pytest
+try:
+ from grpc import aio
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers_async
@@ -158,7 +162,8 @@ async def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unused_
)
result = await wrapped_method(
- retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
)
assert result == 42
diff --git a/tests/asyncio/operations_v1/test_operations_async_client.py b/tests/asyncio/operations_v1/test_operations_async_client.py
index 5473e8ae..47c3b4b4 100644
--- a/tests/asyncio/operations_v1/test_operations_async_client.py
+++ b/tests/asyncio/operations_v1/test_operations_async_client.py
@@ -12,11 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from grpc.experimental import aio
import mock
import pytest
-from google.api_core import grpc_helpers_async, operations_v1, page_iterator_async
+try:
+ from grpc import aio
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
+from google.api_core import grpc_helpers_async
+from google.api_core import operations_v1
+from google.api_core import page_iterator_async
from google.longrunning import operations_pb2
from google.protobuf import empty_pb2
diff --git a/tests/asyncio/test_grpc_helpers_async.py b/tests/asyncio/test_grpc_helpers_async.py
index a511ed46..2d0a1bcd 100644
--- a/tests/asyncio/test_grpc_helpers_async.py
+++ b/tests/asyncio/test_grpc_helpers_async.py
@@ -12,10 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import grpc
-from grpc.experimental import aio
import mock
-import pytest
+import pytest # noqa: I202
+
+try:
+ import grpc
+ from grpc import aio
+except ImportError:
+ grpc = aio = None
+
+
+if grpc is None:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import exceptions
from google.api_core import grpc_helpers_async
@@ -33,6 +42,9 @@ def code(self):
def details(self):
return None
+ def trailing_metadata(self):
+ return None
+
@pytest.mark.asyncio
async def test_wrap_unary_errors():
@@ -270,7 +282,7 @@ def test_wrap_errors_streaming(wrap_stream_errors):
autospec=True,
return_value=(mock.sentinel.credentials, mock.sentinel.projet),
)
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_call):
target = "example.com:443"
composite_creds = composite_creds_call.return_value
@@ -295,7 +307,7 @@ def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_c
autospec=True,
return_value=(mock.sentinel.credentials, mock.sentinel.projet),
)
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_implicit_with_default_host(
grpc_secure_channel, default, composite_creds_call, request, auth_metadata_plugin
):
@@ -319,7 +331,7 @@ def test_create_channel_implicit_with_default_host(
"google.auth.default",
return_value=(mock.sentinel.credentials, mock.sentinel.projet),
)
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_implicit_with_ssl_creds(
grpc_secure_channel, default, composite_creds_call
):
@@ -341,7 +353,7 @@ def test_create_channel_implicit_with_ssl_creds(
autospec=True,
return_value=(mock.sentinel.credentials, mock.sentinel.projet),
)
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_implicit_with_scopes(
grpc_secure_channel, default, composite_creds_call
):
@@ -362,7 +374,7 @@ def test_create_channel_implicit_with_scopes(
autospec=True,
return_value=(mock.sentinel.credentials, mock.sentinel.projet),
)
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_implicit_with_default_scopes(
grpc_secure_channel, default, composite_creds_call
):
@@ -394,7 +406,7 @@ def test_create_channel_explicit_with_duplicate_credentials():
@mock.patch("grpc.composite_channel_credentials")
@mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True)
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call):
target = "example.com:443"
composite_creds = composite_creds_call.return_value
@@ -411,7 +423,7 @@ def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_cred
@mock.patch("grpc.composite_channel_credentials")
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call):
target = "example.com:443"
scopes = ["1", "2"]
@@ -430,7 +442,7 @@ def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_cal
@mock.patch("grpc.composite_channel_credentials")
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_explicit_default_scopes(
grpc_secure_channel, composite_creds_call
):
@@ -453,7 +465,7 @@ def test_create_channel_explicit_default_scopes(
@mock.patch("grpc.composite_channel_credentials")
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
def test_create_channel_explicit_with_quota_project(
grpc_secure_channel, composite_creds_call
):
@@ -474,7 +486,7 @@ def test_create_channel_explicit_with_quota_project(
@mock.patch("grpc.composite_channel_credentials")
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
autospec=True,
@@ -500,7 +512,7 @@ def test_create_channnel_with_credentials_file(
@mock.patch("grpc.composite_channel_credentials")
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
autospec=True,
@@ -527,7 +539,7 @@ def test_create_channel_with_credentials_file_and_scopes(
@mock.patch("grpc.composite_channel_credentials")
-@mock.patch("grpc.experimental.aio.secure_channel")
+@mock.patch("grpc.aio.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
autospec=True,
@@ -553,11 +565,8 @@ def test_create_channel_with_credentials_file_and_default_scopes(
grpc_secure_channel.assert_called_once_with(target, composite_creds)
-@pytest.mark.skipif(
- grpc_helpers_async.HAS_GRPC_GCP, reason="grpc_gcp module not available"
-)
-@mock.patch("grpc.experimental.aio.secure_channel")
-def test_create_channel_without_grpc_gcp(grpc_secure_channel):
+@mock.patch("grpc.aio.secure_channel")
+def test_create_channel(grpc_secure_channel):
target = "example.com:443"
scopes = ["test_scope"]
diff --git a/tests/asyncio/test_operation_async.py b/tests/asyncio/test_operation_async.py
index 907cda7c..26ad7cef 100644
--- a/tests/asyncio/test_operation_async.py
+++ b/tests/asyncio/test_operation_async.py
@@ -16,6 +16,11 @@
import mock
import pytest
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import exceptions
from google.api_core import operation_async
from google.api_core import operations_v1
diff --git a/tests/unit/gapic/test_client_info.py b/tests/unit/gapic/test_client_info.py
index 64080ffd..2ca5c404 100644
--- a/tests/unit/gapic/test_client_info.py
+++ b/tests/unit/gapic/test_client_info.py
@@ -12,6 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core.gapic_v1 import client_info
diff --git a/tests/unit/gapic/test_config.py b/tests/unit/gapic/test_config.py
index 1c15261d..5e42fde8 100644
--- a/tests/unit/gapic/test_config.py
+++ b/tests/unit/gapic/test_config.py
@@ -12,6 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import exceptions
from google.api_core.gapic_v1 import config
diff --git a/tests/unit/gapic/test_method.py b/tests/unit/gapic/test_method.py
index e0ea57ac..9778d23a 100644
--- a/tests/unit/gapic/test_method.py
+++ b/tests/unit/gapic/test_method.py
@@ -15,6 +15,13 @@
import datetime
import mock
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import exceptions
from google.api_core import retry
diff --git a/tests/unit/gapic/test_routing_header.py b/tests/unit/gapic/test_routing_header.py
index 77300e87..30378676 100644
--- a/tests/unit/gapic/test_routing_header.py
+++ b/tests/unit/gapic/test_routing_header.py
@@ -12,6 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core.gapic_v1 import routing_header
diff --git a/tests/unit/operations_v1/test_operations_client.py b/tests/unit/operations_v1/test_operations_client.py
index 001b8fea..187f0be3 100644
--- a/tests/unit/operations_v1/test_operations_client.py
+++ b/tests/unit/operations_v1/test_operations_client.py
@@ -12,6 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import grpc_helpers
from google.api_core import operations_v1
from google.api_core import page_iterator
diff --git a/tests/unit/operations_v1/test_operations_rest_client.py b/tests/unit/operations_v1/test_operations_rest_client.py
new file mode 100644
index 00000000..625539e2
--- /dev/null
+++ b/tests/unit/operations_v1/test_operations_rest_client.py
@@ -0,0 +1,964 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+
+import mock
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+from requests import Response # noqa I201
+from requests.sessions import Session
+
+from google.api_core import client_options
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core.operations_v1 import AbstractOperationsClient
+from google.api_core.operations_v1 import pagers
+from google.api_core.operations_v1 import transports
+import google.auth
+from google.auth import credentials as ga_credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account
+from google.protobuf import json_format # type: ignore
+from google.rpc import status_pb2 # type: ignore
+
+
+HTTP_OPTIONS = {
+ "google.longrunning.Operations.CancelOperation": [
+ {"method": "post", "uri": "/v3/{name=operations/*}:cancel", "body": "*"},
+ ],
+ "google.longrunning.Operations.DeleteOperation": [
+ {"method": "delete", "uri": "/v3/{name=operations/*}"},
+ ],
+ "google.longrunning.Operations.GetOperation": [
+ {"method": "get", "uri": "/v3/{name=operations/*}"},
+ ],
+ "google.longrunning.Operations.ListOperations": [
+ {"method": "get", "uri": "/v3/{name=operations}"},
+ ],
+}
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+def _get_operations_client(http_options=HTTP_OPTIONS):
+ transport = transports.rest.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(), http_options=http_options
+ )
+
+ return AbstractOperationsClient(transport=transport)
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert AbstractOperationsClient._get_default_mtls_endpoint(None) is None
+ assert (
+ AbstractOperationsClient._get_default_mtls_endpoint(api_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ AbstractOperationsClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ AbstractOperationsClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ AbstractOperationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ AbstractOperationsClient._get_default_mtls_endpoint(non_googleapi)
+ == non_googleapi
+ )
+
+
+@pytest.mark.parametrize("client_class", [AbstractOperationsClient])
+def test_operations_client_from_service_account_info(client_class):
+ creds = ga_credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "longrunning.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "transport_class,transport_name", [(transports.OperationsRestTransport, "rest")]
+)
+def test_operations_client_service_account_always_use_jwt(
+ transport_class, transport_name
+):
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport_class(credentials=creds, always_use_jwt_access=True)
+ use_jwt.assert_called_once_with(True)
+
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport_class(credentials=creds, always_use_jwt_access=False)
+ use_jwt.assert_not_called()
+
+
+@pytest.mark.parametrize("client_class", [AbstractOperationsClient])
+def test_operations_client_from_service_account_file(client_class):
+ creds = ga_credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "longrunning.googleapis.com:443"
+
+
+def test_operations_client_get_transport_class():
+ transport = AbstractOperationsClient.get_transport_class()
+ available_transports = [
+ transports.OperationsRestTransport,
+ ]
+ assert transport in available_transports
+
+ transport = AbstractOperationsClient.get_transport_class("rest")
+ assert transport == transports.OperationsRestTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
+)
+@mock.patch.object(
+ AbstractOperationsClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(AbstractOperationsClient),
+)
+def test_operations_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(AbstractOperationsClient, "get_transport_class") as gtc:
+ transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(AbstractOperationsClient, "get_transport_class") as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (AbstractOperationsClient, transports.OperationsRestTransport, "rest", "true"),
+ (AbstractOperationsClient, transports.OperationsRestTransport, "rest", "false"),
+ ],
+)
+@mock.patch.object(
+ AbstractOperationsClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(AbstractOperationsClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_operations_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+
+ def fake_init(client_cert_source_for_mtls=None, **kwargs):
+ """Invoke client_cert source if provided."""
+
+ if client_cert_source_for_mtls:
+ client_cert_source_for_mtls()
+ return None
+
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.side_effect = fake_init
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
+ ):
+ with mock.patch(
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
+)
+def test_operations_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(
+ scopes=["1", "2"],
+ )
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
+)
+def test_operations_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
+
+
+def test_list_operations_rest(
+ transport: str = "rest", request_type=operations_pb2.ListOperationsRequest
+):
+ client = _get_operations_client()
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.ListOperationsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.list_operations(
+ name="operations", filter_="my_filter", page_size=10, page_token="abc"
+ )
+
+ actual_args = req.call_args
+ assert actual_args.args[0] == "GET"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com:443/v3/operations"
+ )
+ assert actual_args.kwargs["params"] == [
+ ("filter", "my_filter"),
+ ("pageSize", 10),
+ ("pageToken", "abc"),
+ ]
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListOperationsPager)
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_operations_rest_failure():
+ client = _get_operations_client(http_options=None)
+
+ with mock.patch.object(Session, "request") as req:
+ response_value = Response()
+ response_value.status_code = 400
+ mock_request = mock.MagicMock()
+ mock_request.method = "GET"
+ mock_request.url = "https://longrunning.googleapis.com:443/v1/operations"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ client.list_operations(name="operations")
+
+
+def test_list_operations_rest_pager():
+ client = AbstractOperationsClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # TODO(kbandes): remove this mock unless there's a good reason for it.
+ # with mock.patch.object(path_template, 'transcode') as transcode:
+ # Set the response as a series of pages
+ response = (
+ operations_pb2.ListOperationsResponse(
+ operations=[
+ operations_pb2.Operation(),
+ operations_pb2.Operation(),
+ operations_pb2.Operation(),
+ ],
+ next_page_token="abc",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[],
+ next_page_token="def",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[operations_pb2.Operation()],
+ next_page_token="ghi",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[operations_pb2.Operation(), operations_pb2.Operation()],
+ ),
+ )
+ # Two responses for two calls
+ response = response + response
+
+ # Wrap the values into proper Response objs
+ response = tuple(json_format.MessageToJson(x) for x in response)
+ return_values = tuple(Response() for i in response)
+ for return_val, response_val in zip(return_values, response):
+ return_val._content = response_val.encode("UTF-8")
+ return_val.status_code = 200
+ req.side_effect = return_values
+
+ pager = client.list_operations(name="operations")
+
+ results = list(pager)
+ assert len(results) == 6
+ assert all(isinstance(i, operations_pb2.Operation) for i in results)
+
+ pages = list(client.list_operations(name="operations").pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.next_page_token == token
+
+
+def test_get_operation_rest(
+ transport: str = "rest", request_type=operations_pb2.GetOperationRequest
+):
+ client = _get_operations_client()
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.Operation(
+ name="operations/sample1",
+ done=True,
+ error=status_pb2.Status(code=411),
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ response = client.get_operation("operations/sample1")
+
+ actual_args = req.call_args
+ assert actual_args.args[0] == "GET"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com:443/v3/operations/sample1"
+ )
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, operations_pb2.Operation)
+ assert response.name == "operations/sample1"
+ assert response.done is True
+
+
+def test_get_operation_rest_failure():
+ client = _get_operations_client(http_options=None)
+
+ with mock.patch.object(Session, "request") as req:
+ response_value = Response()
+ response_value.status_code = 400
+ mock_request = mock.MagicMock()
+ mock_request.method = "GET"
+ mock_request.url = (
+ "https://longrunning.googleapis.com:443/v1/operations/sample1"
+ )
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ client.get_operation("operations/sample1")
+
+
+def test_delete_operation_rest(
+ transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest
+):
+ client = _get_operations_client()
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ json_return_value = ""
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ client.delete_operation(name="operations/sample1")
+ assert req.call_count == 1
+ actual_args = req.call_args
+ assert actual_args.args[0] == "DELETE"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com:443/v3/operations/sample1"
+ )
+
+
+def test_delete_operation_rest_failure():
+ client = _get_operations_client(http_options=None)
+
+ with mock.patch.object(Session, "request") as req:
+ response_value = Response()
+ response_value.status_code = 400
+ mock_request = mock.MagicMock()
+ mock_request.method = "DELETE"
+ mock_request.url = (
+ "https://longrunning.googleapis.com:443/v1/operations/sample1"
+ )
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ client.delete_operation(name="operations/sample1")
+
+
+def test_cancel_operation_rest(transport: str = "rest"):
+ client = _get_operations_client()
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(Session, "request") as req:
+ # Wrap the value into a proper Response obj
+ response_value = Response()
+ response_value.status_code = 200
+ json_return_value = ""
+ response_value._content = json_return_value.encode("UTF-8")
+ req.return_value = response_value
+ client.cancel_operation(name="operations/sample1")
+ assert req.call_count == 1
+ actual_args = req.call_args
+ assert actual_args.args[0] == "POST"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com:443/v3/operations/sample1:cancel"
+ )
+
+
+def test_cancel_operation_rest_failure():
+ client = _get_operations_client(http_options=None)
+
+ with mock.patch.object(Session, "request") as req:
+ response_value = Response()
+ response_value.status_code = 400
+ mock_request = mock.MagicMock()
+ mock_request.method = "POST"
+ mock_request.url = (
+ "https://longrunning.googleapis.com:443/v1/operations/sample1:cancel"
+ )
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ client.cancel_operation(name="operations/sample1")
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ AbstractOperationsClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ AbstractOperationsClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ AbstractOperationsClient(
+ client_options={"scopes": ["1", "2"]},
+ transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ client = AbstractOperationsClient(transport=transport)
+ assert client.transport is transport
+
+
+@pytest.mark.parametrize("transport_class", [transports.OperationsRestTransport])
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_operations_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(core_exceptions.DuplicateCredentialArgs):
+ transports.OperationsTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_operations_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.api_core.operations_v1.transports.OperationsTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.OperationsTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "list_operations",
+ "get_operation",
+ "delete_operation",
+ "cancel_operation",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+ with pytest.raises(NotImplementedError):
+ transport.close()
+
+
+def test_operations_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds, mock.patch(
+ "google.api_core.operations_v1.transports.OperationsTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transports.OperationsTransport(
+ credentials_file="credentials.json",
+ quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=None,
+ default_scopes=(),
+ quota_project_id="octopus",
+ )
+
+
+def test_operations_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
+ "google.api_core.operations_v1.transports.OperationsTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transports.OperationsTransport()
+ adc.assert_called_once()
+
+
+def test_operations_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ AbstractOperationsClient()
+ adc.assert_called_once_with(
+ scopes=None,
+ default_scopes=(),
+ quota_project_id=None,
+ )
+
+
+def test_operations_http_transport_client_cert_source_for_mtls():
+ cred = ga_credentials.AnonymousCredentials()
+ with mock.patch(
+ "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
+ ) as mock_configure_mtls_channel:
+ transports.OperationsRestTransport(
+ credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
+ )
+ mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
+
+
+def test_operations_host_no_port():
+ client = AbstractOperationsClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="longrunning.googleapis.com"
+ ),
+ )
+ assert client.transport._host == "longrunning.googleapis.com:443"
+
+
+def test_operations_host_with_port():
+ client = AbstractOperationsClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="longrunning.googleapis.com:8000"
+ ),
+ )
+ assert client.transport._host == "longrunning.googleapis.com:8000"
+
+
+def test_common_billing_account_path():
+ billing_account = "squid"
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = AbstractOperationsClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "clam",
+ }
+ path = AbstractOperationsClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = AbstractOperationsClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "whelk"
+ expected = "folders/{folder}".format(
+ folder=folder,
+ )
+ actual = AbstractOperationsClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "octopus",
+ }
+ path = AbstractOperationsClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = AbstractOperationsClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "oyster"
+ expected = "organizations/{organization}".format(
+ organization=organization,
+ )
+ actual = AbstractOperationsClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "nudibranch",
+ }
+ path = AbstractOperationsClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = AbstractOperationsClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "cuttlefish"
+ expected = "projects/{project}".format(
+ project=project,
+ )
+ actual = AbstractOperationsClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "mussel",
+ }
+ path = AbstractOperationsClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = AbstractOperationsClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "winkle"
+ location = "nautilus"
+ expected = "projects/{project}/locations/{location}".format(
+ project=project,
+ location=location,
+ )
+ actual = AbstractOperationsClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "scallop",
+ "location": "abalone",
+ }
+ path = AbstractOperationsClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = AbstractOperationsClient.parse_common_location_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.OperationsTransport, "_prep_wrapped_messages"
+ ) as prep:
+ AbstractOperationsClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.OperationsTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = AbstractOperationsClient.get_transport_class()
+ transport_class(
+ credentials=ga_credentials.AnonymousCredentials(),
+ client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/test_bidi.py b/tests/unit/test_bidi.py
index 602d640f..7fb16209 100644
--- a/tests/unit/test_bidi.py
+++ b/tests/unit/test_bidi.py
@@ -14,12 +14,16 @@
import datetime
import logging
+import queue
import threading
-import grpc
import mock
import pytest
-from six.moves import queue
+
+try:
+ import grpc
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
from google.api_core import bidi
from google.api_core import exceptions
@@ -221,18 +225,12 @@ def cancel_side_effect():
class ClosedCall(object):
- # NOTE: This is needed because defining `.next` on an **instance**
- # rather than the **class** will not be iterable in Python 2.
- # This is problematic since a `Mock` just sets members.
-
def __init__(self, exception):
self.exception = exception
def __next__(self):
raise self.exception
- next = __next__ # Python 2
-
def is_active(self):
return False
@@ -354,8 +352,6 @@ def __next__(self):
raise item
return item
- next = __next__ # Python 2
-
def is_active(self):
return self._is_active
@@ -844,7 +840,7 @@ def test_consumer_unexpected_error(self, caplog):
# Wait for the consumer's thread to exit.
while consumer.is_active:
- pass
+ pass # pragma: NO COVER (race condition)
on_response.assert_not_called()
bidi_rpc.recv.assert_called_once()
diff --git a/tests/unit/test_client_info.py b/tests/unit/test_client_info.py
index f2274ec2..f5eebfbe 100644
--- a/tests/unit/test_client_info.py
+++ b/tests/unit/test_client_info.py
@@ -13,6 +13,11 @@
# limitations under the License.
+try:
+ import grpc
+except ImportError:
+ grpc = None
+
from google.api_core import client_info
@@ -20,7 +25,12 @@ def test_constructor_defaults():
info = client_info.ClientInfo()
assert info.python_version is not None
- assert info.grpc_version is not None
+
+ if grpc is not None:
+ assert info.grpc_version is not None
+ else:
+ assert info.grpc_version is None
+
assert info.api_core_version is not None
assert info.gapic_version is None
assert info.client_library_version is None
diff --git a/tests/unit/test_client_options.py b/tests/unit/test_client_options.py
index 40edcc19..d56a1b3a 100644
--- a/tests/unit/test_client_options.py
+++ b/tests/unit/test_client_options.py
@@ -100,7 +100,8 @@ def test_constructor_with_api_key():
def test_constructor_with_both_api_key_and_credentials_file():
with pytest.raises(ValueError):
client_options.ClientOptions(
- api_key="api-key", credentials_file="path/to/credentials.json",
+ api_key="api-key",
+ credentials_file="path/to/credentials.json",
)
diff --git a/tests/unit/test_datetime_helpers.py b/tests/unit/test_datetime_helpers.py
index 4ddcf361..5f5470a6 100644
--- a/tests/unit/test_datetime_helpers.py
+++ b/tests/unit/test_datetime_helpers.py
@@ -16,7 +16,6 @@
import datetime
import pytest
-import pytz
from google.api_core import datetime_helpers
from google.protobuf import timestamp_pb2
@@ -31,7 +30,7 @@ def test_utcnow():
def test_to_milliseconds():
- dt = datetime.datetime(1970, 1, 1, 0, 0, 1, tzinfo=pytz.utc)
+ dt = datetime.datetime(1970, 1, 1, 0, 0, 1, tzinfo=datetime.timezone.utc)
assert datetime_helpers.to_milliseconds(dt) == 1000
@@ -42,7 +41,7 @@ def test_to_microseconds():
def test_to_microseconds_non_utc():
- zone = pytz.FixedOffset(-1)
+ zone = datetime.timezone(datetime.timedelta(minutes=-1))
dt = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=zone)
assert datetime_helpers.to_microseconds(dt) == ONE_MINUTE_IN_MICROSECONDS
@@ -56,7 +55,7 @@ def test_to_microseconds_naive():
def test_from_microseconds():
five_mins_from_epoch_in_microseconds = 5 * ONE_MINUTE_IN_MICROSECONDS
five_mins_from_epoch_datetime = datetime.datetime(
- 1970, 1, 1, 0, 5, 0, tzinfo=pytz.utc
+ 1970, 1, 1, 0, 5, 0, tzinfo=datetime.timezone.utc
)
result = datetime_helpers.from_microseconds(five_mins_from_epoch_in_microseconds)
@@ -78,28 +77,28 @@ def test_from_iso8601_time():
def test_from_rfc3339():
value = "2009-12-17T12:44:32.123456Z"
assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, 123456, pytz.utc
+ 2009, 12, 17, 12, 44, 32, 123456, datetime.timezone.utc
)
def test_from_rfc3339_nanos():
value = "2009-12-17T12:44:32.123456Z"
assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, 123456, pytz.utc
+ 2009, 12, 17, 12, 44, 32, 123456, datetime.timezone.utc
)
def test_from_rfc3339_without_nanos():
value = "2009-12-17T12:44:32Z"
assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, 0, pytz.utc
+ 2009, 12, 17, 12, 44, 32, 0, datetime.timezone.utc
)
def test_from_rfc3339_nanos_without_nanos():
value = "2009-12-17T12:44:32Z"
assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, 0, pytz.utc
+ 2009, 12, 17, 12, 44, 32, 0, datetime.timezone.utc
)
@@ -119,7 +118,7 @@ def test_from_rfc3339_nanos_without_nanos():
def test_from_rfc3339_with_truncated_nanos(truncated, micros):
value = "2009-12-17T12:44:32.{}Z".format(truncated)
assert datetime_helpers.from_rfc3339(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, micros, pytz.utc
+ 2009, 12, 17, 12, 44, 32, micros, datetime.timezone.utc
)
@@ -148,7 +147,7 @@ def test_from_rfc3339_nanos_is_deprecated():
def test_from_rfc3339_nanos_with_truncated_nanos(truncated, micros):
value = "2009-12-17T12:44:32.{}Z".format(truncated)
assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime(
- 2009, 12, 17, 12, 44, 32, micros, pytz.utc
+ 2009, 12, 17, 12, 44, 32, micros, datetime.timezone.utc
)
@@ -171,20 +170,20 @@ def test_to_rfc3339():
def test_to_rfc3339_with_utc():
- value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=pytz.utc)
+ value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=datetime.timezone.utc)
expected = "2016-04-05T13:30:00.000000Z"
assert datetime_helpers.to_rfc3339(value, ignore_zone=False) == expected
def test_to_rfc3339_with_non_utc():
- zone = pytz.FixedOffset(-60)
+ zone = datetime.timezone(datetime.timedelta(minutes=-60))
value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone)
expected = "2016-04-05T14:30:00.000000Z"
assert datetime_helpers.to_rfc3339(value, ignore_zone=False) == expected
def test_to_rfc3339_with_non_utc_ignore_zone():
- zone = pytz.FixedOffset(-60)
+ zone = datetime.timezone(datetime.timedelta(minutes=-60))
value = datetime.datetime(2016, 4, 5, 13, 30, 0, tzinfo=zone)
expected = "2016-04-05T13:30:00.000000Z"
assert datetime_helpers.to_rfc3339(value, ignore_zone=True) == expected
@@ -283,7 +282,7 @@ def test_from_rfc3339_w_invalid():
def test_from_rfc3339_wo_fraction():
timestamp = "2016-12-20T21:13:47Z"
expected = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, tzinfo=pytz.UTC
+ 2016, 12, 20, 21, 13, 47, tzinfo=datetime.timezone.utc
)
stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
assert stamp == expected
@@ -292,7 +291,7 @@ def test_from_rfc3339_wo_fraction():
def test_from_rfc3339_w_partial_precision():
timestamp = "2016-12-20T21:13:47.1Z"
expected = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, microsecond=100000, tzinfo=pytz.UTC
+ 2016, 12, 20, 21, 13, 47, microsecond=100000, tzinfo=datetime.timezone.utc
)
stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
assert stamp == expected
@@ -301,7 +300,7 @@ def test_from_rfc3339_w_partial_precision():
def test_from_rfc3339_w_full_precision():
timestamp = "2016-12-20T21:13:47.123456789Z"
expected = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=pytz.UTC
+ 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=datetime.timezone.utc
)
stamp = datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(timestamp)
assert stamp == expected
@@ -332,7 +331,9 @@ def test_timestamp_pb_wo_nanos_naive():
stamp = datetime_helpers.DatetimeWithNanoseconds(
2016, 12, 20, 21, 13, 47, 123456
)
- delta = stamp.replace(tzinfo=pytz.UTC) - datetime_helpers._UTC_EPOCH
+ delta = (
+ stamp.replace(tzinfo=datetime.timezone.utc) - datetime_helpers._UTC_EPOCH
+ )
seconds = int(delta.total_seconds())
nanos = 123456000
timestamp = timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos)
@@ -341,7 +342,7 @@ def test_timestamp_pb_wo_nanos_naive():
@staticmethod
def test_timestamp_pb_w_nanos():
stamp = datetime_helpers.DatetimeWithNanoseconds(
- 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=pytz.UTC
+ 2016, 12, 20, 21, 13, 47, nanosecond=123456789, tzinfo=datetime.timezone.utc
)
delta = stamp - datetime_helpers._UTC_EPOCH
timestamp = timestamp_pb2.Timestamp(
@@ -351,7 +352,9 @@ def test_timestamp_pb_w_nanos():
@staticmethod
def test_from_timestamp_pb_wo_nanos():
- when = datetime.datetime(2016, 12, 20, 21, 13, 47, 123456, tzinfo=pytz.UTC)
+ when = datetime.datetime(
+ 2016, 12, 20, 21, 13, 47, 123456, tzinfo=datetime.timezone.utc
+ )
delta = when - datetime_helpers._UTC_EPOCH
seconds = int(delta.total_seconds())
timestamp = timestamp_pb2.Timestamp(seconds=seconds)
@@ -361,11 +364,13 @@ def test_from_timestamp_pb_wo_nanos():
assert _to_seconds(when) == _to_seconds(stamp)
assert stamp.microsecond == 0
assert stamp.nanosecond == 0
- assert stamp.tzinfo == pytz.UTC
+ assert stamp.tzinfo == datetime.timezone.utc
@staticmethod
def test_from_timestamp_pb_w_nanos():
- when = datetime.datetime(2016, 12, 20, 21, 13, 47, 123456, tzinfo=pytz.UTC)
+ when = datetime.datetime(
+ 2016, 12, 20, 21, 13, 47, 123456, tzinfo=datetime.timezone.utc
+ )
delta = when - datetime_helpers._UTC_EPOCH
seconds = int(delta.total_seconds())
timestamp = timestamp_pb2.Timestamp(seconds=seconds, nanos=123456789)
@@ -375,7 +380,7 @@ def test_from_timestamp_pb_w_nanos():
assert _to_seconds(when) == _to_seconds(stamp)
assert stamp.microsecond == 123456
assert stamp.nanosecond == 123456789
- assert stamp.tzinfo == pytz.UTC
+ assert stamp.tzinfo == datetime.timezone.utc
def _to_seconds(value):
@@ -387,5 +392,5 @@ def _to_seconds(value):
Returns:
int: Microseconds since the unix epoch.
"""
- assert value.tzinfo is pytz.UTC
+ assert value.tzinfo is datetime.timezone.utc
return calendar.timegm(value.timetuple())
diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py
index fb29015f..4169ad44 100644
--- a/tests/unit/test_exceptions.py
+++ b/tests/unit/test_exceptions.py
@@ -12,14 +12,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import http.client
import json
-import grpc
import mock
+import pytest
import requests
-from six.moves import http_client
+
+try:
+ import grpc
+ from grpc_status import rpc_status
+except ImportError:
+ grpc = rpc_status = None
from google.api_core import exceptions
+from google.protobuf import any_pb2, json_format
+from google.rpc import error_details_pb2, status_pb2
def test_create_google_cloud_error():
@@ -33,11 +41,8 @@ def test_create_google_cloud_error():
def test_create_google_cloud_error_with_args():
error = {
- "domain": "global",
- "location": "test",
- "locationType": "testing",
+ "code": 600,
"message": "Testing",
- "reason": "test",
}
response = mock.sentinel.response
exception = exceptions.GoogleAPICallError("Testing", [error], response=response)
@@ -50,8 +55,8 @@ def test_create_google_cloud_error_with_args():
def test_from_http_status():
message = "message"
- exception = exceptions.from_http_status(http_client.NOT_FOUND, message)
- assert exception.code == http_client.NOT_FOUND
+ exception = exceptions.from_http_status(http.client.NOT_FOUND, message)
+ assert exception.code == http.client.NOT_FOUND
assert exception.message == message
assert exception.errors == []
@@ -61,11 +66,11 @@ def test_from_http_status_with_errors_and_response():
errors = ["1", "2"]
response = mock.sentinel.response
exception = exceptions.from_http_status(
- http_client.NOT_FOUND, message, errors=errors, response=response
+ http.client.NOT_FOUND, message, errors=errors, response=response
)
assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http_client.NOT_FOUND
+ assert exception.code == http.client.NOT_FOUND
assert exception.message == message
assert exception.errors == errors
assert exception.response == response
@@ -82,7 +87,7 @@ def test_from_http_status_unknown_code():
def make_response(content):
response = requests.Response()
response._content = content
- response.status_code = http_client.NOT_FOUND
+ response.status_code = http.client.NOT_FOUND
response.request = requests.Request(
method="POST", url="https://example.com"
).prepare()
@@ -95,18 +100,19 @@ def test_from_http_response_no_content():
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http_client.NOT_FOUND
+ assert exception.code == http.client.NOT_FOUND
assert exception.message == "POST https://example.com/: unknown error"
assert exception.response == response
def test_from_http_response_text_content():
response = make_response(b"message")
+ response.encoding = "UTF8" # suppress charset_normalizer warning
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http_client.NOT_FOUND
+ assert exception.code == http.client.NOT_FOUND
assert exception.message == "POST https://example.com/: message"
@@ -120,7 +126,7 @@ def test_from_http_response_json_content():
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http_client.NOT_FOUND
+ assert exception.code == http.client.NOT_FOUND
assert exception.message == "POST https://example.com/: json message"
assert exception.errors == ["1", "2"]
@@ -131,47 +137,50 @@ def test_from_http_response_bad_json_content():
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http_client.NOT_FOUND
+ assert exception.code == http.client.NOT_FOUND
assert exception.message == "POST https://example.com/: unknown error"
def test_from_http_response_json_unicode_content():
response = make_response(
json.dumps(
- {"error": {"message": u"\u2019 message", "errors": ["1", "2"]}}
+ {"error": {"message": "\u2019 message", "errors": ["1", "2"]}}
).encode("utf-8")
)
exception = exceptions.from_http_response(response)
assert isinstance(exception, exceptions.NotFound)
- assert exception.code == http_client.NOT_FOUND
- assert exception.message == u"POST https://example.com/: \u2019 message"
+ assert exception.code == http.client.NOT_FOUND
+ assert exception.message == "POST https://example.com/: \u2019 message"
assert exception.errors == ["1", "2"]
+@pytest.mark.skipif(grpc is None, reason="No grpc")
def test_from_grpc_status():
message = "message"
exception = exceptions.from_grpc_status(grpc.StatusCode.OUT_OF_RANGE, message)
assert isinstance(exception, exceptions.BadRequest)
assert isinstance(exception, exceptions.OutOfRange)
- assert exception.code == http_client.BAD_REQUEST
+ assert exception.code == http.client.BAD_REQUEST
assert exception.grpc_status_code == grpc.StatusCode.OUT_OF_RANGE
assert exception.message == message
assert exception.errors == []
+@pytest.mark.skipif(grpc is None, reason="No grpc")
def test_from_grpc_status_as_int():
message = "message"
exception = exceptions.from_grpc_status(11, message)
assert isinstance(exception, exceptions.BadRequest)
assert isinstance(exception, exceptions.OutOfRange)
- assert exception.code == http_client.BAD_REQUEST
+ assert exception.code == http.client.BAD_REQUEST
assert exception.grpc_status_code == grpc.StatusCode.OUT_OF_RANGE
assert exception.message == message
assert exception.errors == []
+@pytest.mark.skipif(grpc is None, reason="No grpc")
def test_from_grpc_status_with_errors_and_response():
message = "message"
response = mock.sentinel.response
@@ -186,6 +195,7 @@ def test_from_grpc_status_with_errors_and_response():
assert exception.response == response
+@pytest.mark.skipif(grpc is None, reason="No grpc")
def test_from_grpc_status_unknown_code():
message = "message"
exception = exceptions.from_grpc_status(grpc.StatusCode.OK, message)
@@ -193,6 +203,7 @@ def test_from_grpc_status_unknown_code():
assert exception.message == message
+@pytest.mark.skipif(grpc is None, reason="No grpc")
def test_from_grpc_error():
message = "message"
error = mock.create_autospec(grpc.Call, instance=True)
@@ -203,13 +214,14 @@ def test_from_grpc_error():
assert isinstance(exception, exceptions.BadRequest)
assert isinstance(exception, exceptions.InvalidArgument)
- assert exception.code == http_client.BAD_REQUEST
+ assert exception.code == http.client.BAD_REQUEST
assert exception.grpc_status_code == grpc.StatusCode.INVALID_ARGUMENT
assert exception.message == message
assert exception.errors == [error]
assert exception.response == error
+@pytest.mark.skipif(grpc is None, reason="No grpc")
def test_from_grpc_error_non_call():
message = "message"
error = mock.create_autospec(grpc.RpcError, instance=True)
@@ -223,3 +235,161 @@ def test_from_grpc_error_non_call():
assert exception.message == message
assert exception.errors == [error]
assert exception.response == error
+
+
+@pytest.mark.skipif(grpc is None, reason="No grpc")
+def test_from_grpc_error_bare_call():
+ message = "Testing"
+
+ class TestingError(grpc.Call, grpc.RpcError):
+ def __init__(self, exception):
+ self.exception = exception
+
+ def code(self):
+ return self.exception.grpc_status_code
+
+ def details(self):
+ return message
+
+ nested_message = "message"
+ error = TestingError(exceptions.GoogleAPICallError(nested_message))
+
+ exception = exceptions.from_grpc_error(error)
+
+ assert isinstance(exception, exceptions.GoogleAPICallError)
+ assert exception.code is None
+ assert exception.grpc_status_code is None
+ assert exception.message == message
+ assert exception.errors == [error]
+ assert exception.response == error
+ assert exception.details == []
+
+
+def create_bad_request_details():
+ bad_request_details = error_details_pb2.BadRequest()
+ field_violation = bad_request_details.field_violations.add()
+ field_violation.field = "document.content"
+ field_violation.description = "Must have some text content to annotate."
+ status_detail = any_pb2.Any()
+ status_detail.Pack(bad_request_details)
+ return status_detail
+
+
+def create_error_info_details():
+ info = error_details_pb2.ErrorInfo(
+ reason="SERVICE_DISABLED",
+ domain="googleapis.com",
+ metadata={
+ "consumer": "projects/455411330361",
+ "service": "translate.googleapis.com",
+ },
+ )
+ status_detail = any_pb2.Any()
+ status_detail.Pack(info)
+ return status_detail
+
+
+def test_error_details_from_rest_response():
+ bad_request_detail = create_bad_request_details()
+ error_info_detail = create_error_info_details()
+ status = status_pb2.Status()
+ status.code = 3
+ status.message = (
+ "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
+ )
+ status.details.append(bad_request_detail)
+ status.details.append(error_info_detail)
+
+ # See JSON schema in https://cloud.google.com/apis/design/errors#http_mapping
+ http_response = make_response(
+ json.dumps(
+ {"error": json.loads(json_format.MessageToJson(status, sort_keys=True))}
+ ).encode("utf-8")
+ )
+ exception = exceptions.from_http_response(http_response)
+ want_error_details = [
+ json.loads(json_format.MessageToJson(bad_request_detail)),
+ json.loads(json_format.MessageToJson(error_info_detail)),
+ ]
+ assert want_error_details == exception.details
+
+ # 404 POST comes from make_response.
+ assert str(exception) == (
+ "404 POST https://example.com/: 3 INVALID_ARGUMENT:"
+ " One of content, or gcs_content_uri must be set."
+ " [{'@type': 'type.googleapis.com/google.rpc.BadRequest',"
+ " 'fieldViolations': [{'description': 'Must have some text content to annotate.',"
+ " 'field': 'document.content'}]},"
+ " {'@type': 'type.googleapis.com/google.rpc.ErrorInfo',"
+ " 'domain': 'googleapis.com',"
+ " 'metadata': {'consumer': 'projects/455411330361',"
+ " 'service': 'translate.googleapis.com'},"
+ " 'reason': 'SERVICE_DISABLED'}]"
+ )
+
+
+def test_error_details_from_v1_rest_response():
+ response = make_response(
+ json.dumps(
+ {"error": {"message": "\u2019 message", "errors": ["1", "2"]}}
+ ).encode("utf-8")
+ )
+ exception = exceptions.from_http_response(response)
+ assert exception.details == []
+ assert (
+ exception.reason is None
+ and exception.domain is None
+ and exception.metadata is None
+ )
+
+
+@pytest.mark.skipif(grpc is None, reason="gRPC not importable")
+def test_error_details_from_grpc_response():
+ status = rpc_status.status_pb2.Status()
+ status.code = 3
+ status.message = (
+ "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
+ )
+ status_br_detail = create_bad_request_details()
+ status_ei_detail = create_error_info_details()
+ status.details.append(status_br_detail)
+ status.details.append(status_ei_detail)
+
+ # Actualy error doesn't matter as long as its grpc.Call,
+ # because from_call is mocked.
+ error = mock.create_autospec(grpc.Call, instance=True)
+ with mock.patch("grpc_status.rpc_status.from_call") as m:
+ m.return_value = status
+ exception = exceptions.from_grpc_error(error)
+
+ bad_request_detail = error_details_pb2.BadRequest()
+ error_info_detail = error_details_pb2.ErrorInfo()
+ status_br_detail.Unpack(bad_request_detail)
+ status_ei_detail.Unpack(error_info_detail)
+ assert exception.details == [bad_request_detail, error_info_detail]
+ assert exception.reason == error_info_detail.reason
+ assert exception.domain == error_info_detail.domain
+ assert exception.metadata == error_info_detail.metadata
+
+
+@pytest.mark.skipif(grpc is None, reason="gRPC not importable")
+def test_error_details_from_grpc_response_unknown_error():
+ status_detail = any_pb2.Any()
+
+ status = rpc_status.status_pb2.Status()
+ status.code = 3
+ status.message = (
+ "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
+ )
+ status.details.append(status_detail)
+
+ error = mock.create_autospec(grpc.Call, instance=True)
+ with mock.patch("grpc_status.rpc_status.from_call") as m:
+ m.return_value = status
+ exception = exceptions.from_grpc_error(error)
+ assert exception.details == [status_detail]
+ assert (
+ exception.reason is None
+ and exception.domain is None
+ and exception.metadata is None
+ )
diff --git a/tests/unit/test_extended_operation.py b/tests/unit/test_extended_operation.py
new file mode 100644
index 00000000..c551bfa8
--- /dev/null
+++ b/tests/unit/test_extended_operation.py
@@ -0,0 +1,205 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import dataclasses
+import enum
+import typing
+
+import mock
+import pytest
+
+from google.api_core import exceptions
+from google.api_core import extended_operation
+from google.api_core import retry
+
+TEST_OPERATION_NAME = "test/extended_operation"
+
+
+@dataclasses.dataclass(frozen=True)
+class CustomOperation:
+ class StatusCode(enum.Enum):
+ UNKNOWN = 0
+ DONE = 1
+ PENDING = 2
+
+ name: str
+ status: StatusCode
+ error_code: typing.Optional[int] = None
+ error_message: typing.Optional[str] = None
+ armor_class: typing.Optional[int] = None
+
+ # Note: in generated clients, this property must be generated for each
+ # extended operation message type.
+ # The status may be an enum, a string, or a bool. If it's a string or enum,
+ # its text is compared to the string "DONE".
+ @property
+ def done(self):
+ return self.status.name == "DONE"
+
+
+def make_extended_operation(responses=None):
+ client_operations_responses = responses or [
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
+ )
+ ]
+
+ refresh = mock.Mock(spec=["__call__"], side_effect=client_operations_responses)
+ refresh.responses = client_operations_responses
+ cancel = mock.Mock(spec=["__call__"])
+ extended_operation_future = extended_operation.ExtendedOperation.make(
+ refresh,
+ cancel,
+ client_operations_responses[0],
+ )
+
+ return extended_operation_future, refresh, cancel
+
+
+def test_constructor():
+ ex_op, refresh, _ = make_extended_operation()
+ assert ex_op._extended_operation == refresh.responses[0]
+ assert not ex_op.cancelled()
+ assert not ex_op.done()
+ assert ex_op.name == TEST_OPERATION_NAME
+ assert ex_op.status == CustomOperation.StatusCode.PENDING
+ assert ex_op.error_code is None
+ assert ex_op.error_message is None
+
+
+def test_done():
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
+ ),
+ # Second response indicates that the operation has finished.
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE
+ ),
+ # Bumper to make sure we stop polling on DONE.
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ error_message="Gone too far!",
+ ),
+ ]
+ ex_op, refresh, _ = make_extended_operation(responses)
+
+ # Start out not done.
+ assert not ex_op.done()
+ assert refresh.call_count == 1
+
+ # Refresh brings us to the done state.
+ assert ex_op.done()
+ assert refresh.call_count == 2
+ assert not ex_op.error_message
+
+ # Make sure that subsequent checks are no-ops.
+ assert ex_op.done()
+ assert refresh.call_count == 2
+ assert not ex_op.error_message
+
+
+def test_cancellation():
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
+ ),
+ # Second response indicates that the operation was cancelled.
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE
+ ),
+ ]
+ ex_op, _, cancel = make_extended_operation(responses)
+
+ assert not ex_op.cancelled()
+
+ assert ex_op.cancel()
+ assert ex_op.cancelled()
+ cancel.assert_called_once_with()
+
+ # Cancelling twice should have no effect.
+ assert not ex_op.cancel()
+ cancel.assert_called_once_with()
+
+
+def test_done_w_retry():
+ # Not sure what's going on here with the coverage, so just ignore it.
+ test_retry = retry.Retry(predicate=lambda x: True) # pragma: NO COVER
+
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
+ ),
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE
+ ),
+ ]
+
+ ex_op, refresh, _ = make_extended_operation(responses)
+
+ ex_op.done(retry=test_retry)
+
+ refresh.assert_called_once_with(retry=test_retry)
+
+
+def test_error():
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ error_code=400,
+ error_message="Bad request",
+ ),
+ ]
+
+ ex_op, _, _ = make_extended_operation(responses)
+
+ # Defaults to CallError when grpc is not installed
+ with pytest.raises(exceptions.BadRequest):
+ ex_op.result()
+
+ # Inconsistent result
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ error_code=2112,
+ ),
+ ]
+
+ ex_op, _, _ = make_extended_operation(responses)
+
+ with pytest.raises(exceptions.GoogleAPICallError):
+ ex_op.result()
+
+
+def test_pass_through():
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.PENDING,
+ armor_class=10,
+ ),
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ armor_class=20,
+ ),
+ ]
+ ex_op, _, _ = make_extended_operation(responses)
+
+ assert ex_op.armor_class == 10
+ ex_op.result()
+ assert ex_op.armor_class == 20
diff --git a/tests/unit/test_grpc_helpers.py b/tests/unit/test_grpc_helpers.py
index 12bf1849..8b9fd9f1 100644
--- a/tests/unit/test_grpc_helpers.py
+++ b/tests/unit/test_grpc_helpers.py
@@ -12,10 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import grpc
import mock
import pytest
+try:
+ import grpc
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
+
from google.api_core import exceptions
from google.api_core import grpc_helpers
import google.auth.credentials
@@ -52,6 +56,9 @@ def code(self):
def details(self):
return None
+ def trailing_metadata(self):
+ return None
+
def test_wrap_unary_errors():
grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
@@ -66,6 +73,128 @@ def test_wrap_unary_errors():
assert exc_info.value.response == grpc_error
+class Test_StreamingResponseIterator:
+ @staticmethod
+ def _make_wrapped(*items):
+ return iter(items)
+
+ @staticmethod
+ def _make_one(wrapped, **kw):
+ return grpc_helpers._StreamingResponseIterator(wrapped, **kw)
+
+ def test_ctor_defaults(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped)
+ assert iterator._stored_first_result == "a"
+ assert list(wrapped) == ["b", "c"]
+
+ def test_ctor_explicit(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+ assert getattr(iterator, "_stored_first_result", self) is self
+ assert list(wrapped) == ["a", "b", "c"]
+
+ def test_ctor_w_rpc_error_on_prefetch(self):
+ wrapped = mock.MagicMock()
+ wrapped.__next__.side_effect = grpc.RpcError()
+
+ with pytest.raises(grpc.RpcError):
+ self._make_one(wrapped)
+
+ def test___iter__(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped)
+ assert iter(iterator) is iterator
+
+ def test___next___w_cached_first_result(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped)
+ assert next(iterator) == "a"
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+ assert next(iterator) == "b"
+ assert next(iterator) == "c"
+
+ def test___next___wo_cached_first_result(self):
+ wrapped = self._make_wrapped("a", "b", "c")
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+ assert next(iterator) == "a"
+ assert next(iterator) == "b"
+ assert next(iterator) == "c"
+
+ def test___next___w_rpc_error(self):
+ wrapped = mock.MagicMock()
+ wrapped.__next__.side_effect = grpc.RpcError()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ with pytest.raises(exceptions.GoogleAPICallError):
+ next(iterator)
+
+ def test_add_callback(self):
+ wrapped = mock.MagicMock()
+ callback = mock.Mock(spec={})
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.add_callback(callback) is wrapped.add_callback.return_value
+
+ wrapped.add_callback.assert_called_once_with(callback)
+
+ def test_cancel(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.cancel() is wrapped.cancel.return_value
+
+ wrapped.cancel.assert_called_once_with()
+
+ def test_code(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.code() is wrapped.code.return_value
+
+ wrapped.code.assert_called_once_with()
+
+ def test_details(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.details() is wrapped.details.return_value
+
+ wrapped.details.assert_called_once_with()
+
+ def test_initial_metadata(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.initial_metadata() is wrapped.initial_metadata.return_value
+
+ wrapped.initial_metadata.assert_called_once_with()
+
+ def test_is_active(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.is_active() is wrapped.is_active.return_value
+
+ wrapped.is_active.assert_called_once_with()
+
+ def test_time_remaining(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.time_remaining() is wrapped.time_remaining.return_value
+
+ wrapped.time_remaining.assert_called_once_with()
+
+ def test_trailing_metadata(self):
+ wrapped = mock.MagicMock()
+ iterator = self._make_one(wrapped, prefetch_first_result=False)
+
+ assert iterator.trailing_metadata() is wrapped.trailing_metadata.return_value
+
+ wrapped.trailing_metadata.assert_called_once_with()
+
+
def test_wrap_stream_okay():
expected_responses = [1, 2, 3]
callable_ = mock.Mock(spec=["__call__"], return_value=iter(expected_responses))
@@ -297,6 +426,7 @@ def test_create_channel_implicit_with_ssl_creds(
composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY)
composite_creds = composite_creds_call.return_value
+
if grpc_helpers.HAS_GRPC_GCP:
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
@@ -378,6 +508,7 @@ def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_cred
)
assert channel is grpc_secure_channel.return_value
+
if grpc_helpers.HAS_GRPC_GCP:
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
@@ -401,6 +532,7 @@ def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_cal
credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
assert channel is grpc_secure_channel.return_value
+
if grpc_helpers.HAS_GRPC_GCP:
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
@@ -428,6 +560,7 @@ def test_create_channel_explicit_default_scopes(
)
assert channel is grpc_secure_channel.return_value
+
if grpc_helpers.HAS_GRPC_GCP:
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
@@ -453,6 +586,7 @@ def test_create_channel_explicit_with_quota_project(
credentials.with_quota_project.assert_called_once_with("project-foo")
assert channel is grpc_secure_channel.return_value
+
if grpc_helpers.HAS_GRPC_GCP:
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
@@ -481,6 +615,7 @@ def test_create_channel_with_credentials_file(
)
assert channel is grpc_secure_channel.return_value
+
if grpc_helpers.HAS_GRPC_GCP:
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
@@ -512,6 +647,7 @@ def test_create_channel_with_credentials_file_and_scopes(
)
assert channel is grpc_secure_channel.return_value
+
if grpc_helpers.HAS_GRPC_GCP:
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
@@ -543,6 +679,7 @@ def test_create_channel_with_credentials_file_and_default_scopes(
)
assert channel is grpc_secure_channel.return_value
+
if grpc_helpers.HAS_GRPC_GCP:
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
diff --git a/tests/unit/test_operation.py b/tests/unit/test_operation.py
index 28fbfe27..22e23bc3 100644
--- a/tests/unit/test_operation.py
+++ b/tests/unit/test_operation.py
@@ -14,6 +14,12 @@
import mock
+import pytest
+
+try:
+ import grpc # noqa: F401
+except ImportError:
+ pytest.skip("No GRPC", allow_module_level=True)
from google.api_core import exceptions
from google.api_core import operation
diff --git a/tests/unit/test_page_iterator.py b/tests/unit/test_page_iterator.py
index 668cf392..cf43aedf 100644
--- a/tests/unit/test_page_iterator.py
+++ b/tests/unit/test_page_iterator.py
@@ -17,7 +17,6 @@
import mock
import pytest
-import six
from google.api_core import page_iterator
@@ -56,17 +55,17 @@ def test_iterator_calls_parent_item_to_value(self):
assert item_to_value.call_count == 0
assert page.remaining == 100
- assert six.next(page) == 10
+ assert next(page) == 10
assert item_to_value.call_count == 1
item_to_value.assert_called_with(parent, 10)
assert page.remaining == 99
- assert six.next(page) == 11
+ assert next(page) == 11
assert item_to_value.call_count == 2
item_to_value.assert_called_with(parent, 11)
assert page.remaining == 98
- assert six.next(page) == 12
+ assert next(page) == 12
assert item_to_value.call_count == 3
item_to_value.assert_called_with(parent, 12)
assert page.remaining == 97
@@ -197,17 +196,17 @@ def test__items_iter(self):
# Consume items and check the state of the iterator.
assert iterator.num_results == 0
- assert six.next(items_iter) == item1
+ assert next(items_iter) == item1
assert iterator.num_results == 1
- assert six.next(items_iter) == item2
+ assert next(items_iter) == item2
assert iterator.num_results == 2
- assert six.next(items_iter) == item3
+ assert next(items_iter) == item3
assert iterator.num_results == 3
with pytest.raises(StopIteration):
- six.next(items_iter)
+ next(items_iter)
def test___iter__(self):
iterator = PageIteratorImpl(None, None)
@@ -289,16 +288,16 @@ def test_iterate(self):
items_iter = iter(iterator)
- val1 = six.next(items_iter)
+ val1 = next(items_iter)
assert val1 == item1
assert iterator.num_results == 1
- val2 = six.next(items_iter)
+ val2 = next(items_iter)
assert val2 == item2
assert iterator.num_results == 2
with pytest.raises(StopIteration):
- six.next(items_iter)
+ next(items_iter)
api_request.assert_called_once_with(method="GET", path=path, query_params={})
@@ -503,20 +502,21 @@ def api_request(*args, **kw):
items_iter = iter(iterator.pages)
npages = int(math.ceil(float(n_results) / page_size))
for ipage in range(npages):
- assert list(six.next(items_iter)) == [
+ assert list(next(items_iter)) == [
dict(name=str(i))
for i in range(
- ipage * page_size, min((ipage + 1) * page_size, n_results),
+ ipage * page_size,
+ min((ipage + 1) * page_size, n_results),
)
]
else:
items_iter = iter(iterator)
for i in range(n_results):
- assert six.next(items_iter) == dict(name=str(i))
+ assert next(items_iter) == dict(name=str(i))
assert iterator.num_results == i + 1
with pytest.raises(StopIteration):
- six.next(items_iter)
+ next(items_iter)
class TestGRPCIterator(object):
@@ -621,7 +621,7 @@ def __init__(self, pages, page_token=None):
self.page_token = page_token
def next(self):
- return six.next(self._pages)
+ return next(self._pages)
__next__ = next
diff --git a/tests/unit/test_path_template.py b/tests/unit/test_path_template.py
index 4c8a7c5e..73d351c0 100644
--- a/tests/unit/test_path_template.py
+++ b/tests/unit/test_path_template.py
@@ -17,6 +17,7 @@
import mock
import pytest
+from google.api import auth_pb2
from google.api_core import path_template
@@ -84,6 +85,61 @@ def test_expanded_failure(tmpl, args, kwargs, exc_match):
path_template.expand(tmpl, *args, **kwargs)
+@pytest.mark.parametrize(
+ "request_obj, field, expected_result",
+ [
+ [{"field": "stringValue"}, "field", "stringValue"],
+ [{"field": "stringValue"}, "nosuchfield", None],
+ [{"field": "stringValue"}, "field.subfield", None],
+ [{"field": {"subfield": "stringValue"}}, "field", None],
+ [{"field": {"subfield": "stringValue"}}, "field.subfield", "stringValue"],
+ [{"field": {"subfield": [1, 2, 3]}}, "field.subfield", [1, 2, 3]],
+ [{"field": {"subfield": "stringValue"}}, "field", None],
+ [{"field": {"subfield": "stringValue"}}, "field.nosuchfield", None],
+ [
+ {"field": {"subfield": {"subsubfield": "stringValue"}}},
+ "field.subfield.subsubfield",
+ "stringValue",
+ ],
+ ["string", "field", None],
+ ],
+)
+def test_get_field(request_obj, field, expected_result):
+ result = path_template.get_field(request_obj, field)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "request_obj, field, expected_result",
+ [
+ [{"field": "stringValue"}, "field", {}],
+ [{"field": "stringValue"}, "nosuchfield", {"field": "stringValue"}],
+ [{"field": "stringValue"}, "field.subfield", {"field": "stringValue"}],
+ [{"field": {"subfield": "stringValue"}}, "field.subfield", {"field": {}}],
+ [
+ {"field": {"subfield": "stringValue", "q": "w"}, "e": "f"},
+ "field.subfield",
+ {"field": {"q": "w"}, "e": "f"},
+ ],
+ [
+ {"field": {"subfield": "stringValue"}},
+ "field.nosuchfield",
+ {"field": {"subfield": "stringValue"}},
+ ],
+ [
+ {"field": {"subfield": {"subsubfield": "stringValue", "q": "w"}}},
+ "field.subfield.subsubfield",
+ {"field": {"subfield": {"q": "w"}}},
+ ],
+ ["string", "field", "string"],
+ ["string", "field.subfield", "string"],
+ ],
+)
+def test_delete_field(request_obj, field, expected_result):
+ path_template.delete_field(request_obj, field)
+ assert request_obj == expected_result
+
+
@pytest.mark.parametrize(
"tmpl, path",
[
@@ -113,3 +169,483 @@ def test__replace_variable_with_pattern():
match.group.return_value = None
with pytest.raises(ValueError, match="Unknown"):
path_template._replace_variable_with_pattern(match)
+
+
+@pytest.mark.parametrize(
+ "http_options, message, request_kwargs, expected_result",
+ [
+ [
+ [["get", "/v1/no/template", ""]],
+ None,
+ {"foo": "bar"},
+ ["get", "/v1/no/template", {}, {"foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/no/template", ""]],
+ auth_pb2.AuthenticationRule(selector="bar"),
+ {},
+ [
+ "get",
+ "/v1/no/template",
+ None,
+ auth_pb2.AuthenticationRule(selector="bar"),
+ ],
+ ],
+ # Single templates
+ [
+ [["get", "/v1/{field}", ""]],
+ None,
+ {"field": "parent"},
+ ["get", "/v1/parent", {}, {}],
+ ],
+ [
+ [["get", "/v1/{selector}", ""]],
+ auth_pb2.AuthenticationRule(selector="parent"),
+ {},
+ ["get", "/v1/parent", None, auth_pb2.AuthenticationRule()],
+ ],
+ [
+ [["get", "/v1/{field.sub}", ""]],
+ None,
+ {"field": {"sub": "parent"}, "foo": "bar"},
+ ["get", "/v1/parent", {}, {"field": {}, "foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{oauth.canonical_scopes}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="bar",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="parent"),
+ ),
+ {},
+ [
+ "get",
+ "/v1/parent",
+ None,
+ auth_pb2.AuthenticationRule(
+ selector="bar", oauth=auth_pb2.OAuthRequirements()
+ ),
+ ],
+ ],
+ ],
+)
+def test_transcode_base_case(http_options, message, request_kwargs, expected_result):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, message, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, message, request_kwargs, expected_result",
+ [
+ [
+ [["get", "/v1/{field.subfield}", ""]],
+ None,
+ {"field": {"subfield": "parent"}, "foo": "bar"},
+ ["get", "/v1/parent", {}, {"field": {}, "foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{oauth.canonical_scopes}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="bar",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="parent"),
+ ),
+ {},
+ [
+ "get",
+ "/v1/parent",
+ None,
+ auth_pb2.AuthenticationRule(
+ selector="bar", oauth=auth_pb2.OAuthRequirements()
+ ),
+ ],
+ ],
+ [
+ [["get", "/v1/{field.subfield.subsubfield}", ""]],
+ None,
+ {"field": {"subfield": {"subsubfield": "parent"}}, "foo": "bar"},
+ ["get", "/v1/parent", {}, {"field": {"subfield": {}}, "foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{field.subfield1}/{field.subfield2}", ""]],
+ None,
+ {"field": {"subfield1": "parent", "subfield2": "child"}, "foo": "bar"},
+ ["get", "/v1/parent/child", {}, {"field": {}, "foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{selector}/{oauth.canonical_scopes}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="parent",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="child"),
+ ),
+ {"field": {"subfield1": "parent", "subfield2": "child"}, "foo": "bar"},
+ [
+ "get",
+ "/v1/parent/child",
+ None,
+ auth_pb2.AuthenticationRule(oauth=auth_pb2.OAuthRequirements()),
+ ],
+ ],
+ ],
+)
+def test_transcode_subfields(http_options, message, request_kwargs, expected_result):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, message, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, message, request_kwargs, expected_result",
+ [
+ # Single segment wildcard
+ [
+ [["get", "/v1/{field=*}", ""]],
+ None,
+ {"field": "parent"},
+ ["get", "/v1/parent", {}, {}],
+ ],
+ [
+ [["get", "/v1/{selector=*}", ""]],
+ auth_pb2.AuthenticationRule(selector="parent"),
+ {},
+ ["get", "/v1/parent", None, auth_pb2.AuthenticationRule()],
+ ],
+ [
+ [["get", "/v1/{field=a/*/b/*}", ""]],
+ None,
+ {"field": "a/parent/b/child", "foo": "bar"},
+ ["get", "/v1/a/parent/b/child", {}, {"foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{selector=a/*/b/*}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent/b/child", allow_without_credential=True
+ ),
+ {},
+ [
+ "get",
+ "/v1/a/parent/b/child",
+ None,
+ auth_pb2.AuthenticationRule(allow_without_credential=True),
+ ],
+ ],
+ # Double segment wildcard
+ [
+ [["get", "/v1/{field=**}", ""]],
+ None,
+ {"field": "parent/p1"},
+ ["get", "/v1/parent/p1", {}, {}],
+ ],
+ [
+ [["get", "/v1/{selector=**}", ""]],
+ auth_pb2.AuthenticationRule(selector="parent/p1"),
+ {},
+ ["get", "/v1/parent/p1", None, auth_pb2.AuthenticationRule()],
+ ],
+ [
+ [["get", "/v1/{field=a/**/b/**}", ""]],
+ None,
+ {"field": "a/parent/p1/b/child/c1", "foo": "bar"},
+ ["get", "/v1/a/parent/p1/b/child/c1", {}, {"foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{selector=a/**/b/**}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent/p1/b/child/c1", allow_without_credential=True
+ ),
+ {},
+ [
+ "get",
+ "/v1/a/parent/p1/b/child/c1",
+ None,
+ auth_pb2.AuthenticationRule(allow_without_credential=True),
+ ],
+ ],
+ # Combined single and double segment wildcard
+ [
+ [["get", "/v1/{field=a/*/b/**}", ""]],
+ None,
+ {"field": "a/parent/b/child/c1"},
+ ["get", "/v1/a/parent/b/child/c1", {}, {}],
+ ],
+ [
+ [["get", "/v1/{selector=a/*/b/**}", ""]],
+ auth_pb2.AuthenticationRule(selector="a/parent/b/child/c1"),
+ {},
+ ["get", "/v1/a/parent/b/child/c1", None, auth_pb2.AuthenticationRule()],
+ ],
+ [
+ [["get", "/v1/{field=a/**/b/*}/v2/{name}", ""]],
+ None,
+ {"field": "a/parent/p1/b/child", "name": "first", "foo": "bar"},
+ ["get", "/v1/a/parent/p1/b/child/v2/first", {}, {"foo": "bar"}],
+ ],
+ [
+ [["get", "/v1/{selector=a/**/b/*}/v2/{oauth.canonical_scopes}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent/p1/b/child",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="first"),
+ ),
+ {"field": "a/parent/p1/b/child", "name": "first", "foo": "bar"},
+ [
+ "get",
+ "/v1/a/parent/p1/b/child/v2/first",
+ None,
+ auth_pb2.AuthenticationRule(oauth=auth_pb2.OAuthRequirements()),
+ ],
+ ],
+ ],
+)
+def test_transcode_with_wildcard(
+ http_options, message, request_kwargs, expected_result
+):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, message, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, message, request_kwargs, expected_result",
+ [
+ # Single field body
+ [
+ [["post", "/v1/no/template", "data"]],
+ None,
+ {"data": {"id": 1, "info": "some info"}, "foo": "bar"},
+ ["post", "/v1/no/template", {"id": 1, "info": "some info"}, {"foo": "bar"}],
+ ],
+ [
+ [["post", "/v1/no/template", "oauth"]],
+ auth_pb2.AuthenticationRule(
+ selector="bar",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="child"),
+ ),
+ {},
+ [
+ "post",
+ "/v1/no/template",
+ auth_pb2.OAuthRequirements(canonical_scopes="child"),
+ auth_pb2.AuthenticationRule(selector="bar"),
+ ],
+ ],
+ [
+ [["post", "/v1/{field=a/*}/b/{name=**}", "data"]],
+ None,
+ {
+ "field": "a/parent",
+ "name": "first/last",
+ "data": {"id": 1, "info": "some info"},
+ "foo": "bar",
+ },
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ {"id": 1, "info": "some info"},
+ {"foo": "bar"},
+ ],
+ ],
+ [
+ [["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "oauth"]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent",
+ allow_without_credential=True,
+ requirements=[auth_pb2.AuthRequirement(provider_id="p")],
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"),
+ ),
+ {},
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ auth_pb2.OAuthRequirements(),
+ auth_pb2.AuthenticationRule(
+ requirements=[auth_pb2.AuthRequirement(provider_id="p")],
+ allow_without_credential=True,
+ ),
+ ],
+ ],
+ # Wildcard body
+ [
+ [["post", "/v1/{field=a/*}/b/{name=**}", "*"]],
+ None,
+ {
+ "field": "a/parent",
+ "name": "first/last",
+ "data": {"id": 1, "info": "some info"},
+ "foo": "bar",
+ },
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ {"data": {"id": 1, "info": "some info"}, "foo": "bar"},
+ {},
+ ],
+ ],
+ [
+ [["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "*"]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent",
+ allow_without_credential=True,
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"),
+ ),
+ {
+ "field": "a/parent",
+ "name": "first/last",
+ "data": {"id": 1, "info": "some info"},
+ "foo": "bar",
+ },
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ auth_pb2.AuthenticationRule(
+ allow_without_credential=True, oauth=auth_pb2.OAuthRequirements()
+ ),
+ auth_pb2.AuthenticationRule(),
+ ],
+ ],
+ ],
+)
+def test_transcode_with_body(http_options, message, request_kwargs, expected_result):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, message, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, message, request_kwargs, expected_result",
+ [
+ # Additional bindings
+ [
+ [
+ ["post", "/v1/{field=a/*}/b/{name=**}", "extra_data"],
+ ["post", "/v1/{field=a/*}/b/{name=**}", "*"],
+ ],
+ None,
+ {
+ "field": "a/parent",
+ "name": "first/last",
+ "data": {"id": 1, "info": "some info"},
+ "foo": "bar",
+ },
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ {"data": {"id": 1, "info": "some info"}, "foo": "bar"},
+ {},
+ ],
+ ],
+ [
+ [
+ [
+ "post",
+ "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}",
+ "extra_data",
+ ],
+ ["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "*"],
+ ],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent",
+ allow_without_credential=True,
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"),
+ ),
+ {},
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ auth_pb2.AuthenticationRule(
+ allow_without_credential=True, oauth=auth_pb2.OAuthRequirements()
+ ),
+ auth_pb2.AuthenticationRule(),
+ ],
+ ],
+ [
+ [
+ ["get", "/v1/{field=a/*}/b/{name=**}", ""],
+ ["get", "/v1/{field=a/*}/b/first/last", ""],
+ ],
+ None,
+ {"field": "a/parent", "foo": "bar"},
+ ["get", "/v1/a/parent/b/first/last", {}, {"foo": "bar"}],
+ ],
+ [
+ [
+ ["get", "/v1/{selector=a/*}/b/{oauth.allow_without_credential=**}", ""],
+ ["get", "/v1/{selector=a/*}/b/first/last", ""],
+ ],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent",
+ allow_without_credential=True,
+ oauth=auth_pb2.OAuthRequirements(),
+ ),
+ {},
+ [
+ "get",
+ "/v1/a/parent/b/first/last",
+ None,
+ auth_pb2.AuthenticationRule(
+ allow_without_credential=True, oauth=auth_pb2.OAuthRequirements()
+ ),
+ ],
+ ],
+ ],
+)
+def test_transcode_with_additional_bindings(
+ http_options, message, request_kwargs, expected_result
+):
+ http_options, expected_result = helper_test_transcode(http_options, expected_result)
+ result = path_template.transcode(http_options, message, **request_kwargs)
+ assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "http_options, message, request_kwargs",
+ [
+ [[["get", "/v1/{name}", ""]], None, {"foo": "bar"}],
+ [[["get", "/v1/{selector}", ""]], auth_pb2.AuthenticationRule(), {}],
+ [[["get", "/v1/{name}", ""]], auth_pb2.AuthenticationRule(), {}],
+ [[["get", "/v1/{name}", ""]], None, {"name": "first/last"}],
+ [
+ [["get", "/v1/{selector}", ""]],
+ auth_pb2.AuthenticationRule(selector="first/last"),
+ {},
+ ],
+ [[["get", "/v1/{name=mr/*/*}", ""]], None, {"name": "first/last"}],
+ [
+ [["get", "/v1/{selector=mr/*/*}", ""]],
+ auth_pb2.AuthenticationRule(selector="first/last"),
+ {},
+ ],
+ [[["post", "/v1/{name}", "data"]], None, {"name": "first/last"}],
+ [
+ [["post", "/v1/{selector}", "data"]],
+ auth_pb2.AuthenticationRule(selector="first"),
+ {},
+ ],
+ [[["post", "/v1/{first_name}", "data"]], None, {"last_name": "last"}],
+ [
+ [["post", "/v1/{first_name}", ""]],
+ auth_pb2.AuthenticationRule(selector="first"),
+ {},
+ ],
+ ],
+)
+def test_transcode_fails(http_options, message, request_kwargs):
+ http_options, _ = helper_test_transcode(http_options, range(4))
+ with pytest.raises(ValueError):
+ path_template.transcode(http_options, message, **request_kwargs)
+
+
+def helper_test_transcode(http_options_list, expected_result_list):
+ http_options = []
+ for opt_list in http_options_list:
+ http_option = {"method": opt_list[0], "uri": opt_list[1]}
+ if opt_list[2]:
+ http_option["body"] = opt_list[2]
+ http_options.append(http_option)
+
+ expected_result = {
+ "method": expected_result_list[0],
+ "uri": expected_result_list[1],
+ "query_params": expected_result_list[3],
+ }
+ if expected_result_list[2]:
+ expected_result["body"] = expected_result_list[2]
+ return (http_options, expected_result)
diff --git a/tests/unit/test_rest_helpers.py b/tests/unit/test_rest_helpers.py
new file mode 100644
index 00000000..ff1a43f0
--- /dev/null
+++ b/tests/unit/test_rest_helpers.py
@@ -0,0 +1,94 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from google.api_core import rest_helpers
+
+
+def test_flatten_simple_value():
+ with pytest.raises(TypeError):
+ rest_helpers.flatten_query_params("abc")
+
+
+def test_flatten_list():
+ with pytest.raises(TypeError):
+ rest_helpers.flatten_query_params(["abc", "def"])
+
+
+def test_flatten_none():
+ assert rest_helpers.flatten_query_params(None) == []
+
+
+def test_flatten_empty_dict():
+ assert rest_helpers.flatten_query_params({}) == []
+
+
+def test_flatten_simple_dict():
+ obj = {"a": "abc", "b": "def", "c": True, "d": False, "e": 10, "f": -3.76}
+ assert rest_helpers.flatten_query_params(obj) == [
+ ("a", "abc"),
+ ("b", "def"),
+ ("c", True),
+ ("d", False),
+ ("e", 10),
+ ("f", -3.76),
+ ]
+
+
+def test_flatten_simple_dict_strict():
+ obj = {"a": "abc", "b": "def", "c": True, "d": False, "e": 10, "f": -3.76}
+ assert rest_helpers.flatten_query_params(obj, strict=True) == [
+ ("a", "abc"),
+ ("b", "def"),
+ ("c", "true"),
+ ("d", "false"),
+ ("e", "10"),
+ ("f", "-3.76"),
+ ]
+
+
+def test_flatten_repeated_field():
+ assert rest_helpers.flatten_query_params({"a": ["x", "y", "z", None]}) == [
+ ("a", "x"),
+ ("a", "y"),
+ ("a", "z"),
+ ]
+
+
+def test_flatten_nested_dict():
+ obj = {"a": {"b": {"c": ["x", "y", "z"]}}, "d": {"e": "uvw"}}
+ expected_result = [("a.b.c", "x"), ("a.b.c", "y"), ("a.b.c", "z"), ("d.e", "uvw")]
+
+ assert rest_helpers.flatten_query_params(obj) == expected_result
+
+
+def test_flatten_repeated_dict():
+ obj = {
+ "a": {"b": {"c": [{"v": 1}, {"v": 2}]}},
+ "d": "uvw",
+ }
+
+ with pytest.raises(ValueError):
+ rest_helpers.flatten_query_params(obj)
+
+
+def test_flatten_repeated_list():
+ obj = {
+ "a": {"b": {"c": [["e", "f"], ["g", "h"]]}},
+ "d": "uvw",
+ }
+
+ with pytest.raises(ValueError):
+ rest_helpers.flatten_query_params(obj)
diff --git a/tests/unit/test_rest_streaming.py b/tests/unit/test_rest_streaming.py
new file mode 100644
index 00000000..a44c83c0
--- /dev/null
+++ b/tests/unit/test_rest_streaming.py
@@ -0,0 +1,216 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import logging
+import random
+import time
+from typing import List
+from unittest.mock import patch
+
+import proto
+import pytest
+import requests
+
+from google.api_core import rest_streaming
+from google.protobuf import duration_pb2
+from google.protobuf import timestamp_pb2
+
+
+__protobuf__ = proto.module(package=__name__)
+SEED = int(time.time())
+logging.info(f"Starting rest streaming tests with random seed: {SEED}")
+random.seed(SEED)
+
+
+class Genre(proto.Enum):
+ GENRE_UNSPECIFIED = 0
+ CLASSICAL = 1
+ JAZZ = 2
+ ROCK = 3
+
+
+class Composer(proto.Message):
+ given_name = proto.Field(proto.STRING, number=1)
+ family_name = proto.Field(proto.STRING, number=2)
+ relateds = proto.RepeatedField(proto.STRING, number=3)
+ indices = proto.MapField(proto.STRING, proto.STRING, number=4)
+
+
+class Song(proto.Message):
+ composer = proto.Field(Composer, number=1)
+ title = proto.Field(proto.STRING, number=2)
+ lyrics = proto.Field(proto.STRING, number=3)
+ year = proto.Field(proto.INT32, number=4)
+ genre = proto.Field(Genre, number=5)
+ is_five_mins_longer = proto.Field(proto.BOOL, number=6)
+ score = proto.Field(proto.DOUBLE, number=7)
+ likes = proto.Field(proto.INT64, number=8)
+ duration = proto.Field(duration_pb2.Duration, number=9)
+ date_added = proto.Field(timestamp_pb2.Timestamp, number=10)
+
+
+class EchoResponse(proto.Message):
+ content = proto.Field(proto.STRING, number=1)
+
+
+class ResponseMock(requests.Response):
+ class _ResponseItr:
+ def __init__(self, _response_bytes: bytes, random_split=False):
+ self._responses_bytes = _response_bytes
+ self._i = 0
+ self._random_split = random_split
+
+ def __next__(self):
+ if self._i == len(self._responses_bytes):
+ raise StopIteration
+ if self._random_split:
+ n = random.randint(1, len(self._responses_bytes[self._i :]))
+ else:
+ n = 1
+ x = self._responses_bytes[self._i : self._i + n]
+ self._i += n
+ return x.decode("utf-8")
+
+ def __init__(
+ self,
+ responses: List[proto.Message],
+ response_cls,
+ random_split=False,
+ ):
+ super().__init__()
+ self._responses = responses
+ self._random_split = random_split
+ self._response_message_cls = response_cls
+
+ def _parse_responses(self, responses: List[proto.Message]) -> bytes:
+ # json.dumps returns a string surrounded with quotes that need to be stripped
+ # in order to be an actual JSON.
+ json_responses = [
+ self._response_message_cls.to_json(r).strip('"') for r in responses
+ ]
+ logging.info(f"Sending JSON stream: {json_responses}")
+ ret_val = "[{}]".format(",".join(json_responses))
+ return bytes(ret_val, "utf-8")
+
+ def close(self):
+ raise NotImplementedError()
+
+ def iter_content(self, *args, **kwargs):
+ return self._ResponseItr(
+ self._parse_responses(self._responses),
+ random_split=self._random_split,
+ )
+
+
+@pytest.mark.parametrize("random_split", [False])
+def test_next_simple(random_split):
+ responses = [EchoResponse(content="hello world"), EchoResponse(content="yes")]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=EchoResponse
+ )
+ itr = rest_streaming.ResponseIterator(resp, EchoResponse)
+ assert list(itr) == responses
+
+
+@pytest.mark.parametrize("random_split", [True, False])
+def test_next_nested(random_split):
+ responses = [
+ Song(title="some song", composer=Composer(given_name="some name")),
+ Song(title="another song", date_added=datetime.datetime(2021, 12, 17)),
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=Song
+ )
+ itr = rest_streaming.ResponseIterator(resp, Song)
+ assert list(itr) == responses
+
+
+@pytest.mark.parametrize("random_split", [True, False])
+def test_next_stress(random_split):
+ n = 50
+ responses = [
+ Song(title="title_%d" % i, composer=Composer(given_name="name_%d" % i))
+ for i in range(n)
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=Song
+ )
+ itr = rest_streaming.ResponseIterator(resp, Song)
+ assert list(itr) == responses
+
+
+@pytest.mark.parametrize("random_split", [True, False])
+def test_next_escaped_characters_in_string(random_split):
+ composer_with_relateds = Composer()
+ relateds = ["Artist A", "Artist B"]
+ composer_with_relateds.relateds = relateds
+
+ responses = [
+ Song(title='ti"tle\nfoo\tbar{}', composer=Composer(given_name="name\n\n\n")),
+ Song(
+ title='{"this is weird": "totally"}', composer=Composer(given_name="\\{}\\")
+ ),
+ Song(title='\\{"key": ["value",]}\\', composer=composer_with_relateds),
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=Song
+ )
+ itr = rest_streaming.ResponseIterator(resp, Song)
+ assert list(itr) == responses
+
+
+def test_next_not_array():
+ with patch.object(
+ ResponseMock, "iter_content", return_value=iter('{"hello": 0}')
+ ) as mock_method:
+
+ resp = ResponseMock(responses=[], response_cls=EchoResponse)
+ itr = rest_streaming.ResponseIterator(resp, EchoResponse)
+ with pytest.raises(ValueError):
+ next(itr)
+ mock_method.assert_called_once()
+
+
+def test_cancel():
+ with patch.object(ResponseMock, "close", return_value=None) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=EchoResponse)
+ itr = rest_streaming.ResponseIterator(resp, EchoResponse)
+ itr.cancel()
+ mock_method.assert_called_once()
+
+
+def test_check_buffer():
+ with patch.object(
+ ResponseMock,
+ "_parse_responses",
+ return_value=bytes('[{"content": "hello"}, {', "utf-8"),
+ ):
+ resp = ResponseMock(responses=[], response_cls=EchoResponse)
+ itr = rest_streaming.ResponseIterator(resp, EchoResponse)
+ with pytest.raises(ValueError):
+ next(itr)
+ next(itr)
+
+
+def test_next_html():
+ with patch.object(
+ ResponseMock, "iter_content", return_value=iter("")
+ ) as mock_method:
+
+ resp = ResponseMock(responses=[], response_cls=EchoResponse)
+ itr = rest_streaming.ResponseIterator(resp, EchoResponse)
+ with pytest.raises(ValueError):
+ next(itr)
+ mock_method.assert_called_once()