diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 6dd32fec..ee212e97 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index cabd0e5b..1c35951e 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.7" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index 7718391a..cbd7e77f 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -5,4 +5,6 @@ typing-extensions twine wheel setuptools -nox \ No newline at end of file +nox +charset-normalizer<3 +click<8.1.0 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 385f2d4d..9c1b9be3 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.9.24 \ + --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ + --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -93,11 +93,14 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via + # -r requirements.in # gcp-docuploader # gcp-releasetool colorlog==6.7.0 \ @@ -110,29 +113,33 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==37.0.4 \ - --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ - --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ - --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ - --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ - --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ - --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ - --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ - --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ - --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ - --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ - --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ - --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ - --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ - --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ - --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ - --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ - --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ - --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ - --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ - --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ - --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ - --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via # gcp-releasetool # secretstorage @@ -148,23 +155,23 @@ filelock==3.8.0 \ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 # via virtualenv -gcp-docuploader==0.6.3 \ - --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ - --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b +gcp-docuploader==0.6.4 \ + --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ + --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.8.7 \ - --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ - --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d +gcp-releasetool==1.10.0 \ + --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ + --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d # via -r requirements.in -google-api-core==2.8.2 \ - --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ - --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 +google-api-core==2.10.2 \ + --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ + --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e # via # google-cloud-core # google-cloud-storage -google-auth==2.11.0 \ - --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ - --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb +google-auth==2.14.1 \ + --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ + --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 # via # gcp-releasetool # google-api-core @@ -174,76 +181,102 @@ google-cloud-core==2.3.2 \ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a # via google-cloud-storage -google-cloud-storage==2.5.0 \ - --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ - --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 +google-cloud-storage==2.6.0 \ + --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ + --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 # via gcp-docuploader -google-crc32c==1.3.0 \ - --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ - --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ - --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ - --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ - --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ - --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ - --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ - --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ - --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ - --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ - --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ - --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ - --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ - --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ - --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ - --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ - --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ - --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ - --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ - --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ - --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ - --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ - --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ - --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ - --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ - --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ - --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ - --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ - --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ - --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ - --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ - --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ - --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ - --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ - --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ - --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ - --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ - --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ - --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ - --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ - --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ - --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ - --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 # via google-resumable-media -google-resumable-media==2.3.3 \ - --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ - --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 +google-resumable-media==2.4.0 \ + --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ + --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage -googleapis-common-protos==1.56.4 \ - --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ - --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 +googleapis-common-protos==1.57.0 \ + --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ + --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c # via google-api-core -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.12.0 \ - --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ - --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in + # keyring # twine -jaraco-classes==3.2.2 \ - --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ - --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -255,9 +288,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.0 \ - --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ - --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 # via # gcp-releasetool # twine @@ -303,9 +336,9 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -more-itertools==8.14.0 \ - --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ - --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ @@ -321,35 +354,33 @@ pkginfo==1.8.3 \ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c # via twine -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via virtualenv -protobuf==3.20.1 \ - --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ - --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ - --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ - --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ - --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ - --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ - --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ - --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ - --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ - --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ - --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ - --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ - --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ - --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ - --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ - --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ - --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ - --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ - --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ - --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ - --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ - --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ - --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ - --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee # via # gcp-docuploader # gcp-releasetool @@ -378,9 +409,9 @@ pygments==2.13.0 \ # via # readme-renderer # rich -pyjwt==2.4.0 \ - --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ - --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -393,9 +424,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.0 \ - --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ - --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ @@ -406,17 +437,17 @@ requests==2.28.1 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.9.1 \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.5.1 \ - --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ - --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -438,9 +469,9 @@ twine==4.0.1 \ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 # via -r requirements.in -typing-extensions==4.3.0 \ - --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ - --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in urllib3==1.26.12 \ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ @@ -448,25 +479,25 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.4 \ - --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ - --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 +virtualenv==20.16.7 \ + --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ + --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 # via bleach -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via -r requirements.in -zipp==3.8.1 \ - --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ - --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.2.0 \ - --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ - --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 +setuptools==65.5.1 \ + --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ + --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f # via -r requirements.in diff --git a/google/api_core/extended_operation.py b/google/api_core/extended_operation.py index cabae107..092eba4e 100644 --- a/google/api_core/extended_operation.py +++ b/google/api_core/extended_operation.py @@ -50,10 +50,13 @@ class ExtendedOperation(polling.PollingFuture): refresh (Callable[[], type(extended_operation)]): A callable that returns the latest state of the operation. cancel (Callable[[], None]): A callable that tries to cancel the operation. - retry: Optional(google.api_core.retry.Retry): The retry configuration used - when polling. This can be used to control how often :meth:`done` - is polled. Regardless of the retry's ``deadline``, it will be - overridden by the ``timeout`` argument to :meth:`result`. + polling Optional(google.api_core.retry.Retry): The configuration used + for polling. This can be used to control how often :meth:`done` + is polled. If the ``timeout`` argument to :meth:`result` is + specified it will override the ``polling.timeout`` property. + retry Optional(google.api_core.retry.Retry): DEPRECATED use ``polling`` + instead. If specified it will override ``polling`` parameter to + maintain backward compatibility. Note: Most long-running API methods use google.api_core.operation.Operation This class is a wrapper for a subset of methods that use alternative @@ -68,9 +71,14 @@ class ExtendedOperation(polling.PollingFuture): """ def __init__( - self, extended_operation, refresh, cancel, retry=polling.DEFAULT_RETRY + self, + extended_operation, + refresh, + cancel, + polling=polling.DEFAULT_POLLING, + **kwargs, ): - super().__init__(retry=retry) + super().__init__(polling=polling, **kwargs) self._extended_operation = extended_operation self._refresh = refresh self._cancel = cancel @@ -114,7 +122,7 @@ def error_message(self): def __getattr__(self, name): return getattr(self._extended_operation, name) - def done(self, retry=polling.DEFAULT_RETRY): + def done(self, retry=None): self._refresh_and_update(retry) return self._extended_operation.done @@ -137,9 +145,11 @@ def cancelled(self): self._refresh_and_update() return self._extended_operation.done - def _refresh_and_update(self, retry=polling.DEFAULT_RETRY): + def _refresh_and_update(self, retry=None): if not self._extended_operation.done: - self._extended_operation = self._refresh(retry=retry) + self._extended_operation = ( + self._refresh(retry=retry) if retry else self._refresh() + ) self._handle_refreshed_operation() def _handle_refreshed_operation(self): diff --git a/google/api_core/future/async_future.py b/google/api_core/future/async_future.py index 88c183f9..325ee9cd 100644 --- a/google/api_core/future/async_future.py +++ b/google/api_core/future/async_future.py @@ -95,7 +95,7 @@ async def _blocking_poll(self, timeout=None): if self._future.done(): return - retry_ = self._retry.with_deadline(timeout) + retry_ = self._retry.with_timeout(timeout) try: await retry_(self._done_or_raise)() diff --git a/google/api_core/future/polling.py b/google/api_core/future/polling.py index 02e680f6..6e6aa5d4 100644 --- a/google/api_core/future/polling.py +++ b/google/api_core/future/polling.py @@ -18,7 +18,7 @@ import concurrent.futures from google.api_core import exceptions -from google.api_core import retry +from google.api_core import retry as retries from google.api_core.future import _helpers from google.api_core.future import base @@ -29,14 +29,37 @@ class _OperationNotComplete(Exception): pass -RETRY_PREDICATE = retry.if_exception_type( +# DEPRECATED as it conflates RPC retry and polling concepts into one. +# Use POLLING_PREDICATE instead to configure polling. +RETRY_PREDICATE = retries.if_exception_type( _OperationNotComplete, exceptions.TooManyRequests, exceptions.InternalServerError, exceptions.BadGateway, exceptions.ServiceUnavailable, ) -DEFAULT_RETRY = retry.Retry(predicate=RETRY_PREDICATE) + +# DEPRECATED: use DEFAULT_POLLING to configure LRO polling logic. Construct +# Retry object using its default values as a baseline for any custom retry logic +# (not to be confused with polling logic). +DEFAULT_RETRY = retries.Retry(predicate=RETRY_PREDICATE) + +# POLLING_PREDICATE is supposed to poll only on _OperationNotComplete. +# Any RPC-specific errors (like ServiceUnavailable) will be handled +# by retry logic (not to be confused with polling logic) which is triggered for +# every polling RPC independently of polling logic but within its context. +POLLING_PREDICATE = retries.if_exception_type( + _OperationNotComplete, +) + +# Default polling configuration +DEFAULT_POLLING = retries.Retry( + predicate=POLLING_PREDICATE, + initial=1.0, # seconds + maximum=20.0, # seconds + multiplier=1.5, + timeout=900, # seconds +) class PollingFuture(base.Future): @@ -45,21 +68,29 @@ class PollingFuture(base.Future): The :meth:`done` method should be implemented by subclasses. The polling behavior will repeatedly call ``done`` until it returns True. + The actuall polling logic is encapsulated in :meth:`result` method. See + documentation for that method for details on how polling works. + .. note:: Privacy here is intended to prevent the final class from overexposing, not to prevent subclasses from accessing methods. Args: - retry (google.api_core.retry.Retry): The retry configuration used - when polling. This can be used to control how often :meth:`done` - is polled. Regardless of the retry's ``deadline``, it will be - overridden by the ``timeout`` argument to :meth:`result`. + polling (google.api_core.retry.Retry): The configuration used for polling. + This parameter controls how often :meth:`done` is polled. If the + ``timeout`` argument is specified in :meth:`result` method it will + override the ``polling.timeout`` property. + retry (google.api_core.retry.Retry): DEPRECATED use ``polling`` instead. + If set, it will override ``polling`` paremeter for backward + compatibility. """ - def __init__(self, retry=DEFAULT_RETRY): + _DEFAULT_VALUE = object() + + def __init__(self, polling=DEFAULT_POLLING, **kwargs): super(PollingFuture, self).__init__() - self._retry = retry + self._polling = kwargs.get("retry", polling) self._result = None self._exception = None self._result_set = False @@ -69,11 +100,13 @@ def __init__(self, retry=DEFAULT_RETRY): self._done_callbacks = [] @abc.abstractmethod - def done(self, retry=DEFAULT_RETRY): + def done(self, retry=None): """Checks to see if the operation is complete. Args: - retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. + retry (google.api_core.retry.Retry): (Optional) How to retry the + polling RPC (to not be confused with polling configuration. See + the documentation for :meth:`result` for details). Returns: bool: True if the operation is complete, False otherwise. @@ -81,45 +114,136 @@ def done(self, retry=DEFAULT_RETRY): # pylint: disable=redundant-returns-doc, missing-raises-doc raise NotImplementedError() - def _done_or_raise(self, retry=DEFAULT_RETRY): + def _done_or_raise(self, retry=None): """Check if the future is done and raise if it's not.""" - kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry} - - if not self.done(**kwargs): + if not self.done(retry=retry): raise _OperationNotComplete() def running(self): """True if the operation is currently running.""" return not self.done() - def _blocking_poll(self, timeout=None, retry=DEFAULT_RETRY): - """Poll and wait for the Future to be resolved. + def _blocking_poll(self, timeout=_DEFAULT_VALUE, retry=None, polling=None): + """Poll and wait for the Future to be resolved.""" - Args: - timeout (int): - How long (in seconds) to wait for the operation to complete. - If None, wait indefinitely. - """ if self._result_set: return - retry_ = self._retry.with_deadline(timeout) + polling = polling or self._polling + if timeout is not PollingFuture._DEFAULT_VALUE: + polling = polling.with_timeout(timeout) try: - kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry} - retry_(self._done_or_raise)(**kwargs) + polling(self._done_or_raise)(retry=retry) except exceptions.RetryError: raise concurrent.futures.TimeoutError( - "Operation did not complete within the designated " "timeout." + f"Operation did not complete within the designated timeout of " + f"{polling.timeout} seconds." ) - def result(self, timeout=None, retry=DEFAULT_RETRY): - """Get the result of the operation, blocking if necessary. + def result(self, timeout=_DEFAULT_VALUE, retry=None, polling=None): + """Get the result of the operation. + + This method will poll for operation status periodically, blocking if + necessary. If you just want to make sure that this method does not block + for more than X seconds and you do not care about the nitty-gritty of + how this method operates, just call it with ``result(timeout=X)``. The + other parameters are for advanced use only. + + Every call to this method is controlled by the following three + parameters, each of which has a specific, distinct role, even though all three + may look very similar: ``timeout``, ``retry`` and ``polling``. In most + cases users do not need to specify any custom values for any of these + parameters and may simply rely on default ones instead. + + If you choose to specify custom parameters, please make sure you've + read the documentation below carefully. + + First, please check :class:`google.api_core.retry.Retry` + class documentation for the proper definition of timeout and deadline + terms and for the definition the three different types of timeouts. + This class operates in terms of Retry Timeout and Polling Timeout. It + does not let customizing RPC timeout and the user is expected to rely on + default behavior for it. + + The roles of each argument of this method are as follows: + + ``timeout`` (int): (Optional) The Polling Timeout as defined in + :class:`google.api_core.retry.Retry`. If the operation does not complete + within this timeout an exception will be thrown. This parameter affects + neither Retry Timeout nor RPC Timeout. + + ``retry`` (google.api_core.retry.Retry): (Optional) How to retry the + polling RPC. The ``retry.timeout`` property of this parameter is the + Retry Timeout as defined in :class:`google.api_core.retry.Retry`. + This parameter defines ONLY how the polling RPC call is retried + (i.e. what to do if the RPC we used for polling returned an error). It + does NOT define how the polling is done (i.e. how frequently and for + how long to call the polling RPC); use the ``polling`` parameter for that. + If a polling RPC throws and error and retrying it fails, the whole + future fails with the corresponding exception. If you want to tune which + server response error codes are not fatal for operation polling, use this + parameter to control that (``retry.predicate`` in particular). + + ``polling`` (google.api_core.retry.Retry): (Optional) How often and + for how long to call the polling RPC periodically (i.e. what to do if + a polling rpc returned successfully but its returned result indicates + that the long running operation is not completed yet, so we need to + check it again at some point in future). This parameter does NOT define + how to retry each individual polling RPC in case of an error; use the + ``retry`` parameter for that. The ``polling.timeout`` of this parameter + is Polling Timeout as defined in as defined in + :class:`google.api_core.retry.Retry`. + + For each of the arguments, there are also default values in place, which + will be used if a user does not specify their own. The default values + for the three parameters are not to be confused with the default values + for the corresponding arguments in this method (those serve as "not set" + markers for the resolution logic). + + If ``timeout`` is provided (i.e.``timeout is not _DEFAULT VALUE``; note + the ``None`` value means "infinite timeout"), it will be used to control + the actual Polling Timeout. Otherwise, the ``polling.timeout`` value + will be used instead (see below for how the ``polling`` config itself + gets resolved). In other words, this parameter effectively overrides + the ``polling.timeout`` value if specified. This is so to preserve + backward compatibility. + + If ``retry`` is provided (i.e. ``retry is not None``) it will be used to + control retry behavior for the polling RPC and the ``retry.timeout`` + will determine the Retry Timeout. If not provided, the + polling RPC will be called with whichever default retry config was + specified for the polling RPC at the moment of the construction of the + polling RPC's client. For example, if the polling RPC is + ``operations_client.get_operation()``, the ``retry`` parameter will be + controlling its retry behavior (not polling behavior) and, if not + specified, that specific method (``operations_client.get_operation()``) + will be retried according to the default retry config provided during + creation of ``operations_client`` client instead. This argument exists + mainly for backward compatibility; users are very unlikely to ever need + to set this parameter explicitly. + + If ``polling`` is provided (i.e. ``polling is not None``), it will be used + to controll the overall polling behavior and ``polling.timeout`` will + controll Polling Timeout unless it is overridden by ``timeout`` parameter + as described above. If not provided, the``polling`` parameter specified + during construction of this future (the ``polling`` argument in the + constructor) will be used instead. Note: since the ``timeout`` argument may + override ``polling.timeout`` value, this parameter should be viewed as + coupled with the ``timeout`` parameter as described above. Args: - timeout (int): - How long (in seconds) to wait for the operation to complete. - If None, wait indefinitely. + timeout (int): (Optional) How long (in seconds) to wait for the + operation to complete. If None, wait indefinitely. + retry (google.api_core.retry.Retry): (Optional) How to retry the + polling RPC. This defines ONLY how the polling RPC call is + retried (i.e. what to do if the RPC we used for polling returned + an error). It does NOT define how the polling is done (i.e. how + frequently and for how long to call the polling RPC). + polling (google.api_core.retry.Retry): (Optional) How often and + for how long to call polling RPC periodically. This parameter + does NOT define how to retry each individual polling RPC call + (use the ``retry`` parameter for that). Returns: google.protobuf.Message: The Operation's result. @@ -128,8 +252,8 @@ def result(self, timeout=None, retry=DEFAULT_RETRY): google.api_core.GoogleAPICallError: If the operation errors or if the timeout is reached before the operation completes. """ - kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry} - self._blocking_poll(timeout=timeout, **kwargs) + + self._blocking_poll(timeout=timeout, retry=retry, polling=polling) if self._exception is not None: # pylint: disable=raising-bad-type @@ -138,12 +262,18 @@ def result(self, timeout=None, retry=DEFAULT_RETRY): return self._result - def exception(self, timeout=None): + def exception(self, timeout=_DEFAULT_VALUE): """Get the exception from the operation, blocking if necessary. + See the documentation for the :meth:`result` method for details on how + this method operates, as both ``result`` and this method rely on the + exact same polling logic. The only difference is that this method does + not accept ``retry`` and ``polling`` arguments but relies on the default ones + instead. + Args: timeout (int): How long to wait for the operation to complete. - If None, wait indefinitely. + If None, wait indefinitely. Returns: Optional[google.api_core.GoogleAPICallError]: The operation's diff --git a/google/api_core/gapic_v1/config.py b/google/api_core/gapic_v1/config.py index 9c722871..36b50d9f 100644 --- a/google/api_core/gapic_v1/config.py +++ b/google/api_core/gapic_v1/config.py @@ -33,6 +33,9 @@ def _exception_class_for_grpc_status_name(name): """Returns the Google API exception class for a gRPC error code name. + DEPRECATED: use ``exceptions.exception_class_for_grpc_status`` method + directly instead. + Args: name (str): The name of the gRPC status code, for example, ``UNAVAILABLE``. @@ -47,6 +50,8 @@ def _exception_class_for_grpc_status_name(name): def _retry_from_retry_config(retry_params, retry_codes, retry_impl=retry.Retry): """Creates a Retry object given a gapic retry configuration. + DEPRECATED: instantiate retry and timeout classes directly instead. + Args: retry_params (dict): The retry parameter values, for example:: @@ -81,6 +86,8 @@ def _retry_from_retry_config(retry_params, retry_codes, retry_impl=retry.Retry): def _timeout_from_retry_config(retry_params): """Creates a ExponentialTimeout object given a gapic retry configuration. + DEPRECATED: instantiate retry and timeout classes directly instead. + Args: retry_params (dict): The retry parameter values, for example:: @@ -113,6 +120,8 @@ def parse_method_configs(interface_config, retry_impl=retry.Retry): """Creates default retry and timeout objects for each method in a gapic interface config. + DEPRECATED: instantiate retry and timeout classes directly instead. + Args: interface_config (Mapping): The interface config section of the full gapic library config. For example, If the full configuration has diff --git a/google/api_core/gapic_v1/method.py b/google/api_core/gapic_v1/method.py index 73c8d4bc..0c1624a3 100644 --- a/google/api_core/gapic_v1/method.py +++ b/google/api_core/gapic_v1/method.py @@ -22,8 +22,8 @@ import functools from google.api_core import grpc_helpers -from google.api_core import timeout from google.api_core.gapic_v1 import client_info +from google.api_core.timeout import TimeToDeadlineTimeout USE_DEFAULT_METADATA = object() @@ -52,55 +52,14 @@ def _apply_decorators(func, decorators): ``decorators`` may contain items that are ``None`` or ``False`` which will be ignored. """ - decorators = filter(_is_not_none_or_false, reversed(decorators)) + filtered_decorators = filter(_is_not_none_or_false, reversed(decorators)) - for decorator in decorators: + for decorator in filtered_decorators: func = decorator(func) return func -def _determine_timeout(default_timeout, specified_timeout, retry): - """Determines how timeout should be applied to a wrapped method. - - Args: - default_timeout (Optional[Timeout]): The default timeout specified - at method creation time. - specified_timeout (Optional[Timeout]): The timeout specified at - invocation time. If :attr:`DEFAULT`, this will be set to - the ``default_timeout``. - retry (Optional[Retry]): The retry specified at invocation time. - - Returns: - Optional[Timeout]: The timeout to apply to the method or ``None``. - """ - # If timeout is specified as a number instead of a Timeout instance, - # convert it to a ConstantTimeout. - if isinstance(specified_timeout, (int, float)): - specified_timeout = timeout.ConstantTimeout(specified_timeout) - if isinstance(default_timeout, (int, float)): - default_timeout = timeout.ConstantTimeout(default_timeout) - - if specified_timeout is DEFAULT: - specified_timeout = default_timeout - - if specified_timeout is default_timeout: - # If timeout is the default and the default timeout is exponential and - # a non-default retry is specified, make sure the timeout's deadline - # matches the retry's. This handles the case where the user leaves - # the timeout default but specifies a lower deadline via the retry. - if ( - retry - and retry is not DEFAULT - and isinstance(default_timeout, timeout.ExponentialTimeout) - ): - return default_timeout.with_deadline(retry._deadline) - else: - return default_timeout - - return specified_timeout - - class _GapicCallable(object): """Callable that applies retry, timeout, and metadata logic. @@ -108,9 +67,11 @@ class _GapicCallable(object): target (Callable): The low-level RPC method. retry (google.api_core.retry.Retry): The default retry for the callable. If ``None``, this callable will not retry by default - timeout (google.api_core.timeout.Timeout): The default timeout - for the callable. If ``None``, this callable will not specify - a timeout argument to the low-level RPC method by default. + timeout (google.api_core.timeout.Timeout): The default timeout for the + callable (i.e. duration of time within which an RPC must terminate + after its start, not to be confused with deadline). If ``None``, + this callable will not specify a timeout argument to the low-level + RPC method. metadata (Sequence[Tuple[str, str]]): Additional metadata that is provided to the RPC method on every invocation. This is merged with any metadata specified during invocation. If ``None``, no @@ -125,18 +86,16 @@ def __init__(self, target, retry, timeout, metadata=None): def __call__(self, *args, timeout=DEFAULT, retry=DEFAULT, **kwargs): """Invoke the low-level RPC with retry, timeout, and metadata.""" - timeout = _determine_timeout( - self._timeout, - timeout, - # Use only the invocation-specified retry only for this, as we only - # want to adjust the timeout deadline if the *user* specified - # a different retry. - retry, - ) if retry is DEFAULT: retry = self._retry + if timeout is DEFAULT: + timeout = self._timeout + + if isinstance(timeout, (int, float)): + timeout = TimeToDeadlineTimeout(timeout=timeout) + # Apply all applicable decorators. wrapped_func = _apply_decorators(self._target, [retry, timeout]) diff --git a/google/api_core/gapic_v1/routing_header.py b/google/api_core/gapic_v1/routing_header.py index a7bcb5a8..28b13abf 100644 --- a/google/api_core/gapic_v1/routing_header.py +++ b/google/api_core/gapic_v1/routing_header.py @@ -20,21 +20,32 @@ Generally, these headers are specified as gRPC metadata. """ +from enum import Enum from urllib.parse import urlencode ROUTING_METADATA_KEY = "x-goog-request-params" -def to_routing_header(params): +def to_routing_header(params, qualified_enums=True): """Returns a routing header string for the given request parameters. Args: params (Mapping[str, Any]): A dictionary containing the request parameters used for routing. + qualified_enums (bool): Whether to represent enum values + as their type-qualified symbol names instead of as their + unqualified symbol names. Returns: str: The routing header string. + """ + if not qualified_enums: + if isinstance(params, dict): + tuples = params.items() + else: + tuples = params + params = [(x[0], x[1].name) if isinstance(x[1], Enum) else x for x in tuples] return urlencode( params, # Per Google API policy (go/api-url-encoding), / is not encoded. @@ -42,16 +53,19 @@ def to_routing_header(params): ) -def to_grpc_metadata(params): +def to_grpc_metadata(params, qualified_enums=True): """Returns the gRPC metadata containing the routing headers for the given request parameters. Args: params (Mapping[str, Any]): A dictionary containing the request parameters used for routing. + qualified_enums (bool): Whether to represent enum values + as their type-qualified symbol names instead of as their + unqualified symbol names. Returns: Tuple(str, str): The gRPC metadata containing the routing header key and value. """ - return (ROUTING_METADATA_KEY, to_routing_header(params)) + return (ROUTING_METADATA_KEY, to_routing_header(params, qualified_enums)) diff --git a/google/api_core/operation.py b/google/api_core/operation.py index b17f753b..90cbdc99 100644 --- a/google/api_core/operation.py +++ b/google/api_core/operation.py @@ -61,10 +61,13 @@ class Operation(polling.PollingFuture): result. metadata_type (func:`type`): The protobuf type for the operation's metadata. - retry (google.api_core.retry.Retry): The retry configuration used - when polling. This can be used to control how often :meth:`done` - is polled. Regardless of the retry's ``deadline``, it will be - overridden by the ``timeout`` argument to :meth:`result`. + polling (google.api_core.retry.Retry): The configuration used for polling. + This parameter controls how often :meth:`done` is polled. If the + ``timeout`` argument is specified in the :meth:`result` method, it will + override the ``polling.timeout`` property. + retry (google.api_core.retry.Retry): DEPRECATED: use ``polling`` instead. + If specified it will override ``polling`` parameter to maintain + backward compatibility. """ def __init__( @@ -74,9 +77,10 @@ def __init__( cancel, result_type, metadata_type=None, - retry=polling.DEFAULT_RETRY, + polling=polling.DEFAULT_POLLING, + **kwargs ): - super(Operation, self).__init__(retry=retry) + super(Operation, self).__init__(polling=polling, **kwargs) self._operation = operation self._refresh = refresh self._cancel = cancel @@ -146,7 +150,7 @@ def _set_result_from_operation(self): ) self.set_exception(exception) - def _refresh_and_update(self, retry=polling.DEFAULT_RETRY): + def _refresh_and_update(self, retry=None): """Refresh the operation and update the result if needed. Args: @@ -155,10 +159,10 @@ def _refresh_and_update(self, retry=polling.DEFAULT_RETRY): # If the currently cached operation is done, no need to make another # RPC as it will not change once done. if not self._operation.done: - self._operation = self._refresh(retry=retry) + self._operation = self._refresh(retry=retry) if retry else self._refresh() self._set_result_from_operation() - def done(self, retry=polling.DEFAULT_RETRY): + def done(self, retry=None): """Checks to see if the operation is complete. Args: diff --git a/google/api_core/operations_v1/operations_async_client.py b/google/api_core/operations_v1/operations_async_client.py index 5a5e5562..81c4513c 100644 --- a/google/api_core/operations_v1/operations_async_client.py +++ b/google/api_core/operations_v1/operations_async_client.py @@ -24,8 +24,10 @@ import functools +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1, page_iterator_async -from google.api_core.operations_v1 import operations_client_config +from google.api_core import retry as retries +from google.api_core import timeout as timeouts from google.longrunning import operations_pb2 @@ -41,39 +43,44 @@ class OperationsAsyncClient: the default configuration is used. """ - def __init__(self, channel, client_config=operations_client_config.config): + def __init__(self, channel, client_config=None): # Create the gRPC client stub with gRPC AsyncIO channel. self.operations_stub = operations_pb2.OperationsStub(channel) - # Create all wrapped methods using the interface configuration. - # The interface config contains all of the default settings for retry - # and timeout for each RPC method. - interfaces = client_config["interfaces"] - interface_config = interfaces["google.longrunning.Operations"] - method_configs = gapic_v1.config_async.parse_method_configs(interface_config) + default_retry = retries.Retry( + initial=0.1, # seconds + maximum=60.0, # seconds + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + timeout=600.0, # seconds + ) + default_timeout = timeouts.TimeToDeadlineTimeout(timeout=600.0) self._get_operation = gapic_v1.method_async.wrap_method( self.operations_stub.GetOperation, - default_retry=method_configs["GetOperation"].retry, - default_timeout=method_configs["GetOperation"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, ) self._list_operations = gapic_v1.method_async.wrap_method( self.operations_stub.ListOperations, - default_retry=method_configs["ListOperations"].retry, - default_timeout=method_configs["ListOperations"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, ) self._cancel_operation = gapic_v1.method_async.wrap_method( self.operations_stub.CancelOperation, - default_retry=method_configs["CancelOperation"].retry, - default_timeout=method_configs["CancelOperation"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, ) self._delete_operation = gapic_v1.method_async.wrap_method( self.operations_stub.DeleteOperation, - default_retry=method_configs["DeleteOperation"].retry, - default_timeout=method_configs["DeleteOperation"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, ) async def get_operation( diff --git a/google/api_core/operations_v1/operations_client.py b/google/api_core/operations_v1/operations_client.py index e48eac01..3ddd3c47 100644 --- a/google/api_core/operations_v1/operations_client.py +++ b/google/api_core/operations_v1/operations_client.py @@ -37,9 +37,11 @@ import functools +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import page_iterator -from google.api_core.operations_v1 import operations_client_config +from google.api_core import retry as retries +from google.api_core import timeout as timeouts from google.longrunning import operations_pb2 @@ -54,39 +56,44 @@ class OperationsClient(object): the default configuration is used. """ - def __init__(self, channel, client_config=operations_client_config.config): + def __init__(self, channel, client_config=None): # Create the gRPC client stub. self.operations_stub = operations_pb2.OperationsStub(channel) - # Create all wrapped methods using the interface configuration. - # The interface config contains all of the default settings for retry - # and timeout for each RPC method. - interfaces = client_config["interfaces"] - interface_config = interfaces["google.longrunning.Operations"] - method_configs = gapic_v1.config.parse_method_configs(interface_config) + default_retry = retries.Retry( + initial=0.1, # seconds + maximum=60.0, # seconds + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + timeout=600.0, # seconds + ) + default_timeout = timeouts.TimeToDeadlineTimeout(timeout=600.0) self._get_operation = gapic_v1.method.wrap_method( self.operations_stub.GetOperation, - default_retry=method_configs["GetOperation"].retry, - default_timeout=method_configs["GetOperation"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, ) self._list_operations = gapic_v1.method.wrap_method( self.operations_stub.ListOperations, - default_retry=method_configs["ListOperations"].retry, - default_timeout=method_configs["ListOperations"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, ) self._cancel_operation = gapic_v1.method.wrap_method( self.operations_stub.CancelOperation, - default_retry=method_configs["CancelOperation"].retry, - default_timeout=method_configs["CancelOperation"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, ) self._delete_operation = gapic_v1.method.wrap_method( self.operations_stub.DeleteOperation, - default_retry=method_configs["DeleteOperation"].retry, - default_timeout=method_configs["DeleteOperation"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, ) # Service calls diff --git a/google/api_core/operations_v1/operations_client_config.py b/google/api_core/operations_v1/operations_client_config.py index 6cf95753..70cfd70a 100644 --- a/google/api_core/operations_v1/operations_client_config.py +++ b/google/api_core/operations_v1/operations_client_config.py @@ -14,6 +14,7 @@ """gapic configuration for the googe.longrunning.operations client.""" +# DEPRECATED: retry and timeout classes are instantiated directly config = { "interfaces": { "google.longrunning.Operations": { diff --git a/google/api_core/operations_v1/transports/__init__.py b/google/api_core/operations_v1/transports/__init__.py index b443c078..df53e15e 100644 --- a/google/api_core/operations_v1/transports/__init__.py +++ b/google/api_core/operations_v1/transports/__init__.py @@ -14,14 +14,13 @@ # limitations under the License. # from collections import OrderedDict -from typing import Dict, Type from .base import OperationsTransport from .rest import OperationsRestTransport # Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]] +_transport_registry = OrderedDict() _transport_registry["rest"] = OperationsRestTransport __all__ = ( diff --git a/google/api_core/operations_v1/transports/rest.py b/google/api_core/operations_v1/transports/rest.py index 27ed7661..bb6cd99c 100644 --- a/google/api_core/operations_v1/transports/rest.py +++ b/google/api_core/operations_v1/transports/rest.py @@ -14,6 +14,7 @@ # limitations under the License. # +import re from typing import Callable, Dict, Optional, Sequence, Tuple, Union from requests import __version__ as requests_version @@ -73,6 +74,7 @@ def __init__( always_use_jwt_access: Optional[bool] = False, url_scheme: str = "https", http_options: Optional[Dict] = None, + path_prefix: str = "v1", ) -> None: """Instantiate the transport. @@ -108,12 +110,24 @@ def __init__( http_options: a dictionary of http_options for transcoding, to override the defaults from operatons.proto. Each method has an entry with the corresponding http rules as value. + path_prefix: path prefix (usually represents API version). Set to + "v1" by default. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + super().__init__( host=host, credentials=credentials, @@ -127,6 +141,7 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) self._http_options = http_options or {} + self._path_prefix = path_prefix def _list_operations( self, @@ -157,7 +172,10 @@ def _list_operations( """ http_options = [ - {"method": "get", "uri": "/v1/{name=operations}"}, + { + "method": "get", + "uri": "/{}/{{name=**}}/operations".format(self._path_prefix), + }, ] if "google.longrunning.Operations.ListOperations" in self._http_options: http_options = self._http_options[ @@ -188,7 +206,7 @@ def _list_operations( headers = dict(metadata) headers["Content-Type"] = "application/json" response = getattr(self._session, method)( - "https://{host}{uri}".format(host=self._host, uri=uri), + "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params), @@ -234,7 +252,10 @@ def _get_operation( """ http_options = [ - {"method": "get", "uri": "/v1/{name=operations/**}"}, + { + "method": "get", + "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix), + }, ] if "google.longrunning.Operations.GetOperation" in self._http_options: http_options = self._http_options[ @@ -265,7 +286,7 @@ def _get_operation( headers = dict(metadata) headers["Content-Type"] = "application/json" response = getattr(self._session, method)( - "https://{host}{uri}".format(host=self._host, uri=uri), + "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params), @@ -304,7 +325,10 @@ def _delete_operation( """ http_options = [ - {"method": "delete", "uri": "/v1/{name=operations/**}"}, + { + "method": "delete", + "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix), + }, ] if "google.longrunning.Operations.DeleteOperation" in self._http_options: http_options = self._http_options[ @@ -335,7 +359,7 @@ def _delete_operation( headers = dict(metadata) headers["Content-Type"] = "application/json" response = getattr(self._session, method)( - "https://{host}{uri}".format(host=self._host, uri=uri), + "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params), @@ -371,7 +395,11 @@ def _cancel_operation( """ http_options = [ - {"method": "post", "uri": "/v1/{name=operations/**}:cancel", "body": "*"}, + { + "method": "post", + "uri": "/{}/{{name=**/operations/*}}:cancel".format(self._path_prefix), + "body": "*", + }, ] if "google.longrunning.Operations.CancelOperation" in self._http_options: http_options = self._http_options[ @@ -411,7 +439,7 @@ def _cancel_operation( headers = dict(metadata) headers["Content-Type"] = "application/json" response = getattr(self._session, method)( - "https://{host}{uri}".format(host=self._host, uri=uri), + "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params), diff --git a/google/api_core/retry.py b/google/api_core/retry.py index ce496937..f9207a12 100644 --- a/google/api_core/retry.py +++ b/google/api_core/retry.py @@ -139,15 +139,15 @@ def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULT Yields: float: successive sleep intervals. """ - delay = initial + delay = min(initial, maximum) while True: - # Introduce jitter by yielding a delay that is uniformly distributed - # to average out to the delay time. - yield min(random.uniform(0.0, delay * 2.0), maximum) - delay = delay * multiplier + yield random.uniform(0.0, delay) + delay = min(delay * multiplier, maximum) -def retry_target(target, predicate, sleep_generator, deadline, on_error=None): +def retry_target( + target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs +): """Call a function and retry if it fails. This is the lowest-level retry helper. Generally, you'll use the @@ -161,12 +161,12 @@ def retry_target(target, predicate, sleep_generator, deadline, on_error=None): It should return True to retry or False otherwise. sleep_generator (Iterable[float]): An infinite iterator that determines how long to sleep between retries. - deadline (float): How long to keep retrying the target. The last sleep - period is shortened as necessary, so that the last retry runs at - ``deadline`` (and not considerably beyond it). + timeout (float): How long to keep retrying the target. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + deadline (float): DEPRECATED: use ``timeout`` instead. For backward + compatibility, if specified it will override ``timeout`` parameter. Returns: Any: the return value of the target function. @@ -176,12 +176,13 @@ def retry_target(target, predicate, sleep_generator, deadline, on_error=None): ValueError: If the sleep generator stops yielding values. Exception: If the target raises a method that isn't retryable. """ - if deadline is not None: - deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta( - seconds=deadline - ) + + timeout = kwargs.get("deadline", timeout) + + if timeout is not None: + deadline = datetime_helpers.utcnow() + datetime.timedelta(seconds=timeout) else: - deadline_datetime = None + deadline = None last_exc = None @@ -198,19 +199,17 @@ def retry_target(target, predicate, sleep_generator, deadline, on_error=None): if on_error is not None: on_error(exc) - now = datetime_helpers.utcnow() - - if deadline_datetime is not None: - if deadline_datetime <= now: + if deadline is not None: + next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( + seconds=sleep + ) + if deadline < next_attempt_time: raise exceptions.RetryError( "Deadline of {:.1f}s exceeded while calling target function".format( - deadline + timeout ), last_exc, ) from last_exc - else: - time_to_deadline = (deadline_datetime - now).total_seconds() - sleep = min(time_to_deadline, sleep) _LOGGER.debug( "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) @@ -223,12 +222,77 @@ def retry_target(target, predicate, sleep_generator, deadline, on_error=None): class Retry(object): """Exponential retry decorator. - This class is a decorator used to add exponential back-off retry behavior - to an RPC call. + This class is a decorator used to add retry or polling behavior to an RPC + call. Although the default behavior is to retry transient API errors, a different predicate can be provided to retry other exceptions. + There two important concepts that retry/polling behavior may operate on, + Deadline and Timeout, which need to be properly defined for the correct + usage of this class and the rest of the library. + + Deadline: a fixed point in time by which a certain operation must + terminate. For example, if a certain operation has a deadline + "2022-10-18T23:30:52.123Z" it must terminate (successfully or with an + error) by that time, regardless of when it was started or whether it + was started at all. + + Timeout: the maximum duration of time after which a certain operation + must terminate (successfully or with an error). The countdown begins right + after an operation was started. For example, if an operation was started at + 09:24:00 with timeout of 75 seconds, it must terminate no later than + 09:25:15. + + Unfortunately, in the past this class (and the api-core library as a whole) has not been + properly distinguishing the concepts of "timeout" and "deadline", and the + ``deadline`` parameter has meant ``timeout``. That is why + ``deadline`` has been deprecated and ``timeout`` should be used instead. If the + ``deadline`` parameter is set, it will override the ``timeout`` parameter. In other words, + ``retry.deadline`` should be treated as just a deprecated alias for + ``retry.timeout``. + + Said another way, it is safe to assume that this class and the rest of this + library operate in terms of timeouts (not deadlines) unless explicitly + noted the usage of deadline semantics. + + It is also important to + understand the three most common applications of the Timeout concept in the + context of this library. + + Usually the generic Timeout term may stand for one of the following actual + timeouts: RPC Timeout, Retry Timeout, or Polling Timeout. + + RPC Timeout: a value supplied by the client to the server so + that the server side knows the maximum amount of time it is expected to + spend handling that specifc RPC. For example, in the case of gRPC transport, + RPC Timeout is represented by setting "grpc-timeout" header in the HTTP2 + request. The `timeout` property of this class normally never represents the + RPC Timeout as it is handled separately by the ``google.api_core.timeout`` + module of this library. + + Retry Timeout: this is the most common meaning of the ``timeout`` property + of this class, and defines how long a certain RPC may be retried in case + the server returns an error. + + Polling Timeout: defines how long the + client side is allowed to call the polling RPC repeatedly to check a status of a + long-running operation. Each polling RPC is + expected to succeed (its errors are supposed to be handled by the retry + logic). The decision as to whether a new polling attempt needs to be made is based + not on the RPC status code but on the status of the returned + status of an operation. In other words: we will poll a long-running operation until the operation is done or the polling timeout expires. Each poll will inform us of the status of the operation. The poll consists of an RPC to the server that may itself be retried as per the poll-specific retry settings in case of errors. The operation-level retry settings do NOT apply to polling-RPC retries. + + With the actual timeout types being defined above, the client libraries + often refer to just Timeout without clarifying which type specifically + that is. In that case the actual timeout type (sometimes also refered to as + Logical Timeout) can be determined from the context. If it is a unary rpc + call (i.e. a regular one) Timeout usually stands for the RPC Timeout (if + provided directly as a standalone value) or Retry Timeout (if provided as + ``retry.timeout`` property of the unary RPC's retry config). For + ``Operation`` or ``PollingFuture`` in general Timeout stands for + Polling Timeout. + Args: predicate (Callable[Exception]): A callable that should return ``True`` if the given exception is retryable. @@ -236,9 +300,9 @@ class Retry(object): must be greater than 0. maximum (float): The maximum amount of time to delay in seconds. multiplier (float): The multiplier applied to the delay. - deadline (float): How long to keep retrying in seconds. The last sleep - period is shortened as necessary, so that the last retry runs at - ``deadline`` (and not considerably beyond it). + timeout (float): How long to keep retrying, in seconds. + deadline (float): DEPRECATED: use `timeout` instead. For backward + compatibility, if specified it will override the ``timeout`` parameter. """ def __init__( @@ -247,14 +311,16 @@ def __init__( initial=_DEFAULT_INITIAL_DELAY, maximum=_DEFAULT_MAXIMUM_DELAY, multiplier=_DEFAULT_DELAY_MULTIPLIER, - deadline=_DEFAULT_DEADLINE, + timeout=_DEFAULT_DEADLINE, on_error=None, + **kwargs ): self._predicate = predicate self._initial = initial self._multiplier = multiplier self._maximum = maximum - self._deadline = deadline + self._timeout = kwargs.get("deadline", timeout) + self._deadline = self._timeout self._on_error = on_error def __call__(self, func, on_error=None): @@ -284,7 +350,7 @@ def retry_wrapped_func(*args, **kwargs): target, self._predicate, sleep_generator, - self._deadline, + self._timeout, on_error=on_error, ) @@ -292,23 +358,45 @@ def retry_wrapped_func(*args, **kwargs): @property def deadline(self): - return self._deadline + """ + DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class + documentation for details. + """ + return self._timeout + + @property + def timeout(self): + return self._timeout def with_deadline(self, deadline): - """Return a copy of this retry with the given deadline. + """Return a copy of this retry with the given timeout. + + DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class + documentation for details. + + Args: + deadline (float): How long to keep retrying in seconds. + + Returns: + Retry: A new retry instance with the given timeout. + """ + return self.with_timeout(timeout=deadline) + + def with_timeout(self, timeout): + """Return a copy of this retry with the given timeout. Args: - deadline (float): How long to keep retrying. + timeout (float): How long to keep retrying, in seconds. Returns: - Retry: A new retry instance with the given deadline. + Retry: A new retry instance with the given timeout. """ return Retry( predicate=self._predicate, initial=self._initial, maximum=self._maximum, multiplier=self._multiplier, - deadline=deadline, + timeout=timeout, on_error=self._on_error, ) @@ -327,7 +415,7 @@ def with_predicate(self, predicate): initial=self._initial, maximum=self._maximum, multiplier=self._multiplier, - deadline=self._deadline, + timeout=self._timeout, on_error=self._on_error, ) @@ -348,19 +436,19 @@ def with_delay(self, initial=None, maximum=None, multiplier=None): initial=initial if initial is not None else self._initial, maximum=maximum if maximum is not None else self._maximum, multiplier=multiplier if multiplier is not None else self._multiplier, - deadline=self._deadline, + timeout=self._timeout, on_error=self._on_error, ) def __str__(self): return ( "".format( + "multiplier={:.1f}, timeout={}, on_error={}>".format( self._predicate, self._initial, self._maximum, self._multiplier, - self._deadline, + self._timeout, # timeout can be None, thus no {:.1f} self._on_error, ) ) diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 68a25597..81698838 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -68,9 +68,12 @@ async def check_if_exists(): _DEFAULT_MAXIMUM_DELAY = 60.0 # seconds _DEFAULT_DELAY_MULTIPLIER = 2.0 _DEFAULT_DEADLINE = 60.0 * 2.0 # seconds +_DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds -async def retry_target(target, predicate, sleep_generator, deadline, on_error=None): +async def retry_target( + target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs +): """Call a function and retry if it fails. This is the lowest-level retry helper. Generally, you'll use the @@ -84,12 +87,12 @@ async def retry_target(target, predicate, sleep_generator, deadline, on_error=No It should return True to retry or False otherwise. sleep_generator (Iterable[float]): An infinite iterator that determines how long to sleep between retries. - deadline (float): How long to keep retrying the target. The last sleep - period is shortened as necessary, so that the last retry runs at - ``deadline`` (and not considerably beyond it). + timeout (float): How long to keep retrying the target, in seconds. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + deadline (float): DEPRECATED use ``timeout`` instead. For backward + compatibility, if set it will override the ``timeout`` parameter. Returns: Any: the return value of the target function. @@ -99,9 +102,12 @@ async def retry_target(target, predicate, sleep_generator, deadline, on_error=No ValueError: If the sleep generator stops yielding values. Exception: If the target raises a method that isn't retryable. """ + + timeout = kwargs.get("deadline", timeout) + deadline_dt = ( - (datetime_helpers.utcnow() + datetime.timedelta(seconds=deadline)) - if deadline + (datetime_helpers.utcnow() + datetime.timedelta(seconds=timeout)) + if timeout else None ) @@ -132,8 +138,8 @@ async def retry_target(target, predicate, sleep_generator, deadline, on_error=No # Chains the raising RetryError with the root cause error, # which helps observability and debugability. raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded while calling target function".format( - deadline + "Timeout of {:.1f}s exceeded while calling target function".format( + timeout ), last_exc, ) from last_exc @@ -165,12 +171,12 @@ class AsyncRetry: must be greater than 0. maximum (float): The maximum amout of time to delay in seconds. multiplier (float): The multiplier applied to the delay. - deadline (float): How long to keep retrying in seconds. The last sleep - period is shortened as necessary, so that the last retry runs at - ``deadline`` (and not considerably beyond it). + timeout (float): How long to keep retrying in seconds. on_error (Callable[Exception]): A function to call while processing a retryable exception. Any error raised by this function will *not* be caught. + deadline (float): DEPRECATED use ``timeout`` instead. If set it will + override ``timeout`` parameter. """ def __init__( @@ -179,14 +185,16 @@ def __init__( initial=_DEFAULT_INITIAL_DELAY, maximum=_DEFAULT_MAXIMUM_DELAY, multiplier=_DEFAULT_DELAY_MULTIPLIER, - deadline=_DEFAULT_DEADLINE, + timeout=_DEFAULT_TIMEOUT, on_error=None, + **kwargs ): self._predicate = predicate self._initial = initial self._multiplier = multiplier self._maximum = maximum - self._deadline = deadline + self._timeout = kwargs.get("deadline", timeout) + self._deadline = self._timeout self._on_error = on_error def __call__(self, func, on_error=None): @@ -216,7 +224,7 @@ async def retry_wrapped_func(*args, **kwargs): target, self._predicate, sleep_generator, - self._deadline, + self._timeout, on_error=on_error, ) @@ -228,7 +236,7 @@ def _replace( initial=None, maximum=None, multiplier=None, - deadline=None, + timeout=None, on_error=None, ): return AsyncRetry( @@ -236,12 +244,13 @@ def _replace( initial=initial or self._initial, maximum=maximum or self._maximum, multiplier=multiplier or self._multiplier, - deadline=deadline or self._deadline, + timeout=timeout or self._timeout, on_error=on_error or self._on_error, ) def with_deadline(self, deadline): """Return a copy of this retry with the given deadline. + DEPRECATED: use :meth:`with_timeout` instead. Args: deadline (float): How long to keep retrying. @@ -249,7 +258,18 @@ def with_deadline(self, deadline): Returns: AsyncRetry: A new retry instance with the given deadline. """ - return self._replace(deadline=deadline) + return self._replace(timeout=deadline) + + def with_timeout(self, timeout): + """Return a copy of this retry with the given timeout. + + Args: + timeout (float): How long to keep retrying, in seconds. + + Returns: + AsyncRetry: A new retry instance with the given timeout. + """ + return self._replace(timeout=timeout) def with_predicate(self, predicate): """Return a copy of this retry with the given predicate. @@ -280,12 +300,12 @@ def with_delay(self, initial=None, maximum=None, multiplier=None): def __str__(self): return ( "".format( + "multiplier={:.1f}, timeout={:.1f}, on_error={}>".format( self._predicate, self._initial, self._maximum, self._multiplier, - self._deadline, + self._timeout, self._on_error, ) ) diff --git a/google/api_core/timeout.py b/google/api_core/timeout.py index 73232180..3546d540 100644 --- a/google/api_core/timeout.py +++ b/google/api_core/timeout.py @@ -14,8 +14,9 @@ """Decorators for applying timeout arguments to functions. -These decorators are used to wrap API methods to apply either a constant -or exponential timeout argument. +These decorators are used to wrap API methods to apply either a +Deadline-dependent (recommended), constant (DEPRECATED) or exponential +(DEPRECATED) timeout argument. For example, imagine an API method that can take a while to return results, such as one that might block until a resource is ready: @@ -66,9 +67,69 @@ def is_thing_ready(timeout=None): _DEFAULT_DEADLINE = None +class TimeToDeadlineTimeout(object): + """A decorator that decreases timeout set for an RPC based on how much time + has left till its deadline. The deadline is calculated as + ``now + initial_timeout`` when this decorator is first called for an rpc. + + In other words this decorator implements deadline semantics in terms of a + sequence of decreasing timeouts t0 > t1 > t2 ... tn >= 0. + + Args: + timeout (Optional[float]): the timeout (in seconds) to applied to the + wrapped function. If `None`, the target function is expected to + never timeout. + """ + + def __init__(self, timeout=None, clock=datetime_helpers.utcnow): + self._timeout = timeout + self._clock = clock + + def __call__(self, func): + """Apply the timeout decorator. + + Args: + func (Callable): The function to apply the timeout argument to. + This function must accept a timeout keyword argument. + + Returns: + Callable: The wrapped function. + """ + + first_attempt_timestamp = self._clock().timestamp() + + @functools.wraps(func) + def func_with_timeout(*args, **kwargs): + """Wrapped function that adds timeout.""" + + remaining_timeout = self._timeout + if remaining_timeout is not None: + # All calculations are in seconds + now_timestamp = self._clock().timestamp() + + # To avoid usage of nonlocal but still have round timeout + # numbers for first attempt (in most cases the only attempt made + # for an RPC. + if now_timestamp - first_attempt_timestamp < 0.001: + now_timestamp = first_attempt_timestamp + + time_since_first_attempt = now_timestamp - first_attempt_timestamp + # Avoid setting negative timeout + kwargs["timeout"] = max(0, self._timeout - time_since_first_attempt) + + return func(*args, **kwargs) + + return func_with_timeout + + def __str__(self): + return "".format(self._timeout) + + class ConstantTimeout(object): """A decorator that adds a constant timeout argument. + DEPRECATED: use ``TimeToDeadlineTimeout`` instead. + This is effectively equivalent to ``functools.partial(func, timeout=timeout)``. @@ -140,6 +201,9 @@ def _exponential_timeout_generator(initial, maximum, multiplier, deadline): class ExponentialTimeout(object): """A decorator that adds an exponentially increasing timeout argument. + DEPRECATED: the concept of incrementing timeout exponentially has been + deprecated. Use ``TimeToDeadlineTimeout`` instead. + This is useful if a function is called multiple times. Each time the function is called this decorator will calculate a new timeout parameter based on the the number of times the function has been called. diff --git a/noxfile.py b/noxfile.py index 2d8f1e02..d7cba97c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -26,7 +26,7 @@ # Black and flake8 clash on the syntax for ignoring flake8's F401 in this file. BLACK_EXCLUDES = ["--exclude", "^/google/api_core/operations_v1/__init__.py"] -DEFAULT_PYTHON_VERSION = "3.7" +DEFAULT_PYTHON_VERSION = "3.10" CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() # 'docfx' is excluded since it only needs to run in 'docs-presubmit' @@ -61,7 +61,7 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", "flake8-import-order", BLACK_VERSION) + session.install("flake8", BLACK_VERSION) session.install(".") session.run( "black", @@ -94,8 +94,16 @@ def default(session, install_grpc=True): CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - # Install all test dependencies, then install this package in-place. - session.install("dataclasses", "mock", "pytest", "pytest-cov", "pytest-xdist") + session.install( + "dataclasses", + "mock", + # Revert to just "pytest" once + # https://github.com/pytest-dev/pytest/issues/10451 is fixed + "pytest<7.2.0", + "pytest-cov", + "pytest-xdist", + ) + if install_grpc: session.install("-e", ".[grpc]", "-c", constraints_path) else: @@ -104,7 +112,7 @@ def default(session, install_grpc=True): pytest_args = [ "python", "-m", - "py.test", + "pytest", *( # Helpful for running a single test or testfile. session.posargs @@ -163,7 +171,7 @@ def unit_wo_grpc(session): default(session, install_grpc=False) -@nox.session(python="3.8") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" @@ -204,7 +212,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.8") +@nox.session(python="3.9") def docs(session): """Build the docs for this library.""" @@ -226,7 +234,7 @@ def docs(session): ) -@nox.session(python="3.8") +@nox.session(python="3.9") def docfx(session): """Build the docfx yaml files for this library.""" diff --git a/tests/asyncio/gapic/test_method_async.py b/tests/asyncio/gapic/test_method_async.py index 11847da7..02d883f6 100644 --- a/tests/asyncio/gapic/test_method_async.py +++ b/tests/asyncio/gapic/test_method_async.py @@ -198,41 +198,6 @@ async def test_wrap_method_with_overriding_retry_and_timeout(unused_sleep): method.assert_called_with(timeout=22, metadata=mock.ANY) -@mock.patch("asyncio.sleep") -@mock.patch( - "google.api_core.datetime_helpers.utcnow", - side_effect=_utcnow_monotonic(), - autospec=True, -) -@pytest.mark.asyncio -async def test_wrap_method_with_overriding_retry_deadline(utcnow, unused_sleep): - fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42) - method = mock.Mock( - spec=aio.UnaryUnaryMultiCallable, - side_effect=([exceptions.InternalServerError(None)] * 4) + [fake_call], - ) - - default_retry = retry_async.AsyncRetry() - default_timeout = timeout.ExponentialTimeout(deadline=60) - wrapped_method = gapic_v1.method_async.wrap_method( - method, default_retry, default_timeout - ) - - # Overriding only the retry's deadline should also override the timeout's - # deadline. - result = await wrapped_method(retry=default_retry.with_deadline(30)) - - assert result == 42 - timeout_args = [call[1]["timeout"] for call in method.call_args_list] - assert timeout_args == [5.0, 10.0, 20.0, 26.0, 25.0] - assert utcnow.call_count == ( - 1 - + 1 # Compute wait_for timeout in retry_async - + 5 # First to set the deadline. - + 5 # One for each min(timeout, maximum, (DEADLINE - NOW).seconds) - ) - - @pytest.mark.asyncio async def test_wrap_method_with_overriding_timeout_as_a_number(): fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42) diff --git a/tests/asyncio/operations_v1/test_operations_async_client.py b/tests/asyncio/operations_v1/test_operations_async_client.py index 47c3b4b4..34236da7 100644 --- a/tests/asyncio/operations_v1/test_operations_async_client.py +++ b/tests/asyncio/operations_v1/test_operations_async_client.py @@ -17,7 +17,7 @@ try: from grpc import aio -except ImportError: +except ImportError: # pragma: NO COVER pytest.skip("No GRPC", allow_module_level=True) from google.api_core import grpc_helpers_async diff --git a/tests/asyncio/test_grpc_helpers_async.py b/tests/asyncio/test_grpc_helpers_async.py index 2d0a1bcd..2f8f3460 100644 --- a/tests/asyncio/test_grpc_helpers_async.py +++ b/tests/asyncio/test_grpc_helpers_async.py @@ -18,11 +18,11 @@ try: import grpc from grpc import aio -except ImportError: +except ImportError: # pragma: NO COVER grpc = aio = None -if grpc is None: +if grpc is None: # pragma: NO COVER pytest.skip("No GRPC", allow_module_level=True) diff --git a/tests/asyncio/test_operation_async.py b/tests/asyncio/test_operation_async.py index 26ad7cef..127ba634 100644 --- a/tests/asyncio/test_operation_async.py +++ b/tests/asyncio/test_operation_async.py @@ -18,7 +18,7 @@ try: import grpc # noqa: F401 -except ImportError: +except ImportError: # pragma: NO COVER pytest.skip("No GRPC", allow_module_level=True) from google.api_core import exceptions diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/test_retry_async.py index 873caaf1..14807eb5 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/test_retry_async.py @@ -116,7 +116,7 @@ async def test_retry_target_deadline_exceeded(utcnow, sleep): await retry_async.retry_target(target, predicate, range(10), deadline=10) assert exc_info.value.cause == exception - assert exc_info.match("Deadline of 10.0s exceeded") + assert exc_info.match("Timeout of 10.0s exceeded") assert exc_info.match("last exception: meep") assert target.call_count == 2 @@ -253,7 +253,7 @@ def if_exception_type(exc): assert re.match( ( r", " - r"initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0, " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " r"on_error=None>" ), str(retry_), @@ -276,8 +276,7 @@ async def test___call___and_execute_success(self, sleep): target.assert_called_once_with("meep") sleep.assert_not_called() - # Make uniform return half of its maximum, which is the calculated sleep time. - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___and_execute_retry(self, sleep, uniform): @@ -302,8 +301,7 @@ async def test___call___and_execute_retry(self, sleep, uniform): sleep.assert_called_once_with(retry_._initial) assert on_error.call_count == 1 - # Make uniform return half of its maximum, which is the calculated sleep time. - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): @@ -376,8 +374,7 @@ async def test___init___without_retry_executed(self, sleep): sleep.assert_not_called() _some_function.assert_not_called() - # Make uniform return half of its maximum, which is the calculated sleep time. - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___init___when_retry_is_executed(self, sleep, uniform): diff --git a/tests/unit/future/test_polling.py b/tests/unit/future/test_polling.py index 2381d036..f5d9b4f1 100644 --- a/tests/unit/future/test_polling.py +++ b/tests/unit/future/test_polling.py @@ -24,7 +24,7 @@ class PollingFutureImpl(polling.PollingFuture): - def done(self): + def done(self, retry=None): return False def cancel(self): @@ -33,9 +33,6 @@ def cancel(self): def cancelled(self): return False - def running(self): - return True - def test_polling_future_constructor(): future = PollingFutureImpl() @@ -84,20 +81,23 @@ def test_invoke_callback_exception(): class PollingFutureImplWithPoll(PollingFutureImpl): - def __init__(self): + def __init__(self, max_poll_count=1): super(PollingFutureImplWithPoll, self).__init__() self.poll_count = 0 self.event = threading.Event() + self.max_poll_count = max_poll_count - def done(self, retry=polling.DEFAULT_RETRY): + def done(self, retry=None): self.poll_count += 1 + if self.max_poll_count > self.poll_count: + return False self.event.wait() self.set_result(42) return True -def test_result_with_polling(): - future = PollingFutureImplWithPoll() +def test_result_with_one_polling(): + future = PollingFutureImplWithPoll(max_poll_count=1) future.event.set() result = future.result() @@ -109,8 +109,34 @@ def test_result_with_polling(): assert future.poll_count == 1 +def test_result_with_two_pollings(): + future = PollingFutureImplWithPoll(max_poll_count=2) + + future.event.set() + result = future.result() + + assert result == 42 + assert future.poll_count == 2 + # Repeated calls should not cause additional polling + assert future.result() == result + assert future.poll_count == 2 + + +def test_result_with_two_pollings_custom_retry(): + future = PollingFutureImplWithPoll(max_poll_count=2) + + future.event.set() + result = future.result() + + assert result == 42 + assert future.poll_count == 2 + # Repeated calls should not cause additional polling + assert future.result() == result + assert future.poll_count == 2 + + class PollingFutureImplTimeout(PollingFutureImplWithPoll): - def done(self, retry=polling.DEFAULT_RETRY): + def done(self, retry=None): time.sleep(1) return False @@ -132,11 +158,11 @@ def __init__(self, errors): super(PollingFutureImplTransient, self).__init__() self._errors = errors - def done(self, retry=polling.DEFAULT_RETRY): + def done(self, retry=None): + self.poll_count += 1 if self._errors: error, self._errors = self._errors[0], self._errors[1:] raise error("testing") - self.poll_count += 1 self.set_result(42) return True @@ -144,17 +170,17 @@ def done(self, retry=polling.DEFAULT_RETRY): def test_result_transient_error(): future = PollingFutureImplTransient( ( - exceptions.TooManyRequests, - exceptions.InternalServerError, - exceptions.BadGateway, + polling._OperationNotComplete, + polling._OperationNotComplete, + polling._OperationNotComplete, ) ) result = future.result() assert result == 42 - assert future.poll_count == 1 + assert future.poll_count == 4 # Repeated calls should not cause additional polling assert future.result() == result - assert future.poll_count == 1 + assert future.poll_count == 4 def test_callback_background_thread(): @@ -197,23 +223,23 @@ def test_double_callback_background_thread(): class PollingFutureImplWithoutRetry(PollingFutureImpl): - def done(self): + def done(self, retry=None): return True - def result(self): + def result(self, timeout=None, retry=None, polling=None): return super(PollingFutureImplWithoutRetry, self).result() - def _blocking_poll(self, timeout): + def _blocking_poll(self, timeout=None, retry=None, polling=None): return super(PollingFutureImplWithoutRetry, self)._blocking_poll( timeout=timeout ) class PollingFutureImplWith_done_or_raise(PollingFutureImpl): - def done(self): + def done(self, retry=None): return True - def _done_or_raise(self): + def _done_or_raise(self, retry=None): return super(PollingFutureImplWith_done_or_raise, self)._done_or_raise() @@ -223,12 +249,12 @@ def test_polling_future_without_retry(): ) future = PollingFutureImplWithoutRetry() assert future.done() - assert future.running() + assert not future.running() assert future.result() is None with mock.patch.object(future, "done") as done_mock: future._done_or_raise() - done_mock.assert_called_once_with() + done_mock.assert_called_once_with(retry=None) with mock.patch.object(future, "done") as done_mock: future._done_or_raise(retry=custom_retry) @@ -238,5 +264,5 @@ def test_polling_future_without_retry(): def test_polling_future_with__done_or_raise(): future = PollingFutureImplWith_done_or_raise() assert future.done() - assert future.running() + assert not future.running() assert future.result() is None diff --git a/tests/unit/gapic/test_method.py b/tests/unit/gapic/test_method.py index 9778d23a..b1035413 100644 --- a/tests/unit/gapic/test_method.py +++ b/tests/unit/gapic/test_method.py @@ -39,27 +39,6 @@ def _utcnow_monotonic(): curr_value += delta -def test__determine_timeout(): - # Check _determine_timeout always returns a Timeout object. - timeout_type_timeout = timeout.ConstantTimeout(600.0) - returned_timeout = google.api_core.gapic_v1.method._determine_timeout( - 600.0, 600.0, None - ) - assert isinstance(returned_timeout, timeout.ConstantTimeout) - returned_timeout = google.api_core.gapic_v1.method._determine_timeout( - 600.0, timeout_type_timeout, None - ) - assert isinstance(returned_timeout, timeout.ConstantTimeout) - returned_timeout = google.api_core.gapic_v1.method._determine_timeout( - timeout_type_timeout, 600.0, None - ) - assert isinstance(returned_timeout, timeout.ConstantTimeout) - returned_timeout = google.api_core.gapic_v1.method._determine_timeout( - timeout_type_timeout, timeout_type_timeout, None - ) - assert isinstance(returned_timeout, timeout.ConstantTimeout) - - def test_wrap_method_basic(): method = mock.Mock(spec=["__call__"], return_value=42) @@ -199,37 +178,6 @@ def test_wrap_method_with_overriding_retry_and_timeout(unusued_sleep): method.assert_called_with(timeout=22, metadata=mock.ANY) -@mock.patch("time.sleep") -@mock.patch( - "google.api_core.datetime_helpers.utcnow", - side_effect=_utcnow_monotonic(), - autospec=True, -) -def test_wrap_method_with_overriding_retry_deadline(utcnow, unused_sleep): - method = mock.Mock( - spec=["__call__"], - side_effect=([exceptions.InternalServerError(None)] * 4) + [42], - ) - default_retry = retry.Retry() - default_timeout = timeout.ExponentialTimeout(deadline=60) - wrapped_method = google.api_core.gapic_v1.method.wrap_method( - method, default_retry, default_timeout - ) - - # Overriding only the retry's deadline should also override the timeout's - # deadline. - result = wrapped_method(retry=default_retry.with_deadline(30)) - - assert result == 42 - timeout_args = [call[1]["timeout"] for call in method.call_args_list] - assert timeout_args == [5.0, 10.0, 20.0, 26.0, 25.0] - assert utcnow.call_count == ( - 1 - + 5 # First to set the deadline. - + 5 # One for each min(timeout, maximum, (DEADLINE - NOW).seconds) - ) - - def test_wrap_method_with_overriding_timeout_as_a_number(): method = mock.Mock(spec=["__call__"], return_value=42) default_retry = retry.Retry() diff --git a/tests/unit/gapic/test_routing_header.py b/tests/unit/gapic/test_routing_header.py index 30378676..9d31eb39 100644 --- a/tests/unit/gapic/test_routing_header.py +++ b/tests/unit/gapic/test_routing_header.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from enum import Enum + import pytest try: @@ -35,6 +37,35 @@ def test_to_routing_header_with_slashes(): assert value == "name=me/ep&book.read=1%262" +def test_enum_fully_qualified(): + class Message: + class Color(Enum): + RED = 1 + GREEN = 2 + BLUE = 3 + + params = [("color", Message.Color.RED)] + value = routing_header.to_routing_header(params) + assert value == "color=Color.RED" + value = routing_header.to_routing_header(params, qualified_enums=True) + assert value == "color=Color.RED" + + +def test_enum_nonqualified(): + class Message: + class Color(Enum): + RED = 1 + GREEN = 2 + BLUE = 3 + + params = [("color", Message.Color.RED), ("num", 5)] + value = routing_header.to_routing_header(params, qualified_enums=False) + assert value == "color=RED&num=5" + params = {"color": Message.Color.RED, "num": 5} + value = routing_header.to_routing_header(params, qualified_enums=False) + assert value == "color=RED&num=5" + + def test_to_grpc_metadata(): params = [("name", "meep"), ("book.read", "1")] metadata = routing_header.to_grpc_metadata(params) diff --git a/tests/unit/operations_v1/test_operations_client.py b/tests/unit/operations_v1/test_operations_client.py index 187f0be3..fb4b14f1 100644 --- a/tests/unit/operations_v1/test_operations_client.py +++ b/tests/unit/operations_v1/test_operations_client.py @@ -16,12 +16,13 @@ try: import grpc # noqa: F401 -except ImportError: +except ImportError: # pragma: NO COVER pytest.skip("No GRPC", allow_module_level=True) from google.api_core import grpc_helpers from google.api_core import operations_v1 from google.api_core import page_iterator +from google.api_core.operations_v1 import operations_client_config from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 @@ -96,3 +97,7 @@ def test_cancel_operation(): ].metadata assert len(channel.CancelOperation.requests) == 1 assert channel.CancelOperation.requests[0].name == "name" + + +def test_operations_client_config(): + assert operations_client_config.config["interfaces"] diff --git a/tests/unit/operations_v1/test_operations_rest_client.py b/tests/unit/operations_v1/test_operations_rest_client.py index 625539e2..149c463c 100644 --- a/tests/unit/operations_v1/test_operations_rest_client.py +++ b/tests/unit/operations_v1/test_operations_rest_client.py @@ -20,7 +20,7 @@ try: import grpc # noqa: F401 -except ImportError: +except ImportError: # pragma: NO COVER pytest.skip("No GRPC", allow_module_level=True) from requests import Response # noqa I201 from requests.sessions import Session @@ -121,7 +121,7 @@ def test_operations_client_from_service_account_info(client_class): assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "longrunning.googleapis.com:443" + assert client.transport._host == "https://longrunning.googleapis.com" @pytest.mark.parametrize( @@ -160,7 +160,7 @@ def test_operations_client_from_service_account_file(client_class): assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "longrunning.googleapis.com:443" + assert client.transport._host == "https://longrunning.googleapis.com" def test_operations_client_get_transport_class(): @@ -465,10 +465,7 @@ def test_list_operations_rest( actual_args = req.call_args assert actual_args.args[0] == "GET" - assert ( - actual_args.args[1] - == "https://longrunning.googleapis.com:443/v3/operations" - ) + assert actual_args.args[1] == "https://longrunning.googleapis.com/v3/operations" assert actual_args.kwargs["params"] == [ ("filter", "my_filter"), ("pageSize", 10), @@ -574,7 +571,7 @@ def test_get_operation_rest( assert actual_args.args[0] == "GET" assert ( actual_args.args[1] - == "https://longrunning.googleapis.com:443/v3/operations/sample1" + == "https://longrunning.googleapis.com/v3/operations/sample1" ) # Establish that the response is the type that we expect. @@ -591,13 +588,11 @@ def test_get_operation_rest_failure(): response_value.status_code = 400 mock_request = mock.MagicMock() mock_request.method = "GET" - mock_request.url = ( - "https://longrunning.googleapis.com:443/v1/operations/sample1" - ) + mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1" response_value.request = mock_request req.return_value = response_value with pytest.raises(core_exceptions.GoogleAPIError): - client.get_operation("operations/sample1") + client.get_operation("sammple0/operations/sample1") def test_delete_operation_rest( @@ -619,7 +614,7 @@ def test_delete_operation_rest( assert actual_args.args[0] == "DELETE" assert ( actual_args.args[1] - == "https://longrunning.googleapis.com:443/v3/operations/sample1" + == "https://longrunning.googleapis.com/v3/operations/sample1" ) @@ -631,13 +626,11 @@ def test_delete_operation_rest_failure(): response_value.status_code = 400 mock_request = mock.MagicMock() mock_request.method = "DELETE" - mock_request.url = ( - "https://longrunning.googleapis.com:443/v1/operations/sample1" - ) + mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1" response_value.request = mock_request req.return_value = response_value with pytest.raises(core_exceptions.GoogleAPIError): - client.delete_operation(name="operations/sample1") + client.delete_operation(name="sample0/operations/sample1") def test_cancel_operation_rest(transport: str = "rest"): @@ -657,7 +650,7 @@ def test_cancel_operation_rest(transport: str = "rest"): assert actual_args.args[0] == "POST" assert ( actual_args.args[1] - == "https://longrunning.googleapis.com:443/v3/operations/sample1:cancel" + == "https://longrunning.googleapis.com/v3/operations/sample1:cancel" ) @@ -670,12 +663,12 @@ def test_cancel_operation_rest_failure(): mock_request = mock.MagicMock() mock_request.method = "POST" mock_request.url = ( - "https://longrunning.googleapis.com:443/v1/operations/sample1:cancel" + "https://longrunning.googleapis.com/v1/operations/sample1:cancel" ) response_value.request = mock_request req.return_value = response_value with pytest.raises(core_exceptions.GoogleAPIError): - client.cancel_operation(name="operations/sample1") + client.cancel_operation(name="sample0/operations/sample1") def test_credentials_transport_error(): @@ -825,7 +818,7 @@ def test_operations_host_no_port(): api_endpoint="longrunning.googleapis.com" ), ) - assert client.transport._host == "longrunning.googleapis.com:443" + assert client.transport._host == "https://longrunning.googleapis.com" def test_operations_host_with_port(): @@ -835,7 +828,7 @@ def test_operations_host_with_port(): api_endpoint="longrunning.googleapis.com:8000" ), ) - assert client.transport._host == "longrunning.googleapis.com:8000" + assert client.transport._host == "https://longrunning.googleapis.com:8000" def test_common_billing_account_path(): diff --git a/tests/unit/test_bidi.py b/tests/unit/test_bidi.py index 7fb16209..f5e2b72b 100644 --- a/tests/unit/test_bidi.py +++ b/tests/unit/test_bidi.py @@ -22,7 +22,7 @@ try: import grpc -except ImportError: +except ImportError: # pragma: NO COVER pytest.skip("No GRPC", allow_module_level=True) from google.api_core import bidi diff --git a/tests/unit/test_client_info.py b/tests/unit/test_client_info.py index f5eebfbe..3361fef6 100644 --- a/tests/unit/test_client_info.py +++ b/tests/unit/test_client_info.py @@ -15,7 +15,7 @@ try: import grpc -except ImportError: +except ImportError: # pragma: NO COVER grpc = None from google.api_core import client_info @@ -26,9 +26,9 @@ def test_constructor_defaults(): assert info.python_version is not None - if grpc is not None: + if grpc is not None: # pragma: NO COVER assert info.grpc_version is not None - else: + else: # pragma: NO COVER assert info.grpc_version is None assert info.api_core_version is not None diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py index 4169ad44..eb0b12d1 100644 --- a/tests/unit/test_exceptions.py +++ b/tests/unit/test_exceptions.py @@ -22,7 +22,7 @@ try: import grpc from grpc_status import rpc_status -except ImportError: +except ImportError: # pragma: NO COVER grpc = rpc_status = None from google.api_core import exceptions diff --git a/tests/unit/test_grpc_helpers.py b/tests/unit/test_grpc_helpers.py index 8b9fd9f1..9fe938d6 100644 --- a/tests/unit/test_grpc_helpers.py +++ b/tests/unit/test_grpc_helpers.py @@ -17,7 +17,7 @@ try: import grpc -except ImportError: +except ImportError: # pragma: NO COVER pytest.skip("No GRPC", allow_module_level=True) from google.api_core import exceptions @@ -365,7 +365,7 @@ def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_c default.assert_called_once_with(scopes=None, default_scopes=None) - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: grpc_secure_channel.assert_called_once_with(target, composite_creds) @@ -400,7 +400,7 @@ def test_create_channel_implicit_with_default_host( mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host ) - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: grpc_secure_channel.assert_called_once_with(target, composite_creds) @@ -427,7 +427,7 @@ def test_create_channel_implicit_with_ssl_creds( composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY) composite_creds = composite_creds_call.return_value - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: grpc_secure_channel.assert_called_once_with(target, composite_creds) @@ -452,7 +452,7 @@ def test_create_channel_implicit_with_scopes( default.assert_called_once_with(scopes=["one", "two"], default_scopes=None) - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: grpc_secure_channel.assert_called_once_with(target, composite_creds) @@ -477,7 +477,7 @@ def test_create_channel_implicit_with_default_scopes( default.assert_called_once_with(scopes=None, default_scopes=["three", "four"]) - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: grpc_secure_channel.assert_called_once_with(target, composite_creds) @@ -509,7 +509,7 @@ def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_cred assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: grpc_secure_channel.assert_called_once_with(target, composite_creds) @@ -533,7 +533,7 @@ def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_cal assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: grpc_secure_channel.assert_called_once_with(target, composite_creds) @@ -561,7 +561,7 @@ def test_create_channel_explicit_default_scopes( assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: grpc_secure_channel.assert_called_once_with(target, composite_creds) @@ -587,7 +587,7 @@ def test_create_channel_explicit_with_quota_project( assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: grpc_secure_channel.assert_called_once_with(target, composite_creds) @@ -616,7 +616,7 @@ def test_create_channel_with_credentials_file( assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: grpc_secure_channel.assert_called_once_with(target, composite_creds) @@ -648,7 +648,7 @@ def test_create_channel_with_credentials_file_and_scopes( assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: grpc_secure_channel.assert_called_once_with(target, composite_creds) @@ -680,7 +680,7 @@ def test_create_channel_with_credentials_file_and_default_scopes( assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: grpc_secure_channel.assert_called_once_with(target, composite_creds) @@ -690,7 +690,7 @@ def test_create_channel_with_credentials_file_and_default_scopes( not grpc_helpers.HAS_GRPC_GCP, reason="grpc_gcp module not available" ) @mock.patch("grpc_gcp.secure_channel") -def test_create_channel_with_grpc_gcp(grpc_gcp_secure_channel): +def test_create_channel_with_grpc_gcp(grpc_gcp_secure_channel): # pragma: NO COVER target = "example.com:443" scopes = ["test_scope"] diff --git a/tests/unit/test_operation.py b/tests/unit/test_operation.py index 22e23bc3..f029866c 100644 --- a/tests/unit/test_operation.py +++ b/tests/unit/test_operation.py @@ -18,7 +18,7 @@ try: import grpc # noqa: F401 -except ImportError: +except ImportError: # pragma: NO COVER pytest.skip("No GRPC", allow_module_level=True) from google.api_core import exceptions diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 74c5d77c..ec27056d 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -52,7 +52,7 @@ def test_if_transient_error(): # Make uniform return half of its maximum, which will be the calculated # sleep time. -@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) +@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) def test_exponential_sleep_generator_base_2(uniform): gen = retry.exponential_sleep_generator(1, 60, multiplier=2) @@ -172,6 +172,7 @@ def test_constructor_defaults(self): assert retry_._deadline == 120 assert retry_._on_error is None assert retry_.deadline == 120 + assert retry_.timeout == 120 def test_constructor_options(self): _some_function = mock.Mock() @@ -315,7 +316,7 @@ def if_exception_type(exc): assert re.match( ( r", " - r"initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0, " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " r"on_error=None>" ), str(retry_), @@ -337,8 +338,7 @@ def test___call___and_execute_success(self, sleep): target.assert_called_once_with("meep") sleep.assert_not_called() - # Make uniform return half of its maximum, which is the calculated sleep time. - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) def test___call___and_execute_retry(self, sleep, uniform): @@ -360,8 +360,7 @@ def test___call___and_execute_retry(self, sleep, uniform): sleep.assert_called_once_with(retry_._initial) assert on_error.call_count == 1 - # Make uniform return half of its maximum, which is the calculated sleep time. - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): @@ -371,7 +370,7 @@ def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): initial=1.0, maximum=1024.0, multiplier=2.0, - deadline=9.9, + deadline=30.9, ) utcnow = datetime.datetime.utcnow() @@ -406,8 +405,17 @@ def increase_time(sleep_delay): last_wait = sleep.call_args.args[0] total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) - assert last_wait == 2.9 # and not 8.0, because the last delay was shortened - assert total_wait == 9.9 # the same as the deadline + assert last_wait == 8.0 + # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus + # we do not even wait for it to be scheduled (30.9 is configured timeout). + # This changes the previous logic of shortening the last attempt to fit + # in the deadline. The previous logic was removed to make Python retry + # logic consistent with the other languages and to not disrupt the + # randomized retry delays distribution by artificially increasing a + # probability of scheduling two (instead of one) last attempts with very + # short delay between them, while the second retry having very low chance + # of succeeding anyways. + assert total_wait == 15.0 @mock.patch("time.sleep", autospec=True) def test___init___without_retry_executed(self, sleep): @@ -432,8 +440,7 @@ def test___init___without_retry_executed(self, sleep): sleep.assert_not_called() _some_function.assert_not_called() - # Make uniform return half of its maximum, which is the calculated sleep time. - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) def test___init___when_retry_is_executed(self, sleep, uniform): _some_function = mock.Mock() diff --git a/tests/unit/test_timeout.py b/tests/unit/test_timeout.py index 30d624e2..a83a2ecb 100644 --- a/tests/unit/test_timeout.py +++ b/tests/unit/test_timeout.py @@ -17,11 +17,11 @@ import mock -from google.api_core import timeout +from google.api_core import timeout as timeouts def test__exponential_timeout_generator_base_2(): - gen = timeout._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=None) + gen = timeouts._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=None) result = list(itertools.islice(gen, 8)) assert result == [1, 2, 4, 8, 16, 32, 60, 60] @@ -34,7 +34,7 @@ def test__exponential_timeout_generator_base_deadline(utcnow): datetime.datetime.min + datetime.timedelta(seconds=n) for n in range(15) ] - gen = timeout._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=30.0) + gen = timeouts._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=30.0) result = list(itertools.islice(gen, 14)) # Should grow until the cumulative time is > 30s, then start decreasing as @@ -42,22 +42,105 @@ def test__exponential_timeout_generator_base_deadline(utcnow): assert result == [1, 2, 4, 8, 16, 24, 23, 22, 21, 20, 19, 18, 17, 16] +class TestTimeToDeadlineTimeout(object): + def test_constructor(self): + timeout_ = timeouts.TimeToDeadlineTimeout() + assert timeout_._timeout is None + + def test_constructor_args(self): + timeout_ = timeouts.TimeToDeadlineTimeout(42.0) + assert timeout_._timeout == 42.0 + + def test___str__(self): + timeout_ = timeouts.TimeToDeadlineTimeout(1) + assert str(timeout_) == "" + + def test_apply(self): + target = mock.Mock(spec=["__call__", "__name__"], __name__="target") + + datetime.datetime.utcnow() + datetime.timedelta(seconds=1) + + now = datetime.datetime.utcnow() + + times = [ + now, + now + datetime.timedelta(seconds=0.0009), + now + datetime.timedelta(seconds=1), + now + datetime.timedelta(seconds=39), + now + datetime.timedelta(seconds=42), + now + datetime.timedelta(seconds=43), + ] + + def _clock(): + return times.pop(0) + + timeout_ = timeouts.TimeToDeadlineTimeout(42.0, _clock) + wrapped = timeout_(target) + + wrapped() + target.assert_called_with(timeout=42.0) + wrapped() + target.assert_called_with(timeout=41.0) + wrapped() + target.assert_called_with(timeout=3.0) + wrapped() + target.assert_called_with(timeout=0.0) + wrapped() + target.assert_called_with(timeout=0.0) + + def test_apply_no_timeout(self): + target = mock.Mock(spec=["__call__", "__name__"], __name__="target") + + datetime.datetime.utcnow() + datetime.timedelta(seconds=1) + + now = datetime.datetime.utcnow() + + times = [ + now, + now + datetime.timedelta(seconds=0.0009), + now + datetime.timedelta(seconds=1), + now + datetime.timedelta(seconds=2), + ] + + def _clock(): + return times.pop(0) + + timeout_ = timeouts.TimeToDeadlineTimeout(clock=_clock) + wrapped = timeout_(target) + + wrapped() + target.assert_called_with() + wrapped() + target.assert_called_with() + + def test_apply_passthrough(self): + target = mock.Mock(spec=["__call__", "__name__"], __name__="target") + timeout_ = timeouts.TimeToDeadlineTimeout(42.0) + wrapped = timeout_(target) + + wrapped(1, 2, meep="moop") + + target.assert_called_once_with(1, 2, meep="moop", timeout=42.0) + + class TestConstantTimeout(object): def test_constructor(self): - timeout_ = timeout.ConstantTimeout() + timeout_ = timeouts.ConstantTimeout() assert timeout_._timeout is None def test_constructor_args(self): - timeout_ = timeout.ConstantTimeout(42.0) + timeout_ = timeouts.ConstantTimeout(42.0) assert timeout_._timeout == 42.0 def test___str__(self): - timeout_ = timeout.ConstantTimeout(1) + timeout_ = timeouts.ConstantTimeout(1) assert str(timeout_) == "" def test_apply(self): target = mock.Mock(spec=["__call__", "__name__"], __name__="target") - timeout_ = timeout.ConstantTimeout(42.0) + timeout_ = timeouts.ConstantTimeout(42.0) wrapped = timeout_(target) wrapped() @@ -66,7 +149,7 @@ def test_apply(self): def test_apply_passthrough(self): target = mock.Mock(spec=["__call__", "__name__"], __name__="target") - timeout_ = timeout.ConstantTimeout(42.0) + timeout_ = timeouts.ConstantTimeout(42.0) wrapped = timeout_(target) wrapped(1, 2, meep="moop") @@ -76,30 +159,30 @@ def test_apply_passthrough(self): class TestExponentialTimeout(object): def test_constructor(self): - timeout_ = timeout.ExponentialTimeout() - assert timeout_._initial == timeout._DEFAULT_INITIAL_TIMEOUT - assert timeout_._maximum == timeout._DEFAULT_MAXIMUM_TIMEOUT - assert timeout_._multiplier == timeout._DEFAULT_TIMEOUT_MULTIPLIER - assert timeout_._deadline == timeout._DEFAULT_DEADLINE + timeout_ = timeouts.ExponentialTimeout() + assert timeout_._initial == timeouts._DEFAULT_INITIAL_TIMEOUT + assert timeout_._maximum == timeouts._DEFAULT_MAXIMUM_TIMEOUT + assert timeout_._multiplier == timeouts._DEFAULT_TIMEOUT_MULTIPLIER + assert timeout_._deadline == timeouts._DEFAULT_DEADLINE def test_constructor_args(self): - timeout_ = timeout.ExponentialTimeout(1, 2, 3, 4) + timeout_ = timeouts.ExponentialTimeout(1, 2, 3, 4) assert timeout_._initial == 1 assert timeout_._maximum == 2 assert timeout_._multiplier == 3 assert timeout_._deadline == 4 def test_with_timeout(self): - original_timeout = timeout.ExponentialTimeout() + original_timeout = timeouts.ExponentialTimeout() timeout_ = original_timeout.with_deadline(42) assert original_timeout is not timeout_ - assert timeout_._initial == timeout._DEFAULT_INITIAL_TIMEOUT - assert timeout_._maximum == timeout._DEFAULT_MAXIMUM_TIMEOUT - assert timeout_._multiplier == timeout._DEFAULT_TIMEOUT_MULTIPLIER + assert timeout_._initial == timeouts._DEFAULT_INITIAL_TIMEOUT + assert timeout_._maximum == timeouts._DEFAULT_MAXIMUM_TIMEOUT + assert timeout_._multiplier == timeouts._DEFAULT_TIMEOUT_MULTIPLIER assert timeout_._deadline == 42 def test___str__(self): - timeout_ = timeout.ExponentialTimeout(1, 2, 3, 4) + timeout_ = timeouts.ExponentialTimeout(1, 2, 3, 4) assert str(timeout_) == ( "" @@ -107,7 +190,7 @@ def test___str__(self): def test_apply(self): target = mock.Mock(spec=["__call__", "__name__"], __name__="target") - timeout_ = timeout.ExponentialTimeout(1, 10, 2) + timeout_ = timeouts.ExponentialTimeout(1, 10, 2) wrapped = timeout_(target) wrapped() @@ -121,7 +204,7 @@ def test_apply(self): def test_apply_passthrough(self): target = mock.Mock(spec=["__call__", "__name__"], __name__="target") - timeout_ = timeout.ExponentialTimeout(42.0, 100, 2) + timeout_ = timeouts.ExponentialTimeout(42.0, 100, 2) wrapped = timeout_(target) wrapped(1, 2, meep="moop")