From 63ab99ec31894206dfdcc127529799a1110e07f9 Mon Sep 17 00:00:00 2001 From: Anand Inguva Date: Tue, 20 Jun 2023 17:36:08 -0400 Subject: [PATCH 1/8] Remove python 3.7 support --- .../test-properties.json | 14 +- .../setup-self-hosted-action/action.yml | 9 -- .github/workflows/build_wheels.yml | 6 +- .github/workflows/dask_runner_tests.yml | 3 +- .github/workflows/python_dependency_tests.yml | 1 - .github/workflows/python_tests.yml | 7 +- .../jenkins/PythonTestProperties.groovy | 5 +- .test-infra/jenkins/README.md | 6 +- .../job_PostCommit_PortableJar_Flink.groovy | 2 +- .../job_PostCommit_PortableJar_Spark.groovy | 2 +- .../Python_WordCount_IT_Benchmarks.json | 2 +- .test-infra/tools/README.md | 2 +- .test-infra/tools/python_installer.sh | 2 +- build.gradle.kts | 19 +-- .../beam/gradle/BeamModulePlugin.groovy | 2 +- local-env-setup.sh | 4 +- release/src/main/Dockerfile | 3 +- .../python_release_automation.sh | 2 +- .../release/test/resources/mass_comment.txt | 3 +- .../internal/cloudpickle_pickler_test.py | 2 - sdks/python/apache_beam/io/filesystem_test.py | 7 +- .../inference/tensorflow_inference_it_test.py | 5 - .../apache_beam/typehints/decorators_test.py | 8 +- .../typehints/native_type_compatibility.py | 5 +- .../typehints/typed_pipeline_test.py | 14 +- .../base_image_requirements_manual.txt | 2 +- sdks/python/container/build.gradle | 3 - .../py37/base_image_requirements.txt | 135 ------------------ sdks/python/container/py37/build.gradle | 28 ---- .../container/run_validatescontainer.sh | 2 - sdks/python/mypy.ini | 2 +- sdks/python/setup.py | 8 +- .../test-suites/dataflow/py37/build.gradle | 24 ---- .../test-suites/direct/py37/build.gradle | 24 ---- sdks/python/test-suites/gradle.properties | 24 ++-- .../test-suites/portable/py37/build.gradle | 26 ---- sdks/python/test-suites/tox/py37/build.gradle | 43 ------ sdks/python/tox.ini | 28 ++-- settings.gradle.kts | 5 - .../en/documentation/runtime/environments.md | 2 +- .../content/en/get-started/quickstart-py.md | 2 +- 41 files changed, 68 insertions(+), 425 deletions(-) delete mode 100644 sdks/python/container/py37/base_image_requirements.txt delete mode 100644 sdks/python/container/py37/build.gradle delete mode 100644 sdks/python/test-suites/dataflow/py37/build.gradle delete mode 100644 sdks/python/test-suites/direct/py37/build.gradle delete mode 100644 sdks/python/test-suites/portable/py37/build.gradle delete mode 100644 sdks/python/test-suites/tox/py37/build.gradle diff --git a/.github/actions/setup-default-test-properties/test-properties.json b/.github/actions/setup-default-test-properties/test-properties.json index 32b449ea1ed9e..2bac60d4abbe5 100644 --- a/.github/actions/setup-default-test-properties/test-properties.json +++ b/.github/actions/setup-default-test-properties/test-properties.json @@ -1,14 +1,14 @@ { "PythonTestProperties": { - "ALL_SUPPORTED_VERSIONS": ["3.7", "3.8", "3.9", "3.10", "3.11"], - "LOWEST_SUPPORTED": ["3.7"], + "ALL_SUPPORTED_VERSIONS": ["3.8", "3.9", "3.10", "3.11"], + "LOWEST_SUPPORTED": ["3.8"], "HIGHEST_SUPPORTED": ["3.11"], - "ESSENTIAL_VERSIONS": ["3.7", "3.11"], - "CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS": ["3.7", "3.11"], + "ESSENTIAL_VERSIONS": ["3.11"], + "CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS": ["3.11"], "CROSS_LANGUAGE_VALIDATES_RUNNER_DATAFLOW_USING_SQL_PYTHON_VERSIONS": ["3.11"], - "VALIDATES_CONTAINER_DATAFLOW_PYTHON_VERSIONS": ["3.7", "3.8", "3.9", "3.10", "3.11" ] - "LOAD_TEST_PYTHON_VERSION": "3.7", - "CHICAGO_TAXI_EXAMPLE_FLINK_PYTHON_VERSION": "3.7", + "VALIDATES_CONTAINER_DATAFLOW_PYTHON_VERSIONS": ["3.8", "3.9", "3.10", "3.11" ] + "LOAD_TEST_PYTHON_VERSION": "3.8", + "CHICAGO_TAXI_EXAMPLE_FLINK_PYTHON_VERSION": "3.8", "DEFAULT_INTERPRETER": "python3.8", "TOX_ENV": ["Cloud", "Cython"] }, diff --git a/.github/actions/setup-self-hosted-action/action.yml b/.github/actions/setup-self-hosted-action/action.yml index 0e7cc8534e9e5..9702b4450ec98 100644 --- a/.github/actions/setup-self-hosted-action/action.yml +++ b/.github/actions/setup-self-hosted-action/action.yml @@ -18,10 +18,6 @@ name: 'Setup environment for self-hosted runners' description: 'Setup action to run jobs in a self-hosted runner' inputs: - requires-py-37: - required: false - description: 'Set as false if does not require py37 setup' - default: 'true' requires-py-38: required: false description: 'Set as false if does not require py38 setup' @@ -42,11 +38,6 @@ inputs: runs: using: "composite" steps: - - name: Install python 3.7 - if: ${{ inputs.requires-py-37 == 'true' }} - uses: actions/setup-python@v4 - with: - python-version: "3.7" - name: Install python 3.8 if: ${{ inputs.requires-py-38 == 'true' }} uses: actions/setup-python@v4 diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index ead302975d686..3befac33f2234 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -48,7 +48,7 @@ jobs: runs-on: ubuntu-latest env: EVENT_NAME: ${{ github.event_name }} - PY_VERSIONS_FULL: "cp37-* cp38-* cp39-* cp310-* cp311-*" + PY_VERSIONS_FULL: "cp38-* cp39-* cp310-* cp311-*" outputs: gcp-variables-set: ${{ steps.check_gcp_variables.outputs.gcp-variables-set }} py-versions-full: ${{ steps.set-py-versions.outputs.py-versions-full }} @@ -91,7 +91,7 @@ jobs: - name: Install python uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.8 - name: Get build dependencies working-directory: ./sdks/python run: python -m pip install -r build-requirements.txt @@ -255,7 +255,7 @@ jobs: - name: Install Python uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.8 - uses: docker/setup-qemu-action@v1 if: ${{matrix.arch == 'aarch64'}} name: Set up QEMU diff --git a/.github/workflows/dask_runner_tests.yml b/.github/workflows/dask_runner_tests.yml index 33d0575e2c8c6..655c09dc1503a 100644 --- a/.github/workflows/dask_runner_tests.yml +++ b/.github/workflows/dask_runner_tests.yml @@ -43,7 +43,7 @@ jobs: - name: Install python uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.8 - name: Get build dependencies working-directory: ./sdks/python run: pip install pip setuptools --upgrade && pip install -r build-requirements.txt @@ -67,7 +67,6 @@ jobs: matrix: os: [ubuntu-latest, macos-latest, windows-latest] params: [ - {"py_ver": "3.7", "tox_env": "py37"}, {"py_ver": "3.8", "tox_env": "py38"}, {"py_ver": "3.9", "tox_env": "py39"}, {"py_ver": "3.10", "tox_env": "py310" }, diff --git a/.github/workflows/python_dependency_tests.yml b/.github/workflows/python_dependency_tests.yml index daf30b4bae9c0..809a422c9e87e 100644 --- a/.github/workflows/python_dependency_tests.yml +++ b/.github/workflows/python_dependency_tests.yml @@ -26,7 +26,6 @@ jobs: matrix: os: [ubuntu-latest] params: [ - {"py_ver": "3.7", "py_env": "py37"}, {"py_ver": "3.8", "py_env": "py38"}, {"py_ver": "3.9", "py_env": "py39"}, {"py_ver": "3.10", "py_env": "py310" }, diff --git a/.github/workflows/python_tests.yml b/.github/workflows/python_tests.yml index 819330d394ad6..f832184c45ba2 100644 --- a/.github/workflows/python_tests.yml +++ b/.github/workflows/python_tests.yml @@ -77,7 +77,7 @@ jobs: - name: Install python uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.8 - name: Get build dependencies working-directory: ./sdks/python run: pip install pip setuptools --upgrade && pip install -r build-requirements.txt @@ -101,7 +101,6 @@ jobs: matrix: os: [ubuntu-latest, macos-latest, windows-latest] params: [ - {"py_ver": "3.7", "tox_env": "py37"}, {"py_ver": "3.8", "tox_env": "py38"}, {"py_ver": "3.9", "tox_env": "py39"}, {"py_ver": "3.10", "tox_env": "py310" }, @@ -141,7 +140,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python: ["3.8", "3.9", "3.10", "3.11"] steps: - name: Checkout code uses: actions/checkout@v3 @@ -169,7 +168,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python: ["3.8", "3.9", "3.10", "3.11"] steps: - name: Checkout code uses: actions/checkout@v3 diff --git a/.test-infra/jenkins/PythonTestProperties.groovy b/.test-infra/jenkins/PythonTestProperties.groovy index 1ebf7cc84a87f..98257a6e1c288 100644 --- a/.test-infra/jenkins/PythonTestProperties.groovy +++ b/.test-infra/jenkins/PythonTestProperties.groovy @@ -20,7 +20,6 @@ class PythonTestProperties { // Indicates all supported Python versions. // This must be sorted in ascending order. final static List ALL_SUPPORTED_VERSIONS = [ - '3.7', '3.8', '3.9', '3.10', @@ -38,9 +37,9 @@ class PythonTestProperties { final static List CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS = ESSENTIAL_VERSIONS final static List CROSS_LANGUAGE_VALIDATES_RUNNER_DATAFLOW_USING_SQL_PYTHON_VERSIONS = [HIGHEST_SUPPORTED] final static List VALIDATES_CONTAINER_DATAFLOW_PYTHON_VERSIONS = ALL_SUPPORTED_VERSIONS - final static String LOAD_TEST_PYTHON_VERSION = '3.7' + final static String LOAD_TEST_PYTHON_VERSION = '3.8' final static String RUN_INFERENCE_TEST_PYTHON_VERSION = '3.8' - final static String CHICAGO_TAXI_EXAMPLE_FLINK_PYTHON_VERSION = '3.7' + final static String CHICAGO_TAXI_EXAMPLE_FLINK_PYTHON_VERSION = '3.8' // Use for various shell scripts triggered by Jenkins. // Gradle scripts should use project.ext.pythonVersion defined by PythonNature/BeamModulePlugin. final static String DEFAULT_INTERPRETER = 'python3.8' diff --git a/.test-infra/jenkins/README.md b/.test-infra/jenkins/README.md index 5f9b55d4e2366..2f4335770208e 100644 --- a/.test-infra/jenkins/README.md +++ b/.test-infra/jenkins/README.md @@ -163,12 +163,10 @@ Beam Jenkins overview page: [link](https://ci-beam.apache.org/) | beam_PostCommit_Python_VR_Spark | [cron](https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/), [phrase](https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/) | `Run Python Spark ValidatesRunner` | [![Build Status](https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/badge/icon)](https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark) | | beam_PostCommit_Python_Xlang_Gcp_Direct | [cron](https://ci-beam.apache.org/job/beam_PostCommit_Python_Xlang_Gcp_Direct/), [phrase](https://ci-beam.apache.org/job/beam_PostCommit_Python_Xlang_Gcp_Direct_PR/) | `Run Python_Xlang_Gcp_Direct PostCommit` | [![Build Status](https://ci-beam.apache.org/job/beam_PostCommit_Python_Xlang_Gcp_Direct/badge/icon)](https://ci-beam.apache.org/job/beam_PostCommit_Python_Xlang_Gcp_Direct/) | | beam_PostCommit_Python_Xlang_Gcp_Dataflow | [cron](https://ci-beam.apache.org/job/beam_PostCommit_Python_Xlang_Gcp_Dataflow/), [phrase](https://ci-beam.apache.org/job/beam_PostCommit_Python_Xlang_Gcp_Dataflow_PR/) | `Run Python_Xlang_Gcp_Dataflow PostCommit` | [![Build Status](https://ci-beam.apache.org/job/beam_PostCommit_Python_Xlang_Gcp_Dataflow/badge/icon)](https://ci-beam.apache.org/job/beam_PostCommit_Python_Xlang_Gcp_Dataflow/) | -| beam_PostCommit_Python37 | [cron](https://ci-beam.apache.org/job/beam_PostCommit_Python37), [phrase](https://ci-beam.apache.org/job/beam_PostCommit_Python37_PR/) | `Run Python 3.7 PostCommit` | [![Build Status](https://ci-beam.apache.org/job/beam_PostCommit_Python37/badge/icon)](https://ci-beam.apache.org/job/beam_PostCommit_Python37) | | beam_PostCommit_Python38 | [cron](https://ci-beam.apache.org/job/beam_PostCommit_Python38), [phrase](https://ci-beam.apache.org/job/beam_PostCommit_Python38_PR/) | `Run Python 3.8 PostCommit` | [![Build Status](https://ci-beam.apache.org/job/beam_PostCommit_Python38/badge/icon)](https://ci-beam.apache.org/job/beam_PostCommit_Python38) | | beam_PostCommit_Python39 | [cron](https://ci-beam.apache.org/job/beam_PostCommit_Python39), [phrase](https://ci-beam.apache.org/job/beam_PostCommit_Python39_PR/) | `Run Python 3.9 PostCommit` | [![Build Status](https://ci-beam.apache.org/job/beam_PostCommit_Python39/badge/icon)](https://ci-beam.apache.org/job/beam_PostCommit_Python39) | | beam_PostCommit_Python310 | [cron](https://ci-beam.apache.org/job/beam_PostCommit_Python310), [phrase](https://ci-beam.apache.org/job/beam_PostCommit_Python310_PR/) | `Run Python 3.10 PostCommit` | [![Build Status](https://ci-beam.apache.org/job/beam_PostCommit_Python310/badge/icon)](https://ci-beam.apache.org/job/beam_PostCommit_Python310) | | beam_PostCommit_Python311 | [cron](https://ci-beam.apache.org/job/beam_PostCommit_Python311), [phrase](https://ci-beam.apache.org/job/beam_PostCommit_Python311_PR/) | `Run Python 3.11 PostCommit` | [![Build Status](https://ci-beam.apache.org/job/beam_PostCommit_Python311/badge/icon)](https://ci-beam.apache.org/job/beam_PostCommit_Python311) | -| beam_PostCommit_Sickbay_Python37 | [cron](https://ci-beam.apache.org/job/beam_PostCommit_Sickbay_Python37), [phrase](https://ci-beam.apache.org/job/beam_PostCommit_SickBay_Python37_PR/) | `Run Python 3.7 PostCommit Sickbay tests` | [![Build Status](https://ci-beam.apache.org/job/beam_PostCommit_Sickbay_Python37/badge/icon)](https://ci-beam.apache.org/job/beam_PostCommit_Sickbay_Python37) | | beam_PostCommit_Sickbay_Python38 | [cron](https://ci-beam.apache.org/job/beam_PostCommit_Sickbay_Python38), [phrase](https://ci-beam.apache.org/job/beam_PostCommit_SickBay_Python38_PR/) | `Run Python 3.8 PostCommit Sickbay tests` | [![Build Status](https://ci-beam.apache.org/job/beam_PostCommit_Sickbay_Python38/badge/icon)](https://ci-beam.apache.org/job/beam_PostCommit_Sickbay_Python38) | | beam_PostCommit_Sickbay_Python39 | [cron](https://ci-beam.apache.org/job/beam_PostCommit_Sickbay_Python39), [phrase](https://ci-beam.apache.org/job/beam_PostCommit_SickBay_Python39_PR/) | `Run Python 3.9 PostCommit Sickbay tests` | [![Build Status](https://ci-beam.apache.org/job/beam_PostCommit_Sickbay_Python39/badge/icon)](https://ci-beam.apache.org/job/beam_PostCommit_Sickbay_Python39) | | beam_PostCommit_Sickbay_Python310 | [cron](https://ci-beam.apache.org/job/beam_PostCommit_Sickbay_Python310), [phrase](https://ci-beam.apache.org/job/beam_PostCommit_SickBay_Python310_PR/) | `Run Python 3.10 PostCommit Sickbay tests` | [![Build Status](https://ci-beam.apache.org/job/beam_PostCommit_Sickbay_Python310/badge/icon)](https://ci-beam.apache.org/job/beam_PostCommit_Sickbay_Python310) | @@ -202,7 +200,7 @@ Beam Jenkins overview page: [link](https://ci-beam.apache.org/) | beam_PerformanceTests_SparkReceiver_IO | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_SparkReceiver_IO/) | `Run Java SparkReceiverIO Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_SparkReceiver_IO/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_SparkReceiver_IO) | | beam_PerformanceTests_TFRecordIOIT | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_TFRecordIOIT/) | `Run Java TFRecordIO Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_TFRecordIOIT/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_TFRecordIOIT) | | beam_PerformanceTests_TextIOIT | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_TextIOIT/), [hdfs_cron](https://ci-beam.apache.org/job/beam_PerformanceTests_TextIOIT_HDFS/) | `Run Java TextIO Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_TextIOIT/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_TextIOIT) [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_TextIOIT_HDFS/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_TextIOIT_HDFS) | -| beam_PerformanceTests_WordCountIT_Py37 | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/) | `Run Python37 WordCountIT Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_WordCountIT_Py37/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_WordCountIT_Py37) | +| beam_PerformanceTests_WordCountIT_Py38 | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_WordCountIT_Py38/) | `Run Python38 WordCountIT Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_WordCountIT_Py38/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_WordCountIT_Py38) | | beam_PerformanceTests_XmlIOIT | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_XmlIOIT/), [hdfs_cron](https://ci-beam.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/) | `Run Java XmlIO Performance Test` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_XmlIOIT/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_XmlIOIT) [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_XmlIOIT_HDFS) | | beam_PerformanceTests_SQLBigQueryIO_Batch_Java | [cron](https://ci-beam.apache.org/job/beam_PerformanceTests_SQLBigQueryIO_Batch_Java/) | `Run SQLBigQueryIO Batch Performance Test Java` | [![Build Status](https://ci-beam.apache.org/job/beam_PerformanceTests_SQLBigQueryIO_Batch_Java/badge/icon)](https://ci-beam.apache.org/job/beam_PerformanceTests_SQLBigQueryIO_Batch_Java/) | | beam_Java_JMH | [cron](https://ci-beam.apache.org/job/beam_Java_JMH/) | | [![Build Status](https://ci-beam.apache.org/job/beam_Java_JMH/badge/icon)](https://ci-beam.apache.org/job/beam_Java_JMH/) | @@ -308,4 +306,4 @@ Beam Jenkins overview page: [link](https://ci-beam.apache.org/) retest this please ``` -* Last update (mm/dd/yyyy): 04/04/2022 +* Last update (mm/dd/yyyy): 06/20/2022 diff --git a/.test-infra/jenkins/job_PostCommit_PortableJar_Flink.groovy b/.test-infra/jenkins/job_PostCommit_PortableJar_Flink.groovy index 1332b61ccb04e..0c6f51f8be543 100644 --- a/.test-infra/jenkins/job_PostCommit_PortableJar_Flink.groovy +++ b/.test-infra/jenkins/job_PostCommit_PortableJar_Flink.groovy @@ -31,7 +31,7 @@ PostcommitJobBuilder.postCommitJob('beam_PostCommit_PortableJar_Flink', steps { gradle { rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:python:test-suites:portable:py37:testPipelineJarFlinkRunner') + tasks(':sdks:python:test-suites:portable:py38:testPipelineJarFlinkRunner') commonJobProperties.setGradleSwitches(delegate) } } diff --git a/.test-infra/jenkins/job_PostCommit_PortableJar_Spark.groovy b/.test-infra/jenkins/job_PostCommit_PortableJar_Spark.groovy index 93e58af8979ac..1f1963a9b2e4e 100644 --- a/.test-infra/jenkins/job_PostCommit_PortableJar_Spark.groovy +++ b/.test-infra/jenkins/job_PostCommit_PortableJar_Spark.groovy @@ -31,7 +31,7 @@ PostcommitJobBuilder.postCommitJob('beam_PostCommit_PortableJar_Spark', steps { gradle { rootBuildScriptDir(commonJobProperties.checkoutDir) - tasks(':sdks:python:test-suites:portable:py37:testPipelineJarSparkRunner') + tasks(':sdks:python:test-suites:portable:py38:testPipelineJarSparkRunner') commonJobProperties.setGradleSwitches(delegate) } } diff --git a/.test-infra/metrics/grafana/dashboards/perftests_metrics/Python_WordCount_IT_Benchmarks.json b/.test-infra/metrics/grafana/dashboards/perftests_metrics/Python_WordCount_IT_Benchmarks.json index ef49855314535..02e707b68bdf0 100644 --- a/.test-infra/metrics/grafana/dashboards/perftests_metrics/Python_WordCount_IT_Benchmarks.json +++ b/.test-infra/metrics/grafana/dashboards/perftests_metrics/Python_WordCount_IT_Benchmarks.json @@ -224,7 +224,7 @@ "timeFrom": null, "timeRegions": [], "timeShift": null, - "title": "WordCountIT Batch 1Gb Files - py37", + "title": "WordCountIT Batch 1Gb Files - py38", "tooltip": { "shared": true, "sort": 0, diff --git a/.test-infra/tools/README.md b/.test-infra/tools/README.md index df6ab9525e9a2..758c3885a8141 100644 --- a/.test-infra/tools/README.md +++ b/.test-infra/tools/README.md @@ -47,7 +47,7 @@ Example: - Original ```bash -python_versions_arr=("3.8.16" "3.7.16" "3.9.16" "3.10.10") +python_versions_arr=("3.8.16" "3.9.16" "3.10.10" "3.11.4") ``` - Change diff --git a/.test-infra/tools/python_installer.sh b/.test-infra/tools/python_installer.sh index 0b40eb12b993c..b1b05e597cb3b 100644 --- a/.test-infra/tools/python_installer.sh +++ b/.test-infra/tools/python_installer.sh @@ -20,7 +20,7 @@ set -euo pipefail # Variable containing the python versions to install -python_versions_arr=("3.8.16" "3.7.16" "3.9.16" "3.10.10") +python_versions_arr=("3.8.16" "3.9.16" "3.10.10" "3.11.4") # Install pyenv dependencies. pyenv_dep(){ diff --git a/build.gradle.kts b/build.gradle.kts index 77520c3ce7233..6ba7ebeaca482 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -463,7 +463,6 @@ tasks.register("playgroundPreCommit") { tasks.register("pythonPreCommit") { dependsOn(":sdks:python:test-suites:tox:pycommon:preCommitPyCommon") - dependsOn(":sdks:python:test-suites:tox:py37:preCommitPy37") dependsOn(":sdks:python:test-suites:tox:py38:preCommitPy38") dependsOn(":sdks:python:test-suites:tox:py39:preCommitPy39") dependsOn(":sdks:python:test-suites:tox:py310:preCommitPy310") @@ -481,7 +480,6 @@ tasks.register("pythonDocsPreCommit") { } tasks.register("pythonDockerBuildPreCommit") { - dependsOn(":sdks:python:container:py37:docker") dependsOn(":sdks:python:container:py38:docker") dependsOn(":sdks:python:container:py39:docker") dependsOn(":sdks:python:container:py310:docker") @@ -490,25 +488,13 @@ tasks.register("pythonDockerBuildPreCommit") { tasks.register("pythonLintPreCommit") { // TODO(https://github.com/apache/beam/issues/20209): Find a better way to specify lint and formatter tasks without hardcoding py version. - dependsOn(":sdks:python:test-suites:tox:py37:lint") + dependsOn(":sdks:python:test-suites:tox:py38:lint") } tasks.register("pythonFormatterPreCommit") { dependsOn("sdks:python:test-suites:tox:py38:formatter") } -tasks.register("python37PostCommit") { - dependsOn(":sdks:python:test-suites:dataflow:py37:postCommitIT") - dependsOn(":sdks:python:test-suites:direct:py37:postCommitIT") - dependsOn(":sdks:python:test-suites:direct:py37:directRunnerIT") - dependsOn(":sdks:python:test-suites:direct:py37:hdfsIntegrationTest") - dependsOn(":sdks:python:test-suites:direct:py37:azureIntegrationTest") - dependsOn(":sdks:python:test-suites:portable:py37:postCommitPy37") - dependsOn(":sdks:python:test-suites:dataflow:py37:spannerioIT") - dependsOn(":sdks:python:test-suites:direct:py37:spannerioIT") - dependsOn(":sdks:python:test-suites:portable:py37:xlangSpannerIOIT") -} - tasks.register("python38PostCommit") { dependsOn(":sdks:python:test-suites:dataflow:py38:postCommitIT") dependsOn(":sdks:python:test-suites:direct:py38:postCommitIT") @@ -547,12 +533,11 @@ tasks.register("python311PostCommit") { } tasks.register("portablePythonPreCommit") { - dependsOn(":sdks:python:test-suites:portable:py37:preCommitPy37") + dependsOn(":sdks:python:test-suites:portable:py38:preCommitPy38") dependsOn(":sdks:python:test-suites:portable:py311:preCommitPy311") } tasks.register("pythonSparkPostCommit") { - dependsOn(":sdks:python:test-suites:portable:py37:sparkValidatesRunner") dependsOn(":sdks:python:test-suites:portable:py38:sparkValidatesRunner") dependsOn(":sdks:python:test-suites:portable:py39:sparkValidatesRunner") dependsOn(":sdks:python:test-suites:portable:py311:sparkValidatesRunner") diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy index 1e8f0ce5a9db0..4462a9780417b 100644 --- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy +++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy @@ -2909,10 +2909,10 @@ class BeamModulePlugin implements Plugin { mustRunAfter = [ ":runners:flink:${project.ext.latestFlinkVersion}:job-server:shadowJar", ':runners:spark:3:job-server:shadowJar', - ':sdks:python:container:py37:docker', ':sdks:python:container:py38:docker', ':sdks:python:container:py39:docker', ':sdks:python:container:py310:docker', + ':sdks:python:container:py311:docker', ] doLast { // TODO: Figure out GCS credentials and use real GCS input and output. diff --git a/local-env-setup.sh b/local-env-setup.sh index cc041e872cd39..6cd1092023a59 100755 --- a/local-env-setup.sh +++ b/local-env-setup.sh @@ -55,7 +55,7 @@ if [ "$kernelname" = "Linux" ]; then exit fi - for ver in 3.7 3.8 3.9 3.10 3; do + for ver in 3.8 3.9 3.10 3.11 3; do apt install --yes python$ver-venv done @@ -89,7 +89,7 @@ elif [ "$kernelname" = "Darwin" ]; then echo "Installing openjdk@8" brew install openjdk@8 fi - for ver in 3.7 3.8 3.9; do + for ver in 3.8 3.9 3.10 3.11; do if brew ls --versions python@$ver > /dev/null; then echo "python@$ver already installed. Skipping" brew info python@$ver diff --git a/release/src/main/Dockerfile b/release/src/main/Dockerfile index 0dc80450a2a5b..04d39765cfc20 100644 --- a/release/src/main/Dockerfile +++ b/release/src/main/Dockerfile @@ -42,12 +42,11 @@ RUN curl https://pyenv.run | bash && \ echo 'command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"' >> /root/.bashrc && \ echo ''eval "$(pyenv init -)"'' >> /root/.bashrc && \ source /root/.bashrc && \ - pyenv install 3.7.10 && \ pyenv install 3.8.9 && \ pyenv install 3.9.4 && \ pyenv install 3.10.7 && \ pyenv install 3.11.3 && \ - pyenv global 3.8.9 3.7.10 3.9.4 3.10.7 3.11.3 + pyenv global 3.8.9 3.9.4 3.10.7 3.11.3 # Install a Go version >= 1.16 so we can bootstrap higher # Go versions diff --git a/release/src/main/python-release/python_release_automation.sh b/release/src/main/python-release/python_release_automation.sh index 7c036caa58b9a..e245406b57d07 100755 --- a/release/src/main/python-release/python_release_automation.sh +++ b/release/src/main/python-release/python_release_automation.sh @@ -19,7 +19,7 @@ source release/src/main/python-release/run_release_candidate_python_quickstart.sh source release/src/main/python-release/run_release_candidate_python_mobile_gaming.sh -for version in 3.7 3.8 3.9 3.10 3.11 +for version in 3.8 3.9 3.10 3.11 do run_release_candidate_python_quickstart "tar" "python${version}" run_release_candidate_python_mobile_gaming "tar" "python${version}" diff --git a/scripts/ci/release/test/resources/mass_comment.txt b/scripts/ci/release/test/resources/mass_comment.txt index b3a307c9e698c..93468b0c961b4 100644 --- a/scripts/ci/release/test/resources/mass_comment.txt +++ b/scripts/ci/release/test/resources/mass_comment.txt @@ -59,9 +59,10 @@ Run Portable_Python PreCommit Run PostCommit_Java_Dataflow Run PostCommit_Java_DataflowV2 Run PostCommit_Java_Hadoop_Versions -Run Python 3.7 PostCommit Run Python 3.8 PostCommit Run Python 3.9 PostCommit +Run Python 3.10 PostCommit +Run Python 3.11 PostCommit Run Python Dataflow V2 ValidatesRunner Run Python Dataflow ValidatesContainer Run Python Dataflow ValidatesRunner diff --git a/sdks/python/apache_beam/internal/cloudpickle_pickler_test.py b/sdks/python/apache_beam/internal/cloudpickle_pickler_test.py index 1bbf21cfec14d..2c12877aff4e4 100644 --- a/sdks/python/apache_beam/internal/cloudpickle_pickler_test.py +++ b/sdks/python/apache_beam/internal/cloudpickle_pickler_test.py @@ -19,7 +19,6 @@ # pytype: skip-file -import sys import threading import types import unittest @@ -106,7 +105,6 @@ def test_dump_and_load_mapping_proxy(self): types.MappingProxyType, type(loads(dumps(types.MappingProxyType({}))))) # pylint: disable=exec-used - @unittest.skipIf(sys.version_info < (3, 7), 'Python 3.7 or above only') def test_dataclass(self): exec( ''' diff --git a/sdks/python/apache_beam/io/filesystem_test.py b/sdks/python/apache_beam/io/filesystem_test.py index 52f0e502a2254..a4d456a366da2 100644 --- a/sdks/python/apache_beam/io/filesystem_test.py +++ b/sdks/python/apache_beam/io/filesystem_test.py @@ -26,7 +26,6 @@ import ntpath import os import posixpath -import sys import tempfile import unittest import zlib @@ -237,11 +236,7 @@ def test_match_glob(self, file_pattern, expected_object_names): expected_num_items) @parameterized.expand([ - param( - os_path=posixpath, - # re.escape does not escape forward slashes since Python 3.7 - # https://docs.python.org/3/whatsnew/3.7.html ("bpo-29995") - sep_re='\\/' if sys.version_info < (3, 7, 0) else '/'), + param(os_path=posixpath, sep_re='/'), param(os_path=ntpath, sep_re='\\\\'), ]) def test_translate_pattern(self, os_path, sep_re): diff --git a/sdks/python/apache_beam/ml/inference/tensorflow_inference_it_test.py b/sdks/python/apache_beam/ml/inference/tensorflow_inference_it_test.py index 9c814062e6ed4..bdc0291dd1ed1 100644 --- a/sdks/python/apache_beam/ml/inference/tensorflow_inference_it_test.py +++ b/sdks/python/apache_beam/ml/inference/tensorflow_inference_it_test.py @@ -18,7 +18,6 @@ """End-to-End test for Tensorflow Inference""" import logging -import sys import unittest import uuid from pathlib import Path @@ -67,10 +66,6 @@ def clear_tf_hub_temp_dir(model_path): rmdir(local_path) -@unittest.skipIf( - sys.version_info.major == 3 and sys.version_info.minor == 7, - "Tensorflow tests on Python 3.7 with Apache Beam 2.47.0 or " - "greater are skipped since tensorflow>=2.12 doesn't support Python 3.7") @unittest.skipIf( tf is None, 'Missing dependencies. ' 'Test depends on tensorflow') diff --git a/sdks/python/apache_beam/typehints/decorators_test.py b/sdks/python/apache_beam/typehints/decorators_test.py index ba46038e472b5..239c9bd570789 100644 --- a/sdks/python/apache_beam/typehints/decorators_test.py +++ b/sdks/python/apache_beam/typehints/decorators_test.py @@ -51,14 +51,8 @@ def fn(a, b=1, *c, **d): self.assertListEqual(list(s.parameters), ['a', 'b', 'c', 'd']) def test_get_signature_builtin(self): - # Tests a builtin function for 3.7+ and fallback result for older versions. s = decorators.get_signature(list) - if sys.version_info < (3, 7): - self.assertListEqual( - list(s.parameters), - ['_', '__unknown__varargs', '__unknown__keywords']) - else: - self.assertListEqual(list(s.parameters), ['iterable']) + self.assertListEqual(list(s.parameters), ['iterable']) self.assertEqual(s.return_annotation, List[Any]) def test_from_callable_without_annotations(self): diff --git a/sdks/python/apache_beam/typehints/native_type_compatibility.py b/sdks/python/apache_beam/typehints/native_type_compatibility.py index d03d5db404538..e916f34146f17 100644 --- a/sdks/python/apache_beam/typehints/native_type_compatibility.py +++ b/sdks/python/apache_beam/typehints/native_type_compatibility.py @@ -156,10 +156,7 @@ def is_new_type(typ): return hasattr(typ, '__supertype__') -try: - _ForwardRef = typing.ForwardRef # Python 3.7+ -except AttributeError: - _ForwardRef = typing._ForwardRef +_ForwardRef = typing.ForwardRef # Python 3.7+ def is_forward_ref(typ): diff --git a/sdks/python/apache_beam/typehints/typed_pipeline_test.py b/sdks/python/apache_beam/typehints/typed_pipeline_test.py index 9774a37ac88c3..9cb3fcdbb91db 100644 --- a/sdks/python/apache_beam/typehints/typed_pipeline_test.py +++ b/sdks/python/apache_beam/typehints/typed_pipeline_test.py @@ -64,10 +64,6 @@ def test_non_function(self): result = ['1', '10', '100'] | beam.Map(int, 16) self.assertEqual([1, 16, 256], sorted(result)) - @unittest.skipIf( - sys.version_info < (3, 7, 0), - 'Function signatures for builtins are not available in Python 3 before ' - 'version 3.7.') def test_non_function_fails(self): with self.assertRaises(typehints.TypeCheckError): [1, 2, 3] | beam.Map(str.upper) @@ -888,15 +884,7 @@ def test_pardo_wrapper_builtin_method(self): def test_pardo_wrapper_builtin_type(self): th = beam.ParDo(list).get_type_hints() - if sys.version_info < (3, 7): - self.assertEqual( - th.input_types, - ((typehints.Any, typehints.decorators._ANY_VAR_POSITIONAL), { - '__unknown__keywords': typehints.decorators._ANY_VAR_KEYWORD - })) - else: - # Python 3.7+ supports signatures for builtins like 'list'. - self.assertEqual(th.input_types, ((typehints.Any, ), {})) + self.assertEqual(th.input_types, ((typehints.Any, ), {})) self.assertEqual(th.output_types, ((typehints.Any, ), {})) diff --git a/sdks/python/container/base_image_requirements_manual.txt b/sdks/python/container/base_image_requirements_manual.txt index be917887402b5..a1d80320d42d8 100644 --- a/sdks/python/container/base_image_requirements_manual.txt +++ b/sdks/python/container/base_image_requirements_manual.txt @@ -43,4 +43,4 @@ nose==1.3.7 # For Dataflow internal testing. TODO: remove this. python-snappy;python_version<"3.11" # Optimizes execution of some Beam codepaths. scipy scikit-learn -tensorflow>=2.12.0;python_version>="3.8" +tensorflow>=2.12.0 diff --git a/sdks/python/container/build.gradle b/sdks/python/container/build.gradle index da54ef2ebe8a3..96a9c17b9a2a4 100644 --- a/sdks/python/container/build.gradle +++ b/sdks/python/container/build.gradle @@ -36,7 +36,6 @@ goBuild { } tasks.register("buildAll") { - dependsOn ':sdks:python:container:py37:docker' dependsOn ':sdks:python:container:py38:docker' dependsOn ':sdks:python:container:py39:docker' dependsOn ':sdks:python:container:py310:docker' @@ -44,7 +43,6 @@ tasks.register("buildAll") { } tasks.register("pushAll") { - dependsOn ':sdks:python:container:py37:dockerPush' dependsOn ':sdks:python:container:py38:dockerPush' dependsOn ':sdks:python:container:py39:dockerPush' dependsOn ':sdks:python:container:py310:dockerPush' @@ -52,7 +50,6 @@ tasks.register("pushAll") { } tasks.register("generatePythonRequirementsAll") { - dependsOn ':sdks:python:container:py37:generatePythonRequirements' dependsOn ':sdks:python:container:py38:generatePythonRequirements' dependsOn ':sdks:python:container:py39:generatePythonRequirements' dependsOn ':sdks:python:container:py310:generatePythonRequirements' diff --git a/sdks/python/container/py37/base_image_requirements.txt b/sdks/python/container/py37/base_image_requirements.txt deleted file mode 100644 index a514f4b8b1090..0000000000000 --- a/sdks/python/container/py37/base_image_requirements.txt +++ /dev/null @@ -1,135 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Autogenerated requirements file for Apache Beam py37 container image. -# Run ./gradlew :sdks:python:container:generatePythonRequirementsAll to update. -# Do not edit manually, adjust ../base_image_requirements_manual.txt or -# Apache Beam's setup.py instead, and regenerate the list. -# You will need Python interpreters for all versions supported by Beam, see: -# https://s.apache.org/beam-python-dev-wiki -# Reach out to a committer if you need help. - -attrs==23.1.0 -beautifulsoup4==4.12.2 -bs4==0.0.1 -cachetools==5.3.0 -certifi==2023.5.7 -cffi==1.15.1 -charset-normalizer==3.1.0 -click==8.1.3 -cloudpickle==2.2.1 -crcmod==1.7 -cryptography==41.0.0 -Cython==0.29.34 -deprecation==2.1.0 -dill==0.3.1.1 -dnspython==2.3.0 -docker==6.1.2 -docopt==0.6.2 -exceptiongroup==1.1.1 -execnet==1.9.0 -fastavro==1.7.4 -fasteners==0.18 -freezegun==1.2.2 -future==0.18.3 -google-api-core==2.11.0 -google-api-python-client==2.86.0 -google-apitools==0.5.31 -google-auth==2.18.0 -google-auth-httplib2==0.1.0 -google-cloud-bigquery==3.10.0 -google-cloud-bigquery-storage==2.19.1 -google-cloud-bigtable==2.17.0 -google-cloud-core==2.3.2 -google-cloud-datastore==2.15.2 -google-cloud-dlp==3.12.1 -google-cloud-language==2.9.1 -google-cloud-profiler==4.0.0 -google-cloud-pubsub==2.17.0 -google-cloud-pubsublite==1.7.0 -google-cloud-recommendations-ai==0.10.3 -google-cloud-spanner==3.34.0 -google-cloud-videointelligence==2.11.1 -google-cloud-vision==3.4.1 -google-crc32c==1.5.0 -google-resumable-media==2.5.0 -googleapis-common-protos==1.59.0 -greenlet==2.0.2 -grpc-google-iam-v1==0.12.6 -grpcio==1.54.2 -grpcio-status==1.54.2 -guppy3==3.1.3 -hdfs==2.7.0 -httplib2==0.22.0 -hypothesis==6.75.3 -idna==3.4 -importlib-metadata==6.6.0 -iniconfig==2.0.0 -joblib==1.2.0 -mmh3==3.1.0 -mock==5.0.2 -nltk==3.8.1 -nose==1.3.7 -numpy==1.21.6 -oauth2client==4.1.3 -objsize==0.6.1 -orjson==3.8.12 -overrides==6.5.0 -packaging==23.1 -pandas==1.3.5 -parameterized==0.9.0 -pluggy==1.0.0 -proto-plus==1.22.2 -protobuf==4.23.0 -psycopg2-binary==2.9.6 -pyarrow==11.0.0 -pyasn1==0.5.0 -pyasn1-modules==0.3.0 -pycparser==2.21 -pydot==1.4.2 -PyHamcrest==2.0.4 -pymongo==4.3.3 -PyMySQL==1.0.3 -pyparsing==3.0.9 -pytest==7.3.1 -pytest-timeout==2.1.0 -pytest-xdist==3.3.0 -python-dateutil==2.8.2 -python-snappy==0.6.1 -pytz==2023.3 -PyYAML==6.0 -regex==2023.5.5 -requests==2.30.0 -requests-mock==1.10.0 -rsa==4.9 -scikit-learn==1.0.2 -scipy==1.7.3 -six==1.16.0 -sortedcontainers==2.4.0 -soupsieve==2.4.1 -SQLAlchemy==1.4.48 -sqlparse==0.4.4 -tenacity==8.2.2 -testcontainers==3.7.1 -threadpoolctl==3.1.0 -tomli==2.0.1 -tqdm==4.65.0 -typing_extensions==4.5.0 -uritemplate==4.1.1 -urllib3==1.26.15 -websocket-client==1.5.1 -wrapt==1.15.0 -zipp==3.15.0 -zstandard==0.21.0 diff --git a/sdks/python/container/py37/build.gradle b/sdks/python/container/py37/build.gradle deleted file mode 100644 index 547163a3514e1..0000000000000 --- a/sdks/python/container/py37/build.gradle +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -plugins { - id 'base' - id 'org.apache.beam.module' -} -applyDockerNature() -applyPythonNature() - -pythonVersion = '3.7' - -apply from: "../common.gradle" diff --git a/sdks/python/container/run_validatescontainer.sh b/sdks/python/container/run_validatescontainer.sh index 1dd17c59a647a..be16ff619ed64 100755 --- a/sdks/python/container/run_validatescontainer.sh +++ b/sdks/python/container/run_validatescontainer.sh @@ -24,8 +24,6 @@ # REGION -> Region name to use for Dataflow # # Execute from the root of the repository: -# test Python3.7 container: -# ./gradlew :sdks:python:test-suites:dataflow:py37:validatesContainer # test Python3.8 container: # ./gradlew :sdks:python:test-suites:dataflow:py38:validatesContainer # or test all supported python versions together: diff --git a/sdks/python/mypy.ini b/sdks/python/mypy.ini index a628036d6682f..46dea481f9314 100644 --- a/sdks/python/mypy.ini +++ b/sdks/python/mypy.ini @@ -16,7 +16,7 @@ # [mypy] -python_version = 3.7 +python_version = 3.8 ignore_missing_imports = true follow_imports = normal warn_no_return = true diff --git a/sdks/python/setup.py b/sdks/python/setup.py index a62e292085f34..8446107858947 100644 --- a/sdks/python/setup.py +++ b/sdks/python/setup.py @@ -147,7 +147,6 @@ def cythonize(*args, **kwargs): # Exclude 1.5.0 and 1.5.1 because of # https://github.com/pandas-dev/pandas/issues/45725 dataframe_dependency = [ - 'pandas<1.6.0;python_version=="3.7"', 'pandas>=1.4.3,!=1.5.0,!=1.5.1,<1.6;python_version>="3.8"', ] @@ -175,7 +174,7 @@ def get_portability_package_data(): return files -python_requires = '>=3.7' +python_requires = '>=3.8' if sys.version_info.major == 3 and sys.version_info.minor >= 12: warnings.warn( @@ -326,9 +325,7 @@ def get_portability_package_data(): 'interactive': [ 'facets-overview>=1.1.0,<2', 'google-cloud-dataproc>=5.0.0,<6', - # IPython>=8 is not compatible with Python<=3.7 - 'ipython>=7,<8;python_version<="3.7"', - 'ipython>=8,<9;python_version>"3.7"', + 'ipython>=8,<9', 'ipykernel>=6,<7', 'ipywidgets>=8,<9', # Skip version 6.1.13 due to @@ -363,7 +360,6 @@ def get_portability_package_data(): 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', - 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', diff --git a/sdks/python/test-suites/dataflow/py37/build.gradle b/sdks/python/test-suites/dataflow/py37/build.gradle deleted file mode 100644 index 9f89c61e0a208..0000000000000 --- a/sdks/python/test-suites/dataflow/py37/build.gradle +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: org.apache.beam.gradle.BeamModulePlugin -applyPythonNature() - -// Required to setup a Python 3 virtualenv and task names. -pythonVersion = '3.7' -apply from: "../common.gradle" diff --git a/sdks/python/test-suites/direct/py37/build.gradle b/sdks/python/test-suites/direct/py37/build.gradle deleted file mode 100644 index bf99f72d429c6..0000000000000 --- a/sdks/python/test-suites/direct/py37/build.gradle +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -plugins { id 'org.apache.beam.module' } -applyPythonNature() - -// Required to setup a Python 3 virtualenv and task names. -pythonVersion = '3.7' -apply from: '../common.gradle' diff --git a/sdks/python/test-suites/gradle.properties b/sdks/python/test-suites/gradle.properties index 75bdf25b8d482..216bcffa7af3e 100644 --- a/sdks/python/test-suites/gradle.properties +++ b/sdks/python/test-suites/gradle.properties @@ -23,18 +23,18 @@ # dataflow test-suites # (TODO): https://github.com/apache/beam/issues/21971 # Add python 3.10 to dataflow test-suites -dataflow_precommit_it_task_py_versions=3.7,3.11 -dataflow_mongodbio_it_task_py_versions=3.7 -dataflow_chicago_taxi_example_task_py_versions=3.7 +dataflow_precommit_it_task_py_versions=3.8,3.11 +dataflow_mongodbio_it_task_py_versions=3.8 +dataflow_chicago_taxi_example_task_py_versions=3.8 # dataflow runner v1 batch and streaming tests # lowest and highest version supported by dataflow runner v1 -dataflow_validates_runner_batch_tests=3.7,3.9 -dataflow_validates_runner_streaming_tests=3.7,3.9 +dataflow_validates_runner_batch_tests=3.8,3.9 +dataflow_validates_runner_streaming_tests=3.8,3.9 # TODO: Enable following tests after making sure we have enough capacity. -dataflow_validates_runner_batch_tests_V2=3.7,3.11 -dataflow_validates_runner_streaming_tests_V2=3.7,3.11 +dataflow_validates_runner_batch_tests_V2=3.8,3.11 +dataflow_validates_runner_streaming_tests_V2=3.8,3.11 dataflow_examples_postcommit_py_versions=3.11 # TFX_BSL is not yet supported on Python 3.10. dataflow_cloudml_benchmark_tests_py_versions=3.9 @@ -43,14 +43,14 @@ direct_mongodbio_it_task_py_versions=3.11 # flink runner test-suites flink_validates_runner_precommit_py_versions=3.11 -flink_validates_runner_postcommit_py_versions=3.7,3.11 -flink_examples_postcommit_py_versions=3.7,3.11 +flink_validates_runner_postcommit_py_versions=3.8,3.11 +flink_examples_postcommit_py_versions=3.8,3.11 # samza runner test-suites -samza_validates_runner_postcommit_py_versions=3.7,3.11 +samza_validates_runner_postcommit_py_versions=3.8,3.11 # spark runner test-suites -spark_examples_postcommit_py_versions=3.7,3.11 +spark_examples_postcommit_py_versions=3.8,3.11 # cross language gcp io postcommit python test suites -cross_language_validates_gcp_py_versions=3.7,3.11 +cross_language_validates_gcp_py_versions=3.8,3.11 diff --git a/sdks/python/test-suites/portable/py37/build.gradle b/sdks/python/test-suites/portable/py37/build.gradle deleted file mode 100644 index f4141db2a2d8d..0000000000000 --- a/sdks/python/test-suites/portable/py37/build.gradle +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: org.apache.beam.gradle.BeamModulePlugin -applyPythonNature() - -addPortableWordCountTasks() - -// Required to setup a Python 3.7 virtualenv and task names. -pythonVersion = '3.7' -apply from: "../common.gradle" diff --git a/sdks/python/test-suites/tox/py37/build.gradle b/sdks/python/test-suites/tox/py37/build.gradle deleted file mode 100644 index 744ca67506292..0000000000000 --- a/sdks/python/test-suites/tox/py37/build.gradle +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Unit tests for Python 3.7 - */ - -plugins { id 'org.apache.beam.module' } -applyPythonNature() - -// Required to setup a Python 3 virtualenv and task names. -pythonVersion = '3.7' - -def posargs = project.findProperty("posargs") ?: "" - -task lint {} -check.dependsOn lint - -toxTask "lintPy37", "py37-lint", "${posargs}" -lint.dependsOn lintPy37 - -toxTask "mypyPy37", "py37-mypy", "${posargs}" -lint.dependsOn mypyPy37 - -apply from: "../common.gradle" - -// TODO(https://github.com/apache/beam/issues/20051): Remove this once tox uses isolated builds. -testPy37Cython.mustRunAfter testPython37, testPy37Cloud diff --git a/sdks/python/tox.ini b/sdks/python/tox.ini index e3ff948aff7e1..e44bcd225681c 100644 --- a/sdks/python/tox.ini +++ b/sdks/python/tox.ini @@ -17,7 +17,7 @@ [tox] # new environments will be excluded by default unless explicitly added to envlist. -envlist = py37,py38,py39,py310,py311,py37-{cloud,cython,lint,mypy,dask},py38-{cloud,cython,docs,cloudcoverage,dask},py39-{cloud,cython},py310-{cloud,cython,dask},py311-{cloud,cython,dask},whitespacelint +envlist = py38,py39,py310,py311,py38-{cloud,cython,docs,cloudcoverage,dask},py39-{cloud,cython},py310-{cloud,cython,dask},py311-{cloud,cython,dask},whitespacelint toxworkdir = {toxinidir}/target/{env:ENV_NAME:.tox} [pycodestyle] @@ -69,19 +69,19 @@ commands_post = bash {toxinidir}/scripts/run_tox_cleanup.sh commands = false {envname} is misconfigured -[testenv:py{37,38,39,310,311}] +[testenv:py{38,39,310,311}] commands = python apache_beam/examples/complete/autocomplete_test.py bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}" -[testenv:py{37,38,39,310,311}-win] +[testenv:py{38,39,310,311}-win] commands = python apache_beam/examples/complete/autocomplete_test.py bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}" install_command = {envbindir}/python.exe {envbindir}/pip.exe install --retries 10 {opts} {packages} list_dependencies_command = {envbindir}/python.exe {envbindir}/pip.exe freeze -[testenv:py{37,38,39,310,311}-cython] +[testenv:py{38,39,310,311}-cython] # cython tests are only expected to work in linux (2.x and 3.x) # If we want to add other platforms in the future, it should be: # `platform = linux2|darwin|...` @@ -94,12 +94,12 @@ commands = python apache_beam/examples/complete/autocomplete_test.py bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}" -[testenv:py{37,38,39,310,311}-cloud] +[testenv:py{38,39,310,311}-cloud] extras = test,gcp,interactive,dataframe,aws,azure commands = bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}" -[testenv:py{37,38,39,310,311}-dask] +[testenv:py{38,39,310,311}-dask] extras = test,dask commands = bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}" @@ -117,7 +117,7 @@ commands = ./codecov -F python -rm codecov -[testenv:py37-lint] +[testenv:py38-lint] # Don't set TMPDIR to avoid "AF_UNIX path too long" errors in pylint. setenv = # keep the version of pylint in sync with the 'rev' in .pre-commit-config.yaml @@ -139,7 +139,7 @@ deps = commands = time {toxinidir}/scripts/run_whitespacelint.sh -[testenv:py37-mypy] +[testenv:py38-mypy] deps = -r build-requirements.txt mypy==0.790 @@ -283,7 +283,7 @@ extras = test commands = bash {toxinidir}/scripts/pytest_validates_runner.sh {envname} {toxinidir}/apache_beam/runners/portability/spark_runner_test.py {posargs} -[testenv:py{37,38,39,310}-pyarrow-{3,4,5,6,7,8,9}] +[testenv:py{38,39,310}-pyarrow-{3,4,5,6,7,8,9}] deps = 3: pyarrow>=3,<4 4: pyarrow>=4,<5 @@ -300,7 +300,7 @@ commands = # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories. /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_pyarrow {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret' -[testenv:py{37,38,39,310,311}-pyarrow-{10,11}] +[testenv:py{38,39,310,311}-pyarrow-{10,11}] deps = 10: pyarrow>=10,<11 11: pyarrow>=11,<12 @@ -313,7 +313,7 @@ commands = /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_pyarrow {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret' -[testenv:py{37,38,39,310,311}-pandas-{14,15}] +[testenv:py{38,39,310,311}-pandas-{14,15}] deps = 14: pandas>=1.4.3,<1.5.0 # Exclude 1.5.0 and 1.5.1 because of https://github.com/pandas-dev/pandas/issues/45725 @@ -324,7 +324,7 @@ commands = # Run all DataFrame API unit tests bash {toxinidir}/scripts/run_pytest.sh {envname} 'apache_beam/dataframe' -[testenv:py{37,38,39,310,311}-pytorch-{19,110,111,112,113}] +[testenv:py{38,39,310,311}-pytorch-{19,110,111,112,113}] deps = -r build-requirements.txt 19: torch>=1.9.0,<1.10.0 @@ -353,7 +353,7 @@ commands = /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_pytorch {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret' # TODO(https://github.com/apache/beam/issues/25796) - uncomment onnx tox task in tox/py38/build.gradle once onnx supports protobuf 4.x.x -[testenv:py{37,38,39,310}-onnx-113] +[testenv:py{38,39,310}-onnx-113] # TODO(https://github.com/apache/beam/issues/25443) # apparently tox has problem when substitution key has single value. Change back to -onnx-{113,...} # when multiple onnx versions are tested. @@ -384,7 +384,7 @@ commands = # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories. /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_tf {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret' -[testenv:py{37,38,39,310}-xgboost-{160,170}] +[testenv:py{38,39,310}-xgboost-{160,170}] deps = -r build-requirements.txt 160: diff --git a/settings.gradle.kts b/settings.gradle.kts index ddc61e1406f9a..591c1b78833cd 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -256,32 +256,27 @@ include(":sdks:python") include(":sdks:python:apache_beam:testing:load_tests") include(":sdks:python:apache_beam:testing:benchmarks:nexmark") include(":sdks:python:container") -include(":sdks:python:container:py37") include(":sdks:python:container:py38") include(":sdks:python:container:py39") include(":sdks:python:container:py310") include(":sdks:python:container:py311") include(":sdks:python:expansion-service-container") include(":sdks:python:test-suites:dataflow") -include(":sdks:python:test-suites:dataflow:py37") include(":sdks:python:test-suites:dataflow:py38") include(":sdks:python:test-suites:dataflow:py39") include(":sdks:python:test-suites:dataflow:py310") include(":sdks:python:test-suites:dataflow:py311") include(":sdks:python:test-suites:direct") -include(":sdks:python:test-suites:direct:py37") include(":sdks:python:test-suites:direct:py38") include(":sdks:python:test-suites:direct:py39") include(":sdks:python:test-suites:direct:py310") include(":sdks:python:test-suites:direct:py311") include(":sdks:python:test-suites:direct:xlang") -include(":sdks:python:test-suites:portable:py37") include(":sdks:python:test-suites:portable:py38") include(":sdks:python:test-suites:portable:py39") include(":sdks:python:test-suites:portable:py310") include(":sdks:python:test-suites:portable:py311") include(":sdks:python:test-suites:tox:pycommon") -include(":sdks:python:test-suites:tox:py37") include(":sdks:python:test-suites:tox:py38") include(":sdks:python:test-suites:tox:py39") include(":sdks:python:test-suites:tox:py310") diff --git a/website/www/site/content/en/documentation/runtime/environments.md b/website/www/site/content/en/documentation/runtime/environments.md index 78c7dc32a49d3..554d471da98ec 100644 --- a/website/www/site/content/en/documentation/runtime/environments.md +++ b/website/www/site/content/en/documentation/runtime/environments.md @@ -115,10 +115,10 @@ This method requires building image artifacts from Beam source. For additional i ./gradlew :sdks:java:container:java17:docker ./gradlew :sdks:go:container:docker ./gradlew :sdks:python:container:py36:docker - ./gradlew :sdks:python:container:py37:docker ./gradlew :sdks:python:container:py38:docker ./gradlew :sdks:python:container:py39:docker ./gradlew :sdks:python:container:py310:docker + ./gradlew :sdks:python:container:py311:docker # Shortcut for building all Python SDKs ./gradlew :sdks:python:container buildAll diff --git a/website/www/site/content/en/get-started/quickstart-py.md b/website/www/site/content/en/get-started/quickstart-py.md index bd6fd3eaa0db0..57cb726ef7d20 100644 --- a/website/www/site/content/en/get-started/quickstart-py.md +++ b/website/www/site/content/en/get-started/quickstart-py.md @@ -23,7 +23,7 @@ If you're interested in contributing to the Apache Beam Python codebase, see the {{< toc >}} -The Python SDK supports Python 3.7, 3.8, 3.9 and 3.10. Beam 2.38.0 was the last release with support for Python 3.6. +The Python SDK supports Python 3.8, 3.9, 3.10 and 3.11. Beam 2.48.0 was the last release with support for Python 3.7. ## Set up your environment From e80eaac20247d1eeb147427f81de76569bacd22c Mon Sep 17 00:00:00 2001 From: Anand Inguva Date: Tue, 20 Jun 2023 17:54:00 -0400 Subject: [PATCH 2/8] Fix few places where py37 was removed --- .../setup-default-test-properties/test-properties.json | 4 ++-- sdks/python/tox.ini | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/actions/setup-default-test-properties/test-properties.json b/.github/actions/setup-default-test-properties/test-properties.json index 2bac60d4abbe5..c4ac53f7e873b 100644 --- a/.github/actions/setup-default-test-properties/test-properties.json +++ b/.github/actions/setup-default-test-properties/test-properties.json @@ -3,8 +3,8 @@ "ALL_SUPPORTED_VERSIONS": ["3.8", "3.9", "3.10", "3.11"], "LOWEST_SUPPORTED": ["3.8"], "HIGHEST_SUPPORTED": ["3.11"], - "ESSENTIAL_VERSIONS": ["3.11"], - "CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS": ["3.11"], + "ESSENTIAL_VERSIONS": ["3.8", "3.11"], + "CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS": ["3.8", "3.11"], "CROSS_LANGUAGE_VALIDATES_RUNNER_DATAFLOW_USING_SQL_PYTHON_VERSIONS": ["3.11"], "VALIDATES_CONTAINER_DATAFLOW_PYTHON_VERSIONS": ["3.8", "3.9", "3.10", "3.11" ] "LOAD_TEST_PYTHON_VERSION": "3.8", diff --git a/sdks/python/tox.ini b/sdks/python/tox.ini index e44bcd225681c..b2f784aada505 100644 --- a/sdks/python/tox.ini +++ b/sdks/python/tox.ini @@ -17,7 +17,7 @@ [tox] # new environments will be excluded by default unless explicitly added to envlist. -envlist = py38,py39,py310,py311,py38-{cloud,cython,docs,cloudcoverage,dask},py39-{cloud,cython},py310-{cloud,cython,dask},py311-{cloud,cython,dask},whitespacelint +envlist = py38,py39,py310,py311,py38-{cloud,cython,docs,lint,mypy,cloudcoverage,dask},py39-{cloud,cython},py310-{cloud,cython,dask},py311-{cloud,cython,dask},whitespacelint toxworkdir = {toxinidir}/target/{env:ENV_NAME:.tox} [pycodestyle] From ef271db872e05cfbf2c28e44bb31349ddcdf2851 Mon Sep 17 00:00:00 2001 From: Anand Inguva Date: Tue, 20 Jun 2023 17:57:16 -0400 Subject: [PATCH 3/8] Add notes to changes.MD --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 117274880d537..8ec9a89b78b39 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -72,7 +72,7 @@ ## Deprecations -* X behavior is deprecated and will be removed in X versions ([#X](https://github.com/apache/beam/issues/X)). +* Remove Python 3.7 support. ([#26447](https://github.com/apache/beam/issues/26447)) ## Bugfixes From a991134cccaf223c6f513aafaf8d8f87a8a88bdd Mon Sep 17 00:00:00 2001 From: Anand Inguva Date: Tue, 20 Jun 2023 19:00:11 -0400 Subject: [PATCH 4/8] fix pylint mypy --- .test-infra/jenkins/job_PerformanceTests_Python.groovy | 2 +- sdks/python/test-suites/tox/py38/build.gradle | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/.test-infra/jenkins/job_PerformanceTests_Python.groovy b/.test-infra/jenkins/job_PerformanceTests_Python.groovy index a2544a098cd4f..04c8fc9995307 100644 --- a/.test-infra/jenkins/job_PerformanceTests_Python.groovy +++ b/.test-infra/jenkins/job_PerformanceTests_Python.groovy @@ -30,7 +30,7 @@ def dataflowPipelineArgs = [ ] testConfigurations = [] -pythonVersions = ['37'] +pythonVersions = ['38'] for (pythonVersion in pythonVersions) { testConfigurations.add([ diff --git a/sdks/python/test-suites/tox/py38/build.gradle b/sdks/python/test-suites/tox/py38/build.gradle index 77f4511b53d00..840135142bdcb 100644 --- a/sdks/python/test-suites/tox/py38/build.gradle +++ b/sdks/python/test-suites/tox/py38/build.gradle @@ -154,3 +154,10 @@ task copyTsSource(type: Copy) { jest.dependsOn copyTsSource eslint.dependsOn copyTsSource copyTsSource.dependsOn cleanPython + + +toxTask "lintPy38", "py38-lint", "${posargs}" +lint.dependsOn lintPy38 + +toxTask "mypyPy38", "py38-mypy", "${posargs}" +lint.dependsOn mypyPy38 \ No newline at end of file From cf2366213a07023203135aa0d933f510fab2281a Mon Sep 17 00:00:00 2001 From: Anand Inguva Date: Tue, 20 Jun 2023 19:12:09 -0400 Subject: [PATCH 5/8] Add lint task to py38 --- sdks/python/test-suites/tox/py38/build.gradle | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/sdks/python/test-suites/tox/py38/build.gradle b/sdks/python/test-suites/tox/py38/build.gradle index 840135142bdcb..a96e1c5c9f6b3 100644 --- a/sdks/python/test-suites/tox/py38/build.gradle +++ b/sdks/python/test-suites/tox/py38/build.gradle @@ -29,13 +29,23 @@ pythonVersion = '3.8' toxTask "formatter", "py3-yapf-check" check.dependsOn formatter +// TODO(BEAM-12000): Move tasks that aren't specific to 3.8 to Py 3.9. +def posargs = project.findProperty("posargs") ?: "" + +task lint {} +check.dependsOn lint + +toxTask "lintPy38", "py38-lint", "${posargs}" +lint.dependsOn lintPy38 + +toxTask "mypyPy38", "py38-mypy", "${posargs}" +lint.dependsOn mypyPy38 + apply from: "../common.gradle" // TODO(https://github.com/apache/beam/issues/20051): Remove this once tox uses isolated builds. testPy38Cython.mustRunAfter testPython38, testPy38CloudCoverage -// TODO(BEAM-12000): Move tasks that aren't specific to 3.8 to Py 3.9. -def posargs = project.findProperty("posargs") ?: "" // PyCoverage Precommit runs test suites that evaluate test coverage and compatibility of // particular dependencies. It is exercised on a single Python version. @@ -154,10 +164,3 @@ task copyTsSource(type: Copy) { jest.dependsOn copyTsSource eslint.dependsOn copyTsSource copyTsSource.dependsOn cleanPython - - -toxTask "lintPy38", "py38-lint", "${posargs}" -lint.dependsOn lintPy38 - -toxTask "mypyPy38", "py38-mypy", "${posargs}" -lint.dependsOn mypyPy38 \ No newline at end of file From 55053689e31d5df665787e36b9f3449c397671ec Mon Sep 17 00:00:00 2001 From: Anand Inguva Date: Wed, 21 Jun 2023 01:18:33 -0400 Subject: [PATCH 6/8] Fix error: The u prefix for strings is no longer necessary in Python >=3.0 --- .../metrics/sync/jenkins/syncjenkins.py | 46 +- sdks/python/apache_beam/coders/coders_test.py | 6 +- .../apache_beam/coders/coders_test_common.py | 12 +- .../apache_beam/dataframe/transforms_test.py | 12 +- .../examples/cookbook/bigtableio_it_test.py | 2 +- .../examples/snippets/snippets_test.py | 4 +- .../apache_beam/examples/wordcount_test.py | 2 +- .../internal/cloudpickle_pickler_test.py | 2 +- .../apache_beam/internal/pickler_test.py | 2 +- .../io/external/xlang_parquetio_test.py | 2 +- .../gcp/big_query_query_to_table_it_test.py | 2 +- .../io/gcp/bigquery_read_it_test.py | 34 +- .../io/gcp/bigquery_write_it_test.py | 8 +- .../gcp/datastore/v1new/datastoreio_test.py | 2 +- .../clients/storage/storage_v1_client.py | 1050 ++++++++--------- .../clients/storage/storage_v1_messages.py | 36 +- sdks/python/apache_beam/io/gcp/pubsub_test.py | 4 +- .../apache_beam/transforms/display_test.py | 2 +- .../transforms/environments_test.py | 2 +- .../apache_beam/transforms/external_java.py | 2 +- .../apache_beam/transforms/external_test.py | 18 +- .../apache_beam/transforms/trigger_test.py | 14 +- .../transforms/validate_runner_xlang_test.py | 2 +- .../en/documentation/programming-guide.md | 2 +- 24 files changed, 634 insertions(+), 634 deletions(-) diff --git a/.test-infra/metrics/sync/jenkins/syncjenkins.py b/.test-infra/metrics/sync/jenkins/syncjenkins.py index d421094c0456d..32bbf1fff2e93 100644 --- a/.test-infra/metrics/sync/jenkins/syncjenkins.py +++ b/.test-infra/metrics/sync/jenkins/syncjenkins.py @@ -62,7 +62,7 @@ def fetchJobs(): url = ('https://ci-beam.apache.org/api/json' '?tree=jobs[name,url,lastCompletedBuild[id]]&depth=1') r = requests.get(url) - jobs = r.json()[u'jobs'] + jobs = r.json()['jobs'] result = map(lambda x: (x['name'], int(x['lastCompletedBuild']['id']) if x['lastCompletedBuild'] is not None @@ -122,31 +122,31 @@ def fetchBuildsForJob(jobUrl): f'estimatedDuration,fullDisplayName,actions[{durFields}]') url = f'{jobUrl}api/json?depth=1&tree=builds[{fields}]' r = requests.get(url) - return r.json()[u'builds'] + return r.json()['builds'] def buildRowValuesArray(jobName, build): timings = next((x - for x in build[u'actions'] - if (u'_class' in x) - and (x[u'_class'] == u'jenkins.metrics.impl.TimeInQueueAction')), + for x in build['actions'] + if ('_class' in x) + and (x['_class'] == 'jenkins.metrics.impl.TimeInQueueAction')), None) values = [jobName, - int(build[u'id']), - build[u'url'], - build[u'result'], - datetime.fromtimestamp(build[u'timestamp'] / 1000), - build[u'builtOn'], - build[u'duration'], - build[u'estimatedDuration'], - build[u'fullDisplayName'], - timings[u'blockedDurationMillis'] if timings is not None else -1, - timings[u'buildableDurationMillis'] if timings is not None else -1, - timings[u'buildingDurationMillis'] if timings is not None else -1, - timings[u'executingTimeMillis'] if timings is not None else -1, - timings[u'queuingDurationMillis'] if timings is not None else -1, - timings[u'totalDurationMillis'] if timings is not None else -1, - timings[u'waitingDurationMillis'] if timings is not None else -1] + int(build['id']), + build['url'], + build['result'], + datetime.fromtimestamp(build['timestamp'] / 1000), + build['builtOn'], + build['duration'], + build['estimatedDuration'], + build['fullDisplayName'], + timings['blockedDurationMillis'] if timings is not None else -1, + timings['buildableDurationMillis'] if timings is not None else -1, + timings['buildingDurationMillis'] if timings is not None else -1, + timings['executingTimeMillis'] if timings is not None else -1, + timings['queuingDurationMillis'] if timings is not None else -1, + timings['totalDurationMillis'] if timings is not None else -1, + timings['waitingDurationMillis'] if timings is not None else -1] return values @@ -168,16 +168,16 @@ def fetchNewData(): syncedJobId = syncedJobs[newJobName] if newJobName in syncedJobs else -1 if newJobLastBuildId > syncedJobId: builds = fetchBuildsForJob(newJobUrl) - builds = [x for x in builds if int(x[u'id']) > syncedJobId] + builds = [x for x in builds if int(x['id']) > syncedJobId] connection = initConnection() cursor = connection.cursor() for build in builds: - if build[u'building']: + if build['building']: continue; rowValues = buildRowValuesArray(newJobName, build) - print("inserting", newJobName, build[u'id']) + print("inserting", newJobName, build['id']) insertRow(cursor, rowValues) cursor.close() diff --git a/sdks/python/apache_beam/coders/coders_test.py b/sdks/python/apache_beam/coders/coders_test.py index 1d73c5977ea45..1143e9c5d8732 100644 --- a/sdks/python/apache_beam/coders/coders_test.py +++ b/sdks/python/apache_beam/coders/coders_test.py @@ -76,7 +76,7 @@ def test_proto_coder(self): ma = test_message.MessageA() mb = ma.field2.add() mb.field1 = True - ma.field1 = u'hello world' + ma.field1 = 'hello world' expected_coder = coders.ProtoCoder(ma.__class__) real_coder = coders_registry.get_coder(ma.__class__) self.assertEqual(expected_coder, real_coder) @@ -90,7 +90,7 @@ def test_deterministic_proto_coder(self): ma = test_message.MessageA() mb = ma.field2.add() mb.field1 = True - ma.field1 = u'hello world' + ma.field1 = 'hello world' expected_coder = coders.DeterministicProtoCoder(ma.__class__) real_coder = ( coders_registry.get_coder( @@ -130,7 +130,7 @@ class ProtoPlusCoderTest(unittest.TestCase): def test_proto_plus_coder(self): ma = ProtoPlusMessageA() ma.field2 = [ProtoPlusMessageB(field1=True)] - ma.field1 = u'hello world' + ma.field1 = 'hello world' expected_coder = coders.ProtoPlusCoder(ma.__class__) real_coder = coders_registry.get_coder(ma.__class__) self.assertTrue(issubclass(ma.__class__, proto.Message)) diff --git a/sdks/python/apache_beam/coders/coders_test_common.py b/sdks/python/apache_beam/coders/coders_test_common.py index 7adb06cb28701..8b6674aebec70 100644 --- a/sdks/python/apache_beam/coders/coders_test_common.py +++ b/sdks/python/apache_beam/coders/coders_test_common.py @@ -121,7 +121,7 @@ class CodersTest(unittest.TestCase): -1, 1.5, b'str\0str', - u'unicode\0\u0101', + 'unicode\0\u0101', (), (1, 2, 3), [], @@ -407,7 +407,7 @@ def test_tuple_coder(self): coders.TupleCoder((coders.PickleCoder(), coders.VarIntCoder())), coders.StrUtf8Coder(), coders.BooleanCoder())), ((1, 2), 'a', True), - ((-2, 5), u'a\u0101' * 100, False), ((300, 1), 'abc\0' * 5, True)) + ((-2, 5), 'a\u0101' * 100, False), ((300, 1), 'abc\0' * 5, True)) def test_tuple_sequence_coder(self): int_tuple_coder = coders.TupleSequenceCoder(coders.VarIntCoder()) @@ -420,7 +420,7 @@ def test_base64_pickle_coder(self): self.check_coder(coders.Base64PickleCoder(), 'a', 1, 1.5, (1, 2, 3)) def test_utf8_coder(self): - self.check_coder(coders.StrUtf8Coder(), 'a', u'ab\u00FF', u'\u0101\0') + self.check_coder(coders.StrUtf8Coder(), 'a', 'ab\u00FF', '\u0101\0') def test_iterable_coder(self): iterable_coder = coders.IterableCoder(coders.VarIntCoder()) @@ -604,10 +604,10 @@ def test_proto_coder(self): ma = test_message.MessageA() mab = ma.field2.add() mab.field1 = True - ma.field1 = u'hello world' + ma.field1 = 'hello world' mb = test_message.MessageA() - mb.field1 = u'beam' + mb.field1 = 'beam' proto_coder = coders.ProtoCoder(ma.__class__) self.check_coder(proto_coder, ma) @@ -666,7 +666,7 @@ def __iter__(self): # Test nested tuple observable. coder = coders.TupleCoder((coders.StrUtf8Coder(), iter_coder)) - value = (u'123', observ) + value = ('123', observ) self.assertEqual( coder.get_impl().get_estimated_size_and_observables(value)[1], [(observ, elem_coder.get_impl())]) diff --git a/sdks/python/apache_beam/dataframe/transforms_test.py b/sdks/python/apache_beam/dataframe/transforms_test.py index b824bc56c2f95..a143606cc9130 100644 --- a/sdks/python/apache_beam/dataframe/transforms_test.py +++ b/sdks/python/apache_beam/dataframe/transforms_test.py @@ -213,8 +213,8 @@ def test_batching_beam_row_input(self): with beam.Pipeline() as p: result = ( p - | beam.Create([(u'Falcon', 380.), (u'Falcon', 370.), (u'Parrot', 24.), - (u'Parrot', 26.)]) + | beam.Create([('Falcon', 380.), ('Falcon', 370.), ('Parrot', 24.), + ('Parrot', 26.)]) | beam.Map(lambda tpl: beam.Row(Animal=tpl[0], Speed=tpl[1])) | transforms.DataframeTransform( lambda df: df.groupby('Animal').mean(), include_indexes=True)) @@ -225,8 +225,8 @@ def test_batching_beam_row_to_dataframe(self): with beam.Pipeline() as p: df = convert.to_dataframe( p - | beam.Create([(u'Falcon', 380.), (u'Falcon', 370.), ( - u'Parrot', 24.), (u'Parrot', 26.)]) + | beam.Create([('Falcon', 380.), ('Falcon', 370.), ('Parrot', 24.), ( + 'Parrot', 26.)]) | beam.Map(lambda tpl: beam.Row(Animal=tpl[0], Speed=tpl[1]))) result = convert.to_pcollection( @@ -260,8 +260,8 @@ def test_unbatching_series(self): with beam.Pipeline() as p: result = ( p - | beam.Create([(u'Falcon', 380.), (u'Falcon', 370.), (u'Parrot', 24.), - (u'Parrot', 26.)]) + | beam.Create([('Falcon', 380.), ('Falcon', 370.), ('Parrot', 24.), + ('Parrot', 26.)]) | beam.Map(lambda tpl: beam.Row(Animal=tpl[0], Speed=tpl[1])) | transforms.DataframeTransform(lambda df: df.Animal)) diff --git a/sdks/python/apache_beam/examples/cookbook/bigtableio_it_test.py b/sdks/python/apache_beam/examples/cookbook/bigtableio_it_test.py index 55effaa11a7a1..98023fbc624c1 100644 --- a/sdks/python/apache_beam/examples/cookbook/bigtableio_it_test.py +++ b/sdks/python/apache_beam/examples/cookbook/bigtableio_it_test.py @@ -54,7 +54,7 @@ import google.cloud.bigtable.instance EXISTING_INSTANCES = [] # type: List[google.cloud.bigtable.instance.Instance] -LABEL_KEY = u'python-bigtable-beam' +LABEL_KEY = 'python-bigtable-beam' label_stamp = datetime.datetime.utcnow().replace(tzinfo=UTC) label_stamp_micros = _microseconds_from_datetime(label_stamp) LABELS = {LABEL_KEY: str(label_stamp_micros)} diff --git a/sdks/python/apache_beam/examples/snippets/snippets_test.py b/sdks/python/apache_beam/examples/snippets/snippets_test.py index 0188a2814665e..1e9d0b5121235 100644 --- a/sdks/python/apache_beam/examples/snippets/snippets_test.py +++ b/sdks/python/apache_beam/examples/snippets/snippets_test.py @@ -614,7 +614,7 @@ def test_model_pipelines(self): snippets.model_pipelines() self.assertEqual( self.get_output(result_path), - [str(s) for s in [(u'aa', 1), (u'bb', 2), (u'cc', 3)]]) + [str(s) for s in [('aa', 1), ('bb', 2), ('cc', 3)]]) def test_model_pcollection(self): temp_path = self.create_temp_file() @@ -863,7 +863,7 @@ def _inner(topic=None, subscription=None): input_topic = 'projects/fake-beam-test-project/topic/intopic' input_values = [ TimestampedValue(b'a a b', 1), - TimestampedValue(u'🤷 ¯\\_(ツ)_/¯ b b '.encode('utf-8'), 12), + TimestampedValue('🤷 ¯\\_(ツ)_/¯ b b '.encode('utf-8'), 12), TimestampedValue(b'a b c c c', 20) ] output_topic = 'projects/fake-beam-test-project/topic/outtopic' diff --git a/sdks/python/apache_beam/examples/wordcount_test.py b/sdks/python/apache_beam/examples/wordcount_test.py index 0b658ed5fbbb2..cea0ce368e555 100644 --- a/sdks/python/apache_beam/examples/wordcount_test.py +++ b/sdks/python/apache_beam/examples/wordcount_test.py @@ -38,7 +38,7 @@ class WordCountTest(unittest.TestCase): SAMPLE_TEXT = ( - u'a b c a b a\nacento gráfico\nJuly 30, 2018\n\n aa bb cc aa bb aa') + 'a b c a b a\nacento gráfico\nJuly 30, 2018\n\n aa bb cc aa bb aa') def test_basics(self): test_pipeline = TestPipeline(is_integration_test=True) diff --git a/sdks/python/apache_beam/internal/cloudpickle_pickler_test.py b/sdks/python/apache_beam/internal/cloudpickle_pickler_test.py index 2c12877aff4e4..8ae93d53fd1d3 100644 --- a/sdks/python/apache_beam/internal/cloudpickle_pickler_test.py +++ b/sdks/python/apache_beam/internal/cloudpickle_pickler_test.py @@ -33,7 +33,7 @@ class PicklerTest(unittest.TestCase): NO_MAPPINGPROXYTYPE = not hasattr(types, "MappingProxyType") def test_basics(self): - self.assertEqual([1, 'a', (u'z', )], loads(dumps([1, 'a', (u'z', )]))) + self.assertEqual([1, 'a', ('z', )], loads(dumps([1, 'a', ('z', )]))) fun = lambda x: 'xyz-%s' % x self.assertEqual('xyz-abc', loads(dumps(fun))('abc')) diff --git a/sdks/python/apache_beam/internal/pickler_test.py b/sdks/python/apache_beam/internal/pickler_test.py index a9151cd7e1116..824c4c59c0ce8 100644 --- a/sdks/python/apache_beam/internal/pickler_test.py +++ b/sdks/python/apache_beam/internal/pickler_test.py @@ -34,7 +34,7 @@ class PicklerTest(unittest.TestCase): NO_MAPPINGPROXYTYPE = not hasattr(types, "MappingProxyType") def test_basics(self): - self.assertEqual([1, 'a', (u'z', )], loads(dumps([1, 'a', (u'z', )]))) + self.assertEqual([1, 'a', ('z', )], loads(dumps([1, 'a', ('z', )]))) fun = lambda x: 'xyz-%s' % x self.assertEqual('xyz-abc', loads(dumps(fun))('abc')) diff --git a/sdks/python/apache_beam/io/external/xlang_parquetio_test.py b/sdks/python/apache_beam/io/external/xlang_parquetio_test.py index adcabd3e636b1..b4074d156ce7c 100644 --- a/sdks/python/apache_beam/io/external/xlang_parquetio_test.py +++ b/sdks/python/apache_beam/io/external/xlang_parquetio_test.py @@ -60,7 +60,7 @@ def test_xlang_parquetio_write(self): AvroRecord({"name": "ghi"})]) \ | beam.ExternalTransform( PARQUET_WRITE_URN, - ImplicitSchemaPayloadBuilder({'data': u'/tmp/test.parquet'}), + ImplicitSchemaPayloadBuilder({'data': '/tmp/test.parquet'}), address) except RuntimeError as e: if re.search(PARQUET_WRITE_URN, str(e)): diff --git a/sdks/python/apache_beam/io/gcp/big_query_query_to_table_it_test.py b/sdks/python/apache_beam/io/gcp/big_query_query_to_table_it_test.py index 501c2edee4067..e8cd888421972 100644 --- a/sdks/python/apache_beam/io/gcp/big_query_query_to_table_it_test.py +++ b/sdks/python/apache_beam/io/gcp/big_query_query_to_table_it_test.py @@ -83,7 +83,7 @@ NEW_TYPES_QUERY = ('SELECT bytes, date, time FROM [%s.%s]') DIALECT_OUTPUT_SCHEMA = ('{"fields": [{"name": "fruit","type": "STRING"}]}') DIALECT_OUTPUT_VERIFY_QUERY = ('SELECT fruit from `%s`;') -DIALECT_OUTPUT_EXPECTED = [(u'apple', ), (u'orange', )] +DIALECT_OUTPUT_EXPECTED = [('apple', ), ('orange', )] class BigQueryQueryToTableIT(unittest.TestCase): diff --git a/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py b/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py index 98a6d3831907a..248e0849cdd02 100644 --- a/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py +++ b/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py @@ -125,9 +125,9 @@ class ReadTests(BigQueryReadIntegrationTests): }, { 'number': 2, 'str': 'def' }, { - 'number': 3, 'str': u'你好' + 'number': 3, 'str': '你好' }, { - 'number': 4, 'str': u'привет' + 'number': 4, 'str': 'привет' }] @classmethod @@ -309,14 +309,14 @@ def test_table_schema_retrieve_with_direct_read(self): class ReadUsingStorageApiTests(BigQueryReadIntegrationTests): TABLE_DATA = [{ 'number': 1, - 'string': u'你好', + 'string': '你好', 'time': '12:44:31', 'datetime': '2018-12-31 12:44:31', 'rec': None }, { 'number': 4, - 'string': u'привет', + 'string': 'привет', 'time': '12:44:31', 'datetime': '2018-12-31 12:44:31', 'rec': { @@ -425,14 +425,14 @@ def test_iobase_source(self): EXPECTED_TABLE_DATA = [ { 'number': 1, - 'string': u'你好', + 'string': '你好', 'time': datetime.time(12, 44, 31), 'datetime': '2018-12-31T12:44:31', 'rec': None, }, { 'number': 4, - 'string': u'привет', + 'string': 'привет', 'time': datetime.time(12, 44, 31), 'datetime': '2018-12-31T12:44:31', 'rec': { @@ -455,14 +455,14 @@ def test_iobase_source_with_native_datetime(self): EXPECTED_TABLE_DATA = [ { 'number': 1, - 'string': u'你好', + 'string': '你好', 'time': datetime.time(12, 44, 31), 'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31), 'rec': None, }, { 'number': 4, - 'string': u'привет', + 'string': 'привет', 'time': datetime.time(12, 44, 31), 'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31), 'rec': { @@ -497,7 +497,7 @@ def test_iobase_source_with_column_selection(self): def test_iobase_source_with_row_restriction(self): EXPECTED_TABLE_DATA = [{ 'number': 1, - 'string': u'你好', + 'string': '你好', 'time': datetime.time(12, 44, 31), 'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31), 'rec': None @@ -513,7 +513,7 @@ def test_iobase_source_with_row_restriction(self): @pytest.mark.it_postcommit def test_iobase_source_with_column_selection_and_row_restriction(self): - EXPECTED_TABLE_DATA = [{'string': u'привет'}] + EXPECTED_TABLE_DATA = [{'string': 'привет'}] with beam.Pipeline(argv=self.args) as p: result = ( p | 'Read with BigQuery Storage API' >> beam.io.ReadFromBigQuery( @@ -541,14 +541,14 @@ def test_iobase_source_with_query(self): EXPECTED_TABLE_DATA = [ { 'number': 1, - 'string': u'你好', + 'string': '你好', 'time': datetime.time(12, 44, 31), 'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31), 'rec': None, }, { 'number': 4, - 'string': u'привет', + 'string': 'привет', 'time': datetime.time(12, 44, 31), 'datetime': datetime.datetime(2018, 12, 31, 12, 44, 31), 'rec': { @@ -573,7 +573,7 @@ def test_iobase_source_with_query(self): @pytest.mark.it_postcommit def test_iobase_source_with_query_and_filters(self): - EXPECTED_TABLE_DATA = [{'string': u'привет'}] + EXPECTED_TABLE_DATA = [{'string': 'привет'}] query = StaticValueProvider(str, self.query) with beam.Pipeline(argv=self.args) as p: result = ( @@ -713,9 +713,9 @@ class ReadAllBQTests(BigQueryReadIntegrationTests): }, { 'number': 2, 'str': 'def' }, { - 'number': 3, 'str': u'你好' + 'number': 3, 'str': '你好' }, { - 'number': 4, 'str': u'привет' + 'number': 4, 'str': 'привет' }] TABLE_DATA_2 = [{ @@ -723,9 +723,9 @@ class ReadAllBQTests(BigQueryReadIntegrationTests): }, { 'number': 20, 'str': 'defg' }, { - 'number': 30, 'str': u'你好' + 'number': 30, 'str': '你好' }, { - 'number': 40, 'str': u'привет' + 'number': 40, 'str': 'привет' }] TABLE_DATA_3 = [{'number': 10, 'str': 'abcde', 'extra': 3}] diff --git a/sdks/python/apache_beam/io/gcp/bigquery_write_it_test.py b/sdks/python/apache_beam/io/gcp/bigquery_write_it_test.py index a307e06ac5b85..c73d3ff7e53ef 100644 --- a/sdks/python/apache_beam/io/gcp/bigquery_write_it_test.py +++ b/sdks/python/apache_beam/io/gcp/bigquery_write_it_test.py @@ -127,10 +127,10 @@ def test_big_query_write(self): 'number': 2, 'str': 'def' }, { - 'number': 3, 'str': u'你好' + 'number': 3, 'str': '你好' }, { - 'number': 4, 'str': u'привет' + 'number': 4, 'str': 'привет' }, ] table_schema = { @@ -153,10 +153,10 @@ def test_big_query_write(self): 'def', ), ( 3, - u'你好', + '你好', ), ( 4, - u'привет', + 'привет', )]) ] diff --git a/sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio_test.py b/sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio_test.py index 076a95178d83d..aac99cb8c1f0a 100644 --- a/sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio_test.py +++ b/sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio_test.py @@ -438,7 +438,7 @@ def test_DatastoreWriteLargeEntities(self): datastore_write_fn = WriteToDatastore._DatastoreWriteFn(self._PROJECT) datastore_write_fn.start_bundle() for entity in entities: - entity.set_properties({'large': u'A' * 100000}) + entity.set_properties({'large': 'A' * 100000}) datastore_write_fn.process(entity) datastore_write_fn.finish_bundle() diff --git a/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_client.py b/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_client.py index e5b7c0268ec8a..510970cc22495 100644 --- a/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_client.py +++ b/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_client.py @@ -29,22 +29,22 @@ class StorageV1(base_api.BaseApiClient): """Generated client library for service storage version v1.""" MESSAGES_MODULE = messages - BASE_URL = u'https://www.googleapis.com/storage/v1/' + BASE_URL = 'https://www.googleapis.com/storage/v1/' - _PACKAGE = u'storage' + _PACKAGE = 'storage' _SCOPES = [ - u'https://www.googleapis.com/auth/cloud-platform', - u'https://www.googleapis.com/auth/cloud-platform.read-only', - u'https://www.googleapis.com/auth/devstorage.full_control', - u'https://www.googleapis.com/auth/devstorage.read_only', - u'https://www.googleapis.com/auth/devstorage.read_write' + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/devstorage.full_control', + 'https://www.googleapis.com/auth/devstorage.read_only', + 'https://www.googleapis.com/auth/devstorage.read_write' ] - _VERSION = u'v1' + _VERSION = 'v1' _CLIENT_ID = '1042881264118.apps.googleusercontent.com' _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b' _USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b' - _CLIENT_CLASS_NAME = u'StorageV1' - _URL_VERSION = u'v1' + _CLIENT_CLASS_NAME = 'StorageV1' + _URL_VERSION = 'v1' _API_KEY = None def __init__( @@ -90,7 +90,7 @@ def __init__( class BucketAccessControlsService(base_api.BaseApiService): """Service class for the bucketAccessControls resource.""" - _NAME = u'bucketAccessControls' + _NAME = 'bucketAccessControls' def __init__(self, client): super().__init__(client) @@ -109,15 +109,15 @@ def Delete(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'DELETE', - method_id=u'storage.bucketAccessControls.delete', - ordered_params=[u'bucket', u'entity'], - path_params=[u'bucket', u'entity'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/acl/{entity}', + http_method='DELETE', + method_id='storage.bucketAccessControls.delete', + ordered_params=['bucket', 'entity'], + path_params=['bucket', 'entity'], + query_params=['userProject'], + relative_path='b/{bucket}/acl/{entity}', request_field='', - request_type_name=u'StorageBucketAccessControlsDeleteRequest', - response_type_name=u'StorageBucketAccessControlsDeleteResponse', + request_type_name='StorageBucketAccessControlsDeleteRequest', + response_type_name='StorageBucketAccessControlsDeleteResponse', supports_download=False, ) @@ -134,15 +134,15 @@ def Get(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.bucketAccessControls.get', - ordered_params=[u'bucket', u'entity'], - path_params=[u'bucket', u'entity'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/acl/{entity}', + http_method='GET', + method_id='storage.bucketAccessControls.get', + ordered_params=['bucket', 'entity'], + path_params=['bucket', 'entity'], + query_params=['userProject'], + relative_path='b/{bucket}/acl/{entity}', request_field='', - request_type_name=u'StorageBucketAccessControlsGetRequest', - response_type_name=u'BucketAccessControl', + request_type_name='StorageBucketAccessControlsGetRequest', + response_type_name='BucketAccessControl', supports_download=False, ) @@ -159,15 +159,15 @@ def Insert(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Insert.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'POST', - method_id=u'storage.bucketAccessControls.insert', - ordered_params=[u'bucket'], - path_params=[u'bucket'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/acl', - request_field=u'bucketAccessControl', - request_type_name=u'StorageBucketAccessControlsInsertRequest', - response_type_name=u'BucketAccessControl', + http_method='POST', + method_id='storage.bucketAccessControls.insert', + ordered_params=['bucket'], + path_params=['bucket'], + query_params=['userProject'], + relative_path='b/{bucket}/acl', + request_field='bucketAccessControl', + request_type_name='StorageBucketAccessControlsInsertRequest', + response_type_name='BucketAccessControl', supports_download=False, ) @@ -184,15 +184,15 @@ def List(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.bucketAccessControls.list', - ordered_params=[u'bucket'], - path_params=[u'bucket'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/acl', + http_method='GET', + method_id='storage.bucketAccessControls.list', + ordered_params=['bucket'], + path_params=['bucket'], + query_params=['userProject'], + relative_path='b/{bucket}/acl', request_field='', - request_type_name=u'StorageBucketAccessControlsListRequest', - response_type_name=u'BucketAccessControls', + request_type_name='StorageBucketAccessControlsListRequest', + response_type_name='BucketAccessControls', supports_download=False, ) @@ -209,15 +209,15 @@ def Patch(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Patch.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'PATCH', - method_id=u'storage.bucketAccessControls.patch', - ordered_params=[u'bucket', u'entity'], - path_params=[u'bucket', u'entity'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/acl/{entity}', - request_field=u'bucketAccessControl', - request_type_name=u'StorageBucketAccessControlsPatchRequest', - response_type_name=u'BucketAccessControl', + http_method='PATCH', + method_id='storage.bucketAccessControls.patch', + ordered_params=['bucket', 'entity'], + path_params=['bucket', 'entity'], + query_params=['userProject'], + relative_path='b/{bucket}/acl/{entity}', + request_field='bucketAccessControl', + request_type_name='StorageBucketAccessControlsPatchRequest', + response_type_name='BucketAccessControl', supports_download=False, ) @@ -234,22 +234,22 @@ def Update(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Update.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'PUT', - method_id=u'storage.bucketAccessControls.update', - ordered_params=[u'bucket', u'entity'], - path_params=[u'bucket', u'entity'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/acl/{entity}', - request_field=u'bucketAccessControl', - request_type_name=u'StorageBucketAccessControlsUpdateRequest', - response_type_name=u'BucketAccessControl', + http_method='PUT', + method_id='storage.bucketAccessControls.update', + ordered_params=['bucket', 'entity'], + path_params=['bucket', 'entity'], + query_params=['userProject'], + relative_path='b/{bucket}/acl/{entity}', + request_field='bucketAccessControl', + request_type_name='StorageBucketAccessControlsUpdateRequest', + response_type_name='BucketAccessControl', supports_download=False, ) class BucketsService(base_api.BaseApiService): """Service class for the buckets resource.""" - _NAME = u'buckets' + _NAME = 'buckets' def __init__(self, client): super().__init__(client) @@ -268,16 +268,16 @@ def Delete(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'DELETE', - method_id=u'storage.buckets.delete', - ordered_params=[u'bucket'], - path_params=[u'bucket'], + http_method='DELETE', + method_id='storage.buckets.delete', + ordered_params=['bucket'], + path_params=['bucket'], query_params= - [u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'userProject'], - relative_path=u'b/{bucket}', + ['ifMetagenerationMatch', 'ifMetagenerationNotMatch', 'userProject'], + relative_path='b/{bucket}', request_field='', - request_type_name=u'StorageBucketsDeleteRequest', - response_type_name=u'StorageBucketsDeleteResponse', + request_type_name='StorageBucketsDeleteRequest', + response_type_name='StorageBucketsDeleteResponse', supports_download=False, ) @@ -294,20 +294,20 @@ def Get(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.buckets.get', - ordered_params=[u'bucket'], - path_params=[u'bucket'], + http_method='GET', + method_id='storage.buckets.get', + ordered_params=['bucket'], + path_params=['bucket'], query_params=[ - u'ifMetagenerationMatch', - u'ifMetagenerationNotMatch', - u'projection', - u'userProject' + 'ifMetagenerationMatch', + 'ifMetagenerationNotMatch', + 'projection', + 'userProject' ], - relative_path=u'b/{bucket}', + relative_path='b/{bucket}', request_field='', - request_type_name=u'StorageBucketsGetRequest', - response_type_name=u'Bucket', + request_type_name='StorageBucketsGetRequest', + response_type_name='Bucket', supports_download=False, ) @@ -324,15 +324,15 @@ def GetIamPolicy(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.buckets.getIamPolicy', - ordered_params=[u'bucket'], - path_params=[u'bucket'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/iam', + http_method='GET', + method_id='storage.buckets.getIamPolicy', + ordered_params=['bucket'], + path_params=['bucket'], + query_params=['userProject'], + relative_path='b/{bucket}/iam', request_field='', - request_type_name=u'StorageBucketsGetIamPolicyRequest', - response_type_name=u'Policy', + request_type_name='StorageBucketsGetIamPolicyRequest', + response_type_name='Policy', supports_download=False, ) @@ -349,21 +349,21 @@ def Insert(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Insert.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'POST', - method_id=u'storage.buckets.insert', - ordered_params=[u'project'], + http_method='POST', + method_id='storage.buckets.insert', + ordered_params=['project'], path_params=[], query_params=[ - u'predefinedAcl', - u'predefinedDefaultObjectAcl', - u'project', - u'projection', - u'userProject' + 'predefinedAcl', + 'predefinedDefaultObjectAcl', + 'project', + 'projection', + 'userProject' ], - relative_path=u'b', - request_field=u'bucket', - request_type_name=u'StorageBucketsInsertRequest', - response_type_name=u'Bucket', + relative_path='b', + request_field='bucket', + request_type_name='StorageBucketsInsertRequest', + response_type_name='Bucket', supports_download=False, ) @@ -380,22 +380,22 @@ def List(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.buckets.list', - ordered_params=[u'project'], + http_method='GET', + method_id='storage.buckets.list', + ordered_params=['project'], path_params=[], query_params=[ - u'maxResults', - u'pageToken', - u'prefix', - u'project', - u'projection', - u'userProject' + 'maxResults', + 'pageToken', + 'prefix', + 'project', + 'projection', + 'userProject' ], - relative_path=u'b', + relative_path='b', request_field='', - request_type_name=u'StorageBucketsListRequest', - response_type_name=u'Buckets', + request_type_name='StorageBucketsListRequest', + response_type_name='Buckets', supports_download=False, ) @@ -412,15 +412,15 @@ def LockRetentionPolicy(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) LockRetentionPolicy.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'POST', - method_id=u'storage.buckets.lockRetentionPolicy', - ordered_params=[u'bucket', u'ifMetagenerationMatch'], - path_params=[u'bucket'], - query_params=[u'ifMetagenerationMatch', u'userProject'], - relative_path=u'b/{bucket}/lockRetentionPolicy', + http_method='POST', + method_id='storage.buckets.lockRetentionPolicy', + ordered_params=['bucket', 'ifMetagenerationMatch'], + path_params=['bucket'], + query_params=['ifMetagenerationMatch', 'userProject'], + relative_path='b/{bucket}/lockRetentionPolicy', request_field='', - request_type_name=u'StorageBucketsLockRetentionPolicyRequest', - response_type_name=u'Bucket', + request_type_name='StorageBucketsLockRetentionPolicyRequest', + response_type_name='Bucket', supports_download=False, ) @@ -437,22 +437,22 @@ def Patch(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Patch.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'PATCH', - method_id=u'storage.buckets.patch', - ordered_params=[u'bucket'], - path_params=[u'bucket'], + http_method='PATCH', + method_id='storage.buckets.patch', + ordered_params=['bucket'], + path_params=['bucket'], query_params=[ - u'ifMetagenerationMatch', - u'ifMetagenerationNotMatch', - u'predefinedAcl', - u'predefinedDefaultObjectAcl', - u'projection', - u'userProject' + 'ifMetagenerationMatch', + 'ifMetagenerationNotMatch', + 'predefinedAcl', + 'predefinedDefaultObjectAcl', + 'projection', + 'userProject' ], - relative_path=u'b/{bucket}', - request_field=u'bucketResource', - request_type_name=u'StorageBucketsPatchRequest', - response_type_name=u'Bucket', + relative_path='b/{bucket}', + request_field='bucketResource', + request_type_name='StorageBucketsPatchRequest', + response_type_name='Bucket', supports_download=False, ) @@ -469,15 +469,15 @@ def SetIamPolicy(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'PUT', - method_id=u'storage.buckets.setIamPolicy', - ordered_params=[u'bucket'], - path_params=[u'bucket'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/iam', - request_field=u'policy', - request_type_name=u'StorageBucketsSetIamPolicyRequest', - response_type_name=u'Policy', + http_method='PUT', + method_id='storage.buckets.setIamPolicy', + ordered_params=['bucket'], + path_params=['bucket'], + query_params=['userProject'], + relative_path='b/{bucket}/iam', + request_field='policy', + request_type_name='StorageBucketsSetIamPolicyRequest', + response_type_name='Policy', supports_download=False, ) @@ -494,15 +494,15 @@ def TestIamPermissions(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.buckets.testIamPermissions', - ordered_params=[u'bucket', u'permissions'], - path_params=[u'bucket'], - query_params=[u'permissions', u'userProject'], - relative_path=u'b/{bucket}/iam/testPermissions', + http_method='GET', + method_id='storage.buckets.testIamPermissions', + ordered_params=['bucket', 'permissions'], + path_params=['bucket'], + query_params=['permissions', 'userProject'], + relative_path='b/{bucket}/iam/testPermissions', request_field='', - request_type_name=u'StorageBucketsTestIamPermissionsRequest', - response_type_name=u'TestIamPermissionsResponse', + request_type_name='StorageBucketsTestIamPermissionsRequest', + response_type_name='TestIamPermissionsResponse', supports_download=False, ) @@ -519,29 +519,29 @@ def Update(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Update.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'PUT', - method_id=u'storage.buckets.update', - ordered_params=[u'bucket'], - path_params=[u'bucket'], + http_method='PUT', + method_id='storage.buckets.update', + ordered_params=['bucket'], + path_params=['bucket'], query_params=[ - u'ifMetagenerationMatch', - u'ifMetagenerationNotMatch', - u'predefinedAcl', - u'predefinedDefaultObjectAcl', - u'projection', - u'userProject' + 'ifMetagenerationMatch', + 'ifMetagenerationNotMatch', + 'predefinedAcl', + 'predefinedDefaultObjectAcl', + 'projection', + 'userProject' ], - relative_path=u'b/{bucket}', - request_field=u'bucketResource', - request_type_name=u'StorageBucketsUpdateRequest', - response_type_name=u'Bucket', + relative_path='b/{bucket}', + request_field='bucketResource', + request_type_name='StorageBucketsUpdateRequest', + response_type_name='Bucket', supports_download=False, ) class ChannelsService(base_api.BaseApiService): """Service class for the channels resource.""" - _NAME = u'channels' + _NAME = 'channels' def __init__(self, client): super().__init__(client) @@ -560,22 +560,22 @@ def Stop(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Stop.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'POST', - method_id=u'storage.channels.stop', + http_method='POST', + method_id='storage.channels.stop', ordered_params=[], path_params=[], query_params=[], - relative_path=u'channels/stop', + relative_path='channels/stop', request_field='', - request_type_name=u'Channel', - response_type_name=u'StorageChannelsStopResponse', + request_type_name='Channel', + response_type_name='StorageChannelsStopResponse', supports_download=False, ) class DefaultObjectAccessControlsService(base_api.BaseApiService): """Service class for the defaultObjectAccessControls resource.""" - _NAME = u'defaultObjectAccessControls' + _NAME = 'defaultObjectAccessControls' def __init__(self, client): super().__init__(client) @@ -594,15 +594,15 @@ def Delete(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'DELETE', - method_id=u'storage.defaultObjectAccessControls.delete', - ordered_params=[u'bucket', u'entity'], - path_params=[u'bucket', u'entity'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/defaultObjectAcl/{entity}', + http_method='DELETE', + method_id='storage.defaultObjectAccessControls.delete', + ordered_params=['bucket', 'entity'], + path_params=['bucket', 'entity'], + query_params=['userProject'], + relative_path='b/{bucket}/defaultObjectAcl/{entity}', request_field='', - request_type_name=u'StorageDefaultObjectAccessControlsDeleteRequest', - response_type_name=u'StorageDefaultObjectAccessControlsDeleteResponse', + request_type_name='StorageDefaultObjectAccessControlsDeleteRequest', + response_type_name='StorageDefaultObjectAccessControlsDeleteResponse', supports_download=False, ) @@ -619,15 +619,15 @@ def Get(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.defaultObjectAccessControls.get', - ordered_params=[u'bucket', u'entity'], - path_params=[u'bucket', u'entity'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/defaultObjectAcl/{entity}', + http_method='GET', + method_id='storage.defaultObjectAccessControls.get', + ordered_params=['bucket', 'entity'], + path_params=['bucket', 'entity'], + query_params=['userProject'], + relative_path='b/{bucket}/defaultObjectAcl/{entity}', request_field='', - request_type_name=u'StorageDefaultObjectAccessControlsGetRequest', - response_type_name=u'ObjectAccessControl', + request_type_name='StorageDefaultObjectAccessControlsGetRequest', + response_type_name='ObjectAccessControl', supports_download=False, ) @@ -644,15 +644,15 @@ def Insert(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Insert.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'POST', - method_id=u'storage.defaultObjectAccessControls.insert', - ordered_params=[u'bucket'], - path_params=[u'bucket'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/defaultObjectAcl', - request_field=u'objectAccessControl', - request_type_name=u'StorageDefaultObjectAccessControlsInsertRequest', - response_type_name=u'ObjectAccessControl', + http_method='POST', + method_id='storage.defaultObjectAccessControls.insert', + ordered_params=['bucket'], + path_params=['bucket'], + query_params=['userProject'], + relative_path='b/{bucket}/defaultObjectAcl', + request_field='objectAccessControl', + request_type_name='StorageDefaultObjectAccessControlsInsertRequest', + response_type_name='ObjectAccessControl', supports_download=False, ) @@ -669,16 +669,16 @@ def List(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.defaultObjectAccessControls.list', - ordered_params=[u'bucket'], - path_params=[u'bucket'], + http_method='GET', + method_id='storage.defaultObjectAccessControls.list', + ordered_params=['bucket'], + path_params=['bucket'], query_params= - [u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'userProject'], - relative_path=u'b/{bucket}/defaultObjectAcl', + ['ifMetagenerationMatch', 'ifMetagenerationNotMatch', 'userProject'], + relative_path='b/{bucket}/defaultObjectAcl', request_field='', - request_type_name=u'StorageDefaultObjectAccessControlsListRequest', - response_type_name=u'ObjectAccessControls', + request_type_name='StorageDefaultObjectAccessControlsListRequest', + response_type_name='ObjectAccessControls', supports_download=False, ) @@ -695,15 +695,15 @@ def Patch(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Patch.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'PATCH', - method_id=u'storage.defaultObjectAccessControls.patch', - ordered_params=[u'bucket', u'entity'], - path_params=[u'bucket', u'entity'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/defaultObjectAcl/{entity}', - request_field=u'objectAccessControl', - request_type_name=u'StorageDefaultObjectAccessControlsPatchRequest', - response_type_name=u'ObjectAccessControl', + http_method='PATCH', + method_id='storage.defaultObjectAccessControls.patch', + ordered_params=['bucket', 'entity'], + path_params=['bucket', 'entity'], + query_params=['userProject'], + relative_path='b/{bucket}/defaultObjectAcl/{entity}', + request_field='objectAccessControl', + request_type_name='StorageDefaultObjectAccessControlsPatchRequest', + response_type_name='ObjectAccessControl', supports_download=False, ) @@ -720,22 +720,22 @@ def Update(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Update.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'PUT', - method_id=u'storage.defaultObjectAccessControls.update', - ordered_params=[u'bucket', u'entity'], - path_params=[u'bucket', u'entity'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/defaultObjectAcl/{entity}', - request_field=u'objectAccessControl', - request_type_name=u'StorageDefaultObjectAccessControlsUpdateRequest', - response_type_name=u'ObjectAccessControl', + http_method='PUT', + method_id='storage.defaultObjectAccessControls.update', + ordered_params=['bucket', 'entity'], + path_params=['bucket', 'entity'], + query_params=['userProject'], + relative_path='b/{bucket}/defaultObjectAcl/{entity}', + request_field='objectAccessControl', + request_type_name='StorageDefaultObjectAccessControlsUpdateRequest', + response_type_name='ObjectAccessControl', supports_download=False, ) class NotificationsService(base_api.BaseApiService): """Service class for the notifications resource.""" - _NAME = u'notifications' + _NAME = 'notifications' def __init__(self, client): super().__init__(client) @@ -754,15 +754,15 @@ def Delete(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'DELETE', - method_id=u'storage.notifications.delete', - ordered_params=[u'bucket', u'notification'], - path_params=[u'bucket', u'notification'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/notificationConfigs/{notification}', + http_method='DELETE', + method_id='storage.notifications.delete', + ordered_params=['bucket', 'notification'], + path_params=['bucket', 'notification'], + query_params=['userProject'], + relative_path='b/{bucket}/notificationConfigs/{notification}', request_field='', - request_type_name=u'StorageNotificationsDeleteRequest', - response_type_name=u'StorageNotificationsDeleteResponse', + request_type_name='StorageNotificationsDeleteRequest', + response_type_name='StorageNotificationsDeleteResponse', supports_download=False, ) @@ -779,15 +779,15 @@ def Get(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.notifications.get', - ordered_params=[u'bucket', u'notification'], - path_params=[u'bucket', u'notification'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/notificationConfigs/{notification}', + http_method='GET', + method_id='storage.notifications.get', + ordered_params=['bucket', 'notification'], + path_params=['bucket', 'notification'], + query_params=['userProject'], + relative_path='b/{bucket}/notificationConfigs/{notification}', request_field='', - request_type_name=u'StorageNotificationsGetRequest', - response_type_name=u'Notification', + request_type_name='StorageNotificationsGetRequest', + response_type_name='Notification', supports_download=False, ) @@ -804,15 +804,15 @@ def Insert(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Insert.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'POST', - method_id=u'storage.notifications.insert', - ordered_params=[u'bucket'], - path_params=[u'bucket'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/notificationConfigs', - request_field=u'notification', - request_type_name=u'StorageNotificationsInsertRequest', - response_type_name=u'Notification', + http_method='POST', + method_id='storage.notifications.insert', + ordered_params=['bucket'], + path_params=['bucket'], + query_params=['userProject'], + relative_path='b/{bucket}/notificationConfigs', + request_field='notification', + request_type_name='StorageNotificationsInsertRequest', + response_type_name='Notification', supports_download=False, ) @@ -829,22 +829,22 @@ def List(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.notifications.list', - ordered_params=[u'bucket'], - path_params=[u'bucket'], - query_params=[u'userProject'], - relative_path=u'b/{bucket}/notificationConfigs', + http_method='GET', + method_id='storage.notifications.list', + ordered_params=['bucket'], + path_params=['bucket'], + query_params=['userProject'], + relative_path='b/{bucket}/notificationConfigs', request_field='', - request_type_name=u'StorageNotificationsListRequest', - response_type_name=u'Notifications', + request_type_name='StorageNotificationsListRequest', + response_type_name='Notifications', supports_download=False, ) class ObjectAccessControlsService(base_api.BaseApiService): """Service class for the objectAccessControls resource.""" - _NAME = u'objectAccessControls' + _NAME = 'objectAccessControls' def __init__(self, client): super().__init__(client) @@ -863,15 +863,15 @@ def Delete(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'DELETE', - method_id=u'storage.objectAccessControls.delete', - ordered_params=[u'bucket', u'object', u'entity'], - path_params=[u'bucket', u'entity', u'object'], - query_params=[u'generation', u'userProject'], - relative_path=u'b/{bucket}/o/{object}/acl/{entity}', + http_method='DELETE', + method_id='storage.objectAccessControls.delete', + ordered_params=['bucket', 'object', 'entity'], + path_params=['bucket', 'entity', 'object'], + query_params=['generation', 'userProject'], + relative_path='b/{bucket}/o/{object}/acl/{entity}', request_field='', - request_type_name=u'StorageObjectAccessControlsDeleteRequest', - response_type_name=u'StorageObjectAccessControlsDeleteResponse', + request_type_name='StorageObjectAccessControlsDeleteRequest', + response_type_name='StorageObjectAccessControlsDeleteResponse', supports_download=False, ) @@ -888,15 +888,15 @@ def Get(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.objectAccessControls.get', - ordered_params=[u'bucket', u'object', u'entity'], - path_params=[u'bucket', u'entity', u'object'], - query_params=[u'generation', u'userProject'], - relative_path=u'b/{bucket}/o/{object}/acl/{entity}', + http_method='GET', + method_id='storage.objectAccessControls.get', + ordered_params=['bucket', 'object', 'entity'], + path_params=['bucket', 'entity', 'object'], + query_params=['generation', 'userProject'], + relative_path='b/{bucket}/o/{object}/acl/{entity}', request_field='', - request_type_name=u'StorageObjectAccessControlsGetRequest', - response_type_name=u'ObjectAccessControl', + request_type_name='StorageObjectAccessControlsGetRequest', + response_type_name='ObjectAccessControl', supports_download=False, ) @@ -913,15 +913,15 @@ def Insert(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Insert.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'POST', - method_id=u'storage.objectAccessControls.insert', - ordered_params=[u'bucket', u'object'], - path_params=[u'bucket', u'object'], - query_params=[u'generation', u'userProject'], - relative_path=u'b/{bucket}/o/{object}/acl', - request_field=u'objectAccessControl', - request_type_name=u'StorageObjectAccessControlsInsertRequest', - response_type_name=u'ObjectAccessControl', + http_method='POST', + method_id='storage.objectAccessControls.insert', + ordered_params=['bucket', 'object'], + path_params=['bucket', 'object'], + query_params=['generation', 'userProject'], + relative_path='b/{bucket}/o/{object}/acl', + request_field='objectAccessControl', + request_type_name='StorageObjectAccessControlsInsertRequest', + response_type_name='ObjectAccessControl', supports_download=False, ) @@ -938,15 +938,15 @@ def List(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.objectAccessControls.list', - ordered_params=[u'bucket', u'object'], - path_params=[u'bucket', u'object'], - query_params=[u'generation', u'userProject'], - relative_path=u'b/{bucket}/o/{object}/acl', + http_method='GET', + method_id='storage.objectAccessControls.list', + ordered_params=['bucket', 'object'], + path_params=['bucket', 'object'], + query_params=['generation', 'userProject'], + relative_path='b/{bucket}/o/{object}/acl', request_field='', - request_type_name=u'StorageObjectAccessControlsListRequest', - response_type_name=u'ObjectAccessControls', + request_type_name='StorageObjectAccessControlsListRequest', + response_type_name='ObjectAccessControls', supports_download=False, ) @@ -963,15 +963,15 @@ def Patch(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Patch.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'PATCH', - method_id=u'storage.objectAccessControls.patch', - ordered_params=[u'bucket', u'object', u'entity'], - path_params=[u'bucket', u'entity', u'object'], - query_params=[u'generation', u'userProject'], - relative_path=u'b/{bucket}/o/{object}/acl/{entity}', - request_field=u'objectAccessControl', - request_type_name=u'StorageObjectAccessControlsPatchRequest', - response_type_name=u'ObjectAccessControl', + http_method='PATCH', + method_id='storage.objectAccessControls.patch', + ordered_params=['bucket', 'object', 'entity'], + path_params=['bucket', 'entity', 'object'], + query_params=['generation', 'userProject'], + relative_path='b/{bucket}/o/{object}/acl/{entity}', + request_field='objectAccessControl', + request_type_name='StorageObjectAccessControlsPatchRequest', + response_type_name='ObjectAccessControl', supports_download=False, ) @@ -988,22 +988,22 @@ def Update(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Update.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'PUT', - method_id=u'storage.objectAccessControls.update', - ordered_params=[u'bucket', u'object', u'entity'], - path_params=[u'bucket', u'entity', u'object'], - query_params=[u'generation', u'userProject'], - relative_path=u'b/{bucket}/o/{object}/acl/{entity}', - request_field=u'objectAccessControl', - request_type_name=u'StorageObjectAccessControlsUpdateRequest', - response_type_name=u'ObjectAccessControl', + http_method='PUT', + method_id='storage.objectAccessControls.update', + ordered_params=['bucket', 'object', 'entity'], + path_params=['bucket', 'entity', 'object'], + query_params=['generation', 'userProject'], + relative_path='b/{bucket}/o/{object}/acl/{entity}', + request_field='objectAccessControl', + request_type_name='StorageObjectAccessControlsUpdateRequest', + response_type_name='ObjectAccessControl', supports_download=False, ) class ObjectsService(base_api.BaseApiService): """Service class for the objects resource.""" - _NAME = u'objects' + _NAME = 'objects' def __init__(self, client): super().__init__(client) @@ -1012,9 +1012,9 @@ def __init__(self, client): accept=['*/*'], max_size=None, resumable_multipart=True, - resumable_path=u'/resumable/upload/storage/v1/b/{bucket}/o', + resumable_path='/resumable/upload/storage/v1/b/{bucket}/o', simple_multipart=True, - simple_path=u'/upload/storage/v1/b/{bucket}/o', + simple_path='/upload/storage/v1/b/{bucket}/o', ), } @@ -1031,21 +1031,21 @@ def Compose(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Compose.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'POST', - method_id=u'storage.objects.compose', - ordered_params=[u'destinationBucket', u'destinationObject'], - path_params=[u'destinationBucket', u'destinationObject'], + http_method='POST', + method_id='storage.objects.compose', + ordered_params=['destinationBucket', 'destinationObject'], + path_params=['destinationBucket', 'destinationObject'], query_params=[ - u'destinationPredefinedAcl', - u'ifGenerationMatch', - u'ifMetagenerationMatch', - u'kmsKeyName', - u'userProject' + 'destinationPredefinedAcl', + 'ifGenerationMatch', + 'ifMetagenerationMatch', + 'kmsKeyName', + 'userProject' ], - relative_path=u'b/{destinationBucket}/o/{destinationObject}/compose', - request_field=u'composeRequest', - request_type_name=u'StorageObjectsComposeRequest', - response_type_name=u'Object', + relative_path='b/{destinationBucket}/o/{destinationObject}/compose', + request_field='composeRequest', + request_type_name='StorageObjectsComposeRequest', + response_type_name='Object', supports_download=False, ) @@ -1062,39 +1062,39 @@ def Copy(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Copy.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'POST', - method_id=u'storage.objects.copy', + http_method='POST', + method_id='storage.objects.copy', ordered_params=[ - u'sourceBucket', - u'sourceObject', - u'destinationBucket', - u'destinationObject' + 'sourceBucket', + 'sourceObject', + 'destinationBucket', + 'destinationObject' ], path_params=[ - u'destinationBucket', - u'destinationObject', - u'sourceBucket', - u'sourceObject' + 'destinationBucket', + 'destinationObject', + 'sourceBucket', + 'sourceObject' ], query_params=[ - u'destinationPredefinedAcl', - u'ifGenerationMatch', - u'ifGenerationNotMatch', - u'ifMetagenerationMatch', - u'ifMetagenerationNotMatch', - u'ifSourceGenerationMatch', - u'ifSourceGenerationNotMatch', - u'ifSourceMetagenerationMatch', - u'ifSourceMetagenerationNotMatch', - u'projection', - u'sourceGeneration', - u'userProject' + 'destinationPredefinedAcl', + 'ifGenerationMatch', + 'ifGenerationNotMatch', + 'ifMetagenerationMatch', + 'ifMetagenerationNotMatch', + 'ifSourceGenerationMatch', + 'ifSourceGenerationNotMatch', + 'ifSourceMetagenerationMatch', + 'ifSourceMetagenerationNotMatch', + 'projection', + 'sourceGeneration', + 'userProject' ], relative_path= - u'b/{sourceBucket}/o/{sourceObject}/copyTo/b/{destinationBucket}/o/{destinationObject}', - request_field=u'object', - request_type_name=u'StorageObjectsCopyRequest', - response_type_name=u'Object', + 'b/{sourceBucket}/o/{sourceObject}/copyTo/b/{destinationBucket}/o/{destinationObject}', + request_field='object', + request_type_name='StorageObjectsCopyRequest', + response_type_name='Object', supports_download=False, ) @@ -1111,22 +1111,22 @@ def Delete(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'DELETE', - method_id=u'storage.objects.delete', - ordered_params=[u'bucket', u'object'], - path_params=[u'bucket', u'object'], + http_method='DELETE', + method_id='storage.objects.delete', + ordered_params=['bucket', 'object'], + path_params=['bucket', 'object'], query_params=[ - u'generation', - u'ifGenerationMatch', - u'ifGenerationNotMatch', - u'ifMetagenerationMatch', - u'ifMetagenerationNotMatch', - u'userProject' + 'generation', + 'ifGenerationMatch', + 'ifGenerationNotMatch', + 'ifMetagenerationMatch', + 'ifMetagenerationNotMatch', + 'userProject' ], - relative_path=u'b/{bucket}/o/{object}', + relative_path='b/{bucket}/o/{object}', request_field='', - request_type_name=u'StorageObjectsDeleteRequest', - response_type_name=u'StorageObjectsDeleteResponse', + request_type_name='StorageObjectsDeleteRequest', + response_type_name='StorageObjectsDeleteResponse', supports_download=False, ) @@ -1146,23 +1146,23 @@ def Get(self, request, global_params=None, download=None): config, request, global_params=global_params, download=download) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.objects.get', - ordered_params=[u'bucket', u'object'], - path_params=[u'bucket', u'object'], + http_method='GET', + method_id='storage.objects.get', + ordered_params=['bucket', 'object'], + path_params=['bucket', 'object'], query_params=[ - u'generation', - u'ifGenerationMatch', - u'ifGenerationNotMatch', - u'ifMetagenerationMatch', - u'ifMetagenerationNotMatch', - u'projection', - u'userProject' + 'generation', + 'ifGenerationMatch', + 'ifGenerationNotMatch', + 'ifMetagenerationMatch', + 'ifMetagenerationNotMatch', + 'projection', + 'userProject' ], - relative_path=u'b/{bucket}/o/{object}', + relative_path='b/{bucket}/o/{object}', request_field='', - request_type_name=u'StorageObjectsGetRequest', - response_type_name=u'Object', + request_type_name='StorageObjectsGetRequest', + response_type_name='Object', supports_download=True, ) @@ -1179,15 +1179,15 @@ def GetIamPolicy(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.objects.getIamPolicy', - ordered_params=[u'bucket', u'object'], - path_params=[u'bucket', u'object'], - query_params=[u'generation', u'userProject'], - relative_path=u'b/{bucket}/o/{object}/iam', + http_method='GET', + method_id='storage.objects.getIamPolicy', + ordered_params=['bucket', 'object'], + path_params=['bucket', 'object'], + query_params=['generation', 'userProject'], + relative_path='b/{bucket}/o/{object}/iam', request_field='', - request_type_name=u'StorageObjectsGetIamPolicyRequest', - response_type_name=u'Policy', + request_type_name='StorageObjectsGetIamPolicyRequest', + response_type_name='Policy', supports_download=False, ) @@ -1212,26 +1212,26 @@ def Insert(self, request, global_params=None, upload=None): upload_config=upload_config) Insert.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'POST', - method_id=u'storage.objects.insert', - ordered_params=[u'bucket'], - path_params=[u'bucket'], + http_method='POST', + method_id='storage.objects.insert', + ordered_params=['bucket'], + path_params=['bucket'], query_params=[ - u'contentEncoding', - u'ifGenerationMatch', - u'ifGenerationNotMatch', - u'ifMetagenerationMatch', - u'ifMetagenerationNotMatch', - u'kmsKeyName', - u'name', - u'predefinedAcl', - u'projection', - u'userProject' + 'contentEncoding', + 'ifGenerationMatch', + 'ifGenerationNotMatch', + 'ifMetagenerationMatch', + 'ifMetagenerationNotMatch', + 'kmsKeyName', + 'name', + 'predefinedAcl', + 'projection', + 'userProject' ], - relative_path=u'b/{bucket}/o', - request_field=u'object', - request_type_name=u'StorageObjectsInsertRequest', - response_type_name=u'Object', + relative_path='b/{bucket}/o', + request_field='object', + request_type_name='StorageObjectsInsertRequest', + response_type_name='Object', supports_download=False, ) @@ -1248,24 +1248,24 @@ def List(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.objects.list', - ordered_params=[u'bucket'], - path_params=[u'bucket'], + http_method='GET', + method_id='storage.objects.list', + ordered_params=['bucket'], + path_params=['bucket'], query_params=[ - u'delimiter', - u'includeTrailingDelimiter', - u'maxResults', - u'pageToken', - u'prefix', - u'projection', - u'userProject', - u'versions' + 'delimiter', + 'includeTrailingDelimiter', + 'maxResults', + 'pageToken', + 'prefix', + 'projection', + 'userProject', + 'versions' ], - relative_path=u'b/{bucket}/o', + relative_path='b/{bucket}/o', request_field='', - request_type_name=u'StorageObjectsListRequest', - response_type_name=u'Objects', + request_type_name='StorageObjectsListRequest', + response_type_name='Objects', supports_download=False, ) @@ -1282,24 +1282,24 @@ def Patch(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Patch.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'PATCH', - method_id=u'storage.objects.patch', - ordered_params=[u'bucket', u'object'], - path_params=[u'bucket', u'object'], + http_method='PATCH', + method_id='storage.objects.patch', + ordered_params=['bucket', 'object'], + path_params=['bucket', 'object'], query_params=[ - u'generation', - u'ifGenerationMatch', - u'ifGenerationNotMatch', - u'ifMetagenerationMatch', - u'ifMetagenerationNotMatch', - u'predefinedAcl', - u'projection', - u'userProject' + 'generation', + 'ifGenerationMatch', + 'ifGenerationNotMatch', + 'ifMetagenerationMatch', + 'ifMetagenerationNotMatch', + 'predefinedAcl', + 'projection', + 'userProject' ], - relative_path=u'b/{bucket}/o/{object}', - request_field=u'objectResource', - request_type_name=u'StorageObjectsPatchRequest', - response_type_name=u'Object', + relative_path='b/{bucket}/o/{object}', + request_field='objectResource', + request_type_name='StorageObjectsPatchRequest', + response_type_name='Object', supports_download=False, ) @@ -1316,42 +1316,42 @@ def Rewrite(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Rewrite.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'POST', - method_id=u'storage.objects.rewrite', + http_method='POST', + method_id='storage.objects.rewrite', ordered_params=[ - u'sourceBucket', - u'sourceObject', - u'destinationBucket', - u'destinationObject' + 'sourceBucket', + 'sourceObject', + 'destinationBucket', + 'destinationObject' ], path_params=[ - u'destinationBucket', - u'destinationObject', - u'sourceBucket', - u'sourceObject' + 'destinationBucket', + 'destinationObject', + 'sourceBucket', + 'sourceObject' ], query_params=[ - u'destinationKmsKeyName', - u'destinationPredefinedAcl', - u'ifGenerationMatch', - u'ifGenerationNotMatch', - u'ifMetagenerationMatch', - u'ifMetagenerationNotMatch', - u'ifSourceGenerationMatch', - u'ifSourceGenerationNotMatch', - u'ifSourceMetagenerationMatch', - u'ifSourceMetagenerationNotMatch', - u'maxBytesRewrittenPerCall', - u'projection', - u'rewriteToken', - u'sourceGeneration', - u'userProject' + 'destinationKmsKeyName', + 'destinationPredefinedAcl', + 'ifGenerationMatch', + 'ifGenerationNotMatch', + 'ifMetagenerationMatch', + 'ifMetagenerationNotMatch', + 'ifSourceGenerationMatch', + 'ifSourceGenerationNotMatch', + 'ifSourceMetagenerationMatch', + 'ifSourceMetagenerationNotMatch', + 'maxBytesRewrittenPerCall', + 'projection', + 'rewriteToken', + 'sourceGeneration', + 'userProject' ], relative_path= - u'b/{sourceBucket}/o/{sourceObject}/rewriteTo/b/{destinationBucket}/o/{destinationObject}', - request_field=u'object', - request_type_name=u'StorageObjectsRewriteRequest', - response_type_name=u'RewriteResponse', + 'b/{sourceBucket}/o/{sourceObject}/rewriteTo/b/{destinationBucket}/o/{destinationObject}', + request_field='object', + request_type_name='StorageObjectsRewriteRequest', + response_type_name='RewriteResponse', supports_download=False, ) @@ -1368,15 +1368,15 @@ def SetIamPolicy(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'PUT', - method_id=u'storage.objects.setIamPolicy', - ordered_params=[u'bucket', u'object'], - path_params=[u'bucket', u'object'], - query_params=[u'generation', u'userProject'], - relative_path=u'b/{bucket}/o/{object}/iam', - request_field=u'policy', - request_type_name=u'StorageObjectsSetIamPolicyRequest', - response_type_name=u'Policy', + http_method='PUT', + method_id='storage.objects.setIamPolicy', + ordered_params=['bucket', 'object'], + path_params=['bucket', 'object'], + query_params=['generation', 'userProject'], + relative_path='b/{bucket}/o/{object}/iam', + request_field='policy', + request_type_name='StorageObjectsSetIamPolicyRequest', + response_type_name='Policy', supports_download=False, ) @@ -1393,15 +1393,15 @@ def TestIamPermissions(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.objects.testIamPermissions', - ordered_params=[u'bucket', u'object', u'permissions'], - path_params=[u'bucket', u'object'], - query_params=[u'generation', u'permissions', u'userProject'], - relative_path=u'b/{bucket}/o/{object}/iam/testPermissions', + http_method='GET', + method_id='storage.objects.testIamPermissions', + ordered_params=['bucket', 'object', 'permissions'], + path_params=['bucket', 'object'], + query_params=['generation', 'permissions', 'userProject'], + relative_path='b/{bucket}/o/{object}/iam/testPermissions', request_field='', - request_type_name=u'StorageObjectsTestIamPermissionsRequest', - response_type_name=u'TestIamPermissionsResponse', + request_type_name='StorageObjectsTestIamPermissionsRequest', + response_type_name='TestIamPermissionsResponse', supports_download=False, ) @@ -1418,24 +1418,24 @@ def Update(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Update.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'PUT', - method_id=u'storage.objects.update', - ordered_params=[u'bucket', u'object'], - path_params=[u'bucket', u'object'], + http_method='PUT', + method_id='storage.objects.update', + ordered_params=['bucket', 'object'], + path_params=['bucket', 'object'], query_params=[ - u'generation', - u'ifGenerationMatch', - u'ifGenerationNotMatch', - u'ifMetagenerationMatch', - u'ifMetagenerationNotMatch', - u'predefinedAcl', - u'projection', - u'userProject' + 'generation', + 'ifGenerationMatch', + 'ifGenerationNotMatch', + 'ifMetagenerationMatch', + 'ifMetagenerationNotMatch', + 'predefinedAcl', + 'projection', + 'userProject' ], - relative_path=u'b/{bucket}/o/{object}', - request_field=u'objectResource', - request_type_name=u'StorageObjectsUpdateRequest', - response_type_name=u'Object', + relative_path='b/{bucket}/o/{object}', + request_field='objectResource', + request_type_name='StorageObjectsUpdateRequest', + response_type_name='Object', supports_download=False, ) @@ -1452,31 +1452,31 @@ def WatchAll(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) WatchAll.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'POST', - method_id=u'storage.objects.watchAll', - ordered_params=[u'bucket'], - path_params=[u'bucket'], + http_method='POST', + method_id='storage.objects.watchAll', + ordered_params=['bucket'], + path_params=['bucket'], query_params=[ - u'delimiter', - u'includeTrailingDelimiter', - u'maxResults', - u'pageToken', - u'prefix', - u'projection', - u'userProject', - u'versions' + 'delimiter', + 'includeTrailingDelimiter', + 'maxResults', + 'pageToken', + 'prefix', + 'projection', + 'userProject', + 'versions' ], - relative_path=u'b/{bucket}/o/watch', - request_field=u'channel', - request_type_name=u'StorageObjectsWatchAllRequest', - response_type_name=u'Channel', + relative_path='b/{bucket}/o/watch', + request_field='channel', + request_type_name='StorageObjectsWatchAllRequest', + response_type_name='Channel', supports_download=False, ) class ProjectsServiceAccountService(base_api.BaseApiService): """Service class for the projects_serviceAccount resource.""" - _NAME = u'projects_serviceAccount' + _NAME = 'projects_serviceAccount' def __init__(self, client): super().__init__(client) @@ -1495,22 +1495,22 @@ def Get(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method=u'GET', - method_id=u'storage.projects.serviceAccount.get', - ordered_params=[u'projectId'], - path_params=[u'projectId'], - query_params=[u'userProject'], - relative_path=u'projects/{projectId}/serviceAccount', + http_method='GET', + method_id='storage.projects.serviceAccount.get', + ordered_params=['projectId'], + path_params=['projectId'], + query_params=['userProject'], + relative_path='projects/{projectId}/serviceAccount', request_field='', - request_type_name=u'StorageProjectsServiceAccountGetRequest', - response_type_name=u'ServiceAccount', + request_type_name='StorageProjectsServiceAccountGetRequest', + response_type_name='ServiceAccount', supports_download=False, ) class ProjectsService(base_api.BaseApiService): """Service class for the projects resource.""" - _NAME = u'projects' + _NAME = 'projects' def __init__(self, client): super().__init__(client) diff --git a/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_messages.py b/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_messages.py index caef0eb4b033a..65d8bd93258fa 100644 --- a/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_messages.py +++ b/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_messages.py @@ -348,7 +348,7 @@ class WebsiteValue(_messages.Message): encryption = _messages.MessageField('EncryptionValue', 6) etag = _messages.StringField(7) id = _messages.StringField(8) - kind = _messages.StringField(9, default=u'storage#bucket') + kind = _messages.StringField(9, default='storage#bucket') labels = _messages.MessageField('LabelsValue', 10) lifecycle = _messages.MessageField('LifecycleValue', 11) location = _messages.StringField(12) @@ -410,7 +410,7 @@ class ProjectTeamValue(_messages.Message): entityId = _messages.StringField(5) etag = _messages.StringField(6) id = _messages.StringField(7) - kind = _messages.StringField(8, default=u'storage#bucketAccessControl') + kind = _messages.StringField(8, default='storage#bucketAccessControl') projectTeam = _messages.MessageField('ProjectTeamValue', 9) role = _messages.StringField(10) selfLink = _messages.StringField(11) @@ -426,7 +426,7 @@ class BucketAccessControls(_messages.Message): """ items = _messages.MessageField('BucketAccessControl', 1, repeated=True) - kind = _messages.StringField(2, default=u'storage#bucketAccessControls') + kind = _messages.StringField(2, default='storage#bucketAccessControls') class Buckets(_messages.Message): @@ -442,7 +442,7 @@ class Buckets(_messages.Message): """ items = _messages.MessageField('Bucket', 1, repeated=True) - kind = _messages.StringField(2, default=u'storage#buckets') + kind = _messages.StringField(2, default='storage#buckets') nextPageToken = _messages.StringField(3) @@ -497,7 +497,7 @@ class AdditionalProperty(_messages.Message): address = _messages.StringField(1) expiration = _messages.IntegerField(2) id = _messages.StringField(3) - kind = _messages.StringField(4, default=u'api#channel') + kind = _messages.StringField(4, default='api#channel') params = _messages.MessageField('ParamsValue', 5) payload = _messages.BooleanField(6) resourceId = _messages.StringField(7) @@ -549,7 +549,7 @@ class ObjectPreconditionsValue(_messages.Message): objectPreconditions = _messages.MessageField('ObjectPreconditionsValue', 3) destination = _messages.MessageField('Object', 1) - kind = _messages.StringField(2, default=u'storage#composeRequest') + kind = _messages.StringField(2, default='storage#composeRequest') sourceObjects = _messages.MessageField( 'SourceObjectsValueListEntry', 3, repeated=True) @@ -610,9 +610,9 @@ class AdditionalProperty(_messages.Message): etag = _messages.StringField(2) event_types = _messages.StringField(3, repeated=True) id = _messages.StringField(4) - kind = _messages.StringField(5, default=u'storage#notification') + kind = _messages.StringField(5, default='storage#notification') object_name_prefix = _messages.StringField(6) - payload_format = _messages.StringField(7, default=u'JSON_API_V1') + payload_format = _messages.StringField(7, default='JSON_API_V1') selfLink = _messages.StringField(8) topic = _messages.StringField(9) @@ -627,7 +627,7 @@ class Notifications(_messages.Message): """ items = _messages.MessageField('Notification', 1, repeated=True) - kind = _messages.StringField(2, default=u'storage#notifications') + kind = _messages.StringField(2, default='storage#notifications') class Object(_messages.Message): @@ -776,7 +776,7 @@ class OwnerValue(_messages.Message): eventBasedHold = _messages.BooleanField(12) generation = _messages.IntegerField(13) id = _messages.StringField(14) - kind = _messages.StringField(15, default=u'storage#object') + kind = _messages.StringField(15, default='storage#object') kmsKeyName = _messages.StringField(16) md5Hash = _messages.StringField(17) mediaLink = _messages.StringField(18) @@ -842,7 +842,7 @@ class ProjectTeamValue(_messages.Message): etag = _messages.StringField(6) generation = _messages.IntegerField(7) id = _messages.StringField(8) - kind = _messages.StringField(9, default=u'storage#objectAccessControl') + kind = _messages.StringField(9, default='storage#objectAccessControl') object = _messages.StringField(10) projectTeam = _messages.MessageField('ProjectTeamValue', 11) role = _messages.StringField(12) @@ -859,7 +859,7 @@ class ObjectAccessControls(_messages.Message): """ items = _messages.MessageField('ObjectAccessControl', 1, repeated=True) - kind = _messages.StringField(2, default=u'storage#objectAccessControls') + kind = _messages.StringField(2, default='storage#objectAccessControls') class Objects(_messages.Message): @@ -877,7 +877,7 @@ class Objects(_messages.Message): """ items = _messages.MessageField('Object', 1, repeated=True) - kind = _messages.StringField(2, default=u'storage#objects') + kind = _messages.StringField(2, default='storage#objects') nextPageToken = _messages.StringField(3) prefixes = _messages.StringField(4, repeated=True) @@ -956,7 +956,7 @@ class BindingsValueListEntry(_messages.Message): bindings = _messages.MessageField('BindingsValueListEntry', 1, repeated=True) etag = _messages.BytesField(2) - kind = _messages.StringField(3, default=u'storage#policy') + kind = _messages.StringField(3, default='storage#policy') resourceId = _messages.StringField(4) @@ -980,7 +980,7 @@ class RewriteResponse(_messages.Message): """ done = _messages.BooleanField(1) - kind = _messages.StringField(2, default=u'storage#rewriteResponse') + kind = _messages.StringField(2, default='storage#rewriteResponse') objectSize = _messages.IntegerField(3) resource = _messages.MessageField('Object', 4) rewriteToken = _messages.StringField(5) @@ -997,7 +997,7 @@ class ServiceAccount(_messages.Message): """ email_address = _messages.StringField(1) - kind = _messages.StringField(2, default=u'storage#serviceAccount') + kind = _messages.StringField(2, default='storage#serviceAccount') class StandardQueryParameters(_messages.Message): @@ -1028,7 +1028,7 @@ class AltValueValuesEnum(_messages.Enum): """ json = 0 - alt = _messages.EnumField('AltValueValuesEnum', 1, default=u'json') + alt = _messages.EnumField('AltValueValuesEnum', 1, default='json') fields = _messages.StringField(2) key = _messages.StringField(3) oauth_token = _messages.StringField(4) @@ -2710,5 +2710,5 @@ class TestIamPermissionsResponse(_messages.Message): - storage.objects.update - Update object metadata. """ - kind = _messages.StringField(1, default=u'storage#testIamPermissionsResponse') + kind = _messages.StringField(1, default='storage#testIamPermissionsResponse') permissions = _messages.StringField(2, repeated=True) diff --git a/sdks/python/apache_beam/io/gcp/pubsub_test.py b/sdks/python/apache_beam/io/gcp/pubsub_test.py index 8e297511ce0d2..7b4a4d5c93b90 100644 --- a/sdks/python/apache_beam/io/gcp/pubsub_test.py +++ b/sdks/python/apache_beam/io/gcp/pubsub_test.py @@ -530,7 +530,7 @@ def test_read_messages_success(self, mock_pubsub): mock_pubsub.return_value.close.assert_has_calls([mock.call()]) def test_read_strings_success(self, mock_pubsub): - data = u'🤷 ¯\\_(ツ)_/¯' + data = '🤷 ¯\\_(ツ)_/¯' data_encoded = data.encode('utf-8') ack_id = 'ack_id' pull_response = test_utils.create_pull_response( @@ -552,7 +552,7 @@ def test_read_strings_success(self, mock_pubsub): mock_pubsub.return_value.close.assert_has_calls([mock.call()]) def test_read_data_success(self, mock_pubsub): - data_encoded = u'🤷 ¯\\_(ツ)_/¯'.encode('utf-8') + data_encoded = '🤷 ¯\\_(ツ)_/¯'.encode('utf-8') ack_id = 'ack_id' pull_response = test_utils.create_pull_response( [test_utils.PullResponseMessage(data_encoded, ack_id=ack_id)]) diff --git a/sdks/python/apache_beam/transforms/display_test.py b/sdks/python/apache_beam/transforms/display_test.py index a7605b09a3ad9..c91ad41e8d1c0 100644 --- a/sdks/python/apache_beam/transforms/display_test.py +++ b/sdks/python/apache_beam/transforms/display_test.py @@ -165,7 +165,7 @@ class MyDoFn(beam.DoFn): def display_data(self): return { 'unicode_string': 'my string', - 'unicode_literal_string': u'my literal string' + 'unicode_literal_string': 'my literal string' } fn = MyDoFn() diff --git a/sdks/python/apache_beam/transforms/environments_test.py b/sdks/python/apache_beam/transforms/environments_test.py index da55567a81065..25885553b2ff5 100644 --- a/sdks/python/apache_beam/transforms/environments_test.py +++ b/sdks/python/apache_beam/transforms/environments_test.py @@ -53,7 +53,7 @@ def test_environment_encoding(self): EmbeddedPythonGrpcEnvironment(), EmbeddedPythonGrpcEnvironment( state_cache_size=0, data_buffer_time_limit_ms=0), - SubprocessSDKEnvironment(command_string=u'foö')): + SubprocessSDKEnvironment(command_string='foö')): context = pipeline_context.PipelineContext() proto = environment.to_runner_api(context) reconstructed = Environment.from_runner_api(proto, context) diff --git a/sdks/python/apache_beam/transforms/external_java.py b/sdks/python/apache_beam/transforms/external_java.py index f0a963864c1d1..29fc2587542bd 100644 --- a/sdks/python/apache_beam/transforms/external_java.py +++ b/sdks/python/apache_beam/transforms/external_java.py @@ -139,7 +139,7 @@ def run_pipeline(pipeline_options, expansion_service, wait_until_finish=True): | beam.Map(str) | beam.ExternalTransform( TEST_FILTER_URN, - ImplicitSchemaPayloadBuilder({'data': u'middle'}), + ImplicitSchemaPayloadBuilder({'data': 'middle'}), expansion_service) | beam.ExternalTransform(TEST_COUNT_URN, None, expansion_service) | beam.Map(lambda kv: '%s: %s' % kv)) diff --git a/sdks/python/apache_beam/transforms/external_test.py b/sdks/python/apache_beam/transforms/external_test.py index e7e4de46eb255..d3bb02a7f94d7 100644 --- a/sdks/python/apache_beam/transforms/external_test.py +++ b/sdks/python/apache_beam/transforms/external_test.py @@ -71,10 +71,10 @@ class PayloadBase(object): values = { 'integer_example': 1, 'boolean': True, - 'string_example': u'thing', - 'list_of_strings': [u'foo', u'bar'], + 'string_example': 'thing', + 'list_of_strings': ['foo', 'bar'], 'mapping': { - u'key': 1.1 + 'key': 1.1 }, 'optional_integer': None, } @@ -182,7 +182,7 @@ def test_pipeline_generation(self): | beam.Create(['a', 'b']) | beam.ExternalTransform( 'beam:transforms:xlang:test:prefix', - ImplicitSchemaPayloadBuilder({'data': u'0'}), + ImplicitSchemaPayloadBuilder({'data': '0'}), expansion_service.ExpansionServiceServicer())) proto, _ = pipeline.to_runner_api(return_context=True) @@ -196,7 +196,7 @@ def test_pipeline_generation(self): self.assertNotEqual([], pipeline_from_proto.transforms_stack[0].parts[1].parts) self.assertEqual( - u'ExternalTransform(beam:transforms:xlang:test:prefix)/TestLabel', + 'ExternalTransform(beam:transforms:xlang:test:prefix)/TestLabel', pipeline_from_proto.transforms_stack[0].parts[1].parts[0].full_label) @unittest.skipIf(apiclient is None, 'GCP dependencies are not installed') @@ -222,7 +222,7 @@ def test_pipeline_generation_with_runner_overrides(self): 'projects/dummy-project/subscriptions/dummy-subscription') | beam.ExternalTransform( 'beam:transforms:xlang:test:prefix', - ImplicitSchemaPayloadBuilder({'data': u'0'}), + ImplicitSchemaPayloadBuilder({'data': '0'}), expansion_service.ExpansionServiceServicer())) pipeline_proto, _ = p.to_runner_api(return_context=True) @@ -294,7 +294,7 @@ def test_external_empty_spec_translation(self): pipeline = beam.Pipeline() external_transform = beam.ExternalTransform( 'beam:transforms:xlang:test:prefix', - ImplicitSchemaPayloadBuilder({'data': u'0'}), + ImplicitSchemaPayloadBuilder({'data': '0'}), expansion_service.ExpansionServiceServicer()) _ = (pipeline | beam.Create(['a', 'b']) | external_transform) pipeline.run().wait_until_finish() @@ -337,7 +337,7 @@ def test_external_transform_finder_non_leaf(self): | beam.Create(['a', 'b']) | beam.ExternalTransform( 'beam:transforms:xlang:test:prefix', - ImplicitSchemaPayloadBuilder({'data': u'0'}), + ImplicitSchemaPayloadBuilder({'data': '0'}), expansion_service.ExpansionServiceServicer()) | beam.Map(lambda x: x)) pipeline.run().wait_until_finish() @@ -351,7 +351,7 @@ def test_external_transform_finder_leaf(self): | beam.Create(['a', 'b']) | beam.ExternalTransform( 'beam:transforms:xlang:test:nooutput', - ImplicitSchemaPayloadBuilder({'data': u'0'}), + ImplicitSchemaPayloadBuilder({'data': '0'}), expansion_service.ExpansionServiceServicer())) pipeline.run().wait_until_finish() diff --git a/sdks/python/apache_beam/transforms/trigger_test.py b/sdks/python/apache_beam/transforms/trigger_test.py index c8beed42c6526..06e205df61ece 100644 --- a/sdks/python/apache_beam/transforms/trigger_test.py +++ b/sdks/python/apache_beam/transforms/trigger_test.py @@ -1114,15 +1114,15 @@ def _execute( if is_order_agnostic: reshuffle_seed = random.randrange(1 << 20) keys = [ - u'original', - u'reversed', - u'reshuffled(%s)' % reshuffle_seed, - u'one-element-bundles', - u'one-element-bundles-reversed', - u'two-element-bundles' + 'original', + 'reversed', + 'reshuffled(%s)' % reshuffle_seed, + 'one-element-bundles', + 'one-element-bundles-reversed', + 'two-element-bundles' ] else: - keys = [u'key1', u'key2'] + keys = ['key1', 'key2'] # Elements are encoded as a json strings to allow other languages to # decode elements while executing the test stream. diff --git a/sdks/python/apache_beam/transforms/validate_runner_xlang_test.py b/sdks/python/apache_beam/transforms/validate_runner_xlang_test.py index 4de1d884072f8..8e8e79648250b 100644 --- a/sdks/python/apache_beam/transforms/validate_runner_xlang_test.py +++ b/sdks/python/apache_beam/transforms/validate_runner_xlang_test.py @@ -94,7 +94,7 @@ def run_prefix(self, pipeline): | beam.Create(['a', 'b']).with_output_types(str) | beam.ExternalTransform( TEST_PREFIX_URN, - ImplicitSchemaPayloadBuilder({'data': u'0'}), + ImplicitSchemaPayloadBuilder({'data': '0'}), self.expansion_service)) assert_that(res, equal_to(['0a', '0b'])) diff --git a/website/www/site/content/en/documentation/programming-guide.md b/website/www/site/content/en/documentation/programming-guide.md index 8a135b9817ae1..30bdb7247d494 100644 --- a/website/www/site/content/en/documentation/programming-guide.md +++ b/website/www/site/content/en/documentation/programming-guide.md @@ -7664,7 +7664,7 @@ When an SDK-specific wrapper isn't available, you will have to access the cross- | beam.Create(['a', 'b']).with_output_types(unicode) | beam.ExternalTransform( TEST_PREFIX_URN, - ImplicitSchemaPayloadBuilder({'data': u'0'}), + ImplicitSchemaPayloadBuilder({'data': '0'}), )) assert_that(res, equal_to(['0a', '0b'])) ``` From 72bbbd9b67b3e0ff39f4cee4bd14ceaff7ef0ae8 Mon Sep 17 00:00:00 2001 From: Anand Inguva Date: Wed, 21 Jun 2023 10:06:10 -0400 Subject: [PATCH 7/8] Fix few more lint errors --- .../python/apache_beam/testing/datatype_inference_test.py | 4 ++-- sdks/python/apache_beam/testing/extra_assertions_test.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sdks/python/apache_beam/testing/datatype_inference_test.py b/sdks/python/apache_beam/testing/datatype_inference_test.py index 445ebedb4c4ef..001752f8ab276 100644 --- a/sdks/python/apache_beam/testing/datatype_inference_test.py +++ b/sdks/python/apache_beam/testing/datatype_inference_test.py @@ -50,7 +50,7 @@ OrderedDict([ ("a", 1), ("b", 0.12345), - ("c", u"Hello World!!"), + ("c", "Hello World!!"), ("d", np.array([1, 2, 3], dtype=np.int64)), ("e", b"some bytes"), ]), @@ -61,7 +61,7 @@ ]), OrderedDict([ ("a", 100000), - ("c", u"XoXoX"), + ("c", "XoXoX"), ("d", np.array([4, 5, 6], dtype=np.int64)), ("e", b""), ]), diff --git a/sdks/python/apache_beam/testing/extra_assertions_test.py b/sdks/python/apache_beam/testing/extra_assertions_test.py index 9867c79897426..174fb54e2fa8f 100644 --- a/sdks/python/apache_beam/testing/extra_assertions_test.py +++ b/sdks/python/apache_beam/testing/extra_assertions_test.py @@ -27,8 +27,8 @@ class ExtraAssertionsMixinTest(ExtraAssertionsMixin, unittest.TestCase): def test_assert_array_count_equal_strings(self): - data1 = [u"±♠Ωℑ", u"hello", "world"] - data2 = ["hello", u"±♠Ωℑ", u"world"] + data1 = ["±♠Ωℑ", "hello", "world"] + data2 = ["hello", "±♠Ωℑ", "world"] self.assertUnhashableCountEqual(data1, data2) def test_assert_array_count_equal_mixed(self): @@ -37,7 +37,7 @@ def test_assert_array_count_equal_mixed(self): 'a': 1, 123: 1.234 }, ['d', 1], - u"±♠Ωℑ", + "±♠Ωℑ", np.zeros((3, 6)), (1, 2, 3, 'b'), 'def', @@ -55,7 +55,7 @@ def test_assert_array_count_equal_mixed(self): None, 'abc', 'def', - u"±♠Ωℑ", + "±♠Ωℑ", 100, (1, 2, 3, 'b'), np.zeros((3, 6)), From 7125f9d76cf9647819478eb1b08468cba38b4de5 Mon Sep 17 00:00:00 2001 From: Anand Inguva Date: Thu, 22 Jun 2023 12:33:25 -0400 Subject: [PATCH 8/8] Don't modify bq client files --- .../clients/storage/storage_v1_client.py | 1050 ++++++++--------- .../clients/storage/storage_v1_messages.py | 36 +- 2 files changed, 543 insertions(+), 543 deletions(-) diff --git a/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_client.py b/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_client.py index 510970cc22495..e5b7c0268ec8a 100644 --- a/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_client.py +++ b/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_client.py @@ -29,22 +29,22 @@ class StorageV1(base_api.BaseApiClient): """Generated client library for service storage version v1.""" MESSAGES_MODULE = messages - BASE_URL = 'https://www.googleapis.com/storage/v1/' + BASE_URL = u'https://www.googleapis.com/storage/v1/' - _PACKAGE = 'storage' + _PACKAGE = u'storage' _SCOPES = [ - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/devstorage.full_control', - 'https://www.googleapis.com/auth/devstorage.read_only', - 'https://www.googleapis.com/auth/devstorage.read_write' + u'https://www.googleapis.com/auth/cloud-platform', + u'https://www.googleapis.com/auth/cloud-platform.read-only', + u'https://www.googleapis.com/auth/devstorage.full_control', + u'https://www.googleapis.com/auth/devstorage.read_only', + u'https://www.googleapis.com/auth/devstorage.read_write' ] - _VERSION = 'v1' + _VERSION = u'v1' _CLIENT_ID = '1042881264118.apps.googleusercontent.com' _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b' _USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b' - _CLIENT_CLASS_NAME = 'StorageV1' - _URL_VERSION = 'v1' + _CLIENT_CLASS_NAME = u'StorageV1' + _URL_VERSION = u'v1' _API_KEY = None def __init__( @@ -90,7 +90,7 @@ def __init__( class BucketAccessControlsService(base_api.BaseApiService): """Service class for the bucketAccessControls resource.""" - _NAME = 'bucketAccessControls' + _NAME = u'bucketAccessControls' def __init__(self, client): super().__init__(client) @@ -109,15 +109,15 @@ def Delete(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( - http_method='DELETE', - method_id='storage.bucketAccessControls.delete', - ordered_params=['bucket', 'entity'], - path_params=['bucket', 'entity'], - query_params=['userProject'], - relative_path='b/{bucket}/acl/{entity}', + http_method=u'DELETE', + method_id=u'storage.bucketAccessControls.delete', + ordered_params=[u'bucket', u'entity'], + path_params=[u'bucket', u'entity'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/acl/{entity}', request_field='', - request_type_name='StorageBucketAccessControlsDeleteRequest', - response_type_name='StorageBucketAccessControlsDeleteResponse', + request_type_name=u'StorageBucketAccessControlsDeleteRequest', + response_type_name=u'StorageBucketAccessControlsDeleteResponse', supports_download=False, ) @@ -134,15 +134,15 @@ def Get(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.bucketAccessControls.get', - ordered_params=['bucket', 'entity'], - path_params=['bucket', 'entity'], - query_params=['userProject'], - relative_path='b/{bucket}/acl/{entity}', + http_method=u'GET', + method_id=u'storage.bucketAccessControls.get', + ordered_params=[u'bucket', u'entity'], + path_params=[u'bucket', u'entity'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/acl/{entity}', request_field='', - request_type_name='StorageBucketAccessControlsGetRequest', - response_type_name='BucketAccessControl', + request_type_name=u'StorageBucketAccessControlsGetRequest', + response_type_name=u'BucketAccessControl', supports_download=False, ) @@ -159,15 +159,15 @@ def Insert(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Insert.method_config = lambda: base_api.ApiMethodInfo( - http_method='POST', - method_id='storage.bucketAccessControls.insert', - ordered_params=['bucket'], - path_params=['bucket'], - query_params=['userProject'], - relative_path='b/{bucket}/acl', - request_field='bucketAccessControl', - request_type_name='StorageBucketAccessControlsInsertRequest', - response_type_name='BucketAccessControl', + http_method=u'POST', + method_id=u'storage.bucketAccessControls.insert', + ordered_params=[u'bucket'], + path_params=[u'bucket'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/acl', + request_field=u'bucketAccessControl', + request_type_name=u'StorageBucketAccessControlsInsertRequest', + response_type_name=u'BucketAccessControl', supports_download=False, ) @@ -184,15 +184,15 @@ def List(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.bucketAccessControls.list', - ordered_params=['bucket'], - path_params=['bucket'], - query_params=['userProject'], - relative_path='b/{bucket}/acl', + http_method=u'GET', + method_id=u'storage.bucketAccessControls.list', + ordered_params=[u'bucket'], + path_params=[u'bucket'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/acl', request_field='', - request_type_name='StorageBucketAccessControlsListRequest', - response_type_name='BucketAccessControls', + request_type_name=u'StorageBucketAccessControlsListRequest', + response_type_name=u'BucketAccessControls', supports_download=False, ) @@ -209,15 +209,15 @@ def Patch(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Patch.method_config = lambda: base_api.ApiMethodInfo( - http_method='PATCH', - method_id='storage.bucketAccessControls.patch', - ordered_params=['bucket', 'entity'], - path_params=['bucket', 'entity'], - query_params=['userProject'], - relative_path='b/{bucket}/acl/{entity}', - request_field='bucketAccessControl', - request_type_name='StorageBucketAccessControlsPatchRequest', - response_type_name='BucketAccessControl', + http_method=u'PATCH', + method_id=u'storage.bucketAccessControls.patch', + ordered_params=[u'bucket', u'entity'], + path_params=[u'bucket', u'entity'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/acl/{entity}', + request_field=u'bucketAccessControl', + request_type_name=u'StorageBucketAccessControlsPatchRequest', + response_type_name=u'BucketAccessControl', supports_download=False, ) @@ -234,22 +234,22 @@ def Update(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Update.method_config = lambda: base_api.ApiMethodInfo( - http_method='PUT', - method_id='storage.bucketAccessControls.update', - ordered_params=['bucket', 'entity'], - path_params=['bucket', 'entity'], - query_params=['userProject'], - relative_path='b/{bucket}/acl/{entity}', - request_field='bucketAccessControl', - request_type_name='StorageBucketAccessControlsUpdateRequest', - response_type_name='BucketAccessControl', + http_method=u'PUT', + method_id=u'storage.bucketAccessControls.update', + ordered_params=[u'bucket', u'entity'], + path_params=[u'bucket', u'entity'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/acl/{entity}', + request_field=u'bucketAccessControl', + request_type_name=u'StorageBucketAccessControlsUpdateRequest', + response_type_name=u'BucketAccessControl', supports_download=False, ) class BucketsService(base_api.BaseApiService): """Service class for the buckets resource.""" - _NAME = 'buckets' + _NAME = u'buckets' def __init__(self, client): super().__init__(client) @@ -268,16 +268,16 @@ def Delete(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( - http_method='DELETE', - method_id='storage.buckets.delete', - ordered_params=['bucket'], - path_params=['bucket'], + http_method=u'DELETE', + method_id=u'storage.buckets.delete', + ordered_params=[u'bucket'], + path_params=[u'bucket'], query_params= - ['ifMetagenerationMatch', 'ifMetagenerationNotMatch', 'userProject'], - relative_path='b/{bucket}', + [u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'userProject'], + relative_path=u'b/{bucket}', request_field='', - request_type_name='StorageBucketsDeleteRequest', - response_type_name='StorageBucketsDeleteResponse', + request_type_name=u'StorageBucketsDeleteRequest', + response_type_name=u'StorageBucketsDeleteResponse', supports_download=False, ) @@ -294,20 +294,20 @@ def Get(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.buckets.get', - ordered_params=['bucket'], - path_params=['bucket'], + http_method=u'GET', + method_id=u'storage.buckets.get', + ordered_params=[u'bucket'], + path_params=[u'bucket'], query_params=[ - 'ifMetagenerationMatch', - 'ifMetagenerationNotMatch', - 'projection', - 'userProject' + u'ifMetagenerationMatch', + u'ifMetagenerationNotMatch', + u'projection', + u'userProject' ], - relative_path='b/{bucket}', + relative_path=u'b/{bucket}', request_field='', - request_type_name='StorageBucketsGetRequest', - response_type_name='Bucket', + request_type_name=u'StorageBucketsGetRequest', + response_type_name=u'Bucket', supports_download=False, ) @@ -324,15 +324,15 @@ def GetIamPolicy(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.buckets.getIamPolicy', - ordered_params=['bucket'], - path_params=['bucket'], - query_params=['userProject'], - relative_path='b/{bucket}/iam', + http_method=u'GET', + method_id=u'storage.buckets.getIamPolicy', + ordered_params=[u'bucket'], + path_params=[u'bucket'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/iam', request_field='', - request_type_name='StorageBucketsGetIamPolicyRequest', - response_type_name='Policy', + request_type_name=u'StorageBucketsGetIamPolicyRequest', + response_type_name=u'Policy', supports_download=False, ) @@ -349,21 +349,21 @@ def Insert(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Insert.method_config = lambda: base_api.ApiMethodInfo( - http_method='POST', - method_id='storage.buckets.insert', - ordered_params=['project'], + http_method=u'POST', + method_id=u'storage.buckets.insert', + ordered_params=[u'project'], path_params=[], query_params=[ - 'predefinedAcl', - 'predefinedDefaultObjectAcl', - 'project', - 'projection', - 'userProject' + u'predefinedAcl', + u'predefinedDefaultObjectAcl', + u'project', + u'projection', + u'userProject' ], - relative_path='b', - request_field='bucket', - request_type_name='StorageBucketsInsertRequest', - response_type_name='Bucket', + relative_path=u'b', + request_field=u'bucket', + request_type_name=u'StorageBucketsInsertRequest', + response_type_name=u'Bucket', supports_download=False, ) @@ -380,22 +380,22 @@ def List(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.buckets.list', - ordered_params=['project'], + http_method=u'GET', + method_id=u'storage.buckets.list', + ordered_params=[u'project'], path_params=[], query_params=[ - 'maxResults', - 'pageToken', - 'prefix', - 'project', - 'projection', - 'userProject' + u'maxResults', + u'pageToken', + u'prefix', + u'project', + u'projection', + u'userProject' ], - relative_path='b', + relative_path=u'b', request_field='', - request_type_name='StorageBucketsListRequest', - response_type_name='Buckets', + request_type_name=u'StorageBucketsListRequest', + response_type_name=u'Buckets', supports_download=False, ) @@ -412,15 +412,15 @@ def LockRetentionPolicy(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) LockRetentionPolicy.method_config = lambda: base_api.ApiMethodInfo( - http_method='POST', - method_id='storage.buckets.lockRetentionPolicy', - ordered_params=['bucket', 'ifMetagenerationMatch'], - path_params=['bucket'], - query_params=['ifMetagenerationMatch', 'userProject'], - relative_path='b/{bucket}/lockRetentionPolicy', + http_method=u'POST', + method_id=u'storage.buckets.lockRetentionPolicy', + ordered_params=[u'bucket', u'ifMetagenerationMatch'], + path_params=[u'bucket'], + query_params=[u'ifMetagenerationMatch', u'userProject'], + relative_path=u'b/{bucket}/lockRetentionPolicy', request_field='', - request_type_name='StorageBucketsLockRetentionPolicyRequest', - response_type_name='Bucket', + request_type_name=u'StorageBucketsLockRetentionPolicyRequest', + response_type_name=u'Bucket', supports_download=False, ) @@ -437,22 +437,22 @@ def Patch(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Patch.method_config = lambda: base_api.ApiMethodInfo( - http_method='PATCH', - method_id='storage.buckets.patch', - ordered_params=['bucket'], - path_params=['bucket'], + http_method=u'PATCH', + method_id=u'storage.buckets.patch', + ordered_params=[u'bucket'], + path_params=[u'bucket'], query_params=[ - 'ifMetagenerationMatch', - 'ifMetagenerationNotMatch', - 'predefinedAcl', - 'predefinedDefaultObjectAcl', - 'projection', - 'userProject' + u'ifMetagenerationMatch', + u'ifMetagenerationNotMatch', + u'predefinedAcl', + u'predefinedDefaultObjectAcl', + u'projection', + u'userProject' ], - relative_path='b/{bucket}', - request_field='bucketResource', - request_type_name='StorageBucketsPatchRequest', - response_type_name='Bucket', + relative_path=u'b/{bucket}', + request_field=u'bucketResource', + request_type_name=u'StorageBucketsPatchRequest', + response_type_name=u'Bucket', supports_download=False, ) @@ -469,15 +469,15 @@ def SetIamPolicy(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo( - http_method='PUT', - method_id='storage.buckets.setIamPolicy', - ordered_params=['bucket'], - path_params=['bucket'], - query_params=['userProject'], - relative_path='b/{bucket}/iam', - request_field='policy', - request_type_name='StorageBucketsSetIamPolicyRequest', - response_type_name='Policy', + http_method=u'PUT', + method_id=u'storage.buckets.setIamPolicy', + ordered_params=[u'bucket'], + path_params=[u'bucket'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/iam', + request_field=u'policy', + request_type_name=u'StorageBucketsSetIamPolicyRequest', + response_type_name=u'Policy', supports_download=False, ) @@ -494,15 +494,15 @@ def TestIamPermissions(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.buckets.testIamPermissions', - ordered_params=['bucket', 'permissions'], - path_params=['bucket'], - query_params=['permissions', 'userProject'], - relative_path='b/{bucket}/iam/testPermissions', + http_method=u'GET', + method_id=u'storage.buckets.testIamPermissions', + ordered_params=[u'bucket', u'permissions'], + path_params=[u'bucket'], + query_params=[u'permissions', u'userProject'], + relative_path=u'b/{bucket}/iam/testPermissions', request_field='', - request_type_name='StorageBucketsTestIamPermissionsRequest', - response_type_name='TestIamPermissionsResponse', + request_type_name=u'StorageBucketsTestIamPermissionsRequest', + response_type_name=u'TestIamPermissionsResponse', supports_download=False, ) @@ -519,29 +519,29 @@ def Update(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Update.method_config = lambda: base_api.ApiMethodInfo( - http_method='PUT', - method_id='storage.buckets.update', - ordered_params=['bucket'], - path_params=['bucket'], + http_method=u'PUT', + method_id=u'storage.buckets.update', + ordered_params=[u'bucket'], + path_params=[u'bucket'], query_params=[ - 'ifMetagenerationMatch', - 'ifMetagenerationNotMatch', - 'predefinedAcl', - 'predefinedDefaultObjectAcl', - 'projection', - 'userProject' + u'ifMetagenerationMatch', + u'ifMetagenerationNotMatch', + u'predefinedAcl', + u'predefinedDefaultObjectAcl', + u'projection', + u'userProject' ], - relative_path='b/{bucket}', - request_field='bucketResource', - request_type_name='StorageBucketsUpdateRequest', - response_type_name='Bucket', + relative_path=u'b/{bucket}', + request_field=u'bucketResource', + request_type_name=u'StorageBucketsUpdateRequest', + response_type_name=u'Bucket', supports_download=False, ) class ChannelsService(base_api.BaseApiService): """Service class for the channels resource.""" - _NAME = 'channels' + _NAME = u'channels' def __init__(self, client): super().__init__(client) @@ -560,22 +560,22 @@ def Stop(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Stop.method_config = lambda: base_api.ApiMethodInfo( - http_method='POST', - method_id='storage.channels.stop', + http_method=u'POST', + method_id=u'storage.channels.stop', ordered_params=[], path_params=[], query_params=[], - relative_path='channels/stop', + relative_path=u'channels/stop', request_field='', - request_type_name='Channel', - response_type_name='StorageChannelsStopResponse', + request_type_name=u'Channel', + response_type_name=u'StorageChannelsStopResponse', supports_download=False, ) class DefaultObjectAccessControlsService(base_api.BaseApiService): """Service class for the defaultObjectAccessControls resource.""" - _NAME = 'defaultObjectAccessControls' + _NAME = u'defaultObjectAccessControls' def __init__(self, client): super().__init__(client) @@ -594,15 +594,15 @@ def Delete(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( - http_method='DELETE', - method_id='storage.defaultObjectAccessControls.delete', - ordered_params=['bucket', 'entity'], - path_params=['bucket', 'entity'], - query_params=['userProject'], - relative_path='b/{bucket}/defaultObjectAcl/{entity}', + http_method=u'DELETE', + method_id=u'storage.defaultObjectAccessControls.delete', + ordered_params=[u'bucket', u'entity'], + path_params=[u'bucket', u'entity'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/defaultObjectAcl/{entity}', request_field='', - request_type_name='StorageDefaultObjectAccessControlsDeleteRequest', - response_type_name='StorageDefaultObjectAccessControlsDeleteResponse', + request_type_name=u'StorageDefaultObjectAccessControlsDeleteRequest', + response_type_name=u'StorageDefaultObjectAccessControlsDeleteResponse', supports_download=False, ) @@ -619,15 +619,15 @@ def Get(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.defaultObjectAccessControls.get', - ordered_params=['bucket', 'entity'], - path_params=['bucket', 'entity'], - query_params=['userProject'], - relative_path='b/{bucket}/defaultObjectAcl/{entity}', + http_method=u'GET', + method_id=u'storage.defaultObjectAccessControls.get', + ordered_params=[u'bucket', u'entity'], + path_params=[u'bucket', u'entity'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/defaultObjectAcl/{entity}', request_field='', - request_type_name='StorageDefaultObjectAccessControlsGetRequest', - response_type_name='ObjectAccessControl', + request_type_name=u'StorageDefaultObjectAccessControlsGetRequest', + response_type_name=u'ObjectAccessControl', supports_download=False, ) @@ -644,15 +644,15 @@ def Insert(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Insert.method_config = lambda: base_api.ApiMethodInfo( - http_method='POST', - method_id='storage.defaultObjectAccessControls.insert', - ordered_params=['bucket'], - path_params=['bucket'], - query_params=['userProject'], - relative_path='b/{bucket}/defaultObjectAcl', - request_field='objectAccessControl', - request_type_name='StorageDefaultObjectAccessControlsInsertRequest', - response_type_name='ObjectAccessControl', + http_method=u'POST', + method_id=u'storage.defaultObjectAccessControls.insert', + ordered_params=[u'bucket'], + path_params=[u'bucket'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/defaultObjectAcl', + request_field=u'objectAccessControl', + request_type_name=u'StorageDefaultObjectAccessControlsInsertRequest', + response_type_name=u'ObjectAccessControl', supports_download=False, ) @@ -669,16 +669,16 @@ def List(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.defaultObjectAccessControls.list', - ordered_params=['bucket'], - path_params=['bucket'], + http_method=u'GET', + method_id=u'storage.defaultObjectAccessControls.list', + ordered_params=[u'bucket'], + path_params=[u'bucket'], query_params= - ['ifMetagenerationMatch', 'ifMetagenerationNotMatch', 'userProject'], - relative_path='b/{bucket}/defaultObjectAcl', + [u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'userProject'], + relative_path=u'b/{bucket}/defaultObjectAcl', request_field='', - request_type_name='StorageDefaultObjectAccessControlsListRequest', - response_type_name='ObjectAccessControls', + request_type_name=u'StorageDefaultObjectAccessControlsListRequest', + response_type_name=u'ObjectAccessControls', supports_download=False, ) @@ -695,15 +695,15 @@ def Patch(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Patch.method_config = lambda: base_api.ApiMethodInfo( - http_method='PATCH', - method_id='storage.defaultObjectAccessControls.patch', - ordered_params=['bucket', 'entity'], - path_params=['bucket', 'entity'], - query_params=['userProject'], - relative_path='b/{bucket}/defaultObjectAcl/{entity}', - request_field='objectAccessControl', - request_type_name='StorageDefaultObjectAccessControlsPatchRequest', - response_type_name='ObjectAccessControl', + http_method=u'PATCH', + method_id=u'storage.defaultObjectAccessControls.patch', + ordered_params=[u'bucket', u'entity'], + path_params=[u'bucket', u'entity'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/defaultObjectAcl/{entity}', + request_field=u'objectAccessControl', + request_type_name=u'StorageDefaultObjectAccessControlsPatchRequest', + response_type_name=u'ObjectAccessControl', supports_download=False, ) @@ -720,22 +720,22 @@ def Update(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Update.method_config = lambda: base_api.ApiMethodInfo( - http_method='PUT', - method_id='storage.defaultObjectAccessControls.update', - ordered_params=['bucket', 'entity'], - path_params=['bucket', 'entity'], - query_params=['userProject'], - relative_path='b/{bucket}/defaultObjectAcl/{entity}', - request_field='objectAccessControl', - request_type_name='StorageDefaultObjectAccessControlsUpdateRequest', - response_type_name='ObjectAccessControl', + http_method=u'PUT', + method_id=u'storage.defaultObjectAccessControls.update', + ordered_params=[u'bucket', u'entity'], + path_params=[u'bucket', u'entity'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/defaultObjectAcl/{entity}', + request_field=u'objectAccessControl', + request_type_name=u'StorageDefaultObjectAccessControlsUpdateRequest', + response_type_name=u'ObjectAccessControl', supports_download=False, ) class NotificationsService(base_api.BaseApiService): """Service class for the notifications resource.""" - _NAME = 'notifications' + _NAME = u'notifications' def __init__(self, client): super().__init__(client) @@ -754,15 +754,15 @@ def Delete(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( - http_method='DELETE', - method_id='storage.notifications.delete', - ordered_params=['bucket', 'notification'], - path_params=['bucket', 'notification'], - query_params=['userProject'], - relative_path='b/{bucket}/notificationConfigs/{notification}', + http_method=u'DELETE', + method_id=u'storage.notifications.delete', + ordered_params=[u'bucket', u'notification'], + path_params=[u'bucket', u'notification'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/notificationConfigs/{notification}', request_field='', - request_type_name='StorageNotificationsDeleteRequest', - response_type_name='StorageNotificationsDeleteResponse', + request_type_name=u'StorageNotificationsDeleteRequest', + response_type_name=u'StorageNotificationsDeleteResponse', supports_download=False, ) @@ -779,15 +779,15 @@ def Get(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.notifications.get', - ordered_params=['bucket', 'notification'], - path_params=['bucket', 'notification'], - query_params=['userProject'], - relative_path='b/{bucket}/notificationConfigs/{notification}', + http_method=u'GET', + method_id=u'storage.notifications.get', + ordered_params=[u'bucket', u'notification'], + path_params=[u'bucket', u'notification'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/notificationConfigs/{notification}', request_field='', - request_type_name='StorageNotificationsGetRequest', - response_type_name='Notification', + request_type_name=u'StorageNotificationsGetRequest', + response_type_name=u'Notification', supports_download=False, ) @@ -804,15 +804,15 @@ def Insert(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Insert.method_config = lambda: base_api.ApiMethodInfo( - http_method='POST', - method_id='storage.notifications.insert', - ordered_params=['bucket'], - path_params=['bucket'], - query_params=['userProject'], - relative_path='b/{bucket}/notificationConfigs', - request_field='notification', - request_type_name='StorageNotificationsInsertRequest', - response_type_name='Notification', + http_method=u'POST', + method_id=u'storage.notifications.insert', + ordered_params=[u'bucket'], + path_params=[u'bucket'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/notificationConfigs', + request_field=u'notification', + request_type_name=u'StorageNotificationsInsertRequest', + response_type_name=u'Notification', supports_download=False, ) @@ -829,22 +829,22 @@ def List(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.notifications.list', - ordered_params=['bucket'], - path_params=['bucket'], - query_params=['userProject'], - relative_path='b/{bucket}/notificationConfigs', + http_method=u'GET', + method_id=u'storage.notifications.list', + ordered_params=[u'bucket'], + path_params=[u'bucket'], + query_params=[u'userProject'], + relative_path=u'b/{bucket}/notificationConfigs', request_field='', - request_type_name='StorageNotificationsListRequest', - response_type_name='Notifications', + request_type_name=u'StorageNotificationsListRequest', + response_type_name=u'Notifications', supports_download=False, ) class ObjectAccessControlsService(base_api.BaseApiService): """Service class for the objectAccessControls resource.""" - _NAME = 'objectAccessControls' + _NAME = u'objectAccessControls' def __init__(self, client): super().__init__(client) @@ -863,15 +863,15 @@ def Delete(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( - http_method='DELETE', - method_id='storage.objectAccessControls.delete', - ordered_params=['bucket', 'object', 'entity'], - path_params=['bucket', 'entity', 'object'], - query_params=['generation', 'userProject'], - relative_path='b/{bucket}/o/{object}/acl/{entity}', + http_method=u'DELETE', + method_id=u'storage.objectAccessControls.delete', + ordered_params=[u'bucket', u'object', u'entity'], + path_params=[u'bucket', u'entity', u'object'], + query_params=[u'generation', u'userProject'], + relative_path=u'b/{bucket}/o/{object}/acl/{entity}', request_field='', - request_type_name='StorageObjectAccessControlsDeleteRequest', - response_type_name='StorageObjectAccessControlsDeleteResponse', + request_type_name=u'StorageObjectAccessControlsDeleteRequest', + response_type_name=u'StorageObjectAccessControlsDeleteResponse', supports_download=False, ) @@ -888,15 +888,15 @@ def Get(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.objectAccessControls.get', - ordered_params=['bucket', 'object', 'entity'], - path_params=['bucket', 'entity', 'object'], - query_params=['generation', 'userProject'], - relative_path='b/{bucket}/o/{object}/acl/{entity}', + http_method=u'GET', + method_id=u'storage.objectAccessControls.get', + ordered_params=[u'bucket', u'object', u'entity'], + path_params=[u'bucket', u'entity', u'object'], + query_params=[u'generation', u'userProject'], + relative_path=u'b/{bucket}/o/{object}/acl/{entity}', request_field='', - request_type_name='StorageObjectAccessControlsGetRequest', - response_type_name='ObjectAccessControl', + request_type_name=u'StorageObjectAccessControlsGetRequest', + response_type_name=u'ObjectAccessControl', supports_download=False, ) @@ -913,15 +913,15 @@ def Insert(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Insert.method_config = lambda: base_api.ApiMethodInfo( - http_method='POST', - method_id='storage.objectAccessControls.insert', - ordered_params=['bucket', 'object'], - path_params=['bucket', 'object'], - query_params=['generation', 'userProject'], - relative_path='b/{bucket}/o/{object}/acl', - request_field='objectAccessControl', - request_type_name='StorageObjectAccessControlsInsertRequest', - response_type_name='ObjectAccessControl', + http_method=u'POST', + method_id=u'storage.objectAccessControls.insert', + ordered_params=[u'bucket', u'object'], + path_params=[u'bucket', u'object'], + query_params=[u'generation', u'userProject'], + relative_path=u'b/{bucket}/o/{object}/acl', + request_field=u'objectAccessControl', + request_type_name=u'StorageObjectAccessControlsInsertRequest', + response_type_name=u'ObjectAccessControl', supports_download=False, ) @@ -938,15 +938,15 @@ def List(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.objectAccessControls.list', - ordered_params=['bucket', 'object'], - path_params=['bucket', 'object'], - query_params=['generation', 'userProject'], - relative_path='b/{bucket}/o/{object}/acl', + http_method=u'GET', + method_id=u'storage.objectAccessControls.list', + ordered_params=[u'bucket', u'object'], + path_params=[u'bucket', u'object'], + query_params=[u'generation', u'userProject'], + relative_path=u'b/{bucket}/o/{object}/acl', request_field='', - request_type_name='StorageObjectAccessControlsListRequest', - response_type_name='ObjectAccessControls', + request_type_name=u'StorageObjectAccessControlsListRequest', + response_type_name=u'ObjectAccessControls', supports_download=False, ) @@ -963,15 +963,15 @@ def Patch(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Patch.method_config = lambda: base_api.ApiMethodInfo( - http_method='PATCH', - method_id='storage.objectAccessControls.patch', - ordered_params=['bucket', 'object', 'entity'], - path_params=['bucket', 'entity', 'object'], - query_params=['generation', 'userProject'], - relative_path='b/{bucket}/o/{object}/acl/{entity}', - request_field='objectAccessControl', - request_type_name='StorageObjectAccessControlsPatchRequest', - response_type_name='ObjectAccessControl', + http_method=u'PATCH', + method_id=u'storage.objectAccessControls.patch', + ordered_params=[u'bucket', u'object', u'entity'], + path_params=[u'bucket', u'entity', u'object'], + query_params=[u'generation', u'userProject'], + relative_path=u'b/{bucket}/o/{object}/acl/{entity}', + request_field=u'objectAccessControl', + request_type_name=u'StorageObjectAccessControlsPatchRequest', + response_type_name=u'ObjectAccessControl', supports_download=False, ) @@ -988,22 +988,22 @@ def Update(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Update.method_config = lambda: base_api.ApiMethodInfo( - http_method='PUT', - method_id='storage.objectAccessControls.update', - ordered_params=['bucket', 'object', 'entity'], - path_params=['bucket', 'entity', 'object'], - query_params=['generation', 'userProject'], - relative_path='b/{bucket}/o/{object}/acl/{entity}', - request_field='objectAccessControl', - request_type_name='StorageObjectAccessControlsUpdateRequest', - response_type_name='ObjectAccessControl', + http_method=u'PUT', + method_id=u'storage.objectAccessControls.update', + ordered_params=[u'bucket', u'object', u'entity'], + path_params=[u'bucket', u'entity', u'object'], + query_params=[u'generation', u'userProject'], + relative_path=u'b/{bucket}/o/{object}/acl/{entity}', + request_field=u'objectAccessControl', + request_type_name=u'StorageObjectAccessControlsUpdateRequest', + response_type_name=u'ObjectAccessControl', supports_download=False, ) class ObjectsService(base_api.BaseApiService): """Service class for the objects resource.""" - _NAME = 'objects' + _NAME = u'objects' def __init__(self, client): super().__init__(client) @@ -1012,9 +1012,9 @@ def __init__(self, client): accept=['*/*'], max_size=None, resumable_multipart=True, - resumable_path='/resumable/upload/storage/v1/b/{bucket}/o', + resumable_path=u'/resumable/upload/storage/v1/b/{bucket}/o', simple_multipart=True, - simple_path='/upload/storage/v1/b/{bucket}/o', + simple_path=u'/upload/storage/v1/b/{bucket}/o', ), } @@ -1031,21 +1031,21 @@ def Compose(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Compose.method_config = lambda: base_api.ApiMethodInfo( - http_method='POST', - method_id='storage.objects.compose', - ordered_params=['destinationBucket', 'destinationObject'], - path_params=['destinationBucket', 'destinationObject'], + http_method=u'POST', + method_id=u'storage.objects.compose', + ordered_params=[u'destinationBucket', u'destinationObject'], + path_params=[u'destinationBucket', u'destinationObject'], query_params=[ - 'destinationPredefinedAcl', - 'ifGenerationMatch', - 'ifMetagenerationMatch', - 'kmsKeyName', - 'userProject' + u'destinationPredefinedAcl', + u'ifGenerationMatch', + u'ifMetagenerationMatch', + u'kmsKeyName', + u'userProject' ], - relative_path='b/{destinationBucket}/o/{destinationObject}/compose', - request_field='composeRequest', - request_type_name='StorageObjectsComposeRequest', - response_type_name='Object', + relative_path=u'b/{destinationBucket}/o/{destinationObject}/compose', + request_field=u'composeRequest', + request_type_name=u'StorageObjectsComposeRequest', + response_type_name=u'Object', supports_download=False, ) @@ -1062,39 +1062,39 @@ def Copy(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Copy.method_config = lambda: base_api.ApiMethodInfo( - http_method='POST', - method_id='storage.objects.copy', + http_method=u'POST', + method_id=u'storage.objects.copy', ordered_params=[ - 'sourceBucket', - 'sourceObject', - 'destinationBucket', - 'destinationObject' + u'sourceBucket', + u'sourceObject', + u'destinationBucket', + u'destinationObject' ], path_params=[ - 'destinationBucket', - 'destinationObject', - 'sourceBucket', - 'sourceObject' + u'destinationBucket', + u'destinationObject', + u'sourceBucket', + u'sourceObject' ], query_params=[ - 'destinationPredefinedAcl', - 'ifGenerationMatch', - 'ifGenerationNotMatch', - 'ifMetagenerationMatch', - 'ifMetagenerationNotMatch', - 'ifSourceGenerationMatch', - 'ifSourceGenerationNotMatch', - 'ifSourceMetagenerationMatch', - 'ifSourceMetagenerationNotMatch', - 'projection', - 'sourceGeneration', - 'userProject' + u'destinationPredefinedAcl', + u'ifGenerationMatch', + u'ifGenerationNotMatch', + u'ifMetagenerationMatch', + u'ifMetagenerationNotMatch', + u'ifSourceGenerationMatch', + u'ifSourceGenerationNotMatch', + u'ifSourceMetagenerationMatch', + u'ifSourceMetagenerationNotMatch', + u'projection', + u'sourceGeneration', + u'userProject' ], relative_path= - 'b/{sourceBucket}/o/{sourceObject}/copyTo/b/{destinationBucket}/o/{destinationObject}', - request_field='object', - request_type_name='StorageObjectsCopyRequest', - response_type_name='Object', + u'b/{sourceBucket}/o/{sourceObject}/copyTo/b/{destinationBucket}/o/{destinationObject}', + request_field=u'object', + request_type_name=u'StorageObjectsCopyRequest', + response_type_name=u'Object', supports_download=False, ) @@ -1111,22 +1111,22 @@ def Delete(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( - http_method='DELETE', - method_id='storage.objects.delete', - ordered_params=['bucket', 'object'], - path_params=['bucket', 'object'], + http_method=u'DELETE', + method_id=u'storage.objects.delete', + ordered_params=[u'bucket', u'object'], + path_params=[u'bucket', u'object'], query_params=[ - 'generation', - 'ifGenerationMatch', - 'ifGenerationNotMatch', - 'ifMetagenerationMatch', - 'ifMetagenerationNotMatch', - 'userProject' + u'generation', + u'ifGenerationMatch', + u'ifGenerationNotMatch', + u'ifMetagenerationMatch', + u'ifMetagenerationNotMatch', + u'userProject' ], - relative_path='b/{bucket}/o/{object}', + relative_path=u'b/{bucket}/o/{object}', request_field='', - request_type_name='StorageObjectsDeleteRequest', - response_type_name='StorageObjectsDeleteResponse', + request_type_name=u'StorageObjectsDeleteRequest', + response_type_name=u'StorageObjectsDeleteResponse', supports_download=False, ) @@ -1146,23 +1146,23 @@ def Get(self, request, global_params=None, download=None): config, request, global_params=global_params, download=download) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.objects.get', - ordered_params=['bucket', 'object'], - path_params=['bucket', 'object'], + http_method=u'GET', + method_id=u'storage.objects.get', + ordered_params=[u'bucket', u'object'], + path_params=[u'bucket', u'object'], query_params=[ - 'generation', - 'ifGenerationMatch', - 'ifGenerationNotMatch', - 'ifMetagenerationMatch', - 'ifMetagenerationNotMatch', - 'projection', - 'userProject' + u'generation', + u'ifGenerationMatch', + u'ifGenerationNotMatch', + u'ifMetagenerationMatch', + u'ifMetagenerationNotMatch', + u'projection', + u'userProject' ], - relative_path='b/{bucket}/o/{object}', + relative_path=u'b/{bucket}/o/{object}', request_field='', - request_type_name='StorageObjectsGetRequest', - response_type_name='Object', + request_type_name=u'StorageObjectsGetRequest', + response_type_name=u'Object', supports_download=True, ) @@ -1179,15 +1179,15 @@ def GetIamPolicy(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.objects.getIamPolicy', - ordered_params=['bucket', 'object'], - path_params=['bucket', 'object'], - query_params=['generation', 'userProject'], - relative_path='b/{bucket}/o/{object}/iam', + http_method=u'GET', + method_id=u'storage.objects.getIamPolicy', + ordered_params=[u'bucket', u'object'], + path_params=[u'bucket', u'object'], + query_params=[u'generation', u'userProject'], + relative_path=u'b/{bucket}/o/{object}/iam', request_field='', - request_type_name='StorageObjectsGetIamPolicyRequest', - response_type_name='Policy', + request_type_name=u'StorageObjectsGetIamPolicyRequest', + response_type_name=u'Policy', supports_download=False, ) @@ -1212,26 +1212,26 @@ def Insert(self, request, global_params=None, upload=None): upload_config=upload_config) Insert.method_config = lambda: base_api.ApiMethodInfo( - http_method='POST', - method_id='storage.objects.insert', - ordered_params=['bucket'], - path_params=['bucket'], + http_method=u'POST', + method_id=u'storage.objects.insert', + ordered_params=[u'bucket'], + path_params=[u'bucket'], query_params=[ - 'contentEncoding', - 'ifGenerationMatch', - 'ifGenerationNotMatch', - 'ifMetagenerationMatch', - 'ifMetagenerationNotMatch', - 'kmsKeyName', - 'name', - 'predefinedAcl', - 'projection', - 'userProject' + u'contentEncoding', + u'ifGenerationMatch', + u'ifGenerationNotMatch', + u'ifMetagenerationMatch', + u'ifMetagenerationNotMatch', + u'kmsKeyName', + u'name', + u'predefinedAcl', + u'projection', + u'userProject' ], - relative_path='b/{bucket}/o', - request_field='object', - request_type_name='StorageObjectsInsertRequest', - response_type_name='Object', + relative_path=u'b/{bucket}/o', + request_field=u'object', + request_type_name=u'StorageObjectsInsertRequest', + response_type_name=u'Object', supports_download=False, ) @@ -1248,24 +1248,24 @@ def List(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.objects.list', - ordered_params=['bucket'], - path_params=['bucket'], + http_method=u'GET', + method_id=u'storage.objects.list', + ordered_params=[u'bucket'], + path_params=[u'bucket'], query_params=[ - 'delimiter', - 'includeTrailingDelimiter', - 'maxResults', - 'pageToken', - 'prefix', - 'projection', - 'userProject', - 'versions' + u'delimiter', + u'includeTrailingDelimiter', + u'maxResults', + u'pageToken', + u'prefix', + u'projection', + u'userProject', + u'versions' ], - relative_path='b/{bucket}/o', + relative_path=u'b/{bucket}/o', request_field='', - request_type_name='StorageObjectsListRequest', - response_type_name='Objects', + request_type_name=u'StorageObjectsListRequest', + response_type_name=u'Objects', supports_download=False, ) @@ -1282,24 +1282,24 @@ def Patch(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Patch.method_config = lambda: base_api.ApiMethodInfo( - http_method='PATCH', - method_id='storage.objects.patch', - ordered_params=['bucket', 'object'], - path_params=['bucket', 'object'], + http_method=u'PATCH', + method_id=u'storage.objects.patch', + ordered_params=[u'bucket', u'object'], + path_params=[u'bucket', u'object'], query_params=[ - 'generation', - 'ifGenerationMatch', - 'ifGenerationNotMatch', - 'ifMetagenerationMatch', - 'ifMetagenerationNotMatch', - 'predefinedAcl', - 'projection', - 'userProject' + u'generation', + u'ifGenerationMatch', + u'ifGenerationNotMatch', + u'ifMetagenerationMatch', + u'ifMetagenerationNotMatch', + u'predefinedAcl', + u'projection', + u'userProject' ], - relative_path='b/{bucket}/o/{object}', - request_field='objectResource', - request_type_name='StorageObjectsPatchRequest', - response_type_name='Object', + relative_path=u'b/{bucket}/o/{object}', + request_field=u'objectResource', + request_type_name=u'StorageObjectsPatchRequest', + response_type_name=u'Object', supports_download=False, ) @@ -1316,42 +1316,42 @@ def Rewrite(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Rewrite.method_config = lambda: base_api.ApiMethodInfo( - http_method='POST', - method_id='storage.objects.rewrite', + http_method=u'POST', + method_id=u'storage.objects.rewrite', ordered_params=[ - 'sourceBucket', - 'sourceObject', - 'destinationBucket', - 'destinationObject' + u'sourceBucket', + u'sourceObject', + u'destinationBucket', + u'destinationObject' ], path_params=[ - 'destinationBucket', - 'destinationObject', - 'sourceBucket', - 'sourceObject' + u'destinationBucket', + u'destinationObject', + u'sourceBucket', + u'sourceObject' ], query_params=[ - 'destinationKmsKeyName', - 'destinationPredefinedAcl', - 'ifGenerationMatch', - 'ifGenerationNotMatch', - 'ifMetagenerationMatch', - 'ifMetagenerationNotMatch', - 'ifSourceGenerationMatch', - 'ifSourceGenerationNotMatch', - 'ifSourceMetagenerationMatch', - 'ifSourceMetagenerationNotMatch', - 'maxBytesRewrittenPerCall', - 'projection', - 'rewriteToken', - 'sourceGeneration', - 'userProject' + u'destinationKmsKeyName', + u'destinationPredefinedAcl', + u'ifGenerationMatch', + u'ifGenerationNotMatch', + u'ifMetagenerationMatch', + u'ifMetagenerationNotMatch', + u'ifSourceGenerationMatch', + u'ifSourceGenerationNotMatch', + u'ifSourceMetagenerationMatch', + u'ifSourceMetagenerationNotMatch', + u'maxBytesRewrittenPerCall', + u'projection', + u'rewriteToken', + u'sourceGeneration', + u'userProject' ], relative_path= - 'b/{sourceBucket}/o/{sourceObject}/rewriteTo/b/{destinationBucket}/o/{destinationObject}', - request_field='object', - request_type_name='StorageObjectsRewriteRequest', - response_type_name='RewriteResponse', + u'b/{sourceBucket}/o/{sourceObject}/rewriteTo/b/{destinationBucket}/o/{destinationObject}', + request_field=u'object', + request_type_name=u'StorageObjectsRewriteRequest', + response_type_name=u'RewriteResponse', supports_download=False, ) @@ -1368,15 +1368,15 @@ def SetIamPolicy(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo( - http_method='PUT', - method_id='storage.objects.setIamPolicy', - ordered_params=['bucket', 'object'], - path_params=['bucket', 'object'], - query_params=['generation', 'userProject'], - relative_path='b/{bucket}/o/{object}/iam', - request_field='policy', - request_type_name='StorageObjectsSetIamPolicyRequest', - response_type_name='Policy', + http_method=u'PUT', + method_id=u'storage.objects.setIamPolicy', + ordered_params=[u'bucket', u'object'], + path_params=[u'bucket', u'object'], + query_params=[u'generation', u'userProject'], + relative_path=u'b/{bucket}/o/{object}/iam', + request_field=u'policy', + request_type_name=u'StorageObjectsSetIamPolicyRequest', + response_type_name=u'Policy', supports_download=False, ) @@ -1393,15 +1393,15 @@ def TestIamPermissions(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.objects.testIamPermissions', - ordered_params=['bucket', 'object', 'permissions'], - path_params=['bucket', 'object'], - query_params=['generation', 'permissions', 'userProject'], - relative_path='b/{bucket}/o/{object}/iam/testPermissions', + http_method=u'GET', + method_id=u'storage.objects.testIamPermissions', + ordered_params=[u'bucket', u'object', u'permissions'], + path_params=[u'bucket', u'object'], + query_params=[u'generation', u'permissions', u'userProject'], + relative_path=u'b/{bucket}/o/{object}/iam/testPermissions', request_field='', - request_type_name='StorageObjectsTestIamPermissionsRequest', - response_type_name='TestIamPermissionsResponse', + request_type_name=u'StorageObjectsTestIamPermissionsRequest', + response_type_name=u'TestIamPermissionsResponse', supports_download=False, ) @@ -1418,24 +1418,24 @@ def Update(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Update.method_config = lambda: base_api.ApiMethodInfo( - http_method='PUT', - method_id='storage.objects.update', - ordered_params=['bucket', 'object'], - path_params=['bucket', 'object'], + http_method=u'PUT', + method_id=u'storage.objects.update', + ordered_params=[u'bucket', u'object'], + path_params=[u'bucket', u'object'], query_params=[ - 'generation', - 'ifGenerationMatch', - 'ifGenerationNotMatch', - 'ifMetagenerationMatch', - 'ifMetagenerationNotMatch', - 'predefinedAcl', - 'projection', - 'userProject' + u'generation', + u'ifGenerationMatch', + u'ifGenerationNotMatch', + u'ifMetagenerationMatch', + u'ifMetagenerationNotMatch', + u'predefinedAcl', + u'projection', + u'userProject' ], - relative_path='b/{bucket}/o/{object}', - request_field='objectResource', - request_type_name='StorageObjectsUpdateRequest', - response_type_name='Object', + relative_path=u'b/{bucket}/o/{object}', + request_field=u'objectResource', + request_type_name=u'StorageObjectsUpdateRequest', + response_type_name=u'Object', supports_download=False, ) @@ -1452,31 +1452,31 @@ def WatchAll(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) WatchAll.method_config = lambda: base_api.ApiMethodInfo( - http_method='POST', - method_id='storage.objects.watchAll', - ordered_params=['bucket'], - path_params=['bucket'], + http_method=u'POST', + method_id=u'storage.objects.watchAll', + ordered_params=[u'bucket'], + path_params=[u'bucket'], query_params=[ - 'delimiter', - 'includeTrailingDelimiter', - 'maxResults', - 'pageToken', - 'prefix', - 'projection', - 'userProject', - 'versions' + u'delimiter', + u'includeTrailingDelimiter', + u'maxResults', + u'pageToken', + u'prefix', + u'projection', + u'userProject', + u'versions' ], - relative_path='b/{bucket}/o/watch', - request_field='channel', - request_type_name='StorageObjectsWatchAllRequest', - response_type_name='Channel', + relative_path=u'b/{bucket}/o/watch', + request_field=u'channel', + request_type_name=u'StorageObjectsWatchAllRequest', + response_type_name=u'Channel', supports_download=False, ) class ProjectsServiceAccountService(base_api.BaseApiService): """Service class for the projects_serviceAccount resource.""" - _NAME = 'projects_serviceAccount' + _NAME = u'projects_serviceAccount' def __init__(self, client): super().__init__(client) @@ -1495,22 +1495,22 @@ def Get(self, request, global_params=None): return self._RunMethod(config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( - http_method='GET', - method_id='storage.projects.serviceAccount.get', - ordered_params=['projectId'], - path_params=['projectId'], - query_params=['userProject'], - relative_path='projects/{projectId}/serviceAccount', + http_method=u'GET', + method_id=u'storage.projects.serviceAccount.get', + ordered_params=[u'projectId'], + path_params=[u'projectId'], + query_params=[u'userProject'], + relative_path=u'projects/{projectId}/serviceAccount', request_field='', - request_type_name='StorageProjectsServiceAccountGetRequest', - response_type_name='ServiceAccount', + request_type_name=u'StorageProjectsServiceAccountGetRequest', + response_type_name=u'ServiceAccount', supports_download=False, ) class ProjectsService(base_api.BaseApiService): """Service class for the projects resource.""" - _NAME = 'projects' + _NAME = u'projects' def __init__(self, client): super().__init__(client) diff --git a/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_messages.py b/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_messages.py index 65d8bd93258fa..caef0eb4b033a 100644 --- a/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_messages.py +++ b/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_messages.py @@ -348,7 +348,7 @@ class WebsiteValue(_messages.Message): encryption = _messages.MessageField('EncryptionValue', 6) etag = _messages.StringField(7) id = _messages.StringField(8) - kind = _messages.StringField(9, default='storage#bucket') + kind = _messages.StringField(9, default=u'storage#bucket') labels = _messages.MessageField('LabelsValue', 10) lifecycle = _messages.MessageField('LifecycleValue', 11) location = _messages.StringField(12) @@ -410,7 +410,7 @@ class ProjectTeamValue(_messages.Message): entityId = _messages.StringField(5) etag = _messages.StringField(6) id = _messages.StringField(7) - kind = _messages.StringField(8, default='storage#bucketAccessControl') + kind = _messages.StringField(8, default=u'storage#bucketAccessControl') projectTeam = _messages.MessageField('ProjectTeamValue', 9) role = _messages.StringField(10) selfLink = _messages.StringField(11) @@ -426,7 +426,7 @@ class BucketAccessControls(_messages.Message): """ items = _messages.MessageField('BucketAccessControl', 1, repeated=True) - kind = _messages.StringField(2, default='storage#bucketAccessControls') + kind = _messages.StringField(2, default=u'storage#bucketAccessControls') class Buckets(_messages.Message): @@ -442,7 +442,7 @@ class Buckets(_messages.Message): """ items = _messages.MessageField('Bucket', 1, repeated=True) - kind = _messages.StringField(2, default='storage#buckets') + kind = _messages.StringField(2, default=u'storage#buckets') nextPageToken = _messages.StringField(3) @@ -497,7 +497,7 @@ class AdditionalProperty(_messages.Message): address = _messages.StringField(1) expiration = _messages.IntegerField(2) id = _messages.StringField(3) - kind = _messages.StringField(4, default='api#channel') + kind = _messages.StringField(4, default=u'api#channel') params = _messages.MessageField('ParamsValue', 5) payload = _messages.BooleanField(6) resourceId = _messages.StringField(7) @@ -549,7 +549,7 @@ class ObjectPreconditionsValue(_messages.Message): objectPreconditions = _messages.MessageField('ObjectPreconditionsValue', 3) destination = _messages.MessageField('Object', 1) - kind = _messages.StringField(2, default='storage#composeRequest') + kind = _messages.StringField(2, default=u'storage#composeRequest') sourceObjects = _messages.MessageField( 'SourceObjectsValueListEntry', 3, repeated=True) @@ -610,9 +610,9 @@ class AdditionalProperty(_messages.Message): etag = _messages.StringField(2) event_types = _messages.StringField(3, repeated=True) id = _messages.StringField(4) - kind = _messages.StringField(5, default='storage#notification') + kind = _messages.StringField(5, default=u'storage#notification') object_name_prefix = _messages.StringField(6) - payload_format = _messages.StringField(7, default='JSON_API_V1') + payload_format = _messages.StringField(7, default=u'JSON_API_V1') selfLink = _messages.StringField(8) topic = _messages.StringField(9) @@ -627,7 +627,7 @@ class Notifications(_messages.Message): """ items = _messages.MessageField('Notification', 1, repeated=True) - kind = _messages.StringField(2, default='storage#notifications') + kind = _messages.StringField(2, default=u'storage#notifications') class Object(_messages.Message): @@ -776,7 +776,7 @@ class OwnerValue(_messages.Message): eventBasedHold = _messages.BooleanField(12) generation = _messages.IntegerField(13) id = _messages.StringField(14) - kind = _messages.StringField(15, default='storage#object') + kind = _messages.StringField(15, default=u'storage#object') kmsKeyName = _messages.StringField(16) md5Hash = _messages.StringField(17) mediaLink = _messages.StringField(18) @@ -842,7 +842,7 @@ class ProjectTeamValue(_messages.Message): etag = _messages.StringField(6) generation = _messages.IntegerField(7) id = _messages.StringField(8) - kind = _messages.StringField(9, default='storage#objectAccessControl') + kind = _messages.StringField(9, default=u'storage#objectAccessControl') object = _messages.StringField(10) projectTeam = _messages.MessageField('ProjectTeamValue', 11) role = _messages.StringField(12) @@ -859,7 +859,7 @@ class ObjectAccessControls(_messages.Message): """ items = _messages.MessageField('ObjectAccessControl', 1, repeated=True) - kind = _messages.StringField(2, default='storage#objectAccessControls') + kind = _messages.StringField(2, default=u'storage#objectAccessControls') class Objects(_messages.Message): @@ -877,7 +877,7 @@ class Objects(_messages.Message): """ items = _messages.MessageField('Object', 1, repeated=True) - kind = _messages.StringField(2, default='storage#objects') + kind = _messages.StringField(2, default=u'storage#objects') nextPageToken = _messages.StringField(3) prefixes = _messages.StringField(4, repeated=True) @@ -956,7 +956,7 @@ class BindingsValueListEntry(_messages.Message): bindings = _messages.MessageField('BindingsValueListEntry', 1, repeated=True) etag = _messages.BytesField(2) - kind = _messages.StringField(3, default='storage#policy') + kind = _messages.StringField(3, default=u'storage#policy') resourceId = _messages.StringField(4) @@ -980,7 +980,7 @@ class RewriteResponse(_messages.Message): """ done = _messages.BooleanField(1) - kind = _messages.StringField(2, default='storage#rewriteResponse') + kind = _messages.StringField(2, default=u'storage#rewriteResponse') objectSize = _messages.IntegerField(3) resource = _messages.MessageField('Object', 4) rewriteToken = _messages.StringField(5) @@ -997,7 +997,7 @@ class ServiceAccount(_messages.Message): """ email_address = _messages.StringField(1) - kind = _messages.StringField(2, default='storage#serviceAccount') + kind = _messages.StringField(2, default=u'storage#serviceAccount') class StandardQueryParameters(_messages.Message): @@ -1028,7 +1028,7 @@ class AltValueValuesEnum(_messages.Enum): """ json = 0 - alt = _messages.EnumField('AltValueValuesEnum', 1, default='json') + alt = _messages.EnumField('AltValueValuesEnum', 1, default=u'json') fields = _messages.StringField(2) key = _messages.StringField(3) oauth_token = _messages.StringField(4) @@ -2710,5 +2710,5 @@ class TestIamPermissionsResponse(_messages.Message): - storage.objects.update - Update object metadata. """ - kind = _messages.StringField(1, default='storage#testIamPermissionsResponse') + kind = _messages.StringField(1, default=u'storage#testIamPermissionsResponse') permissions = _messages.StringField(2, repeated=True)