From 99c731cd41d26304e0bcca811ab1139de1aef9f9 Mon Sep 17 00:00:00 2001 From: Jack McCluskey Date: Thu, 22 Aug 2024 11:11:35 -0400 Subject: [PATCH 1/3] Remove Python 3.8 Support --- .../test-properties.json | 18 +- .github/workflows/README.md | 50 ++-- .../beam_CloudML_Benchmarks_Dataflow.yml | 2 +- ...m_Inference_Python_Benchmarks_Dataflow.yml | 10 +- ..._LoadTests_Python_CoGBK_Dataflow_Batch.yml | 8 +- ...dTests_Python_CoGBK_Dataflow_Streaming.yml | 8 +- ...oadTests_Python_Combine_Dataflow_Batch.yml | 6 +- ...ests_Python_Combine_Dataflow_Streaming.yml | 6 +- ...m_LoadTests_Python_Combine_Flink_Batch.yml | 6 +- ...adTests_Python_Combine_Flink_Streaming.yml | 4 +- ...ests_Python_FnApiRunner_Microbenchmark.yml | 2 +- ...am_LoadTests_Python_GBK_Dataflow_Batch.yml | 10 +- ...oadTests_Python_GBK_Dataflow_Streaming.yml | 2 +- ...ts_Python_GBK_reiterate_Dataflow_Batch.yml | 4 +- ...ython_GBK_reiterate_Dataflow_Streaming.yml | 4 +- ..._LoadTests_Python_ParDo_Dataflow_Batch.yml | 8 +- ...dTests_Python_ParDo_Dataflow_Streaming.yml | 8 +- ...eam_LoadTests_Python_ParDo_Flink_Batch.yml | 6 +- ...LoadTests_Python_ParDo_Flink_Streaming.yml | 10 +- ...dTests_Python_SideInput_Dataflow_Batch.yml | 20 +- .../workflows/beam_LoadTests_Python_Smoke.yml | 4 +- ...erformanceTests_BiqQueryIO_Read_Python.yml | 2 +- ...nceTests_BiqQueryIO_Write_Python_Batch.yml | 2 +- ...manceTests_PubsubIOIT_Python_Streaming.yml | 2 +- ...ormanceTests_SpannerIO_Read_2GB_Python.yml | 2 +- ...Tests_SpannerIO_Write_2GB_Python_Batch.yml | 2 +- .../beam_PerformanceTests_TextIOIT_Python.yml | 2 +- ...rmanceTests_WordCountIT_PythonVersions.yml | 2 +- ..._PerformanceTests_xlang_KafkaIO_Python.yml | 2 +- .../beam_PostCommit_PortableJar_Flink.yml | 4 +- .../beam_PostCommit_PortableJar_Spark.yml | 4 +- .github/workflows/beam_PostCommit_Python.yml | 2 +- .../workflows/beam_PostCommit_Python_Arm.yml | 2 +- .../beam_PostCommit_Python_Dependency.yml | 2 +- ...beam_PostCommit_Python_Examples_Direct.yml | 2 +- .../beam_PostCommit_Python_Examples_Flink.yml | 2 +- .../beam_PostCommit_Python_Examples_Spark.yml | 2 +- .../beam_PostCommit_Python_Nexmark_Direct.yml | 2 +- ...mit_Python_ValidatesContainer_Dataflow.yml | 2 +- ...on_ValidatesContainer_Dataflow_With_RC.yml | 2 +- ...Commit_Python_ValidatesRunner_Dataflow.yml | 2 +- ...ostCommit_Python_ValidatesRunner_Flink.yml | 2 +- ...ostCommit_Python_ValidatesRunner_Samza.yml | 2 +- ...ostCommit_Python_ValidatesRunner_Spark.yml | 2 +- ...m_PostCommit_Python_Xlang_Gcp_Dataflow.yml | 2 +- ...eam_PostCommit_Python_Xlang_Gcp_Direct.yml | 2 +- ...am_PostCommit_Python_Xlang_IO_Dataflow.yml | 2 +- .../beam_PostCommit_Sickbay_Python.yml | 2 +- ...eam_PostCommit_TransformService_Direct.yml | 4 +- .../workflows/beam_PostCommit_XVR_Direct.yml | 8 +- .../workflows/beam_PostCommit_XVR_Flink.yml | 8 +- ...ostCommit_XVR_JavaUsingPython_Dataflow.yml | 4 +- ...ostCommit_XVR_PythonUsingJava_Dataflow.yml | 4 +- .../workflows/beam_PostCommit_XVR_Samza.yml | 8 +- .../workflows/beam_PostCommit_XVR_Spark3.yml | 8 +- .../beam_PreCommit_Portable_Python.yml | 4 +- .github/workflows/beam_PreCommit_Python.yml | 2 +- .../workflows/beam_PreCommit_PythonDocker.yml | 2 +- .../beam_PreCommit_Python_Coverage.yml | 2 +- .../beam_PreCommit_Python_Dataframes.yml | 2 +- .../beam_PreCommit_Python_Examples.yml | 2 +- .../beam_PreCommit_Python_Integration.yml | 2 +- .../workflows/beam_PreCommit_Python_ML.yml | 2 +- .../beam_PreCommit_Python_Runners.yml | 2 +- .../beam_PreCommit_Python_Transforms.yml | 2 +- ...m_PreCommit_Xlang_Generated_Transforms.yml | 2 +- .../beam_Publish_Beam_SDK_Snapshots.yml | 1 - .github/workflows/build_release_candidate.yml | 8 +- .github/workflows/build_wheels.yml | 6 +- .github/workflows/dask_runner_tests.yml | 3 +- ...ombine_Flink_Batch_2GB_10_byte_records.txt | 2 +- ...ython_Combine_Flink_Batch_2GB_Fanout_4.txt | 2 +- ...ython_Combine_Flink_Batch_2GB_Fanout_8.txt | 2 +- ...ne_Flink_Streaming_2GB_10_byte_records.txt | 2 +- ...n_Combine_Flink_Streaming_2GB_Fanout_4.txt | 2 +- ...n_Combine_Flink_Streaming_2GB_Fanout_8.txt | 2 +- ...on_GBK_Flink_Batch_2GB_of_100B_records.txt | 2 +- ...hon_GBK_Flink_Batch_2GB_of_10B_records.txt | 2 +- ...4_times_with_2GB_10-byte_records_total.txt | 2 +- ...8_times_with_2GB_10-byte_records_total.txt | 2 +- ...nk_Batch_reiterate_4_times_10kB_values.txt | 2 +- .../python_ParDo_Flink_Batch_10_Counters.txt | 2 +- ...python_ParDo_Flink_Batch_10_Iterations.txt | 2 +- ...ython_ParDo_Flink_Batch_200_Iterations.txt | 2 +- ...hon_ParDo_Flink_Streaming_100_Counters.txt | 2 +- ...thon_ParDo_Flink_Streaming_10_Counters.txt | 2 +- ...on_ParDo_Flink_Streaming_10_Iterations.txt | 2 +- ...n_ParDo_Flink_Streaming_200_Iterations.txt | 2 +- ...hon_ParDo_Flink_Streaming_5_Iterations.txt | 2 +- .../playground_backend_precommit.yml | 2 +- .github/workflows/python_dependency_tests.yml | 1 - .github/workflows/python_tests.yml | 5 +- .github/workflows/run_perf_alert_tool.yml | 2 +- .github/workflows/run_rc_validation.yml | 14 +- .github/workflows/typescript_tests.yml | 4 +- .../workflows/update_python_dependencies.yml | 1 - .../jenkins/PythonTestProperties.groovy | 12 +- .test-infra/jenkins/build.gradle | 4 +- .test-infra/jenkins/metrics_report/tox.ini | 8 +- .test-infra/junitxml_report.py | 4 +- .../Python_WordCount_IT_Benchmarks.json | 2 +- build.gradle.kts | 29 +-- .../beam/gradle/BeamModulePlugin.groovy | 2 +- examples/multi-language/README.md | 2 +- gradle.properties | 2 +- local-env-setup.sh | 4 +- release/src/main/Dockerfile | 3 +- .../python_release_automation.sh | 2 +- .../python_release_automation_utils.sh | 12 +- release/src/main/scripts/run_rc_validation.sh | 2 +- .../runners/dataflow/DataflowRunnerTest.java | 8 +- sdks/python/apache_beam/__init__.py | 2 +- .../azure_integration_test.sh | 2 +- .../hdfs_integration_test.sh | 2 +- .../runners/dataflow/internal/apiclient.py | 2 +- .../dataflow/internal/apiclient_test.py | 8 +- sdks/python/container/build.gradle | 2 +- .../py38/base_image_requirements.txt | 163 ------------- sdks/python/container/py38/build.gradle | 28 --- .../container/run_generate_requirements.sh | 6 +- .../container/run_validatescontainer.sh | 8 +- .../expansion-service-container/Dockerfile | 4 +- .../expansion-service-container/build.gradle | 2 +- sdks/python/setup.py | 1 - .../tensor_rt.dockerfile | 4 +- .../test-suites/dataflow/py38/build.gradle | 24 -- .../test-suites/direct/py38/build.gradle | 24 -- sdks/python/test-suites/gradle.properties | 20 +- .../test-suites/portable/py38/build.gradle | 26 -- sdks/python/test-suites/tox/common.gradle | 2 +- sdks/python/test-suites/tox/py38/build.gradle | 224 ------------------ sdks/python/tox.ini | 38 +-- settings.gradle.kts | 5 - .../en/documentation/runtime/environments.md | 2 +- .../content/en/get-started/quickstart-py.md | 2 +- 135 files changed, 293 insertions(+), 811 deletions(-) delete mode 100644 sdks/python/container/py38/base_image_requirements.txt delete mode 100644 sdks/python/container/py38/build.gradle delete mode 100644 sdks/python/test-suites/dataflow/py38/build.gradle delete mode 100644 sdks/python/test-suites/direct/py38/build.gradle delete mode 100644 sdks/python/test-suites/portable/py38/build.gradle delete mode 100644 sdks/python/test-suites/tox/py38/build.gradle diff --git a/.github/actions/setup-default-test-properties/test-properties.json b/.github/actions/setup-default-test-properties/test-properties.json index 89ef25b21160..536be009526d 100644 --- a/.github/actions/setup-default-test-properties/test-properties.json +++ b/.github/actions/setup-default-test-properties/test-properties.json @@ -1,15 +1,15 @@ { "PythonTestProperties": { - "ALL_SUPPORTED_VERSIONS": ["3.8", "3.9", "3.10", "3.11"], - "LOWEST_SUPPORTED": ["3.8"], - "HIGHEST_SUPPORTED": ["3.11"], - "ESSENTIAL_VERSIONS": ["3.8", "3.11"], - "CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS": ["3.8", "3.11"], + "ALL_SUPPORTED_VERSIONS": ["3.9", "3.10", "3.11", "3.12"], + "LOWEST_SUPPORTED": ["3.9"], + "HIGHEST_SUPPORTED": ["3.12"], + "ESSENTIAL_VERSIONS": ["3.9", "3.12"], + "CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS": ["3.9", "3.12"], "CROSS_LANGUAGE_VALIDATES_RUNNER_DATAFLOW_USING_SQL_PYTHON_VERSIONS": ["3.11"], - "VALIDATES_CONTAINER_DATAFLOW_PYTHON_VERSIONS": ["3.8", "3.9", "3.10", "3.11" ], - "LOAD_TEST_PYTHON_VERSION": "3.8", - "CHICAGO_TAXI_EXAMPLE_FLINK_PYTHON_VERSION": "3.8", - "DEFAULT_INTERPRETER": "python3.8", + "VALIDATES_CONTAINER_DATAFLOW_PYTHON_VERSIONS": ["3.9", "3.10", "3.11","3.12"], + "LOAD_TEST_PYTHON_VERSION": "3.9", + "CHICAGO_TAXI_EXAMPLE_FLINK_PYTHON_VERSION": "3.9", + "DEFAULT_INTERPRETER": "python3.9", "TOX_ENV": ["Cloud", "Cython"] }, "JavaTestProperties": { diff --git a/.github/workflows/README.md b/.github/workflows/README.md index 0f3a42583c36..c305e433d1f4 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -77,7 +77,7 @@ jobs: matrix: job_name: ["beam_job_with_matrix"] job_phrase: ["Run Job With Matrix"] - python_version: ['3.8','3.9','3.10','3.11'] + python_version: ['3.9','3.10','3.11', '3.12'] if: | github.event_name == 'push' || github.event_name == 'pull_request_target' || @@ -194,7 +194,7 @@ You can do this by changing runs-on: [self-hosted, ubuntu-20.04, main] (self-hos # Workflows Please note that jobs with matrix need to have matrix element in the comment. Example: -```Run Python PreCommit (3.8)``` +```Run Python PreCommit (3.9)``` ### PreCommit Jobs @@ -251,20 +251,20 @@ PreCommit Jobs run in a schedule and also get triggered in a PR if relevant sour | [ PreCommit Java Thrift IO Direct ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Java_Thrift_IO_Direct.yml) | N/A |`Run Java_Thrift_IO_Direct PreCommit`| [![.github/workflows/beam_PreCommit_Java_Thrift_IO_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Java_Thrift_IO_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Java_Thrift_IO_Direct.yml?query=event%3Aschedule) | | [ PreCommit Java Tika IO Direct ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Java_Tika_IO_Direct.yml) | N/A |`Run Java_Tika_IO_Direct PreCommit`| [![.github/workflows/beam_PreCommit_Java_Tika_IO_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Java_Tika_IO_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Java_Tika_IO_Direct.yml?query=event%3Aschedule) | | [ PreCommit Kotlin Examples ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Kotlin_Examples.yml) | N/A | `Run Kotlin_Examples PreCommit` | [![.github/workflows/beam_PreCommit_Kotlin_Examples.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Kotlin_Examples.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Kotlin_Examples.yml?query=event%3Aschedule) | -| [ PreCommit Portable Python ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Portable_Python.yml) | ['3.8','3.11'] | `Run Portable_Python PreCommit` | [![.github/workflows/beam_PreCommit_Portable_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Portable_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Portable_Python.yml?query=event%3Aschedule) | -| [ PreCommit Python ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python.yml) | ['3.8','3.9','3.10','3.11'] | `Run Python PreCommit (matrix_element)` | [![.github/workflows/beam_PreCommit_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python.yml?query=event%3Aschedule) | +| [ PreCommit Portable Python ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Portable_Python.yml) | ['3.9','3.12'] | `Run Portable_Python PreCommit` | [![.github/workflows/beam_PreCommit_Portable_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Portable_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Portable_Python.yml?query=event%3Aschedule) | +| [ PreCommit Python ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python.yml) | ['3.9','3.10','3.11','3.12'] | `Run Python PreCommit (matrix_element)` | [![.github/workflows/beam_PreCommit_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python.yml?query=event%3Aschedule) | | [ PreCommit Python Coverage ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Coverage.yml) | N/A | `Run Python_Coverage PreCommit`| [![.github/workflows/beam_PreCommit_Python_Coverage.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Coverage.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Coverage.yml?query=event%3Aschedule) | -| [ PreCommit Python Dataframes ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Dataframes.yml) | ['3.8','3.9','3.10','3.11'] | `Run Python_Dataframes PreCommit (matrix_element)`| [![.github/workflows/beam_PreCommit_Python_Dataframes.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Dataframes.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Dataframes.yml?query=event%3Aschedule) | -| [ PreCommit Python Docker ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonDocker.yml) | ['3.8','3.9','3.10','3.11'] | `Run PythonDocker PreCommit (matrix_element)`| [![.github/workflows/beam_PreCommit_PythonDocker.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonDocker.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonDocker.yml?query=event%3Aschedule) | +| [ PreCommit Python Dataframes ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Dataframes.yml) | ['3.9','3.10','3.11','3.12'] | `Run Python_Dataframes PreCommit (matrix_element)`| [![.github/workflows/beam_PreCommit_Python_Dataframes.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Dataframes.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Dataframes.yml?query=event%3Aschedule) | +| [ PreCommit Python Docker ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonDocker.yml) | ['3.9','3.10','3.11','3.12'] | `Run PythonDocker PreCommit (matrix_element)`| [![.github/workflows/beam_PreCommit_PythonDocker.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonDocker.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonDocker.yml?query=event%3Aschedule) | | [ PreCommit Python Docs ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonDocs.yml) | N/A | `Run PythonDocs PreCommit`| [![.github/workflows/beam_PreCommit_PythonDocs.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonDocs.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonDocs.yml?query=event%3Aschedule) | -| [ PreCommit Python Examples ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Examples.yml) | ['3.8','3.9','3.10','3.11'] | `Run Python_Examples PreCommit (matrix_element)` | [![.github/workflows/beam_PreCommit_Python_Examples.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Examples.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Examples.yml?query=event%3Aschedule) | +| [ PreCommit Python Examples ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Examples.yml) | ['3.9','3.10','3.11','3.12'] | `Run Python_Examples PreCommit (matrix_element)` | [![.github/workflows/beam_PreCommit_Python_Examples.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Examples.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Examples.yml?query=event%3Aschedule) | | [ PreCommit Python Formatter ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonFormatter.yml) | N/A | `Run PythonFormatter PreCommit`| [![.github/workflows/beam_PreCommit_PythonFormatter.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonFormatter.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonFormatter.yml?query=event%3Aschedule) | -| [ PreCommit Python Integration](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Integration.yml) | ['3.8','3.11'] | `Run Python_Integration PreCommit (matrix_element)` | [![.github/workflows/beam_PreCommit_Python_Integration.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Integration.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Integration.yml?query=event%3Aschedule) | +| [ PreCommit Python Integration](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Integration.yml) | ['3.9','3.12'] | `Run Python_Integration PreCommit (matrix_element)` | [![.github/workflows/beam_PreCommit_Python_Integration.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Integration.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Integration.yml?query=event%3Aschedule) | | [ PreCommit Python Lint ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonLint.yml) | N/A | `Run PythonLint PreCommit` | [![.github/workflows/beam_PreCommit_PythonLint.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonLint.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_PythonLint.yml?query=event%3Aschedule) | -| [ PreCommit Python ML ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_ML.yml) | ['3.8','3.9','3.10','3.11'] | `Run Python_ML PreCommit (matrix_element)`| [![.github/workflows/beam_PreCommit_Python_ML.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_ML.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_ML.yml?query=event%3Aschedule) | +| [ PreCommit Python ML ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_ML.yml) | ['3.9','3.10','3.11','3.12'] | `Run Python_ML PreCommit (matrix_element)`| [![.github/workflows/beam_PreCommit_Python_ML.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_ML.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_ML.yml?query=event%3Aschedule) | | [ PreCommit Python PVR Flink ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_PVR_Flink.yml) | N/A | `Run Python_PVR_Flink PreCommit` | [![.github/workflows/beam_PreCommit_Python_PVR_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_PVR_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_PVR_Flink.yml?query=event%3Aschedule) | -| [ PreCommit Python Runners ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Runners.yml) | ['3.8','3.9','3.10','3.11'] | `Run Python_Runners PreCommit (matrix_element)`| [![.github/workflows/beam_PreCommit_Python_Runners.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Runners.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Runners.yml?query=event%3Aschedule) | -| [ PreCommit Python Transforms ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Transforms.yml) | ['3.8','3.9','3.10','3.11'] | `Run Python_Transforms PreCommit (matrix_element)`| [![.github/workflows/beam_PreCommit_Python_Transforms.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Transforms.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Transforms.yml?query=event%3Aschedule) | +| [ PreCommit Python Runners ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Runners.yml) | ['3.9','3.10','3.11','3.12'] | `Run Python_Runners PreCommit (matrix_element)`| [![.github/workflows/beam_PreCommit_Python_Runners.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Runners.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Runners.yml?query=event%3Aschedule) | +| [ PreCommit Python Transforms ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Transforms.yml) | ['3.9','3.10','3.11','3.12'] | `Run Python_Transforms PreCommit (matrix_element)`| [![.github/workflows/beam_PreCommit_Python_Transforms.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Transforms.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Python_Transforms.yml?query=event%3Aschedule) | | [ PreCommit RAT ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_RAT.yml) | N/A | `Run RAT PreCommit` | [![.github/workflows/beam_PreCommit_RAT.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_RAT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_RAT.yml?query=event%3Aschedule) | | [ PreCommit Spotless ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Spotless.yml) | N/A | `Run Spotless PreCommit` | [![.github/workflows/beam_PreCommit_Spotless.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Spotless.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_Spotless.yml?query=event%3Aschedule) | | [ PreCommit SQL ](https://github.com/apache/beam/actions/workflows/beam_PreCommit_SQL.yml) | N/A |`Run SQL PreCommit`| [![.github/workflows/beam_PreCommit_SQL.yml](https://github.com/apache/beam/actions/workflows/beam_PreCommit_SQL.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PreCommit_SQL.yml?query=event%3Aschedule) | @@ -353,25 +353,25 @@ PostCommit Jobs run in a schedule against master branch and generally do not get | [ PostCommit Javadoc ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Javadoc.yml) | N/A |`beam_PostCommit_Javadoc.json`| [![.github/workflows/beam_PostCommit_Javadoc.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Javadoc.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Javadoc.yml?query=event%3Aschedule) | | [ PostCommit PortableJar Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Flink.yml) | N/A |`beam_PostCommit_PortableJar_Flink.json`| [![.github/workflows/beam_PostCommit_PortableJar_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Flink.yml?query=event%3Aschedule) | | [ PostCommit PortableJar Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Spark.yml) | N/A |`beam_PostCommit_PortableJar_Spark.json`| [![.github/workflows/beam_PostCommit_PortableJar_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_PortableJar_Spark.yml?query=event%3Aschedule) | -| [ PostCommit Python ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python.yml) | ['3.8','3.9','3.10','3.11'] |`beam_PostCommit_Python.json`| [![.github/workflows/beam_PostCommit_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python.yml?query=event%3Aschedule) | -| [ PostCommit Python Arm](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Arm.yml) | ['3.8','3.9','3.10','3.11'] |`beam_PostCommit_Python_Arm.json`| [![.github/workflows/beam_PostCommit_Python_Arm.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Arm.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Arm.yml?query=event%3Aschedule) | +| [ PostCommit Python ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python.yml) | ['3.9','3.10','3.11','3.12'] |`beam_PostCommit_Python.json`| [![.github/workflows/beam_PostCommit_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python.yml?query=event%3Aschedule) | +| [ PostCommit Python Arm](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Arm.yml) | ['3.9','3.10','3.11','3.12'] |`beam_PostCommit_Python_Arm.json`| [![.github/workflows/beam_PostCommit_Python_Arm.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Arm.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Arm.yml?query=event%3Aschedule) | | [ PostCommit Python Dependency ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Dependency.yml) | N/A |`beam_PostCommit_Python_Dependency.json`| [![.github/workflows/beam_PostCommit_Python_Dependency.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Dependency.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Dependency.yml?query=event%3Aschedule) | | [ PostCommit Python Examples Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Dataflow.yml) | N/A |`beam_PostCommit_Python_Examples_Dataflow.json`| [![.github/workflows/beam_PostCommit_Python_Examples_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Python Examples Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Direct.yml) | ['3.8','3.9','3.10','3.11'] |`beam_PostCommit_Python_Examples_Direct.json`| [![.github/workflows/beam_PostCommit_Python_Examples_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Direct.yml?query=event%3Aschedule) | -| [ PostCommit Python Examples Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Flink.yml) | ['3.8','3.11'] |`beam_PostCommit_Python_Examples_Flink.json`| [![.github/workflows/beam_PostCommit_Python_Examples_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Flink.yml?query=event%3Aschedule) | -| [ PostCommit Python Examples Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Spark.yml) | ['3.8','3.11'] |`beam_PostCommit_Python_Examples_Spark.json`| [![.github/workflows/beam_PostCommit_Python_Examples_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Spark.yml?query=event%3Aschedule) | +| [ PostCommit Python Examples Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Direct.yml) | ['3.9','3.10','3.11','3.12'] |`beam_PostCommit_Python_Examples_Direct.json`| [![.github/workflows/beam_PostCommit_Python_Examples_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Direct.yml?query=event%3Aschedule) | +| [ PostCommit Python Examples Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Flink.yml) | ['3.9','3.12'] |`beam_PostCommit_Python_Examples_Flink.json`| [![.github/workflows/beam_PostCommit_Python_Examples_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Flink.yml?query=event%3Aschedule) | +| [ PostCommit Python Examples Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Spark.yml) | ['3.9','3.12'] |`beam_PostCommit_Python_Examples_Spark.json`| [![.github/workflows/beam_PostCommit_Python_Examples_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Examples_Spark.yml?query=event%3Aschedule) | | [ PostCommit Python MongoDBIO IT ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml) | N/A |`beam_PostCommit_Python_MongoDBIO_IT.json`| [![.github/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_MongoDBIO_IT.yml?query=event%3Aschedule) | | [ PostCommit Python Nexmark Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Nexmark_Direct.yml) | N/A |`beam_PostCommit_Python_Nexmark_Direct.json`| [![.github/workflows/beam_PostCommit_Python_Nexmark_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Nexmark_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Nexmark_Direct.yml?query=event%3Aschedule) | -| [ PostCommit Python ValidatesContainer Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml) | ['3.8','3.9','3.10','3.11'] |`beam_PostCommit_Python_ValidatesContainer_Dataflow.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Python ValidatesContainer Dataflow With RC ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml) | ['3.8','3.9','3.10','3.11'] |`beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml?query=event%3Aschedule) | -| [ PostCommit Python ValidatesRunner Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml) | ['3.8','3.11'] |`beam_PostCommit_Python_ValidatesRunner_Dataflow.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Python ValidatesRunner Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml) | ['3.8','3.11'] |`beam_PostCommit_Python_ValidatesRunner_Flink.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml?query=event%3Aschedule) | -| [ PostCommit Python ValidatesRunner Samza ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml) | ['3.8','3.11'] |`beam_PostCommit_Python_ValidatesRunner_Samza.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml?query=event%3Aschedule) | -| [ PostCommit Python ValidatesRunner Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml) | ['3.8','3.9','3.11'] |`beam_PostCommit_Python_ValidatesRunner_Spark.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml?query=event%3Aschedule) | +| [ PostCommit Python ValidatesContainer Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml) | ['3.9','3.10','3.11','3.12'] |`beam_PostCommit_Python_ValidatesContainer_Dataflow.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit Python ValidatesContainer Dataflow With RC ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml) | ['3.9','3.10','3.11','3.12'] |`beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml?query=event%3Aschedule) | +| [ PostCommit Python ValidatesRunner Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml) | ['3.9','3.12'] |`beam_PostCommit_Python_ValidatesRunner_Dataflow.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml?query=event%3Aschedule) | +| [ PostCommit Python ValidatesRunner Flink ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml) | ['3.9','3.12'] |`beam_PostCommit_Python_ValidatesRunner_Flink.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml?query=event%3Aschedule) | +| [ PostCommit Python ValidatesRunner Samza ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml) | ['3.9','3.12'] |`beam_PostCommit_Python_ValidatesRunner_Samza.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml?query=event%3Aschedule) | +| [ PostCommit Python ValidatesRunner Spark ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml) | ['3.9','3.11'] |`beam_PostCommit_Python_ValidatesRunner_Spark.json`| [![.github/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml?query=event%3Aschedule) | | [ PostCommit Python Xlang Gcp Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml) | N/A |`beam_PostCommit_Python_Xlang_Gcp_Dataflow.json`| [![.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml?query=event%3Aschedule) | | [ PostCommit Python Xlang Gcp Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml) | N/A |`beam_PostCommit_Python_Xlang_Gcp_Direct.json`| [![.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml?query=event%3Aschedule) | | [ PostCommit Python Xlang IO Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml) | N/A |`beam_PostCommit_Python_Xlang_IO_Dataflow.json`| [![.github/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml?query=event%3Aschedule) | -| [ PostCommit Sickbay Python ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Sickbay_Python.yml) | ['3.8','3.9','3.10','3.11'] |`beam_PostCommit_Sickbay_Python.json`| [![.github/workflows/beam_PostCommit_Sickbay_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Sickbay_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Sickbay_Python.yml?query=event%3Aschedule) | +| [ PostCommit Sickbay Python ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Sickbay_Python.yml) | ['3.9','3.10','3.11','3.12'] |`beam_PostCommit_Sickbay_Python.json`| [![.github/workflows/beam_PostCommit_Sickbay_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Sickbay_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Sickbay_Python.yml?query=event%3Aschedule) | | [ PostCommit SQL ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_SQL.yml) | N/A |`beam_PostCommit_SQL.json`| [![.github/workflows/beam_PostCommit_SQL.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_SQL.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_SQL.yml?query=event%3Aschedule) | | [ PostCommit TransformService Direct ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_TransformService_Direct.yml) | N/A |`beam_PostCommit_TransformService_Direct.json`| [![.github/workflows/beam_PostCommit_TransformService_Direct.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_TransformService_Direct.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_TransformService_Direct.yml?query=event%3Aschedule) | [ PostCommit Website Test](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Website_Test.yml) | N/A |`beam_PostCommit_Website_Test.json`| [![.github/workflows/beam_PostCommit_Website_Test.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Website_Test.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_Website_Test.yml?query=event%3Aschedule) | @@ -383,7 +383,7 @@ PostCommit Jobs run in a schedule against master branch and generally do not get | [ PostCommit XVR PythonUsingJavaSQL Dataflow ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml) | N/A |`beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.json`| [![.github/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_PythonUsingJavaSQL_Dataflow.yml?query=event%3Aschedule) | | [ PostCommit XVR Samza ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Samza.yml) | N/A |`beam_PostCommit_XVR_Samza.json`| [![.github/workflows/beam_PostCommit_XVR_Samza.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Samza.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Samza.yml?query=event%3Aschedule) | | [ PostCommit XVR Spark3 ](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Spark3.yml) | N/A |`beam_PostCommit_XVR_Spark3.json`| [![.github/workflows/beam_PostCommit_XVR_Spark3.yml](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Spark3.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PostCommit_XVR_Spark3.yml?query=event%3Aschedule) | -| [ Python Validates Container Dataflow ARM ](https://github.com/apache/beam/actions/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml) | ['3.8','3.9','3.10','3.11'] |`beam_Python_ValidatesContainer_Dataflow_ARM.json`|[![.github/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml](https://github.com/apache/beam/actions/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml?query=event%3Aschedule) | +| [ Python Validates Container Dataflow ARM ](https://github.com/apache/beam/actions/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml) | ['3.9','3.10','3.11','3.12'] |`beam_Python_ValidatesContainer_Dataflow_ARM.json`|[![.github/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml](https://github.com/apache/beam/actions/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_Python_ValidatesContainer_Dataflow_ARM.yml?query=event%3Aschedule) | ### PerformanceTests and Benchmark Jobs @@ -421,7 +421,7 @@ PostCommit Jobs run in a schedule against master branch and generally do not get | [ PerformanceTests TextIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT.yml) | N/A | [![.github/workflows/beam_PerformanceTests_TextIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TextIOIT.yml?query=event%3Aschedule) | [ PerformanceTests TFRecordIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT_HDFS.yml) | N/A | [![.github/workflows/beam_PerformanceTests_TFRecordIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT_HDFS.yml?query=event%3Aschedule) | [ PerformanceTests TFRecordIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT.yml) | N/A | [![.github/workflows/beam_PerformanceTests_TFRecordIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_TFRecordIOIT.yml?query=event%3Aschedule) -| [ PerformanceTests WordCountIT PythonVersions ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml) | ['3.8'] | [![.github/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml?query=event%3Aschedule) +| [ PerformanceTests WordCountIT PythonVersions ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml) | ['3.9'] | [![.github/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml?query=event%3Aschedule) | [ PerformanceTests XmlIOIT HDFS ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT_HDFS.yml) | N/A | [![.github/workflows/beam_PerformanceTests_XmlIOIT_HDFS.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT_HDFS.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT_HDFS.yml?query=event%3Aschedule) | [ PerformanceTests XmlIOIT ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT.yml) | N/A | [![.github/workflows/beam_PerformanceTests_XmlIOIT.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_XmlIOIT.yml?query=event%3Aschedule) | [ PerformanceTests xlang KafkaIO Python ](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml) | N/A | [![.github/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml/badge.svg?event=schedule)](https://github.com/apache/beam/actions/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml?query=event%3Aschedule) diff --git a/.github/workflows/beam_CloudML_Benchmarks_Dataflow.yml b/.github/workflows/beam_CloudML_Benchmarks_Dataflow.yml index f3db177001ec..1448783a24c4 100644 --- a/.github/workflows/beam_CloudML_Benchmarks_Dataflow.yml +++ b/.github/workflows/beam_CloudML_Benchmarks_Dataflow.yml @@ -73,8 +73,8 @@ jobs: uses: ./.github/actions/setup-environment-action with: python-version: | - 3.8 3.9 + 3.10 - name: Prepare test arguments uses: ./.github/actions/test-arguments-action with: diff --git a/.github/workflows/beam_Inference_Python_Benchmarks_Dataflow.yml b/.github/workflows/beam_Inference_Python_Benchmarks_Dataflow.yml index c96e84e4d7bf..2145a23a80fd 100644 --- a/.github/workflows/beam_Inference_Python_Benchmarks_Dataflow.yml +++ b/.github/workflows/beam_Inference_Python_Benchmarks_Dataflow.yml @@ -95,7 +95,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.benchmarks.inference.pytorch_image_classification_benchmarks \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.requirementsTxtFile=apache_beam/ml/inference/torch_tests_requirements.txt \ '-PloadTest.args=${{ env.beam_Inference_Python_Benchmarks_Dataflow_test_arguments_1 }} --job_name=benchmark-tests-pytorch-imagenet-python-101-${{env.NOW_UTC}} --output=gs://temp-storage-for-end-to-end-tests/torch/result_resnet101-${{env.NOW_UTC}}.txt' \ - name: run Pytorch Imagenet Classification with Resnet 152 @@ -106,7 +106,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.benchmarks.inference.pytorch_image_classification_benchmarks \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.requirementsTxtFile=apache_beam/ml/inference/torch_tests_requirements.txt \ '-PloadTest.args=${{ env.beam_Inference_Python_Benchmarks_Dataflow_test_arguments_2 }} --job_name=benchmark-tests-pytorch-imagenet-python-152-${{env.NOW_UTC}} --output=gs://temp-storage-for-end-to-end-tests/torch/result_resnet152-${{env.NOW_UTC}}.txt' \ - name: run Pytorch Language Modeling using Hugging face bert-base-uncased model @@ -117,7 +117,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.benchmarks.inference.pytorch_language_modeling_benchmarks \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.requirementsTxtFile=apache_beam/ml/inference/torch_tests_requirements.txt \ '-PloadTest.args=${{ env.beam_Inference_Python_Benchmarks_Dataflow_test_arguments_3 }} --job_name=benchmark-tests-pytorch-language-modeling-bert-base-uncased-${{env.NOW_UTC}} --output=gs://temp-storage-for-end-to-end-tests/torch/result_bert_base_uncased-${{env.NOW_UTC}}.txt' \ - name: run Pytorch Langauge Modeling using Hugging Face bert-large-uncased model @@ -128,7 +128,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.benchmarks.inference.pytorch_language_modeling_benchmarks \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.requirementsTxtFile=apache_beam/ml/inference/torch_tests_requirements.txt \ '-PloadTest.args=${{ env.beam_Inference_Python_Benchmarks_Dataflow_test_arguments_4 }} --job_name=benchmark-tests-pytorch-language-modeling-bert-large-uncased-${{env.NOW_UTC}} --output=gs://temp-storage-for-end-to-end-tests/torch/result_bert_large_uncased-${{env.NOW_UTC}}.txt' \ - name: run Pytorch Imagenet Classification with Resnet 152 with Tesla T4 GPU @@ -139,6 +139,6 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.benchmarks.inference.pytorch_image_classification_benchmarks \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.requirementsTxtFile=apache_beam/ml/inference/torch_tests_requirements.txt \ '-PloadTest.args=${{ env.beam_Inference_Python_Benchmarks_Dataflow_test_arguments_5 }} --job_name=benchmark-tests-pytorch-imagenet-python-gpu-${{env.NOW_UTC}} --output=gs://temp-storage-for-end-to-end-tests/torch/result_resnet152_gpu-${{env.NOW_UTC}}.txt' \ No newline at end of file diff --git a/.github/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Batch.yml b/.github/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Batch.yml index 87854bdb8f84..2bc4e50d4792 100644 --- a/.github/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Batch.yml +++ b/.github/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Batch.yml @@ -96,7 +96,7 @@ jobs: --info \ -PloadTest.mainClass=apache_beam.testing.load_tests.co_group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_CoGBK_Dataflow_Batch_test_arguments_1 }} --job_name=load-tests-python-dataflow-batch-cogbk-1-${{ steps.datetime.outputs.datetime }}' \ - name: run CoGBK 2GB of 100B records with multiple keys uses: ./.github/actions/gradle-command-self-hosted-action @@ -105,7 +105,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.co_group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_CoGBK_Dataflow_Batch_test_arguments_2 }} --job_name=load-tests-python-dataflow-batch-cogbk-2-${{ steps.datetime.outputs.datetime }}' \ - name: run CoGBK reiterate 4 times 10kB values uses: ./.github/actions/gradle-command-self-hosted-action @@ -114,7 +114,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.co_group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_CoGBK_Dataflow_Batch_test_arguments_3 }} --job_name=load-tests-python-dataflow-batch-cogbk-3-${{ steps.datetime.outputs.datetime }}' \ - name: run CoGBK reiterate 4 times 2MB values uses: ./.github/actions/gradle-command-self-hosted-action @@ -123,5 +123,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.co_group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_CoGBK_Dataflow_Batch_test_arguments_4 }} --job_name=load-tests-python-dataflow-batch-cogbk-4-${{ steps.datetime.outputs.datetime }}' \ No newline at end of file diff --git a/.github/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Streaming.yml b/.github/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Streaming.yml index f9e62e9965a8..b8607fb71603 100644 --- a/.github/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Streaming.yml +++ b/.github/workflows/beam_LoadTests_Python_CoGBK_Dataflow_Streaming.yml @@ -95,7 +95,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.co_group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_CoGBK_Dataflow_Streaming_test_arguments_1 }} --job_name=load-tests-python-dataflow-streaming-cogbk-1-${{ steps.datetime.outputs.datetime }}' \ - name: run CoGBK 2GB of 100B records with multiple keys uses: ./.github/actions/gradle-command-self-hosted-action @@ -104,7 +104,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.co_group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_CoGBK_Dataflow_Streaming_test_arguments_2 }} --job_name=load-tests-python-dataflow-streaming-cogbk-2-${{ steps.datetime.outputs.datetime }}' \ - name: run CoGBK reiterate 4 times 10kB values uses: ./.github/actions/gradle-command-self-hosted-action @@ -113,7 +113,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.co_group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_CoGBK_Dataflow_Streaming_test_arguments_3 }} --job_name=load-tests-python-dataflow-streaming-cogbk-3-${{ steps.datetime.outputs.datetime }}' \ - name: run CoGBK reiterate 4 times 2MB values uses: ./.github/actions/gradle-command-self-hosted-action @@ -122,5 +122,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.co_group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_CoGBK_Dataflow_Streaming_test_arguments_4 }} --job_name=load-tests-python-dataflow-streaming-cogbk-4-${{ steps.datetime.outputs.datetime }}' \ No newline at end of file diff --git a/.github/workflows/beam_LoadTests_Python_Combine_Dataflow_Batch.yml b/.github/workflows/beam_LoadTests_Python_Combine_Dataflow_Batch.yml index 76e2a417c25c..866a5871962d 100644 --- a/.github/workflows/beam_LoadTests_Python_Combine_Dataflow_Batch.yml +++ b/.github/workflows/beam_LoadTests_Python_Combine_Dataflow_Batch.yml @@ -92,7 +92,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.combine_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Combine_Dataflow_Batch_test_arguments_1 }} --job_name=load-tests-python-dataflow-batch-combine-1-${{env.NOW_UTC}}' \ - name: run Combine Dataflow Batch Python Load Test 2 (fanout 4) uses: ./.github/actions/gradle-command-self-hosted-action @@ -101,7 +101,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.combine_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Combine_Dataflow_Batch_test_arguments_2 }} --job_name=load-tests-python-dataflow-batch-combine-2-${{env.NOW_UTC}}' \ - name: run Combine Dataflow Batch Python Load Test 3 (fanout 8) uses: ./.github/actions/gradle-command-self-hosted-action @@ -110,5 +110,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.combine_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Combine_Dataflow_Batch_test_arguments_3 }} --job_name=load-tests-python-dataflow-batch-combine-3-${{env.NOW_UTC}}' \ No newline at end of file diff --git a/.github/workflows/beam_LoadTests_Python_Combine_Dataflow_Streaming.yml b/.github/workflows/beam_LoadTests_Python_Combine_Dataflow_Streaming.yml index 306f71662eb4..e77db13ecb7f 100644 --- a/.github/workflows/beam_LoadTests_Python_Combine_Dataflow_Streaming.yml +++ b/.github/workflows/beam_LoadTests_Python_Combine_Dataflow_Streaming.yml @@ -92,7 +92,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.combine_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Combine_Dataflow_Streaming_test_arguments_1 }} --job_name=load-tests-python-dataflow-streaming-combine-1-${{env.NOW_UTC}}' \ - name: run 2GB Fanout 4 test uses: ./.github/actions/gradle-command-self-hosted-action @@ -101,7 +101,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.combine_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Combine_Dataflow_Streaming_test_arguments_2 }} --job_name=load-tests-python-dataflow-streaming-combine-4-${{env.NOW_UTC}}' \ - name: run 2GB Fanout 8 test uses: ./.github/actions/gradle-command-self-hosted-action @@ -110,5 +110,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.combine_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Combine_Dataflow_Streaming_test_arguments_3 }} --job_name=load-tests-python-dataflow-streaming-combine-5-${{env.NOW_UTC}}' \ No newline at end of file diff --git a/.github/workflows/beam_LoadTests_Python_Combine_Flink_Batch.yml b/.github/workflows/beam_LoadTests_Python_Combine_Flink_Batch.yml index 230f6398e522..0f666a0b7db6 100644 --- a/.github/workflows/beam_LoadTests_Python_Combine_Flink_Batch.yml +++ b/.github/workflows/beam_LoadTests_Python_Combine_Flink_Batch.yml @@ -107,7 +107,7 @@ jobs: with: gradle-command: :sdks:python:apache_beam:testing:load_tests:run arguments: | - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.mainClass=apache_beam.testing.load_tests.combine_test \ -Prunner=PortableRunner \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Combine_Flink_Batch_test_arguments_1 }} --job_name=load-tests-python-flink-batch-combine-1-${{env.NOW_UTC}}' \ @@ -121,7 +121,7 @@ jobs: with: gradle-command: :sdks:python:apache_beam:testing:load_tests:run arguments: | - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.mainClass=apache_beam.testing.load_tests.combine_test \ -Prunner=PortableRunner \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Combine_Flink_Batch_test_arguments_2 }} --job_name=load-tests-python-flink-batch-combine-4-${{env.NOW_UTC}}' \ @@ -130,7 +130,7 @@ jobs: with: gradle-command: :sdks:python:apache_beam:testing:load_tests:run arguments: | - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.mainClass=apache_beam.testing.load_tests.combine_test \ -Prunner=PortableRunner \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Combine_Flink_Batch_test_arguments_3 }} --job_name=load-tests-python-flink-batch-combine-5-${{env.NOW_UTC}}' \ diff --git a/.github/workflows/beam_LoadTests_Python_Combine_Flink_Streaming.yml b/.github/workflows/beam_LoadTests_Python_Combine_Flink_Streaming.yml index 0ea12f49a5f1..6f491e6b9fa9 100644 --- a/.github/workflows/beam_LoadTests_Python_Combine_Flink_Streaming.yml +++ b/.github/workflows/beam_LoadTests_Python_Combine_Flink_Streaming.yml @@ -104,7 +104,7 @@ jobs: with: gradle-command: :sdks:python:apache_beam:testing:load_tests:run arguments: | - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.mainClass=apache_beam.testing.load_tests.combine_test \ -Prunner=PortableRunner \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Combine_Flink_Streaming_test_arguments_1 }} --job_name=load-tests-python-flink-streaming-combine-4-${{env.NOW_UTC}}' \ @@ -113,7 +113,7 @@ jobs: with: gradle-command: :sdks:python:apache_beam:testing:load_tests:run arguments: | - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.mainClass=apache_beam.testing.load_tests.combine_test \ -Prunner=PortableRunner \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Combine_Flink_Streaming_test_arguments_2 }} --job_name=load-tests-python-flink-streaming-combine-5-${{env.NOW_UTC}}' \ diff --git a/.github/workflows/beam_LoadTests_Python_FnApiRunner_Microbenchmark.yml b/.github/workflows/beam_LoadTests_Python_FnApiRunner_Microbenchmark.yml index da6b0beee92c..d69efb4636bb 100644 --- a/.github/workflows/beam_LoadTests_Python_FnApiRunner_Microbenchmark.yml +++ b/.github/workflows/beam_LoadTests_Python_FnApiRunner_Microbenchmark.yml @@ -87,5 +87,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.microbenchmarks_test \ -Prunner=DirectRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_FnApiRunner_Microbenchmark_test_arguments_1 }}' \ No newline at end of file diff --git a/.github/workflows/beam_LoadTests_Python_GBK_Dataflow_Batch.yml b/.github/workflows/beam_LoadTests_Python_GBK_Dataflow_Batch.yml index 6e1e6a09f200..d2924a081255 100644 --- a/.github/workflows/beam_LoadTests_Python_GBK_Dataflow_Batch.yml +++ b/.github/workflows/beam_LoadTests_Python_GBK_Dataflow_Batch.yml @@ -94,7 +94,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_GBK_Dataflow_Batch_test_arguments_1 }} --job_name=load-tests-python-dataflow-batch-gbk-1-${{env.NOW_UTC}}' \ - name: run 2GB of 100B records test uses: ./.github/actions/gradle-command-self-hosted-action @@ -103,7 +103,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_GBK_Dataflow_Batch_test_arguments_2 }} --job_name=load-tests-python-dataflow-batch-gbk-2-${{env.NOW_UTC}}' \ - name: run 2GB of 100kB records test uses: ./.github/actions/gradle-command-self-hosted-action @@ -112,7 +112,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_GBK_Dataflow_Batch_test_arguments_3 }} --job_name=load-tests-python-dataflow-batch-gbk-3-${{env.NOW_UTC}}' \ - name: run fanout 4 times with 2GB 10-byte records test uses: ./.github/actions/gradle-command-self-hosted-action @@ -121,7 +121,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_GBK_Dataflow_Batch_test_arguments_4 }} --job_name=load-tests-python-dataflow-batch-gbk-4-${{env.NOW_UTC}}' \ - name: run fanout 8 times with 2GB 10-byte records total test uses: ./.github/actions/gradle-command-self-hosted-action @@ -130,5 +130,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_GBK_Dataflow_Batch_test_arguments_5 }} --job_name=load-tests-python-dataflow-batch-gbk-5-${{env.NOW_UTC}}' \ No newline at end of file diff --git a/.github/workflows/beam_LoadTests_Python_GBK_Dataflow_Streaming.yml b/.github/workflows/beam_LoadTests_Python_GBK_Dataflow_Streaming.yml index d8d3e35f17a8..70321f2414a0 100644 --- a/.github/workflows/beam_LoadTests_Python_GBK_Dataflow_Streaming.yml +++ b/.github/workflows/beam_LoadTests_Python_GBK_Dataflow_Streaming.yml @@ -90,7 +90,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_GBK_Dataflow_Streaming_test_arguments_1 }} --job_name=load-tests-python-dataflow-streaming-gbk-3-${{env.NOW_UTC}}' \ # // TODO(https://github.com/apache/beam/issues/20403). Skipping some cases because they are too slow: diff --git a/.github/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch.yml b/.github/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch.yml index 7f8e2f197359..f99d2a3f7387 100644 --- a/.github/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch.yml +++ b/.github/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch.yml @@ -91,7 +91,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch_test_arguments_1 }} --job_name=load-tests-python-dataflow-batch-gbk-6-${{env.NOW_UTC}}' \ - name: run reiterate 4 times 2MB values test uses: ./.github/actions/gradle-command-self-hosted-action @@ -100,5 +100,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_GBK_reiterate_Dataflow_Batch_test_arguments_2 }} --job_name=load-tests-python-dataflow-batch-gbk-7-${{env.NOW_UTC}}' \ No newline at end of file diff --git a/.github/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming.yml b/.github/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming.yml index 5b07d15337fa..d7e31f1edcac 100644 --- a/.github/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming.yml +++ b/.github/workflows/beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming.yml @@ -91,7 +91,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming_test_arguments_1 }} --job_name=load-tests-python-dataflow-streaming-gbk-6-${{env.NOW_UTC}}' \ - name: run reiterate 4 times 2MB values test uses: ./.github/actions/gradle-command-self-hosted-action @@ -100,5 +100,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_GBK_reiterate_Dataflow_Streaming_test_arguments_2 }} --job_name=load-tests-python-dataflow-streaming-gbk-7-${{env.NOW_UTC}}' \ diff --git a/.github/workflows/beam_LoadTests_Python_ParDo_Dataflow_Batch.yml b/.github/workflows/beam_LoadTests_Python_ParDo_Dataflow_Batch.yml index 8dc8f031b7e6..b4f505648702 100644 --- a/.github/workflows/beam_LoadTests_Python_ParDo_Dataflow_Batch.yml +++ b/.github/workflows/beam_LoadTests_Python_ParDo_Dataflow_Batch.yml @@ -95,7 +95,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Dataflow_Batch_test_arguments_1 }} --job_name=load-tests-python-dataflow-batch-pardo-1-${{ steps.datetime.outputs.datetime }}' \ - name: run ParDo Dataflow Batch Python Load Test 2 (200 iterations) uses: ./.github/actions/gradle-command-self-hosted-action @@ -104,7 +104,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Dataflow_Batch_test_arguments_2 }} --job_name=load-tests-python-dataflow-batch-pardo-2-${{ steps.datetime.outputs.datetime }}' \ - name: run ParDo Dataflow Batch Python Load Test 3 (10 counters) uses: ./.github/actions/gradle-command-self-hosted-action @@ -113,7 +113,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Dataflow_Batch_test_arguments_3 }} --job_name=load-tests-python-dataflow-batch-pardo-3-${{ steps.datetime.outputs.datetime }}' \ - name: run ParDo Dataflow Batch Python Load Test 4 (100 counters) uses: ./.github/actions/gradle-command-self-hosted-action @@ -122,5 +122,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Dataflow_Batch_test_arguments_4 }} --job_name=load-tests-python-dataflow-batch-pardo-4-${{ steps.datetime.outputs.datetime }}' \ No newline at end of file diff --git a/.github/workflows/beam_LoadTests_Python_ParDo_Dataflow_Streaming.yml b/.github/workflows/beam_LoadTests_Python_ParDo_Dataflow_Streaming.yml index f0d0778d98ca..fabd893afaaf 100644 --- a/.github/workflows/beam_LoadTests_Python_ParDo_Dataflow_Streaming.yml +++ b/.github/workflows/beam_LoadTests_Python_ParDo_Dataflow_Streaming.yml @@ -95,7 +95,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Dataflow_Streaming_test_arguments_1 }} --job_name=load-tests-python-dataflow-streaming-pardo-1-${{ steps.datetime.outputs.datetime }}' \ - name: run ParDo Dataflow Streaming Python Load Test 2 (200 iterations) uses: ./.github/actions/gradle-command-self-hosted-action @@ -104,7 +104,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Dataflow_Streaming_test_arguments_2 }} --job_name=load-tests-python-dataflow-streaming-pardo-2-${{ steps.datetime.outputs.datetime }}' \ - name: run ParDo Dataflow Streaming Python Load Test 3 (10 counters) uses: ./.github/actions/gradle-command-self-hosted-action @@ -113,7 +113,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Dataflow_Streaming_test_arguments_3 }} --job_name=load-tests-python-dataflow-streaming-pardo-3-${{ steps.datetime.outputs.datetime }}' \ - name: run ParDo Dataflow Streaming Python Load Test 4 (100 counters) uses: ./.github/actions/gradle-command-self-hosted-action @@ -122,5 +122,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Dataflow_Streaming_test_arguments_4 }} --job_name=load-tests-python-dataflow-streaming-pardo-4-${{ steps.datetime.outputs.datetime }}' \ No newline at end of file diff --git a/.github/workflows/beam_LoadTests_Python_ParDo_Flink_Batch.yml b/.github/workflows/beam_LoadTests_Python_ParDo_Flink_Batch.yml index bae2f9f82ee1..b6c86e01c299 100644 --- a/.github/workflows/beam_LoadTests_Python_ParDo_Flink_Batch.yml +++ b/.github/workflows/beam_LoadTests_Python_ParDo_Flink_Batch.yml @@ -109,7 +109,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=PortableRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Flink_Batch_test_arguments_1 }} --job_name=load-tests-python-flink-batch-pardo-1-${{ steps.datetime.outputs.datetime }}' \ - name: run ParDo Flink Batch Python Load Test 2 (200 iterations) uses: ./.github/actions/gradle-command-self-hosted-action @@ -118,7 +118,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=PortableRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Flink_Batch_test_arguments_2 }} --job_name=load-tests-python-flink-batch-pardo-3-${{ steps.datetime.outputs.datetime }}' \ - name: run ParDo Flink Batch Python Load Test 3 (10 counters) uses: ./.github/actions/gradle-command-self-hosted-action @@ -127,5 +127,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=PortableRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Flink_Batch_test_arguments_3 }} --job_name=load-tests-python-flink-batch-pardo-4-${{ steps.datetime.outputs.datetime }}' \ No newline at end of file diff --git a/.github/workflows/beam_LoadTests_Python_ParDo_Flink_Streaming.yml b/.github/workflows/beam_LoadTests_Python_ParDo_Flink_Streaming.yml index 4485b7187f80..a6443c0df10b 100644 --- a/.github/workflows/beam_LoadTests_Python_ParDo_Flink_Streaming.yml +++ b/.github/workflows/beam_LoadTests_Python_ParDo_Flink_Streaming.yml @@ -111,7 +111,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=PortableRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Flink_Streaming_test_arguments_1 }} --job_name=load-tests-python-flink-streaming-pardo-1-${{ steps.datetime.outputs.datetime }}' \ - name: run ParDo Flink Streaming Python Load Test 2 (200 iterations) uses: ./.github/actions/gradle-command-self-hosted-action @@ -120,7 +120,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=PortableRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Flink_Streaming_test_arguments_2 }} --job_name=load-tests-python-flink-streaming-pardo-2-${{ steps.datetime.outputs.datetime }}' \ - name: run ParDo Flink Streaming Python Load Test 3 (10 counters) uses: ./.github/actions/gradle-command-self-hosted-action @@ -129,7 +129,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=PortableRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Flink_Streaming_test_arguments_3 }} --job_name=load-tests-python-flink-streaming-pardo-3-${{ steps.datetime.outputs.datetime }}' \ - name: run ParDo Flink Streaming Python Load Test 4 (100 counters) uses: ./.github/actions/gradle-command-self-hosted-action @@ -138,7 +138,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=PortableRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Flink_Streaming_test_arguments_4 }} --job_name=load-tests-python-flink-streaming-pardo-4-${{ steps.datetime.outputs.datetime }}' \ - name: run ParDo Flink Streaming Python Load Test 5 (5 iterations) uses: ./.github/actions/gradle-command-self-hosted-action @@ -147,7 +147,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.pardo_test \ -Prunner=PortableRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_ParDo_Flink_Streaming_test_arguments_5 }} --job_name=load-tests-python-flink-streaming-pardo-6-${{ steps.datetime.outputs.datetime }}' \ - name: Teardown Flink if: always() diff --git a/.github/workflows/beam_LoadTests_Python_SideInput_Dataflow_Batch.yml b/.github/workflows/beam_LoadTests_Python_SideInput_Dataflow_Batch.yml index b93474b2a21d..7917af0ff5d2 100644 --- a/.github/workflows/beam_LoadTests_Python_SideInput_Dataflow_Batch.yml +++ b/.github/workflows/beam_LoadTests_Python_SideInput_Dataflow_Batch.yml @@ -101,7 +101,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.sideinput_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_SideInput_Dataflow_Batch_test_arguments_1 }} --job_name=load-tests-python-dataflow-batch-sideinput-1-${{ steps.datetime.outputs.datetime }}' \ - name: run SideInput Dataflow Batch Python Load Test 2 (1gb-1kb-10workers-1window-99key-percent-dict) uses: ./.github/actions/gradle-command-self-hosted-action @@ -110,7 +110,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.sideinput_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_SideInput_Dataflow_Batch_test_arguments_2 }} --job_name=load-tests-python-dataflow-batch-sideinput-2-${{ steps.datetime.outputs.datetime }}' \ - name: run SideInput Dataflow Batch Python Load Test 3 (10gb-1kb-10workers-1window-first-iterable) uses: ./.github/actions/gradle-command-self-hosted-action @@ -119,7 +119,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.sideinput_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_SideInput_Dataflow_Batch_test_arguments_3 }} --job_name=load-tests-python-dataflow-batch-sideinput-3-${{ steps.datetime.outputs.datetime }}' \ - name: run SideInput Dataflow Batch Python Load Test 4 (10gb-1kb-10workers-1window-iterable) uses: ./.github/actions/gradle-command-self-hosted-action @@ -128,7 +128,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.sideinput_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_SideInput_Dataflow_Batch_test_arguments_4 }} --job_name=load-tests-python-dataflow-batch-sideinput-4-${{ steps.datetime.outputs.datetime }}' \ - name: run SideInput Dataflow Batch Python Load Test 5 (1gb-1kb-10workers-1window-first-list) uses: ./.github/actions/gradle-command-self-hosted-action @@ -137,7 +137,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.sideinput_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_SideInput_Dataflow_Batch_test_arguments_5 }} --job_name=load-tests-python-dataflow-batch-sideinput-5-${{ steps.datetime.outputs.datetime }}' \ - name: run SideInput Dataflow Batch Python Load Test 6 (1gb-1kb-10workers-1window-list) uses: ./.github/actions/gradle-command-self-hosted-action @@ -146,7 +146,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.sideinput_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_SideInput_Dataflow_Batch_test_arguments_6 }} --job_name=load-tests-python-dataflow-batch-sideinput-6-${{ steps.datetime.outputs.datetime }}' \ - name: run SideInput Dataflow Batch Python Load Test 7 (1gb-1kb-10workers-1000window-1key-percent-dict) uses: ./.github/actions/gradle-command-self-hosted-action @@ -155,7 +155,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.sideinput_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_SideInput_Dataflow_Batch_test_arguments_7 }} --job_name=load-tests-python-dataflow-batch-sideinput-7-${{ steps.datetime.outputs.datetime }}' \ - name: run SideInput Dataflow Batch Python Load Test 8 (1gb-1kb-10workers-1000window-99key-percent-dict) uses: ./.github/actions/gradle-command-self-hosted-action @@ -164,7 +164,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.sideinput_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_SideInput_Dataflow_Batch_test_arguments_8 }} --job_name=load-tests-python-dataflow-batch-sideinput-8-${{ steps.datetime.outputs.datetime }}' \ - name: run SideInput Dataflow Batch Python Load Test 9 (10gb-1kb-10workers-1000window-first-iterable) uses: ./.github/actions/gradle-command-self-hosted-action @@ -173,7 +173,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.sideinput_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_SideInput_Dataflow_Batch_test_arguments_9 }} --job_name=load-tests-python-dataflow-batch-sideinput-9-${{ steps.datetime.outputs.datetime }}' \ - name: run SideInput Dataflow Batch Python Load Test 10 (10gb-1kb-10workers-1000window-iterable) uses: ./.github/actions/gradle-command-self-hosted-action @@ -182,5 +182,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.sideinput_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_SideInput_Dataflow_Batch_test_arguments_10 }} --job_name=load-tests-python-dataflow-batch-sideinput-10-${{ steps.datetime.outputs.datetime }}' \ No newline at end of file diff --git a/.github/workflows/beam_LoadTests_Python_Smoke.yml b/.github/workflows/beam_LoadTests_Python_Smoke.yml index 39e8e4b56102..22dc63f19faa 100644 --- a/.github/workflows/beam_LoadTests_Python_Smoke.yml +++ b/.github/workflows/beam_LoadTests_Python_Smoke.yml @@ -90,7 +90,7 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.group_by_key_test \ -Prunner=DirectRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Smoke_test_arguments_1 }} --job_name=load-tests-python-direct-batch-gbk-smoke-${{ steps.datetime.outputs.datetime }}' \ - name: run GroupByKey Python load test Dataflow uses: ./.github/actions/gradle-command-self-hosted-action @@ -99,5 +99,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.testing.load_tests.group_by_key_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_LoadTests_Python_Smoke_test_arguments_2 }} --job_name=load-tests-python-dataflow-batch-gbk-smoke-${{ steps.datetime.outputs.datetime }}' \ No newline at end of file diff --git a/.github/workflows/beam_PerformanceTests_BiqQueryIO_Read_Python.yml b/.github/workflows/beam_PerformanceTests_BiqQueryIO_Read_Python.yml index 6f8e5fc9bc8d..3673ca170555 100644 --- a/.github/workflows/beam_PerformanceTests_BiqQueryIO_Read_Python.yml +++ b/.github/workflows/beam_PerformanceTests_BiqQueryIO_Read_Python.yml @@ -89,6 +89,6 @@ jobs: gradle-command: :sdks:python:apache_beam:testing:load_tests:run arguments: | -PloadTest.mainClass=apache_beam.io.gcp.bigquery_read_perf_test \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -Prunner=DataflowRunner \ '-PloadTest.args=${{env.beam_PerformanceTests_BiqQueryIO_Read_Python_test_arguments_1}}' \ No newline at end of file diff --git a/.github/workflows/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch.yml b/.github/workflows/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch.yml index 5d7b451071f1..f52eec3fa2c8 100644 --- a/.github/workflows/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch.yml +++ b/.github/workflows/beam_PerformanceTests_BiqQueryIO_Write_Python_Batch.yml @@ -89,6 +89,6 @@ jobs: gradle-command: :sdks:python:apache_beam:testing:load_tests:run arguments: | -PloadTest.mainClass=apache_beam.io.gcp.bigquery_write_perf_test \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -Prunner=DataflowRunner \ '-PloadTest.args=${{env.beam_PerformanceTests_BiqQueryIO_Write_Python_Batch_test_arguments_1}}' \ No newline at end of file diff --git a/.github/workflows/beam_PerformanceTests_PubsubIOIT_Python_Streaming.yml b/.github/workflows/beam_PerformanceTests_PubsubIOIT_Python_Streaming.yml index 22a71967d3a4..e358f113a668 100644 --- a/.github/workflows/beam_PerformanceTests_PubsubIOIT_Python_Streaming.yml +++ b/.github/workflows/beam_PerformanceTests_PubsubIOIT_Python_Streaming.yml @@ -90,5 +90,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.io.gcp.pubsub_io_perf_test \ -Prunner=TestDataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_PerformanceTests_PubsubIOIT_Python_Streaming_test_arguments_1 }}' \ No newline at end of file diff --git a/.github/workflows/beam_PerformanceTests_SpannerIO_Read_2GB_Python.yml b/.github/workflows/beam_PerformanceTests_SpannerIO_Read_2GB_Python.yml index 2de75b2496a8..3448bbcf96c5 100644 --- a/.github/workflows/beam_PerformanceTests_SpannerIO_Read_2GB_Python.yml +++ b/.github/workflows/beam_PerformanceTests_SpannerIO_Read_2GB_Python.yml @@ -90,5 +90,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.io.gcp.experimental.spannerio_read_perf_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.args='${{env.beam_PerformanceTests_SpannerIO_Read_2GB_Python_test_arguments_1}}' \ No newline at end of file diff --git a/.github/workflows/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch.yml b/.github/workflows/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch.yml index 00292aee45ac..ba9d05e46838 100644 --- a/.github/workflows/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch.yml +++ b/.github/workflows/beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch.yml @@ -90,5 +90,5 @@ jobs: arguments: | -PloadTest.mainClass=apache_beam.io.gcp.experimental.spannerio_write_perf_test \ -Prunner=DataflowRunner \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.args='${{env.beam_PerformanceTests_SpannerIO_Write_2GB_Python_Batch_test_arguments_1}}' \ No newline at end of file diff --git a/.github/workflows/beam_PerformanceTests_TextIOIT_Python.yml b/.github/workflows/beam_PerformanceTests_TextIOIT_Python.yml index f9ca3d949ced..39bd0ab467d2 100644 --- a/.github/workflows/beam_PerformanceTests_TextIOIT_Python.yml +++ b/.github/workflows/beam_PerformanceTests_TextIOIT_Python.yml @@ -88,7 +88,7 @@ jobs: with: gradle-command: :sdks:python:apache_beam:testing:load_tests:run arguments: | - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ -PloadTest.mainClass=apache_beam.io.filebasedio_perf_test \ -Prunner=DataflowRunner \ '-PloadTest.args=${{env.beam_PerformanceTests_TextIOIT_Python_test_arguments_1}}' \ No newline at end of file diff --git a/.github/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml b/.github/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml index 21dfa13b25dc..e9ef9cd1716a 100644 --- a/.github/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml +++ b/.github/workflows/beam_PerformanceTests_WordCountIT_PythonVersions.yml @@ -64,7 +64,7 @@ jobs: job_name: ["beam_PerformanceTests_WordCountIT_PythonVersions"] job_phrase_1: [Run Python] job_phrase_2: [WordCountIT Performance Test] - python_version: ['3.8'] + python_version: ['3.9'] steps: - uses: actions/checkout@v4 - name: Setup repository diff --git a/.github/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml b/.github/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml index 8abc8a3199dd..00c62edc34ad 100644 --- a/.github/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml +++ b/.github/workflows/beam_PerformanceTests_xlang_KafkaIO_Python.yml @@ -118,5 +118,5 @@ jobs: arguments: | -Prunner=DataflowRunner \ -PloadTest.mainClass=apache_beam.io.external.xlang_kafkaio_perf_test \ - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ '-PloadTest.args=${{ env.beam_PerformanceTests_xlang_KafkaIO_Python_test_arguments_1 }}' \ No newline at end of file diff --git a/.github/workflows/beam_PostCommit_PortableJar_Flink.yml b/.github/workflows/beam_PostCommit_PortableJar_Flink.yml index 5347dc45642b..37bfe68d9b20 100644 --- a/.github/workflows/beam_PostCommit_PortableJar_Flink.yml +++ b/.github/workflows/beam_PostCommit_PortableJar_Flink.yml @@ -79,9 +79,9 @@ jobs: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}} uses: ./.github/actions/gradle-command-self-hosted-action with: - gradle-command: :sdks:python:test-suites:portable:py38:testPipelineJarFlinkRunner + gradle-command: :sdks:python:test-suites:portable:py39:testPipelineJarFlinkRunner arguments: | - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ - name: Archive Python Test Results uses: actions/upload-artifact@v4 if: failure() diff --git a/.github/workflows/beam_PostCommit_PortableJar_Spark.yml b/.github/workflows/beam_PostCommit_PortableJar_Spark.yml index 3778f017d1cc..ce7be60133d7 100644 --- a/.github/workflows/beam_PostCommit_PortableJar_Spark.yml +++ b/.github/workflows/beam_PostCommit_PortableJar_Spark.yml @@ -79,9 +79,9 @@ jobs: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}} uses: ./.github/actions/gradle-command-self-hosted-action with: - gradle-command: :sdks:python:test-suites:portable:py38:testPipelineJarSparkRunner + gradle-command: :sdks:python:test-suites:portable:py39:testPipelineJarSparkRunner arguments: | - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ - name: Archive Python Test Results uses: actions/upload-artifact@v4 if: failure() diff --git a/.github/workflows/beam_PostCommit_Python.yml b/.github/workflows/beam_PostCommit_Python.yml index 6705268143e9..4770515c75fb 100644 --- a/.github/workflows/beam_PostCommit_Python.yml +++ b/.github/workflows/beam_PostCommit_Python.yml @@ -60,7 +60,7 @@ jobs: matrix: job_name: [beam_PostCommit_Python] job_phrase: [Run Python PostCommit] - python_version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python_version: ['3.9', '3.10', '3.11', '3.12'] if: | github.event_name == 'workflow_dispatch' || github.event_name == 'pull_request_target' || diff --git a/.github/workflows/beam_PostCommit_Python_Arm.yml b/.github/workflows/beam_PostCommit_Python_Arm.yml index f9438c3c644d..48fb00b1bb9d 100644 --- a/.github/workflows/beam_PostCommit_Python_Arm.yml +++ b/.github/workflows/beam_PostCommit_Python_Arm.yml @@ -62,7 +62,7 @@ jobs: matrix: job_name: [beam_PostCommit_Python_Arm] job_phrase: [Run Python PostCommit Arm] - python_version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python_version: ['3.9', '3.10', '3.11', '3.12'] if: | github.event_name == 'workflow_dispatch' || github.event_name == 'pull_request_target' || diff --git a/.github/workflows/beam_PostCommit_Python_Dependency.yml b/.github/workflows/beam_PostCommit_Python_Dependency.yml index 04c22e4ab07d..6e7c4ddbd3eb 100644 --- a/.github/workflows/beam_PostCommit_Python_Dependency.yml +++ b/.github/workflows/beam_PostCommit_Python_Dependency.yml @@ -81,7 +81,7 @@ jobs: - name: Run postCommitPyDep uses: ./.github/actions/gradle-command-self-hosted-action with: - gradle-command: :sdks:python:test-suites:tox:py38:postCommitPyDep + gradle-command: :sdks:python:test-suites:tox:py39:postCommitPyDep arguments: -PuseWheelDistribution - name: Archive Python Test Results uses: actions/upload-artifact@v4 diff --git a/.github/workflows/beam_PostCommit_Python_Examples_Direct.yml b/.github/workflows/beam_PostCommit_Python_Examples_Direct.yml index 7d3bb65a20e1..a6bb49f4e444 100644 --- a/.github/workflows/beam_PostCommit_Python_Examples_Direct.yml +++ b/.github/workflows/beam_PostCommit_Python_Examples_Direct.yml @@ -63,7 +63,7 @@ jobs: matrix: job_name: ["beam_PostCommit_Python_Examples_Direct"] job_phrase: ["Run Python Examples_Direct"] - python_version: ['3.8','3.9','3.10','3.11','3.12'] + python_version: ['3.9','3.10','3.11','3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository diff --git a/.github/workflows/beam_PostCommit_Python_Examples_Flink.yml b/.github/workflows/beam_PostCommit_Python_Examples_Flink.yml index f88d7e205cc6..bda807eb147b 100644 --- a/.github/workflows/beam_PostCommit_Python_Examples_Flink.yml +++ b/.github/workflows/beam_PostCommit_Python_Examples_Flink.yml @@ -63,7 +63,7 @@ jobs: matrix: job_name: ["beam_PostCommit_Python_Examples_Flink"] job_phrase: ["Run Python Examples_Flink"] - python_version: ['3.8', '3.12'] + python_version: ['3.9', '3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository diff --git a/.github/workflows/beam_PostCommit_Python_Examples_Spark.yml b/.github/workflows/beam_PostCommit_Python_Examples_Spark.yml index 4300179421b5..d866d412507b 100644 --- a/.github/workflows/beam_PostCommit_Python_Examples_Spark.yml +++ b/.github/workflows/beam_PostCommit_Python_Examples_Spark.yml @@ -63,7 +63,7 @@ jobs: matrix: job_name: ["beam_PostCommit_Python_Examples_Spark"] job_phrase: ["Run Python Examples_Spark"] - python_version: ['3.8', '3.12'] + python_version: ['3.9', '3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository diff --git a/.github/workflows/beam_PostCommit_Python_Nexmark_Direct.yml b/.github/workflows/beam_PostCommit_Python_Nexmark_Direct.yml index 601cb99a44fb..3d47fb86889d 100644 --- a/.github/workflows/beam_PostCommit_Python_Nexmark_Direct.yml +++ b/.github/workflows/beam_PostCommit_Python_Nexmark_Direct.yml @@ -133,7 +133,7 @@ jobs: with: gradle-command: :sdks:python:apache_beam:testing:benchmarks:nexmark:run arguments: | - -PpythonVersion=3.8 \ + -PpythonVersion=3.9 \ "-Pnexmark.args=${{ env.GRADLE_PYTHON_COMMAND_ARGUMENTS }} \ --query=${{ matrix.query }} \ --input=gs://temp-storage-for-perf-tests/nexmark/eventFiles/beam_PostCommit_Python_Nexmark_Direct/query${{ matrix.query }}-\*" \ No newline at end of file diff --git a/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml b/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml index ec7a28d2db2c..bcd936324124 100644 --- a/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow.yml @@ -65,7 +65,7 @@ jobs: matrix: job_name: ["beam_PostCommit_Python_ValidatesContainer_Dataflow"] job_phrase: ["Run Python Dataflow ValidatesContainer"] - python_version: ['3.8','3.9','3.10','3.11','3.12'] + python_version: ['3.9','3.10','3.11','3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository diff --git a/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml b/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml index b90c150291dd..f2eba045722c 100644 --- a/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml +++ b/.github/workflows/beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC.yml @@ -63,7 +63,7 @@ jobs: matrix: job_name: ["beam_PostCommit_Python_ValidatesContainer_Dataflow_With_RC"] job_phrase: ["Run Python RC Dataflow ValidatesContainer"] - python_version: ['3.8','3.9','3.10','3.11','3.12'] + python_version: ['3.9','3.10','3.11','3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository diff --git a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml index 8df5d00287bc..1876950c7a93 100644 --- a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Dataflow.yml @@ -63,7 +63,7 @@ jobs: matrix: job_name: ["beam_PostCommit_Python_ValidatesRunner_Dataflow"] job_phrase: ["Run Python Dataflow ValidatesRunner"] - python_version: ['3.8', '3.12'] + python_version: ['3.9', '3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository diff --git a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml index b301402f4de2..f837c7476e12 100644 --- a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml +++ b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Flink.yml @@ -63,7 +63,7 @@ jobs: matrix: job_name: ["beam_PostCommit_Python_ValidatesRunner_Flink"] job_phrase: ["Run Python Flink ValidatesRunner"] - python_version: ['3.8', '3.12'] + python_version: ['3.9', '3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository diff --git a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml index 6c89b110ec7a..91c249adf338 100644 --- a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml +++ b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Samza.yml @@ -63,7 +63,7 @@ jobs: matrix: job_name: ["beam_PostCommit_Python_ValidatesRunner_Samza"] job_phrase: ["Run Python Samza ValidatesRunner"] - python_version: ['3.8', '3.12'] + python_version: ['3.9', '3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository diff --git a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml index 66d1ac6756c4..7e87aaff22cc 100644 --- a/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml +++ b/.github/workflows/beam_PostCommit_Python_ValidatesRunner_Spark.yml @@ -63,7 +63,7 @@ jobs: matrix: job_name: ["beam_PostCommit_Python_ValidatesRunner_Spark"] job_phrase: ["Run Python Spark ValidatesRunner"] - python_version: ['3.8', '3.12'] + python_version: ['3.9', '3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository diff --git a/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml b/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml index eb204ce50349..b3f37c6b39f0 100644 --- a/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Dataflow.yml @@ -74,7 +74,7 @@ jobs: uses: ./.github/actions/setup-environment-action with: python-version: | - 3.8 + 3.9 3.12 - name: run PostCommit Python Xlang Gcp Dataflow script uses: ./.github/actions/gradle-command-self-hosted-action diff --git a/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml b/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml index 63417231cb96..137d7bc13d2f 100644 --- a/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml +++ b/.github/workflows/beam_PostCommit_Python_Xlang_Gcp_Direct.yml @@ -74,7 +74,7 @@ jobs: uses: ./.github/actions/setup-environment-action with: python-version: | - 3.8 + 3.9 3.12 - name: run PostCommit Python Xlang Gcp Direct script uses: ./.github/actions/gradle-command-self-hosted-action diff --git a/.github/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml b/.github/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml index ac90c2cd66b8..8fc0db189078 100644 --- a/.github/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_Python_Xlang_IO_Dataflow.yml @@ -74,7 +74,7 @@ jobs: uses: ./.github/actions/setup-environment-action with: python-version: | - 3.8 + 3.9 3.12 - name: run PostCommit Python Xlang IO Dataflow script uses: ./.github/actions/gradle-command-self-hosted-action diff --git a/.github/workflows/beam_PostCommit_Sickbay_Python.yml b/.github/workflows/beam_PostCommit_Sickbay_Python.yml index 60f3ded93c62..837c22b699f8 100644 --- a/.github/workflows/beam_PostCommit_Sickbay_Python.yml +++ b/.github/workflows/beam_PostCommit_Sickbay_Python.yml @@ -61,7 +61,7 @@ jobs: job_name: [beam_PostCommit_Sickbay_Python] job_phrase_1: [Run Python] job_phrase_2: [PostCommit Sickbay] - python_version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python_version: ['3.9', '3.10', '3.11', '3.12'] if: | github.event_name == 'workflow_dispatch' || github.event_name == 'pull_request_target' || diff --git a/.github/workflows/beam_PostCommit_TransformService_Direct.yml b/.github/workflows/beam_PostCommit_TransformService_Direct.yml index 966938e43e92..cb339eb9fb40 100644 --- a/.github/workflows/beam_PostCommit_TransformService_Direct.yml +++ b/.github/workflows/beam_PostCommit_TransformService_Direct.yml @@ -62,7 +62,7 @@ jobs: matrix: job_name: ["beam_PostCommit_TransformService_Direct"] job_phrase: ["Run TransformService_Direct PostCommit"] - python_version: ['3.8','3.12'] + python_version: ['3.9','3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository @@ -76,7 +76,7 @@ jobs: with: java-version: 11 python-version: | - 3.8 + 3.9 ${{ matrix.python_version }} - name: run TransformService Direct script uses: ./.github/actions/gradle-command-self-hosted-action diff --git a/.github/workflows/beam_PostCommit_XVR_Direct.yml b/.github/workflows/beam_PostCommit_XVR_Direct.yml index ec66cab88e9b..023ae4f8cd31 100644 --- a/.github/workflows/beam_PostCommit_XVR_Direct.yml +++ b/.github/workflows/beam_PostCommit_XVR_Direct.yml @@ -62,7 +62,7 @@ jobs: matrix: job_name: ["beam_PostCommit_XVR_Direct"] job_phrase: ["Run XVR_Direct PostCommit"] - python_version: ['3.8','3.12'] + python_version: ['3.9','3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository @@ -75,12 +75,12 @@ jobs: uses: ./.github/actions/setup-environment-action with: python-version: | - 3.8 + 3.9 ${{ matrix.python_version }} - name: run PostCommit XVR Direct script env: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}} - if: ${{ matrix.python_version != '3.8' }} + if: ${{ matrix.python_version != '3.9' }} uses: ./.github/actions/gradle-command-self-hosted-action with: gradle-command: :sdks:python:test-suites:direct:xlang:validatesCrossLanguageRunner @@ -90,7 +90,7 @@ jobs: - name: run PostCommit XVR Direct script env: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}} - if: ${{ matrix.python_version == '3.8' }} + if: ${{ matrix.python_version == '3.9' }} uses: ./.github/actions/gradle-command-self-hosted-action with: gradle-command: :sdks:python:test-suites:direct:xlang:validatesCrossLanguageRunner diff --git a/.github/workflows/beam_PostCommit_XVR_Flink.yml b/.github/workflows/beam_PostCommit_XVR_Flink.yml index d88b502988ef..5cde38d24244 100644 --- a/.github/workflows/beam_PostCommit_XVR_Flink.yml +++ b/.github/workflows/beam_PostCommit_XVR_Flink.yml @@ -63,7 +63,7 @@ jobs: matrix: job_name: ["beam_PostCommit_XVR_Flink"] job_phrase: ["Run XVR_Flink PostCommit"] - python_version: ['3.8','3.12'] + python_version: ['3.9','3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository @@ -76,12 +76,12 @@ jobs: uses: ./.github/actions/setup-environment-action with: python-version: | - 3.8 + 3.9 ${{ matrix.python_version }} - name: run PostCommit XVR Flink script env: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}} - if: ${{ matrix.python_version != '3.8' }} + if: ${{ matrix.python_version != '3.9' }} uses: ./.github/actions/gradle-command-self-hosted-action with: gradle-command: :runners:flink:${{ env.FlinkVersion }}:job-server:validatesCrossLanguageRunner @@ -91,7 +91,7 @@ jobs: - name: run PostCommit XVR Flink script env: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}} - if: ${{ matrix.python_version == '3.8' }} + if: ${{ matrix.python_version == '3.9' }} uses: ./.github/actions/gradle-command-self-hosted-action with: gradle-command: :runners:flink:${{ env.FlinkVersion }}:job-server:validatesCrossLanguageRunner diff --git a/.github/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml b/.github/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml index 779d5881ca7a..66770c9a1683 100644 --- a/.github/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_XVR_JavaUsingPython_Dataflow.yml @@ -62,7 +62,7 @@ jobs: matrix: job_name: ["beam_PostCommit_XVR_JavaUsingPython_Dataflow"] job_phrase: ["Run XVR_JavaUsingPython_Dataflow PostCommit"] - python_version: ['3.8','3.12'] + python_version: ['3.9','3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository @@ -75,7 +75,7 @@ jobs: uses: ./.github/actions/setup-environment-action with: python-version: | - 3.8 + 3.9 ${{ matrix.python_version }} - name: run PostCommit XVR JavaUsingPython Dataflow script uses: ./.github/actions/gradle-command-self-hosted-action diff --git a/.github/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml b/.github/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml index 14404e8a9a41..f1269a0ddd09 100644 --- a/.github/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml +++ b/.github/workflows/beam_PostCommit_XVR_PythonUsingJava_Dataflow.yml @@ -62,7 +62,7 @@ jobs: matrix: job_name: ["beam_PostCommit_XVR_PythonUsingJava_Dataflow"] job_phrase: ["Run XVR_PythonUsingJava_Dataflow PostCommit"] - python_version: ['3.8','3.12'] + python_version: ['3.9','3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository @@ -75,7 +75,7 @@ jobs: uses: ./.github/actions/setup-environment-action with: python-version: | - 3.8 + 3.9 ${{ matrix.python_version }} - name: run PostCommit XVR PythonUsingJava Dataflow script uses: ./.github/actions/gradle-command-self-hosted-action diff --git a/.github/workflows/beam_PostCommit_XVR_Samza.yml b/.github/workflows/beam_PostCommit_XVR_Samza.yml index 2d854a3678e7..2d26c9131839 100644 --- a/.github/workflows/beam_PostCommit_XVR_Samza.yml +++ b/.github/workflows/beam_PostCommit_XVR_Samza.yml @@ -62,7 +62,7 @@ jobs: matrix: job_name: ["beam_PostCommit_XVR_Samza"] job_phrase: ["Run XVR_Samza PostCommit"] - python_version: ['3.8','3.12'] + python_version: ['3.9','3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository @@ -77,12 +77,12 @@ jobs: with: java-version: 8 python-version: | - 3.8 + 3.9 ${{ matrix.python_version }} - name: run PostCommit XVR Samza script env: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}} - if: ${{ matrix.python_version != '3.8' }} + if: ${{ matrix.python_version != '3.9' }} uses: ./.github/actions/gradle-command-self-hosted-action with: gradle-command: :runners:samza:job-server:validatesCrossLanguageRunner @@ -92,7 +92,7 @@ jobs: - name: run PostCommit XVR Samza script env: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}} - if: ${{ matrix.python_version == '3.8' }} + if: ${{ matrix.python_version == '3.9' }} uses: ./.github/actions/gradle-command-self-hosted-action with: gradle-command: :runners:samza:job-server:validatesCrossLanguageRunner diff --git a/.github/workflows/beam_PostCommit_XVR_Spark3.yml b/.github/workflows/beam_PostCommit_XVR_Spark3.yml index 83554ecfa84c..c1880e01292b 100644 --- a/.github/workflows/beam_PostCommit_XVR_Spark3.yml +++ b/.github/workflows/beam_PostCommit_XVR_Spark3.yml @@ -62,7 +62,7 @@ jobs: matrix: job_name: ["beam_PostCommit_XVR_Spark3"] job_phrase: ["Run XVR_Spark3 PostCommit"] - python_version: ['3.8','3.12'] + python_version: ['3.9','3.12'] steps: - uses: actions/checkout@v4 - name: Setup repository @@ -75,12 +75,12 @@ jobs: uses: ./.github/actions/setup-environment-action with: python-version: | - 3.8 + 3.9 ${{ matrix.python_version }} - name: run PostCommit XVR Spark3 script env: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}} - if: ${{ matrix.python_version != '3.8' }} + if: ${{ matrix.python_version != '3.9' }} uses: ./.github/actions/gradle-command-self-hosted-action with: gradle-command: :runners:spark:3:job-server:validatesCrossLanguageRunner @@ -90,7 +90,7 @@ jobs: - name: run PostCommit XVR Spark3 script env: CLOUDSDK_CONFIG: ${{ env.KUBELET_GCLOUD_CONFIG_PATH}} - if: ${{ matrix.python_version == '3.8' }} + if: ${{ matrix.python_version == '3.9' }} uses: ./.github/actions/gradle-command-self-hosted-action with: gradle-command: :runners:spark:3:job-server:validatesCrossLanguageRunner diff --git a/.github/workflows/beam_PreCommit_Portable_Python.yml b/.github/workflows/beam_PreCommit_Portable_Python.yml index e1e1e6033087..037df9a17c45 100644 --- a/.github/workflows/beam_PreCommit_Portable_Python.yml +++ b/.github/workflows/beam_PreCommit_Portable_Python.yml @@ -86,7 +86,7 @@ jobs: matrix: job_name: ['beam_PreCommit_Portable_Python'] job_phrase: ['Run Portable_Python PreCommit'] - python_version: ['3.8', '3.12'] + python_version: ['3.9', '3.12'] if: | github.event_name == 'push' || github.event_name == 'pull_request_target' || @@ -106,7 +106,7 @@ jobs: java-version: default python-version: | ${{ matrix.python_version }} - 3.8 + 3.9 - name: Set PY_VER_CLEAN id: set_py_ver_clean run: | diff --git a/.github/workflows/beam_PreCommit_Python.yml b/.github/workflows/beam_PreCommit_Python.yml index 2fbab55819f3..fb1c6c80873a 100644 --- a/.github/workflows/beam_PreCommit_Python.yml +++ b/.github/workflows/beam_PreCommit_Python.yml @@ -64,7 +64,7 @@ jobs: matrix: job_name: ['beam_PreCommit_Python'] job_phrase: ['Run Python PreCommit'] - python_version: ['3.8','3.9','3.10','3.11','3.12'] + python_version: ['3.9','3.10','3.11','3.12'] if: | github.event_name == 'push' || github.event_name == 'pull_request_target' || diff --git a/.github/workflows/beam_PreCommit_PythonDocker.yml b/.github/workflows/beam_PreCommit_PythonDocker.yml index 129429238b1f..63fc6d55e19a 100644 --- a/.github/workflows/beam_PreCommit_PythonDocker.yml +++ b/.github/workflows/beam_PreCommit_PythonDocker.yml @@ -64,7 +64,7 @@ jobs: matrix: job_name: ["beam_PreCommit_PythonDocker"] job_phrase: ["Run PythonDocker PreCommit"] - python_version: ['3.8','3.9','3.10','3.11','3.12'] + python_version: ['3.9','3.10','3.11','3.12'] if: | github.event_name == 'push' || github.event_name == 'pull_request_target' || diff --git a/.github/workflows/beam_PreCommit_Python_Coverage.yml b/.github/workflows/beam_PreCommit_Python_Coverage.yml index a22e3c338eed..0e295250817d 100644 --- a/.github/workflows/beam_PreCommit_Python_Coverage.yml +++ b/.github/workflows/beam_PreCommit_Python_Coverage.yml @@ -87,7 +87,7 @@ jobs: - name: Run preCommitPyCoverage uses: ./.github/actions/gradle-command-self-hosted-action with: - gradle-command: :sdks:python:test-suites:tox:py38:preCommitPyCoverage + gradle-command: :sdks:python:test-suites:tox:py39:preCommitPyCoverage - uses: codecov/codecov-action@v3 with: flags: python diff --git a/.github/workflows/beam_PreCommit_Python_Dataframes.yml b/.github/workflows/beam_PreCommit_Python_Dataframes.yml index f498dadae92d..f045842e061d 100644 --- a/.github/workflows/beam_PreCommit_Python_Dataframes.yml +++ b/.github/workflows/beam_PreCommit_Python_Dataframes.yml @@ -64,7 +64,7 @@ jobs: matrix: job_name: ['beam_PreCommit_Python_Dataframes'] job_phrase: ['Run Python_Dataframes PreCommit'] - python_version: ['3.8','3.9','3.10','3.11','3.12'] + python_version: ['3.9','3.10','3.11','3.12'] if: | github.event_name == 'push' || github.event_name == 'pull_request_target' || diff --git a/.github/workflows/beam_PreCommit_Python_Examples.yml b/.github/workflows/beam_PreCommit_Python_Examples.yml index 0fb404e7f55b..09d46217d6d6 100644 --- a/.github/workflows/beam_PreCommit_Python_Examples.yml +++ b/.github/workflows/beam_PreCommit_Python_Examples.yml @@ -64,7 +64,7 @@ jobs: matrix: job_name: ['beam_PreCommit_Python_Examples'] job_phrase: ['Run Python_Examples PreCommit'] - python_version: ['3.8','3.9','3.10','3.11','3.12'] + python_version: ['3.9','3.10','3.11','3.12'] if: | github.event_name == 'push' || github.event_name == 'pull_request_target' || diff --git a/.github/workflows/beam_PreCommit_Python_Integration.yml b/.github/workflows/beam_PreCommit_Python_Integration.yml index a2d80806d2bf..20aade431f6d 100644 --- a/.github/workflows/beam_PreCommit_Python_Integration.yml +++ b/.github/workflows/beam_PreCommit_Python_Integration.yml @@ -64,7 +64,7 @@ jobs: matrix: job_name: ['beam_PreCommit_Python_Integration'] job_phrase: ['Run Python_Integration PreCommit'] - python_version: ['3.8', '3.12'] + python_version: ['3.9', '3.12'] if: | github.event_name == 'push' || github.event_name == 'pull_request_target' || diff --git a/.github/workflows/beam_PreCommit_Python_ML.yml b/.github/workflows/beam_PreCommit_Python_ML.yml index c5e596e3b421..714eceef5f6b 100644 --- a/.github/workflows/beam_PreCommit_Python_ML.yml +++ b/.github/workflows/beam_PreCommit_Python_ML.yml @@ -64,7 +64,7 @@ jobs: matrix: job_name: ['beam_PreCommit_Python_ML'] job_phrase: ['Run Python_ML PreCommit'] - python_version: ['3.8','3.9','3.10','3.11','3.12'] + python_version: ['3.9','3.10','3.11','3.12'] if: | github.event_name == 'push' || github.event_name == 'pull_request_target' || diff --git a/.github/workflows/beam_PreCommit_Python_Runners.yml b/.github/workflows/beam_PreCommit_Python_Runners.yml index 66037cfaffdb..5db6e94be781 100644 --- a/.github/workflows/beam_PreCommit_Python_Runners.yml +++ b/.github/workflows/beam_PreCommit_Python_Runners.yml @@ -64,7 +64,7 @@ jobs: matrix: job_name: ['beam_PreCommit_Python_Runners'] job_phrase: ['Run Python_Runners PreCommit'] - python_version: ['3.8','3.9','3.10','3.11','3.12'] + python_version: ['3.9','3.10','3.11','3.12'] if: | github.event_name == 'push' || github.event_name == 'pull_request_target' || diff --git a/.github/workflows/beam_PreCommit_Python_Transforms.yml b/.github/workflows/beam_PreCommit_Python_Transforms.yml index caec491a7515..820ca3e26df6 100644 --- a/.github/workflows/beam_PreCommit_Python_Transforms.yml +++ b/.github/workflows/beam_PreCommit_Python_Transforms.yml @@ -64,7 +64,7 @@ jobs: matrix: job_name: ['beam_PreCommit_Python_Transforms'] job_phrase: ['Run Python_Transforms PreCommit'] - python_version: ['3.8','3.9','3.10','3.11','3.12'] + python_version: ['3.9','3.10','3.11','3.12'] if: | github.event_name == 'push' || github.event_name == 'pull_request_target' || diff --git a/.github/workflows/beam_PreCommit_Xlang_Generated_Transforms.yml b/.github/workflows/beam_PreCommit_Xlang_Generated_Transforms.yml index c7ba234bef4b..f79712ac2d76 100644 --- a/.github/workflows/beam_PreCommit_Xlang_Generated_Transforms.yml +++ b/.github/workflows/beam_PreCommit_Xlang_Generated_Transforms.yml @@ -82,7 +82,7 @@ jobs: matrix: job_name: ['beam_PreCommit_Xlang_Generated_Transforms'] job_phrase: ['Run Xlang_Generated_Transforms PreCommit'] - python_version: ['3.8'] + python_version: ['3.9'] if: | github.event_name == 'push' || github.event_name == 'workflow_dispatch' || diff --git a/.github/workflows/beam_Publish_Beam_SDK_Snapshots.yml b/.github/workflows/beam_Publish_Beam_SDK_Snapshots.yml index 72de1f15229f..c971ef36a081 100644 --- a/.github/workflows/beam_Publish_Beam_SDK_Snapshots.yml +++ b/.github/workflows/beam_Publish_Beam_SDK_Snapshots.yml @@ -66,7 +66,6 @@ jobs: - "java:container:java11" - "java:container:java17" - "java:container:java21" - - "python:container:py38" - "python:container:py39" - "python:container:py310" - "python:container:py311" diff --git a/.github/workflows/build_release_candidate.yml b/.github/workflows/build_release_candidate.yml index f944ce90c9f1..ec65ae99072a 100644 --- a/.github/workflows/build_release_candidate.yml +++ b/.github/workflows/build_release_candidate.yml @@ -260,10 +260,10 @@ jobs: with: distribution: 'temurin' java-version: '11' - - name: Install Python 3.8 + - name: Install Python 3.9 uses: actions/setup-python@v5 with: - python-version: '3.8' + python-version: '3.9' - run: echo $JAVA_HOME - run: echo "JAVA11_HOME=${JAVA_HOME}" >> "$GITHUB_OUTPUT" id: export-java11 @@ -310,10 +310,10 @@ jobs: path: beam-site token: ${{ github.event.inputs.REPO_TOKEN }} ref: release-docs - - name: Install Python 3.8 + - name: Install Python 3.9 uses: actions/setup-python@v5 with: - python-version: '3.8' + python-version: '3.9' - name: Install node uses: actions/setup-node@v4 with: diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 1275b38b9d23..64303698fb05 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -48,7 +48,7 @@ jobs: runs-on: [self-hosted, ubuntu-20.04, main] env: EVENT_NAME: ${{ github.event_name }} - PY_VERSIONS_FULL: "cp38-* cp39-* cp310-* cp311-* cp312-*" + PY_VERSIONS_FULL: "cp39-* cp310-* cp311-* cp312-*" outputs: gcp-variables-set: ${{ steps.check_gcp_variables.outputs.gcp-variables-set }} py-versions-full: ${{ steps.set-py-versions.outputs.py-versions-full }} @@ -91,7 +91,7 @@ jobs: - name: Install python uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.9 - name: Get tag id: get_tag run: | @@ -252,7 +252,7 @@ jobs: - name: Install Python uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.9 - uses: docker/setup-qemu-action@v1 if: ${{matrix.arch == 'aarch64'}} name: Set up QEMU diff --git a/.github/workflows/dask_runner_tests.yml b/.github/workflows/dask_runner_tests.yml index 5f39852c228c..f87c70d8b720 100644 --- a/.github/workflows/dask_runner_tests.yml +++ b/.github/workflows/dask_runner_tests.yml @@ -43,7 +43,7 @@ jobs: - name: Install python uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.9 - name: Build source working-directory: ./sdks/python run: pip install -U build && python -m build --sdist @@ -64,7 +64,6 @@ jobs: matrix: os: [ubuntu-latest, macos-latest, windows-latest] params: [ - {"py_ver": "3.8", "tox_env": "py38"}, {"py_ver": "3.9", "tox_env": "py39"}, {"py_ver": "3.10", "tox_env": "py310" }, ] diff --git a/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Batch_2GB_10_byte_records.txt b/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Batch_2GB_10_byte_records.txt index 8295d1c8aa86..57b1bbc854b6 100644 --- a/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Batch_2GB_10_byte_records.txt +++ b/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Batch_2GB_10_byte_records.txt @@ -22,6 +22,6 @@ --parallelism=5 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --top_count=20 --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Batch_2GB_Fanout_4.txt b/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Batch_2GB_Fanout_4.txt index 82f8bcc7c0ae..4923929301dc 100644 --- a/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Batch_2GB_Fanout_4.txt +++ b/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Batch_2GB_Fanout_4.txt @@ -22,7 +22,7 @@ --parallelism=16 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --fanout=4 --top_count=20 --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Batch_2GB_Fanout_8.txt b/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Batch_2GB_Fanout_8.txt index 45425b6bf153..8a089fee3516 100644 --- a/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Batch_2GB_Fanout_8.txt +++ b/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Batch_2GB_Fanout_8.txt @@ -22,7 +22,7 @@ --parallelism=16 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --fanout=8 --top_count=20 --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Streaming_2GB_10_byte_records.txt b/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Streaming_2GB_10_byte_records.txt index 12ffc1790e46..5d1a0be9950e 100644 --- a/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Streaming_2GB_10_byte_records.txt +++ b/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Streaming_2GB_10_byte_records.txt @@ -22,7 +22,7 @@ --parallelism=5 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --top_count=20 --streaming --use_stateful_load_generator diff --git a/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Streaming_2GB_Fanout_4.txt b/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Streaming_2GB_Fanout_4.txt index c7d5552a03bd..650236a9c500 100644 --- a/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Streaming_2GB_Fanout_4.txt +++ b/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Streaming_2GB_Fanout_4.txt @@ -22,7 +22,7 @@ --parallelism=16 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --fanout=4 --top_count=20 --streaming diff --git a/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Streaming_2GB_Fanout_8.txt b/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Streaming_2GB_Fanout_8.txt index bffdeab2cb11..4208571fef62 100644 --- a/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Streaming_2GB_Fanout_8.txt +++ b/.github/workflows/load-tests-pipeline-options/python_Combine_Flink_Streaming_2GB_Fanout_8.txt @@ -22,7 +22,7 @@ --parallelism=16 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --fanout=8 --top_count=20 --streaming diff --git a/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_2GB_of_100B_records.txt b/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_2GB_of_100B_records.txt index 4cb5bfb0d988..f4f5e7de8369 100644 --- a/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_2GB_of_100B_records.txt +++ b/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_2GB_of_100B_records.txt @@ -24,5 +24,5 @@ --parallelism=5 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_2GB_of_10B_records.txt b/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_2GB_of_10B_records.txt index 2427e21cde45..40db0b6d40bc 100644 --- a/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_2GB_of_10B_records.txt +++ b/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_2GB_of_10B_records.txt @@ -24,5 +24,5 @@ --parallelism=5 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_fanout_4_times_with_2GB_10-byte_records_total.txt b/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_fanout_4_times_with_2GB_10-byte_records_total.txt index bf9085141eab..df27dc7c4470 100644 --- a/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_fanout_4_times_with_2GB_10-byte_records_total.txt +++ b/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_fanout_4_times_with_2GB_10-byte_records_total.txt @@ -24,5 +24,5 @@ --parallelism=16 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_fanout_8_times_with_2GB_10-byte_records_total.txt b/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_fanout_8_times_with_2GB_10-byte_records_total.txt index a59f873eb775..6b87f61eed8a 100644 --- a/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_fanout_8_times_with_2GB_10-byte_records_total.txt +++ b/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_fanout_8_times_with_2GB_10-byte_records_total.txt @@ -24,5 +24,5 @@ --parallelism=16 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_reiterate_4_times_10kB_values.txt b/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_reiterate_4_times_10kB_values.txt index 0e5d00b96151..621777663be0 100644 --- a/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_reiterate_4_times_10kB_values.txt +++ b/.github/workflows/load-tests-pipeline-options/python_GBK_Flink_Batch_reiterate_4_times_10kB_values.txt @@ -24,5 +24,5 @@ --parallelism=5 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Batch_10_Counters.txt b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Batch_10_Counters.txt index 4d8bda8ac2f8..fe451559e625 100644 --- a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Batch_10_Counters.txt +++ b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Batch_10_Counters.txt @@ -25,5 +25,5 @@ --parallelism=5 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Batch_10_Iterations.txt b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Batch_10_Iterations.txt index e84cee2f50cf..dd5addb65d14 100644 --- a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Batch_10_Iterations.txt +++ b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Batch_10_Iterations.txt @@ -25,5 +25,5 @@ --parallelism=5 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Batch_200_Iterations.txt b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Batch_200_Iterations.txt index 4d8bda8ac2f8..fe451559e625 100644 --- a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Batch_200_Iterations.txt +++ b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Batch_200_Iterations.txt @@ -25,5 +25,5 @@ --parallelism=5 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_100_Counters.txt b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_100_Counters.txt index b17e2cecc2c8..308deb3ecf4d 100644 --- a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_100_Counters.txt +++ b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_100_Counters.txt @@ -26,6 +26,6 @@ --streaming --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --use_stateful_load_generator --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_10_Counters.txt b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_10_Counters.txt index 957bc6c086d8..78ecc1fd98dd 100644 --- a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_10_Counters.txt +++ b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_10_Counters.txt @@ -26,6 +26,6 @@ --streaming --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --use_stateful_load_generator --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_10_Iterations.txt b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_10_Iterations.txt index baa34ec455b5..04a1213d4039 100644 --- a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_10_Iterations.txt +++ b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_10_Iterations.txt @@ -27,6 +27,6 @@ --stateful --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --use_stateful_load_generator --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_200_Iterations.txt b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_200_Iterations.txt index 44483a6e51cc..a2f7d7600da8 100644 --- a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_200_Iterations.txt +++ b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_200_Iterations.txt @@ -26,6 +26,6 @@ --streaming --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --use_stateful_load_generator --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_5_Iterations.txt b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_5_Iterations.txt index 571b33fb7a49..f49be6c70582 100644 --- a/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_5_Iterations.txt +++ b/.github/workflows/load-tests-pipeline-options/python_ParDo_Flink_Streaming_5_Iterations.txt @@ -30,6 +30,6 @@ --shutdown_sources_after_idle_ms=300000 --job_endpoint=localhost:8099 --environment_type=DOCKER ---environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest +--environment_config=gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest --use_stateful_load_generator --runner=PortableRunner \ No newline at end of file diff --git a/.github/workflows/playground_backend_precommit.yml b/.github/workflows/playground_backend_precommit.yml index 79517e705c27..9ba6cf20534f 100644 --- a/.github/workflows/playground_backend_precommit.yml +++ b/.github/workflows/playground_backend_precommit.yml @@ -33,7 +33,7 @@ jobs: runs-on: ubuntu-latest env: DATASTORE_EMULATOR_VERSION: '423.0.0' - PYTHON_VERSION: '3.8' + PYTHON_VERSION: '3.9' JAVA_VERSION: '11' steps: - name: Check out the repo diff --git a/.github/workflows/python_dependency_tests.yml b/.github/workflows/python_dependency_tests.yml index fed1056b90b2..2eaa9e4ce5aa 100644 --- a/.github/workflows/python_dependency_tests.yml +++ b/.github/workflows/python_dependency_tests.yml @@ -26,7 +26,6 @@ jobs: matrix: os: [ubuntu-latest] params: [ - {"py_ver": "3.8", "py_env": "py38"}, {"py_ver": "3.9", "py_env": "py39"}, {"py_ver": "3.10", "py_env": "py310" }, { "py_ver": "3.11", "py_env": "py311" }, diff --git a/.github/workflows/python_tests.yml b/.github/workflows/python_tests.yml index a65b26645533..35dbbbb6a9d5 100644 --- a/.github/workflows/python_tests.yml +++ b/.github/workflows/python_tests.yml @@ -98,7 +98,6 @@ jobs: matrix: os: [macos-latest, windows-latest] params: [ - {"py_ver": "3.8", "tox_env": "py38"}, {"py_ver": "3.9", "tox_env": "py39"}, {"py_ver": "3.10", "tox_env": "py310" }, { "py_ver": "3.11", "tox_env": "py311" }, @@ -135,7 +134,7 @@ jobs: fail-fast: false matrix: os: [[self-hosted, ubuntu-20.04, main], macos-latest, windows-latest] - python: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python: ["3.9", "3.10", "3.11", "3.12"] steps: - name: Checkout code uses: actions/checkout@v4 @@ -162,7 +161,7 @@ jobs: fail-fast: false matrix: os: [[self-hosted, ubuntu-20.04, main], macos-latest, windows-latest] - python: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python: ["3.9", "3.10", "3.11", "3.12"] steps: - name: Checkout code uses: actions/checkout@v4 diff --git a/.github/workflows/run_perf_alert_tool.yml b/.github/workflows/run_perf_alert_tool.yml index 4bb5df41dcfb..a6aae616efec 100644 --- a/.github/workflows/run_perf_alert_tool.yml +++ b/.github/workflows/run_perf_alert_tool.yml @@ -39,7 +39,7 @@ jobs: - name: Install python uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.9 - name: Install Apache Beam working-directory: ./sdks/python run: pip install -e .[gcp,test] diff --git a/.github/workflows/run_rc_validation.yml b/.github/workflows/run_rc_validation.yml index 15979a9e1acd..801a72d37130 100644 --- a/.github/workflows/run_rc_validation.yml +++ b/.github/workflows/run_rc_validation.yml @@ -106,7 +106,7 @@ jobs: if: ${{github.event.inputs.RUN_SQL_TAXI_WITH_DATAFLOW == 'true'}} strategy: matrix: - py_version: [3.8] + py_version: [3.9] steps: - name: Checkout code uses: actions/checkout@v4 @@ -171,7 +171,7 @@ jobs: if: ${{github.event.inputs.RUN_PYTHON_CROSS_VALIDATION == 'true'}} strategy: matrix: - py_version: [3.8] + py_version: [3.9] steps: - name: Checkout code uses: actions/checkout@v4 @@ -286,7 +286,7 @@ jobs: - name: Install Python uses: actions/setup-python@v5 with: - python-version: '3.8' + python-version: '3.9' - name: Setting python env uses: ./.github/actions/common-rc-validation @@ -351,7 +351,7 @@ jobs: if: ${{github.event.inputs.RUN_DIRECT_RUNNER_TESTS == 'true' }} strategy: matrix: - py_version: [3.8] + py_version: [3.9] needs: generate_shared_pubsub steps: - name: Checkout code @@ -399,7 +399,7 @@ jobs: if: ${{github.event.inputs.RUN_DATAFLOW_RUNNER_TESTS=='true'}} strategy: matrix: - py_version: [3.8] + py_version: [3.9] needs: [generate_shared_pubsub] steps: - name: Checkout code @@ -452,7 +452,7 @@ jobs: if: ${{github.event.inputs.RUN_DIRECT_RUNNER_TESTS == 'true' }} strategy: matrix: - py_version: [3.8] + py_version: [3.9] needs: [generate_shared_pubsub] steps: - name: Checkout code @@ -501,7 +501,7 @@ jobs: if: ${{github.event.inputs.RUN_DATAFLOW_RUNNER_TESTS=='true'}} strategy: matrix: - py_version: [3.8] + py_version: [3.9] needs: [generate_shared_pubsub] steps: - name: Checkout code diff --git a/.github/workflows/typescript_tests.yml b/.github/workflows/typescript_tests.yml index 1b45ea67b5c6..a4e4c2926f84 100644 --- a/.github/workflows/typescript_tests.yml +++ b/.github/workflows/typescript_tests.yml @@ -85,7 +85,7 @@ jobs: - name: Install Python uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.9 - name: Setup Beam Python working-directory: ./sdks/python run: | @@ -140,7 +140,7 @@ jobs: - name: Install python uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.9 - name: Setup Beam Python working-directory: ./sdks/python run: | diff --git a/.github/workflows/update_python_dependencies.yml b/.github/workflows/update_python_dependencies.yml index a91aff39f29a..0ab52e97b9f0 100644 --- a/.github/workflows/update_python_dependencies.yml +++ b/.github/workflows/update_python_dependencies.yml @@ -56,7 +56,6 @@ jobs: uses: ./.github/actions/setup-environment-action with: python-version: | - 3.8 3.9 3.10 3.11 diff --git a/.test-infra/jenkins/PythonTestProperties.groovy b/.test-infra/jenkins/PythonTestProperties.groovy index 98257a6e1c28..7e8e4ad3d8fd 100644 --- a/.test-infra/jenkins/PythonTestProperties.groovy +++ b/.test-infra/jenkins/PythonTestProperties.groovy @@ -20,10 +20,10 @@ class PythonTestProperties { // Indicates all supported Python versions. // This must be sorted in ascending order. final static List ALL_SUPPORTED_VERSIONS = [ - '3.8', '3.9', '3.10', - '3.11' + '3.11', + '3.12' ] final static List SUPPORTED_CONTAINER_TASKS = ALL_SUPPORTED_VERSIONS.collect { "py${it.replace('.', '')}" @@ -37,10 +37,10 @@ class PythonTestProperties { final static List CROSS_LANGUAGE_VALIDATES_RUNNER_PYTHON_VERSIONS = ESSENTIAL_VERSIONS final static List CROSS_LANGUAGE_VALIDATES_RUNNER_DATAFLOW_USING_SQL_PYTHON_VERSIONS = [HIGHEST_SUPPORTED] final static List VALIDATES_CONTAINER_DATAFLOW_PYTHON_VERSIONS = ALL_SUPPORTED_VERSIONS - final static String LOAD_TEST_PYTHON_VERSION = '3.8' - final static String RUN_INFERENCE_TEST_PYTHON_VERSION = '3.8' - final static String CHICAGO_TAXI_EXAMPLE_FLINK_PYTHON_VERSION = '3.8' + final static String LOAD_TEST_PYTHON_VERSION = '3.9' + final static String RUN_INFERENCE_TEST_PYTHON_VERSION = '3.9' + final static String CHICAGO_TAXI_EXAMPLE_FLINK_PYTHON_VERSION = '3.9' // Use for various shell scripts triggered by Jenkins. // Gradle scripts should use project.ext.pythonVersion defined by PythonNature/BeamModulePlugin. - final static String DEFAULT_INTERPRETER = 'python3.8' + final static String DEFAULT_INTERPRETER = 'python3.9' } diff --git a/.test-infra/jenkins/build.gradle b/.test-infra/jenkins/build.gradle index 37c9c4d8d6ae..1cc469116009 100644 --- a/.test-infra/jenkins/build.gradle +++ b/.test-infra/jenkins/build.gradle @@ -39,11 +39,11 @@ task generateMetricsReport { doLast { exec { executable 'sh' - args '-c', ". ${envdir}/bin/activate && tox -e py38-test -c ${toxConfigFilePath}" + args '-c', ". ${envdir}/bin/activate && tox -e py39-test -c ${toxConfigFilePath}" } exec { executable 'sh' - args '-c', ". ${envdir}/bin/activate && tox -e py38-generate-report -c ${toxConfigFilePath} -- --influx-db=${influxDb} --influx-host=${influxHost} --influx-port=${influxPort} --output-file=${generateMetricsReportPath}" + args '-c', ". ${envdir}/bin/activate && tox -e py39-generate-report -c ${toxConfigFilePath} -- --influx-db=${influxDb} --influx-host=${influxHost} --influx-port=${influxPort} --output-file=${generateMetricsReportPath}" } logger.info('Create metrics report file {}', generateMetricsReportPath) } diff --git a/.test-infra/jenkins/metrics_report/tox.ini b/.test-infra/jenkins/metrics_report/tox.ini index 026db5dc4860..7794771b6d7a 100644 --- a/.test-infra/jenkins/metrics_report/tox.ini +++ b/.test-infra/jenkins/metrics_report/tox.ini @@ -14,10 +14,10 @@ ; See the License for the specific language governing permissions and ; limitations under the License. ; -; TODO(https://github.com/apache/beam/issues/20209): Don't hardcode Py3.8 version. +; TODO(https://github.com/apache/beam/issues/20209): Don't hardcode Py3.9 version. [tox] skipsdist = True -envlist = py38-test,py38-generate-report +envlist = py39-test,py39-generate-report [testenv] commands_pre = @@ -25,12 +25,12 @@ commands_pre = pip --version pip check -[testenv:py38-test] +[testenv:py39-test] deps = -r requirements.txt passenv = WORKSPACE,INFLUXDB_USER,INFLUXDB_USER_PASSWORD commands = python -m unittest dashboards_parser.py -[testenv:py38-generate-report] +[testenv:py39-generate-report] deps = -r requirements.txt passenv = WORKSPACE,INFLUXDB_USER,INFLUXDB_USER_PASSWORD,GITHUB_WORKSPACE commands = python report_generator.py {posargs} diff --git a/.test-infra/junitxml_report.py b/.test-infra/junitxml_report.py index 3c386d03fe97..945d30cc2735 100644 --- a/.test-infra/junitxml_report.py +++ b/.test-infra/junitxml_report.py @@ -20,8 +20,8 @@ Example usage, comparing nosetests and pytest test collection: $ cd sdks/python $ rm *.xml -$ tox --recreate -e py38-gcp -$ tox --recreate -e py38-gcp-pytest +$ tox --recreate -e py39-gcp +$ tox --recreate -e py39-gcp-pytest $ python3 ../../.test-infra/junitxml_report.py nosetests*.xml | sort -u > nosetests.out $ python3 ../../.test-infra/junitxml_report.py pytest*.xml | sort -u > pytest.out $ diff -u nosetests.out pytest.out | less diff --git a/.test-infra/metrics/grafana/dashboards/perftests_metrics/Python_WordCount_IT_Benchmarks.json b/.test-infra/metrics/grafana/dashboards/perftests_metrics/Python_WordCount_IT_Benchmarks.json index 02e707b68bdf..519ebdb225db 100644 --- a/.test-infra/metrics/grafana/dashboards/perftests_metrics/Python_WordCount_IT_Benchmarks.json +++ b/.test-infra/metrics/grafana/dashboards/perftests_metrics/Python_WordCount_IT_Benchmarks.json @@ -224,7 +224,7 @@ "timeFrom": null, "timeRegions": [], "timeShift": null, - "title": "WordCountIT Batch 1Gb Files - py38", + "title": "WordCountIT Batch 1Gb Files - py39", "tooltip": { "shared": true, "sort": 0, diff --git a/build.gradle.kts b/build.gradle.kts index e6295384b753..c9fc62af6e5f 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -470,7 +470,6 @@ tasks.register("playgroundPreCommit") { tasks.register("pythonPreCommit") { dependsOn(":sdks:python:test-suites:tox:pycommon:preCommitPyCommon") - dependsOn(":sdks:python:test-suites:tox:py38:preCommitPy38") dependsOn(":sdks:python:test-suites:tox:py39:preCommitPy39") dependsOn(":sdks:python:test-suites:tox:py310:preCommitPy310") dependsOn(":sdks:python:test-suites:tox:py311:preCommitPy311") @@ -487,7 +486,6 @@ tasks.register("pythonDocsPreCommit") { } tasks.register("pythonDockerBuildPreCommit") { - dependsOn(":sdks:python:container:py38:docker") dependsOn(":sdks:python:container:py39:docker") dependsOn(":sdks:python:container:py310:docker") dependsOn(":sdks:python:container:py311:docker") @@ -502,20 +500,6 @@ tasks.register("pythonFormatterPreCommit") { dependsOn("sdks:python:test-suites:tox:pycommon:formatter") } -tasks.register("python38PostCommit") { - dependsOn(":sdks:python:test-suites:dataflow:py38:postCommitIT") - dependsOn(":sdks:python:test-suites:direct:py38:postCommitIT") - dependsOn(":sdks:python:test-suites:direct:py38:hdfsIntegrationTest") - dependsOn(":sdks:python:test-suites:direct:py38:azureIntegrationTest") - dependsOn(":sdks:python:test-suites:portable:py38:postCommitPy38") - // TODO: https://github.com/apache/beam/issues/22651 - // The default container uses Python 3.8. The goal here is to - // duild Docker images for TensorRT tests during run time for python versions - // other than 3.8 and add these tests in other python postcommit suites. - dependsOn(":sdks:python:test-suites:dataflow:py38:inferencePostCommitIT") - dependsOn(":sdks:python:test-suites:direct:py38:inferencePostCommitIT") -} - tasks.register("python39PostCommit") { dependsOn(":sdks:python:test-suites:dataflow:py39:postCommitIT") dependsOn(":sdks:python:test-suites:direct:py39:postCommitIT") @@ -546,12 +530,11 @@ tasks.register("python312PostCommit") { } tasks.register("portablePythonPreCommit") { - dependsOn(":sdks:python:test-suites:portable:py38:preCommitPy38") + dependsOn(":sdks:python:test-suites:portable:py39:preCommitPy39") dependsOn(":sdks:python:test-suites:portable:py312:preCommitPy312") } tasks.register("pythonSparkPostCommit") { - dependsOn(":sdks:python:test-suites:portable:py38:sparkValidatesRunner") dependsOn(":sdks:python:test-suites:portable:py39:sparkValidatesRunner") dependsOn(":sdks:python:test-suites:portable:py312:sparkValidatesRunner") } @@ -576,15 +559,15 @@ tasks.register("javaExamplesDataflowPrecommit") { tasks.register("whitespacePreCommit") { // TODO(https://github.com/apache/beam/issues/20209): Find a better way to specify the tasks without hardcoding py version. - dependsOn(":sdks:python:test-suites:tox:py38:archiveFilesToLint") - dependsOn(":sdks:python:test-suites:tox:py38:unpackFilesToLint") - dependsOn(":sdks:python:test-suites:tox:py38:whitespacelint") + dependsOn(":sdks:python:test-suites:tox:py39:archiveFilesToLint") + dependsOn(":sdks:python:test-suites:tox:py39:unpackFilesToLint") + dependsOn(":sdks:python:test-suites:tox:py39:whitespacelint") } tasks.register("typescriptPreCommit") { // TODO(https://github.com/apache/beam/issues/20209): Find a better way to specify the tasks without hardcoding py version. - dependsOn(":sdks:python:test-suites:tox:py38:eslint") - dependsOn(":sdks:python:test-suites:tox:py38:jest") + dependsOn(":sdks:python:test-suites:tox:py39:eslint") + dependsOn(":sdks:python:test-suites:tox:py39:jest") } tasks.register("pushAllRunnersDockerImages") { diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy index 9e5ffa8a1c42..9a581c3a0680 100644 --- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy +++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy @@ -3144,10 +3144,10 @@ class BeamModulePlugin implements Plugin { mustRunAfter = [ ":runners:flink:${project.ext.latestFlinkVersion}:job-server:shadowJar", ':runners:spark:3:job-server:shadowJar', - ':sdks:python:container:py38:docker', ':sdks:python:container:py39:docker', ':sdks:python:container:py310:docker', ':sdks:python:container:py311:docker', + ':sdks:python:container:py312:docker', ] doLast { // TODO: Figure out GCS credentials and use real GCS input and output. diff --git a/examples/multi-language/README.md b/examples/multi-language/README.md index 4912eb14da39..90f5ae41943e 100644 --- a/examples/multi-language/README.md +++ b/examples/multi-language/README.md @@ -149,7 +149,7 @@ python -m apache_beam.runners.portability.expansion_service_main -p --ful ``` export DOCKER_ROOT= -./gradlew :sdks:python:container:py38:docker -Pdocker-repository-root=$DOCKER_ROOT -Pdocker-tag=latest +./gradlew :sdks:python:container:py39:docker -Pdocker-repository-root=$DOCKER_ROOT -Pdocker-tag=latest docker push $DOCKER_ROOT/beam_python3.8_sdk:latest diff --git a/gradle.properties b/gradle.properties index 084d7d8999b3..20a478cd6aab 100644 --- a/gradle.properties +++ b/gradle.properties @@ -41,4 +41,4 @@ docker_image_default_repo_prefix=beam_ # supported flink versions flink_versions=1.15,1.16,1.17,1.18 # supported python versions -python_versions=3.8,3.9,3.10,3.11,3.12 +python_versions=3.9,3.10,3.11,3.12 diff --git a/local-env-setup.sh b/local-env-setup.sh index f13dc88432a6..ba30813b2bcc 100755 --- a/local-env-setup.sh +++ b/local-env-setup.sh @@ -55,7 +55,7 @@ if [ "$kernelname" = "Linux" ]; then exit fi - for ver in 3.8 3.9 3.10 3.11 3.12 3; do + for ver in 3.9 3.10 3.11 3.12 3; do apt install --yes python$ver-venv done @@ -89,7 +89,7 @@ elif [ "$kernelname" = "Darwin" ]; then echo "Installing openjdk@8" brew install openjdk@8 fi - for ver in 3.8 3.9 3.10 3.11 3.12; do + for ver in 3.9 3.10 3.11 3.12; do if brew ls --versions python@$ver > /dev/null; then echo "python@$ver already installed. Skipping" brew info python@$ver diff --git a/release/src/main/Dockerfile b/release/src/main/Dockerfile index 8f981fddfcd8..8d0f2229adfa 100644 --- a/release/src/main/Dockerfile +++ b/release/src/main/Dockerfile @@ -42,12 +42,11 @@ RUN curl https://pyenv.run | bash && \ echo 'command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"' >> /root/.bashrc && \ echo ''eval "$(pyenv init -)"'' >> /root/.bashrc && \ source /root/.bashrc && \ - pyenv install 3.8.9 && \ pyenv install 3.9.4 && \ pyenv install 3.10.7 && \ pyenv install 3.11.3 && \ pyenv install 3.12.3 && \ - pyenv global 3.8.9 3.9.4 3.10.7 3.11.3 3.12.3 + pyenv global 3.9.4 3.10.7 3.11.3 3.12.3 # Install a Go version >= 1.16 so we can bootstrap higher # Go versions diff --git a/release/src/main/python-release/python_release_automation.sh b/release/src/main/python-release/python_release_automation.sh index 2f6986885a96..248bdd9b65ac 100755 --- a/release/src/main/python-release/python_release_automation.sh +++ b/release/src/main/python-release/python_release_automation.sh @@ -19,7 +19,7 @@ source release/src/main/python-release/run_release_candidate_python_quickstart.sh source release/src/main/python-release/run_release_candidate_python_mobile_gaming.sh -for version in 3.8 3.9 3.10 3.11 3.12 +for version in 3.9 3.10 3.11 3.12 do run_release_candidate_python_quickstart "tar" "python${version}" run_release_candidate_python_mobile_gaming "tar" "python${version}" diff --git a/release/src/main/python-release/python_release_automation_utils.sh b/release/src/main/python-release/python_release_automation_utils.sh index 337ece8ba643..de77038ca684 100644 --- a/release/src/main/python-release/python_release_automation_utils.sh +++ b/release/src/main/python-release/python_release_automation_utils.sh @@ -82,14 +82,14 @@ function get_version() { ####################################### function download_files() { if [[ $1 = *"wheel"* ]]; then - if [[ $2 == "python3.7" ]]; then - BEAM_PYTHON_SDK_WHL="apache_beam-$VERSION*-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" - elif [[ $2 == "python3.8" ]]; then - BEAM_PYTHON_SDK_WHL="apache_beam-$VERSION*-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" - elif [[ $2 == "python3.9" ]]; then - BEAM_PYTHON_SDK_WHL="apache_beam-$VERSION*-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + if [[ $2 == "python3.9" ]]; then + BEAM_PYTHON_SDK_WHL="apache_beam-$VERSION*-cp39-cp39m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" elif [[ $2 == "python3.10" ]]; then BEAM_PYTHON_SDK_WHL="apache_beam-$VERSION*-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + elif [[ $2 == "python3.11" ]]; then + BEAM_PYTHON_SDK_WHL="apache_beam-$VERSION*-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + elif [[ $2 == "python3.12" ]]; then + BEAM_PYTHON_SDK_WHL="apache_beam-$VERSION*-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" else echo "Unable to determine a Beam wheel for interpreter version $2." exit 1 diff --git a/release/src/main/scripts/run_rc_validation.sh b/release/src/main/scripts/run_rc_validation.sh index 91bfa9e2f8bb..9c93ed4ef4d4 100755 --- a/release/src/main/scripts/run_rc_validation.sh +++ b/release/src/main/scripts/run_rc_validation.sh @@ -99,7 +99,7 @@ HUB_VERSION=2.12.0 HUB_ARTIFACTS_NAME=hub-linux-amd64-${HUB_VERSION} BACKUP_BASHRC=.bashrc_backup_$(date +"%Y%m%d%H%M%S") BACKUP_M2=settings_backup_$(date +"%Y%m%d%H%M%S").xml -declare -a PYTHON_VERSIONS_TO_VALIDATE=("python3.8") +declare -a PYTHON_VERSIONS_TO_VALIDATE=("python3.9") echo "" echo "====================Checking Environment & Variables=================" echo "PLEASE update RC_VALIDATE_CONFIGS in file script.config first." diff --git a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java index 37c20c61ad8e..01ceac9da585 100644 --- a/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java +++ b/runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowRunnerTest.java @@ -1255,8 +1255,8 @@ public void testNoStagingLocationAndNoTempLocationFails() { @Test public void testApplySdkEnvironmentOverrides() throws IOException { DataflowPipelineOptions options = buildPipelineOptions(); - String dockerHubPythonContainerUrl = "apache/beam_python3.8_sdk:latest"; - String gcrPythonContainerUrl = "gcr.io/apache-beam-testing/beam-sdk/beam_python3.8_sdk:latest"; + String dockerHubPythonContainerUrl = "apache/beam_python3.9_sdk:latest"; + String gcrPythonContainerUrl = "gcr.io/apache-beam-testing/beam-sdk/beam_python3.9_sdk:latest"; options.setSdkHarnessContainerImageOverrides(".*python.*," + gcrPythonContainerUrl); DataflowRunner runner = DataflowRunner.fromOptions(options); RunnerApi.Pipeline pipeline = @@ -1297,8 +1297,8 @@ public void testApplySdkEnvironmentOverrides() throws IOException { @Test public void testApplySdkEnvironmentOverridesByDefault() throws IOException { DataflowPipelineOptions options = buildPipelineOptions(); - String dockerHubPythonContainerUrl = "apache/beam_python3.8_sdk:latest"; - String gcrPythonContainerUrl = "gcr.io/cloud-dataflow/v1beta3/beam_python3.8_sdk:latest"; + String dockerHubPythonContainerUrl = "apache/beam_python3.9_sdk:latest"; + String gcrPythonContainerUrl = "gcr.io/cloud-dataflow/v1beta3/beam_python3.9_sdk:latest"; DataflowRunner runner = DataflowRunner.fromOptions(options); RunnerApi.Pipeline pipeline = RunnerApi.Pipeline.newBuilder() diff --git a/sdks/python/apache_beam/__init__.py b/sdks/python/apache_beam/__init__.py index 27c2b293fbd0..af88934b0e71 100644 --- a/sdks/python/apache_beam/__init__.py +++ b/sdks/python/apache_beam/__init__.py @@ -70,7 +70,7 @@ import warnings if sys.version_info.major == 3: - if sys.version_info.minor <= 7 or sys.version_info.minor >= 13: + if sys.version_info.minor <= 8 or sys.version_info.minor >= 13: warnings.warn( 'This version of Apache Beam has not been sufficiently tested on ' 'Python %s.%s. You may encounter bugs or missing features.' % diff --git a/sdks/python/apache_beam/io/azure/integration_test/azure_integration_test.sh b/sdks/python/apache_beam/io/azure/integration_test/azure_integration_test.sh index cfac5421093d..e951aa9dea8a 100755 --- a/sdks/python/apache_beam/io/azure/integration_test/azure_integration_test.sh +++ b/sdks/python/apache_beam/io/azure/integration_test/azure_integration_test.sh @@ -23,7 +23,7 @@ if [[ $# != 1 ]]; then printf "Usage: \n$> ./apache_beam/io/azure/integration_test/azure_integration_test.sh " printf "\n\tpython_version: [required] Python version used for container build and run tests." - printf " Use 'python:3.8' for Python3.8." + printf " Use 'python:3.9' for Python3.9." exit 1 fi diff --git a/sdks/python/apache_beam/io/hdfs_integration_test/hdfs_integration_test.sh b/sdks/python/apache_beam/io/hdfs_integration_test/hdfs_integration_test.sh index 98cf4f74e4ab..b6ff18018a38 100755 --- a/sdks/python/apache_beam/io/hdfs_integration_test/hdfs_integration_test.sh +++ b/sdks/python/apache_beam/io/hdfs_integration_test/hdfs_integration_test.sh @@ -23,7 +23,7 @@ if [[ $# != 1 ]]; then printf "Usage: \n$> ./apache_beam/io/hdfs_integration_test/hdfs_integration_test.sh " printf "\n\tpython_version: [required] Python version used for container build and run tests." - printf " Use 'python:3.8' for Python3.8." + printf " Use 'python:3.9' for Python3.9." exit 1 fi diff --git a/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py b/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py index 20cae582f320..97996bd6cbb2 100644 --- a/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py +++ b/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py @@ -82,7 +82,7 @@ _LOGGER = logging.getLogger(__name__) -_PYTHON_VERSIONS_SUPPORTED_BY_DATAFLOW = ['3.8', '3.9', '3.10', '3.11', '3.12'] +_PYTHON_VERSIONS_SUPPORTED_BY_DATAFLOW = ['3.9', '3.10', '3.11', '3.12'] class Environment(object): diff --git a/sdks/python/apache_beam/runners/dataflow/internal/apiclient_test.py b/sdks/python/apache_beam/runners/dataflow/internal/apiclient_test.py index 8331d9cf3919..3f0b8b04ba7f 100644 --- a/sdks/python/apache_beam/runners/dataflow/internal/apiclient_test.py +++ b/sdks/python/apache_beam/runners/dataflow/internal/apiclient_test.py @@ -938,7 +938,7 @@ def test_experiment_use_multiple_sdk_containers(self): @mock.patch( 'apache_beam.runners.dataflow.internal.apiclient.sys.version_info', - (3, 8)) + (3, 9)) def test_get_python_sdk_name(self): pipeline_options = PipelineOptions([ '--project', @@ -957,7 +957,7 @@ def test_get_python_sdk_name(self): 1, FAKE_PIPELINE_URL) self.assertEqual( - 'Apache Beam Python 3.8 SDK', environment._get_python_sdk_name()) + 'Apache Beam Python 3.9 SDK', environment._get_python_sdk_name()) @mock.patch( 'apache_beam.runners.dataflow.internal.apiclient.sys.version_info', @@ -998,12 +998,12 @@ def test_interpreter_version_check_passes_with_experiment(self): @mock.patch( 'apache_beam.runners.dataflow.internal.apiclient.sys.version_info', - (3, 8, 2)) + (3, 9, 2)) @mock.patch( 'apache_beam.runners.dataflow.internal.apiclient.' 'beam_version.__version__', '2.2.0') - def test_interpreter_version_check_passes_py38(self): + def test_interpreter_version_check_passes_py39(self): pipeline_options = PipelineOptions([]) apiclient._verify_interpreter_version_is_supported(pipeline_options) diff --git a/sdks/python/container/build.gradle b/sdks/python/container/build.gradle index f07b6f743fa4..14c08a3a539b 100644 --- a/sdks/python/container/build.gradle +++ b/sdks/python/container/build.gradle @@ -20,7 +20,7 @@ plugins { id 'org.apache.beam.module' } applyGoNature() description = "Apache Beam :: SDKs :: Python :: Container" -int min_python_version=8 +int min_python_version=9 int max_python_version=12 configurations { diff --git a/sdks/python/container/py38/base_image_requirements.txt b/sdks/python/container/py38/base_image_requirements.txt deleted file mode 100644 index 263b10289386..000000000000 --- a/sdks/python/container/py38/base_image_requirements.txt +++ /dev/null @@ -1,163 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Autogenerated requirements file for Apache Beam py38 container image. -# Run ./gradlew :sdks:python:container:generatePythonRequirementsAll to update. -# Do not edit manually, adjust ../base_image_requirements_manual.txt or -# Apache Beam's setup.py instead, and regenerate the list. -# You will need Python interpreters for all versions supported by Beam, see: -# https://s.apache.org/beam-python-dev-wiki -# Reach out to a committer if you need help. - -annotated-types==0.7.0 -async-timeout==4.0.3 -attrs==24.2.0 -backports.zoneinfo==0.2.1 -beautifulsoup4==4.12.3 -bs4==0.0.2 -build==1.2.1 -cachetools==5.4.0 -certifi==2024.7.4 -cffi==1.17.0 -charset-normalizer==3.3.2 -click==8.1.7 -cloudpickle==2.2.1 -cramjam==2.8.3 -crcmod==1.7 -cryptography==43.0.0 -Cython==3.0.10 -deprecation==2.1.0 -dill==0.3.1.1 -dnspython==2.6.1 -docker==7.1.0 -docopt==0.6.2 -docstring_parser==0.16 -exceptiongroup==1.2.2 -execnet==2.1.1 -fastavro==1.9.5 -fasteners==0.19 -freezegun==1.5.1 -future==1.0.0 -google-api-core==2.19.1 -google-api-python-client==2.140.0 -google-apitools==0.5.31 -google-auth==2.33.0 -google-auth-httplib2==0.2.0 -google-cloud-aiplatform==1.61.0 -google-cloud-bigquery==3.25.0 -google-cloud-bigquery-storage==2.25.0 -google-cloud-bigtable==2.25.0 -google-cloud-core==2.4.1 -google-cloud-datastore==2.20.0 -google-cloud-dlp==3.21.0 -google-cloud-language==2.14.0 -google-cloud-profiler==4.1.0 -google-cloud-pubsub==2.23.0 -google-cloud-pubsublite==1.11.1 -google-cloud-recommendations-ai==0.10.12 -google-cloud-resource-manager==1.12.5 -google-cloud-spanner==3.48.0 -google-cloud-storage==2.18.2 -google-cloud-videointelligence==2.13.5 -google-cloud-vision==3.7.4 -google-crc32c==1.5.0 -google-resumable-media==2.7.2 -googleapis-common-protos==1.63.2 -greenlet==3.0.3 -grpc-google-iam-v1==0.13.1 -grpc-interceptor==0.15.4 -grpcio==1.65.4 -grpcio-status==1.62.3 -guppy3==3.1.4.post1 -hdfs==2.7.3 -httplib2==0.22.0 -hypothesis==6.110.1 -idna==3.7 -importlib_metadata==8.2.0 -importlib_resources==6.4.0 -iniconfig==2.0.0 -Jinja2==3.0.3 -joblib==1.4.2 -Js2Py==0.74 -jsonpickle==3.2.2 -jsonschema==4.23.0 -jsonschema-specifications==2023.12.1 -MarkupSafe==2.1.5 -mmh3==4.1.0 -mock==5.1.0 -nltk==3.9.1 -nose==1.3.7 -numpy==1.24.4 -oauth2client==4.1.3 -objsize==0.7.0 -orjson==3.10.7 -overrides==7.7.0 -packaging==24.1 -pandas==2.0.3 -parameterized==0.9.0 -pkgutil_resolve_name==1.3.10 -pluggy==1.5.0 -proto-plus==1.24.0 -protobuf==4.25.4 -psycopg2-binary==2.9.9 -pyarrow==16.1.0 -pyarrow-hotfix==0.6 -pyasn1==0.6.0 -pyasn1_modules==0.4.0 -pycparser==2.22 -pydantic==2.8.2 -pydantic_core==2.20.1 -pydot==1.4.2 -PyHamcrest==2.1.0 -pyjsparser==2.7.1 -pymongo==4.8.0 -PyMySQL==1.1.1 -pyparsing==3.1.2 -pyproject_hooks==1.1.0 -pytest==7.4.4 -pytest-timeout==2.3.1 -pytest-xdist==3.6.1 -python-dateutil==2.9.0.post0 -python-snappy==0.7.2 -pytz==2024.1 -PyYAML==6.0.2 -redis==5.0.8 -referencing==0.35.1 -regex==2024.7.24 -requests==2.32.0 -requests-mock==1.12.1 -rpds-py==0.20.0 -rsa==4.9 -scikit-learn==1.3.2 -scipy==1.10.1 -shapely==2.0.5 -six==1.16.0 -sortedcontainers==2.4.0 -soupsieve==2.5 -SQLAlchemy==2.0.32 -sqlparse==0.5.1 -tenacity==8.5.0 -testcontainers==3.7.1 -threadpoolctl==3.5.0 -tomli==2.0.1 -tqdm==4.66.5 -typing_extensions==4.12.2 -tzdata==2024.1 -tzlocal==5.2 -uritemplate==4.1.1 -urllib3==2.2.2 -wrapt==1.16.0 -zipp==3.19.2 -zstandard==0.23.0 diff --git a/sdks/python/container/py38/build.gradle b/sdks/python/container/py38/build.gradle deleted file mode 100644 index 304895a83718..000000000000 --- a/sdks/python/container/py38/build.gradle +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -plugins { - id 'base' - id 'org.apache.beam.module' -} -applyDockerNature() -applyPythonNature() - -pythonVersion = '3.8' - -apply from: "../common.gradle" diff --git a/sdks/python/container/run_generate_requirements.sh b/sdks/python/container/run_generate_requirements.sh index 6c160bc6ac9e..11b1c42ac23d 100755 --- a/sdks/python/container/run_generate_requirements.sh +++ b/sdks/python/container/run_generate_requirements.sh @@ -25,14 +25,14 @@ # It is recommended to run this script via gradle commands such as: # ./gradlew :sdks:python:container:generatePythonRequirementsAll -# ./gradlew :sdks:python:container:py38:generatePythonRequirements +# ./gradlew :sdks:python:container:py39:generatePythonRequirements # You will need Python interpreters for all versions supported by Beam, see: # https://s.apache.org/beam-python-dev-wiki if [[ $# -lt 2 ]]; then - printf "Example usage: \n$> ./sdks/python/container/run_generate_requirements.sh 3.8 " - printf "\n\where 3.8 is the Python major.minor version." + printf "Example usage: \n$> ./sdks/python/container/run_generate_requirements.sh 3.9 " + printf "\n\where 3.9 is the Python major.minor version." exit 1 fi diff --git a/sdks/python/container/run_validatescontainer.sh b/sdks/python/container/run_validatescontainer.sh index 5ee3342a1efa..ee10388f5e18 100755 --- a/sdks/python/container/run_validatescontainer.sh +++ b/sdks/python/container/run_validatescontainer.sh @@ -24,13 +24,13 @@ # REGION -> Region name to use for Dataflow # # Execute from the root of the repository: -# test Python3.8 x86 container: -# ./gradlew :sdks:python:test-suites:dataflow:py38:validatesContainer +# test Python3.9 x86 container: +# ./gradlew :sdks:python:test-suites:dataflow:py39:validatesContainer # or test all supported python versions x86 containers together: # ./gradlew :sdks:python:test-suites:dataflow:validatesContainer # -# Note: ARM test suites only run on github actions. For example, to test Python3.8 ARM containers, -# commenting `Run Python ValidatesContainer Dataflow ARM (3.8)` will trigger the test. +# Note: ARM test suites only run on github actions. For example, to test Python3.9 ARM containers, +# commenting `Run Python ValidatesContainer Dataflow ARM (3.9)` will trigger the test. echo "This script must be executed in the root of beam project. Please set GCS_LOCATION, PROJECT and REGION as desired." diff --git a/sdks/python/expansion-service-container/Dockerfile b/sdks/python/expansion-service-container/Dockerfile index d3cd4a4afad3..4e82165f594c 100644 --- a/sdks/python/expansion-service-container/Dockerfile +++ b/sdks/python/expansion-service-container/Dockerfile @@ -17,8 +17,8 @@ ############################################################################### # We just need to support one Python version supported by Beam. -# Picking the current default Beam Python version which is Python 3.8. -FROM python:3.8-bookworm as expansion-service +# Picking the current default Beam Python version which is Python 3.9. +FROM python:3.9-bookworm as expansion-service LABEL Author "Apache Beam " ARG TARGETOS ARG TARGETARCH diff --git a/sdks/python/expansion-service-container/build.gradle b/sdks/python/expansion-service-container/build.gradle index 3edcaee35b4a..4e46f060e59f 100644 --- a/sdks/python/expansion-service-container/build.gradle +++ b/sdks/python/expansion-service-container/build.gradle @@ -40,7 +40,7 @@ task copyDockerfileDependencies(type: Copy) { } task copyRequirementsFile(type: Copy) { - from project(':sdks:python:container:py38').fileTree("./") + from project(':sdks:python:container:py39').fileTree("./") include 'base_image_requirements.txt' rename 'base_image_requirements.txt', 'requirements.txt' setDuplicatesStrategy(DuplicatesStrategy.INCLUDE) diff --git a/sdks/python/setup.py b/sdks/python/setup.py index da9d79193207..6ba5c11fd0b7 100644 --- a/sdks/python/setup.py +++ b/sdks/python/setup.py @@ -522,7 +522,6 @@ def get_portability_package_data(): 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', - 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', diff --git a/sdks/python/test-suites/containers/tensorrt_runinference/tensor_rt.dockerfile b/sdks/python/test-suites/containers/tensorrt_runinference/tensor_rt.dockerfile index 6d99a6393fa9..c51099264e8c 100644 --- a/sdks/python/test-suites/containers/tensorrt_runinference/tensor_rt.dockerfile +++ b/sdks/python/test-suites/containers/tensorrt_runinference/tensor_rt.dockerfile @@ -22,7 +22,7 @@ ENV PATH="/usr/src/tensorrt/bin:${PATH}" WORKDIR /workspace -COPY --from=apache/beam_python3.8_sdk:latest /opt/apache/beam /opt/apache/beam +COPY --from=apache/beam_python3.9_sdk:latest /opt/apache/beam /opt/apache/beam RUN pip install --upgrade pip \ && pip install torch>=1.7.1 \ @@ -32,4 +32,4 @@ RUN pip install --upgrade pip \ && pip install cuda-python ENTRYPOINT [ "/opt/apache/beam/boot" ] -RUN apt-get update && apt-get install -y python3.8-venv +RUN apt-get update && apt-get install -y python3.9-venv diff --git a/sdks/python/test-suites/dataflow/py38/build.gradle b/sdks/python/test-suites/dataflow/py38/build.gradle deleted file mode 100644 index b3c3a5bfb8a6..000000000000 --- a/sdks/python/test-suites/dataflow/py38/build.gradle +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: org.apache.beam.gradle.BeamModulePlugin -applyPythonNature() - -// Required to setup a Python 3 virtualenv and task names. -pythonVersion = '3.8' -apply from: "../common.gradle" diff --git a/sdks/python/test-suites/direct/py38/build.gradle b/sdks/python/test-suites/direct/py38/build.gradle deleted file mode 100644 index edf86a7bf5a8..000000000000 --- a/sdks/python/test-suites/direct/py38/build.gradle +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -plugins { id 'org.apache.beam.module' } -applyPythonNature() - -// Required to setup a Python 3 virtualenv and task names. -pythonVersion = '3.8' -apply from: '../common.gradle' diff --git a/sdks/python/test-suites/gradle.properties b/sdks/python/test-suites/gradle.properties index 3d16ee63f12a..f8c04e0f5609 100644 --- a/sdks/python/test-suites/gradle.properties +++ b/sdks/python/test-suites/gradle.properties @@ -23,13 +23,13 @@ # dataflow test-suites # (TODO): https://github.com/apache/beam/issues/21971 # Add python 3.10 to dataflow test-suites -dataflow_precommit_it_task_py_versions=3.8,3.12 -dataflow_mongodbio_it_task_py_versions=3.8 -dataflow_chicago_taxi_example_task_py_versions=3.8 +dataflow_precommit_it_task_py_versions=3.9,3.12 +dataflow_mongodbio_it_task_py_versions=3.9 +dataflow_chicago_taxi_example_task_py_versions=3.9 # TODO: Enable following tests after making sure we have enough capacity. -dataflow_validates_runner_batch_tests=3.8,3.12 -dataflow_validates_runner_streaming_tests=3.8,3.12 +dataflow_validates_runner_batch_tests=3.9,3.12 +dataflow_validates_runner_streaming_tests=3.9,3.12 dataflow_examples_postcommit_py_versions=3.12 # TFX_BSL is not yet supported on Python 3.10. dataflow_cloudml_benchmark_tests_py_versions=3.9 @@ -38,14 +38,14 @@ direct_mongodbio_it_task_py_versions=3.12 # flink runner test-suites flink_validates_runner_precommit_py_versions=3.12 -flink_validates_runner_postcommit_py_versions=3.8,3.12 -flink_examples_postcommit_py_versions=3.8,3.12 +flink_validates_runner_postcommit_py_versions=3.9,3.12 +flink_examples_postcommit_py_versions=3.9,3.12 # samza runner test-suites -samza_validates_runner_postcommit_py_versions=3.8,3.12 +samza_validates_runner_postcommit_py_versions=3.9,3.12 # spark runner test-suites -spark_examples_postcommit_py_versions=3.8,3.12 +spark_examples_postcommit_py_versions=3.9,3.12 # cross language postcommit python test suites -cross_language_validates_py_versions=3.8,3.12 +cross_language_validates_py_versions=3.9,3.12 diff --git a/sdks/python/test-suites/portable/py38/build.gradle b/sdks/python/test-suites/portable/py38/build.gradle deleted file mode 100644 index e15443fa935f..000000000000 --- a/sdks/python/test-suites/portable/py38/build.gradle +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: org.apache.beam.gradle.BeamModulePlugin -applyPythonNature() - -addPortableWordCountTasks() - -// Required to setup a Python 3.8 virtualenv and task names. -pythonVersion = '3.8' -apply from: "../common.gradle" diff --git a/sdks/python/test-suites/tox/common.gradle b/sdks/python/test-suites/tox/common.gradle index df42a2c384c2..c5ab61030ef1 100644 --- a/sdks/python/test-suites/tox/common.gradle +++ b/sdks/python/test-suites/tox/common.gradle @@ -33,7 +33,7 @@ test.dependsOn "testPy${pythonVersionSuffix}ML" // test.dependsOn "testPy${pythonVersionSuffix}Dask" project.tasks.register("preCommitPy${pythonVersionSuffix}") { - // Since codecoverage reports will always be generated for py38, + // Since codecoverage reports will always be generated for py39, // all tests will be exercised. // dependsOn = ["testPy${pythonVersionSuffix}Cloud", "testPython${pythonVersionSuffix}"] dependsOn = ["testPy${pythonVersionSuffix}Cloud", "testPython${pythonVersionSuffix}"] diff --git a/sdks/python/test-suites/tox/py38/build.gradle b/sdks/python/test-suites/tox/py38/build.gradle deleted file mode 100644 index 2ca82d3d9268..000000000000 --- a/sdks/python/test-suites/tox/py38/build.gradle +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * License); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/** - * Unit tests for Python 3.8 - */ - -plugins { id 'org.apache.beam.module' } -applyPythonNature() - -// Required to setup a Python 3 virtualenv and task names. -pythonVersion = '3.8' - -def posargs = project.findProperty("posargs") ?: "" - -apply from: "../common.gradle" - -toxTask "testPy38CloudCoverage", "py38-cloudcoverage", "${posargs}" -test.dependsOn "testPy38CloudCoverage" -project.tasks.register("preCommitPyCoverage") { - dependsOn = ["testPy38CloudCoverage"] -} - -// Dep Postcommit runs test suites that evaluate compatibility of particular -// dependencies. It is exercised on a single Python version. -// -// Should still leave at least one version in PreCommit unless the marked tests -// are also exercised by existing PreCommit -// e.g. pyarrow and pandas also run on PreCommit Dataframe and Coverage -project.tasks.register("postCommitPyDep") {} - -// Create a test task for supported major versions of pyarrow -// We should have a test for the lowest supported version and -// For versions that we would like to prioritize for testing, -// for example versions released in a timeframe of last 1-2 years. - -toxTask "testPy38pyarrow-3", "py38-pyarrow-3", "${posargs}" -test.dependsOn "testPy38pyarrow-3" -postCommitPyDep.dependsOn "testPy38pyarrow-3" - -toxTask "testPy38pyarrow-9", "py38-pyarrow-9", "${posargs}" -test.dependsOn "testPy38pyarrow-9" -postCommitPyDep.dependsOn "testPy38pyarrow-9" - -toxTask "testPy38pyarrow-10", "py38-pyarrow-10", "${posargs}" -test.dependsOn "testPy38pyarrow-10" -postCommitPyDep.dependsOn "testPy38pyarrow-10" - -toxTask "testPy38pyarrow-11", "py38-pyarrow-11", "${posargs}" -test.dependsOn "testPy38pyarrow-11" -postCommitPyDep.dependsOn "testPy38pyarrow-11" - -toxTask "testPy38pyarrow-12", "py38-pyarrow-12", "${posargs}" -test.dependsOn "testPy38pyarrow-12" -postCommitPyDep.dependsOn "testPy38pyarrow-12" - -toxTask "testPy38pyarrow-13", "py38-pyarrow-13", "${posargs}" -test.dependsOn "testPy38pyarrow-13" -postCommitPyDep.dependsOn "testPy38pyarrow-13" - -toxTask "testPy38pyarrow-14", "py38-pyarrow-14", "${posargs}" -test.dependsOn "testPy38pyarrow-14" -postCommitPyDep.dependsOn "testPy38pyarrow-14" - -toxTask "testPy38pyarrow-15", "py38-pyarrow-15", "${posargs}" -test.dependsOn "testPy38pyarrow-15" -postCommitPyDep.dependsOn "testPy38pyarrow-15" - -toxTask "testPy38pyarrow-16", "py38-pyarrow-16", "${posargs}" -test.dependsOn "testPy38pyarrow-16" -postCommitPyDep.dependsOn "testPy38pyarrow-16" - -// Create a test task for each supported minor version of pandas -toxTask "testPy38pandas-14", "py38-pandas-14", "${posargs}" -test.dependsOn "testPy38pandas-14" -postCommitPyDep.dependsOn "testPy38pandas-14" - -toxTask "testPy38pandas-15", "py38-pandas-15", "${posargs}" -test.dependsOn "testPy38pandas-15" -postCommitPyDep.dependsOn "testPy38pandas-15" - -toxTask "testPy38pandas-20", "py38-pandas-20", "${posargs}" -test.dependsOn "testPy38pandas-20" -postCommitPyDep.dependsOn "testPy38pandas-20" - -// TODO(https://github.com/apache/beam/issues/31192): Add below suites -// after dependency compat tests suite switches to Python 3.9 or we add -// Python 2.2 support. - -// toxTask "testPy39pandas-21", "py39-pandas-21", "${posargs}" -// test.dependsOn "testPy39pandas-21" -// postCommitPyDep.dependsOn "testPy39pandas-21" - -// toxTask "testPy39pandas-22", "py39-pandas-22", "${posargs}" -// test.dependsOn "testPy39pandas-22" -// postCommitPyDep.dependsOn "testPy39pandas-22" - -// TODO(https://github.com/apache/beam/issues/30908): Revise what are we testing - -// Create a test task for each minor version of pytorch -toxTask "testPy38pytorch-19", "py38-pytorch-19", "${posargs}" -test.dependsOn "testPy38pytorch-19" -postCommitPyDep.dependsOn "testPy38pytorch-19" - -toxTask "testPy38pytorch-110", "py38-pytorch-110", "${posargs}" -test.dependsOn "testPy38pytorch-110" -postCommitPyDep.dependsOn "testPy38pytorch-110" - -toxTask "testPy38pytorch-111", "py38-pytorch-111", "${posargs}" -test.dependsOn "testPy38pytorch-111" -postCommitPyDep.dependsOn "testPy38pytorch-111" - -toxTask "testPy38pytorch-112", "py38-pytorch-112", "${posargs}" -test.dependsOn "testPy38pytorch-112" -postCommitPyDep.dependsOn "testPy38pytorch-112" - -toxTask "testPy38pytorch-113", "py38-pytorch-113", "${posargs}" -test.dependsOn "testPy38pytorch-113" -postCommitPyDep.dependsOn "testPy38pytorch-113" - -// run on precommit -toxTask "testPy38pytorch-200", "py38-pytorch-200", "${posargs}" -test.dependsOn "testPy38pytorch-200" -postCommitPyDep.dependsOn "testPy38pytorch-200" - -toxTask "testPy38tft-113", "py38-tft-113", "${posargs}" -test.dependsOn "testPy38tft-113" -postCommitPyDep.dependsOn "testPy38tft-113" - -// TODO(https://github.com/apache/beam/issues/25796) - uncomment onnx tox task once onnx supports protobuf 4.x.x -// Create a test task for each minor version of onnx -// toxTask "testPy38onnx-113", "py38-onnx-113", "${posargs}" -// test.dependsOn "testPy38onnx-113" -// postCommitPyDep.dependsOn "testPy38onnx-113" - -// Create a test task for each minor version of tensorflow -toxTask "testPy38tensorflow-212", "py38-tensorflow-212", "${posargs}" -test.dependsOn "testPy38tensorflow-212" -postCommitPyDep.dependsOn "testPy38tensorflow-212" - -// Create a test task for each minor version of transformers -toxTask "testPy38transformers-428", "py38-transformers-428", "${posargs}" -test.dependsOn "testPy38transformers-428" -postCommitPyDep.dependsOn "testPy38transformers-428" - -toxTask "testPy38transformers-429", "py38-transformers-429", "${posargs}" -test.dependsOn "testPy38transformers-429" -postCommitPyDep.dependsOn "testPy38transformers-429" - -toxTask "testPy38transformers-430", "py38-transformers-430", "${posargs}" -test.dependsOn "testPy38transformers-430" -postCommitPyDep.dependsOn "testPy38transformers-430" - -toxTask "testPy38embeddingsMLTransform", "py38-embeddings", "${posargs}" -test.dependsOn "testPy38embeddingsMLTransform" -postCommitPyDep.dependsOn "testPy38embeddingsMLTransform" - -// Part of MLTransform embeddings test suite but requires tensorflow hub, which we need to test on -// mutliple versions so keeping this suite separate. -toxTask "testPy38TensorflowHubEmbeddings-014", "py38-TFHubEmbeddings-014", "${posargs}" -test.dependsOn "testPy38TensorflowHubEmbeddings-014" -postCommitPyDep.dependsOn "testPy38TensorflowHubEmbeddings-014" - -toxTask "testPy38TensorflowHubEmbeddings-015", "py38-TFHubEmbeddings-015", "${posargs}" -test.dependsOn "testPy38TensorflowHubEmbeddings-015" -postCommitPyDep.dependsOn "testPy38TensorflowHubEmbeddings-015" - -toxTask "whitespacelint", "whitespacelint", "${posargs}" - -task archiveFilesToLint(type: Zip) { - archiveFileName = "files-to-whitespacelint.zip" - destinationDirectory = file("$buildDir/dist") - - from ("$rootProject.projectDir") { - include "**/*.md" - include "**/build.gradle" - include '**/build.gradle.kts' - exclude '**/build/**' // intermediate build directory - exclude 'website/www/site/themes/docsy/**' // fork to google/docsy - exclude "**/node_modules/*" - exclude "**/.gogradle/*" - } -} - -task unpackFilesToLint(type: Copy) { - from zipTree("$buildDir/dist/files-to-whitespacelint.zip") - into "$buildDir/files-to-whitespacelint" -} - -whitespacelint.dependsOn archiveFilesToLint, unpackFilesToLint -unpackFilesToLint.dependsOn archiveFilesToLint -archiveFilesToLint.dependsOn cleanPython - -toxTask "jest", "jest", "${posargs}" - -toxTask "eslint", "eslint", "${posargs}" - -task copyTsSource(type: Copy) { - from ("$rootProject.projectDir") { - include "sdks/python/apache_beam/runners/interactive/extensions/**/*" - exclude "sdks/python/apache_beam/runners/interactive/extensions/**/lib/*" - exclude "sdks/python/apache_beam/runners/interactive/extensions/**/node_modules/*" - } - into "$buildDir/ts" -} - -jest.dependsOn copyTsSource -eslint.dependsOn copyTsSource -copyTsSource.dependsOn cleanPython diff --git a/sdks/python/tox.ini b/sdks/python/tox.ini index aa0200f75005..f95b21306fa9 100644 --- a/sdks/python/tox.ini +++ b/sdks/python/tox.ini @@ -17,7 +17,7 @@ [tox] # new environments will be excluded by default unless explicitly added to envlist. -envlist = py38,py39,py310,py311,py312,py38-{cloud,cloudcoverage,dask},py39-{cloud},py310-{cloud,dask},py311-{cloud,dask},py312-{cloud,dask},docs,lint,mypy,whitespacelint +envlist = py39,py310,py311,py312,py39-{cloud},py310-{cloud,dask},py311-{cloud,dask},py312-{cloud,dask},docs,lint,mypy,whitespacelint toxworkdir = {toxinidir}/target/{env:ENV_NAME:.tox} [pycodestyle] @@ -67,26 +67,26 @@ commands_post = commands = false {envname} is misconfigured -[testenv:py{38,39,310,311,312}] +[testenv:py{39,310,311,312}] commands = python apache_beam/examples/complete/autocomplete_test.py bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}" -[testenv:py{38,39,310,311,312}-win] +[testenv:py{39,310,311,312}-win] commands = python apache_beam/examples/complete/autocomplete_test.py bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}" install_command = {envbindir}/python.exe {envbindir}/pip.exe install --retries 10 {opts} {packages} list_dependencies_command = {envbindir}/python.exe {envbindir}/pip.exe freeze -[testenv:py{38,39,310,311,312}-cloud] +[testenv:py{39,310,311,312}-cloud] ; extras = test,gcp,interactive,dataframe,aws,azure extras = test,gcp,interactive,dataframe,aws,azure commands = python apache_beam/examples/complete/autocomplete_test.py bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}" -[testenv:py{38,39,310,311,312}-ml] +[testenv:py{39,310,311,312}-ml] # Don't set TMPDIR to avoid "AF_UNIX path too long" errors in certain tests. setenv = extras = test,gcp,dataframe,ml_test @@ -98,7 +98,7 @@ extras = test,dask commands = bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}" -[testenv:py38-cloudcoverage] +[testenv:py39-cloudcoverage] deps = pytest-cov==3.0.0 # Don't set TMPDIR to avoid "AF_UNIX path too long" errors in certain tests. @@ -271,7 +271,7 @@ commands = bash {toxinidir}/scripts/pytest_validates_runner.sh {envname} {toxinidir}/apache_beam/runners/portability/spark_runner_test.py {posargs} -[testenv:py{38,39}-pyarrow-{3,9,10,11,12,13,14,15,16}] +[testenv:py{39,310}-pyarrow-{3,9,10,11,12,13,14,15,16}] deps = # As a courtesy to users, test against the oldest allowed version of Pyarrow. # We'd have to increase the pyarrow lower bound when Python 3.9 is deprecated. @@ -296,7 +296,7 @@ commands = /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_pyarrow {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret' -[testenv:py{38,39}-pandas-{14,15,20}] +[testenv:py{39,310}-pandas-{14,15,20}] deps = 14: pandas>=1.4.3,<1.5.0 # Exclude 1.5.0 and 1.5.1 because of https://github.com/pandas-dev/pandas/issues/45725 @@ -309,7 +309,7 @@ commands = # Run all DataFrame API unit tests bash {toxinidir}/scripts/run_pytest.sh {envname} 'apache_beam/dataframe' -[testenv:py{38,39}-tft-{113,114}] +[testenv:py{39,310}-tft-{113,114}] deps = # Help pip resolve conflict with typing-extensions due to an old version of tensorflow https://github.com/apache/beam/issues/30852 113: pydantic<2.0 @@ -317,7 +317,7 @@ deps = commands = bash {toxinidir}/scripts/run_pytest.sh {envname} 'apache_beam/ml/transforms apache_beam/examples/snippets/transforms/elementwise/mltransform_test.py' -[testenv:py{38,39}-pytorch-{19,110,111,112,113}] +[testenv:py{39,310}-pytorch-{19,110,111,112,113}] deps = 19: torch>=1.9.0,<1.10.0 110: torch>=1.10.0,<1.11.0 @@ -334,7 +334,7 @@ commands = # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories. /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_pytorch {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret' -[testenv:py{38,39}-pytorch-200] +[testenv:py{39,310}-pytorch-200] deps = 200: torch>=2.0.0,<2.1.0 @@ -349,8 +349,8 @@ commands = # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories. /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_pytorch {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret' -# TODO(https://github.com/apache/beam/issues/25796) - uncomment onnx tox task in tox/py38/build.gradle once onnx supports protobuf 4.x.x -[testenv:py{38,39}-onnx-113] +# TODO(https://github.com/apache/beam/issues/25796) - uncomment onnx tox task in tox/py39/build.gradle once onnx supports protobuf 4.x.x +[testenv:py{39,310}-onnx-113] # TODO(https://github.com/apache/beam/issues/25443) # apparently tox has problem when substitution key has single value. Change back to -onnx-{113,...} # when multiple onnx versions are tested. @@ -369,7 +369,7 @@ commands = # Run all ONNX unit tests pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_onnx {posargs} -[testenv:py{38,39}-tensorflow-212] +[testenv:py{39,310}-tensorflow-212] deps = 212: tensorflow>=2.12rc1,<2.13 # Help pip resolve conflict with typing-extensions for old version of TF https://github.com/apache/beam/issues/30852 @@ -382,7 +382,7 @@ commands = # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories. /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_tf {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret' -[testenv:py{38,39}-xgboost-{160,170}] +[testenv:py{39,310}-xgboost-{160,170}] deps = 160: xgboost>=1.6.0,<1.7.0 @@ -398,7 +398,7 @@ commands = # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories. /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_xgboost {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret' -[testenv:py{38,39}-transformers-{428,429,430}] +[testenv:py{39,310}-transformers-{428,429,430}] deps = 428: transformers>=4.28.0,<4.29.0 429: transformers>=4.29.0,<4.30.0 @@ -415,7 +415,7 @@ commands = # Allow exit code 5 (no tests run) so that we can run this command safely on arbitrary subdirectories. /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_transformers {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret' -[testenv:py{38,311}-vertex-ai] +[testenv:py{39,311}-vertex-ai] deps = tensorflow==2.12.0 extras = test,gcp @@ -428,7 +428,7 @@ commands = /bin/sh -c 'pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_vertex_ai {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret' -[testenv:py{38,39}-embeddings] +[testenv:py{39,310}-embeddings] deps = sentence-transformers==2.2.2 passenv = HF_INFERENCE_TOKEN @@ -441,7 +441,7 @@ commands = /bin/sh -c 'pytest apache_beam/ml/transforms/embeddings -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 {posargs}; ret=$?; [ $ret = 5 ] && exit 0 || exit $ret' -[testenv:py{38,39}-TFHubEmbeddings-{014,015}] +[testenv:py{39,310}-TFHubEmbeddings-{014,015}] deps = 014: tensorflow-hub>=0.14.0,<0.15.0 # Help pip resolve conflict with typing-extensions due to an old version of tensorboard https://github.com/apache/beam/issues/30852 diff --git a/settings.gradle.kts b/settings.gradle.kts index 1a32a8f111c4..4ab7c31d9ea9 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -284,32 +284,27 @@ include(":sdks:python") include(":sdks:python:apache_beam:testing:load_tests") include(":sdks:python:apache_beam:testing:benchmarks:nexmark") include(":sdks:python:container") -include(":sdks:python:container:py38") include(":sdks:python:container:py39") include(":sdks:python:container:py310") include(":sdks:python:container:py311") include(":sdks:python:container:py312") include(":sdks:python:expansion-service-container") include(":sdks:python:test-suites:dataflow") -include(":sdks:python:test-suites:dataflow:py38") include(":sdks:python:test-suites:dataflow:py39") include(":sdks:python:test-suites:dataflow:py310") include(":sdks:python:test-suites:dataflow:py311") include(":sdks:python:test-suites:dataflow:py312") include(":sdks:python:test-suites:direct") -include(":sdks:python:test-suites:direct:py38") include(":sdks:python:test-suites:direct:py39") include(":sdks:python:test-suites:direct:py310") include(":sdks:python:test-suites:direct:py311") include(":sdks:python:test-suites:direct:py312") include(":sdks:python:test-suites:direct:xlang") -include(":sdks:python:test-suites:portable:py38") include(":sdks:python:test-suites:portable:py39") include(":sdks:python:test-suites:portable:py310") include(":sdks:python:test-suites:portable:py311") include(":sdks:python:test-suites:portable:py312") include(":sdks:python:test-suites:tox:pycommon") -include(":sdks:python:test-suites:tox:py38") include(":sdks:python:test-suites:tox:py39") include(":sdks:python:test-suites:tox:py310") include(":sdks:python:test-suites:tox:py311") diff --git a/website/www/site/content/en/documentation/runtime/environments.md b/website/www/site/content/en/documentation/runtime/environments.md index d9a42db29e24..0b4ec0f6f7b4 100644 --- a/website/www/site/content/en/documentation/runtime/environments.md +++ b/website/www/site/content/en/documentation/runtime/environments.md @@ -115,10 +115,10 @@ This method requires building image artifacts from Beam source. For additional i ./gradlew :sdks:java:container:java11:docker ./gradlew :sdks:java:container:java17:docker ./gradlew :sdks:go:container:docker - ./gradlew :sdks:python:container:py38:docker ./gradlew :sdks:python:container:py39:docker ./gradlew :sdks:python:container:py310:docker ./gradlew :sdks:python:container:py311:docker + ./gradlew :sdks:python:container:py312:docker # Shortcut for building all Python SDKs ./gradlew :sdks:python:container:buildAll diff --git a/website/www/site/content/en/get-started/quickstart-py.md b/website/www/site/content/en/get-started/quickstart-py.md index 3428f5346e02..e64bbc277b05 100644 --- a/website/www/site/content/en/get-started/quickstart-py.md +++ b/website/www/site/content/en/get-started/quickstart-py.md @@ -23,7 +23,7 @@ If you're interested in contributing to the Apache Beam Python codebase, see the {{< toc >}} -The Python SDK supports Python 3.8, 3.9, 3.10 and 3.11. Beam 2.48.0 was the last release with support for Python 3.7. +The Python SDK supports Python 3.9, 3.10, 3.11, and 3.12. Beam 2.59.0 was the last release with support for Python 3.8. ## Set up your environment From e16806de2e844fb831ff00ab16804d1570901d94 Mon Sep 17 00:00:00 2001 From: Jack McCluskey Date: Thu, 22 Aug 2024 11:31:00 -0400 Subject: [PATCH 2/3] Provide py38 gradle tasks for 39 --- sdks/python/test-suites/tox/py39/build.gradle | 193 ++++++++++++++++++ 1 file changed, 193 insertions(+) diff --git a/sdks/python/test-suites/tox/py39/build.gradle b/sdks/python/test-suites/tox/py39/build.gradle index 5bb73b60a5d2..9fde83c1dcde 100644 --- a/sdks/python/test-suites/tox/py39/build.gradle +++ b/sdks/python/test-suites/tox/py39/build.gradle @@ -27,3 +27,196 @@ applyPythonNature() pythonVersion = '3.9' apply from: "../common.gradle" + +toxTask "testPy39CloudCoverage", "py39-cloudcoverage", "${posargs}" +test.dependsOn "testPy39CloudCoverage" +project.tasks.register("preCommitPyCoverage") { + dependsOn = ["testPy39CloudCoverage"] +} + +// Dep Postcommit runs test suites that evaluate compatibility of particular +// dependencies. It is exercised on a single Python version. +// +// Should still leave at least one version in PreCommit unless the marked tests +// are also exercised by existing PreCommit +// e.g. pyarrow and pandas also run on PreCommit Dataframe and Coverage +project.tasks.register("postCommitPyDep") {} + +// Create a test task for supported major versions of pyarrow +// We should have a test for the lowest supported version and +// For versions that we would like to prioritize for testing, +// for example versions released in a timeframe of last 1-2 years. + +toxTask "testPy39pyarrow-3", "py39-pyarrow-3", "${posargs}" +test.dependsOn "testPy39pyarrow-3" +postCommitPyDep.dependsOn "testPy39pyarrow-3" + +toxTask "testPy39pyarrow-9", "py39-pyarrow-9", "${posargs}" +test.dependsOn "testPy39pyarrow-9" +postCommitPyDep.dependsOn "testPy39pyarrow-9" + +toxTask "testPy39pyarrow-10", "py39-pyarrow-10", "${posargs}" +test.dependsOn "testPy39pyarrow-10" +postCommitPyDep.dependsOn "testPy39pyarrow-10" + +toxTask "testPy39pyarrow-11", "py39-pyarrow-11", "${posargs}" +test.dependsOn "testPy39pyarrow-11" +postCommitPyDep.dependsOn "testPy39pyarrow-11" + +toxTask "testPy39pyarrow-12", "py39-pyarrow-12", "${posargs}" +test.dependsOn "testPy39pyarrow-12" +postCommitPyDep.dependsOn "testPy39pyarrow-12" + +toxTask "testPy39pyarrow-13", "py39-pyarrow-13", "${posargs}" +test.dependsOn "testPy39pyarrow-13" +postCommitPyDep.dependsOn "testPy39pyarrow-13" + +toxTask "testPy39pyarrow-14", "py39-pyarrow-14", "${posargs}" +test.dependsOn "testPy39pyarrow-14" +postCommitPyDep.dependsOn "testPy39pyarrow-14" + +toxTask "testPy39pyarrow-15", "py39-pyarrow-15", "${posargs}" +test.dependsOn "testPy39pyarrow-15" +postCommitPyDep.dependsOn "testPy39pyarrow-15" + +toxTask "testPy39pyarrow-16", "py39-pyarrow-16", "${posargs}" +test.dependsOn "testPy39pyarrow-16" +postCommitPyDep.dependsOn "testPy39pyarrow-16" + +// Create a test task for each supported minor version of pandas +toxTask "testPy39pandas-14", "py39-pandas-14", "${posargs}" +test.dependsOn "testPy39pandas-14" +postCommitPyDep.dependsOn "testPy39pandas-14" + +toxTask "testPy39pandas-15", "py39-pandas-15", "${posargs}" +test.dependsOn "testPy39pandas-15" +postCommitPyDep.dependsOn "testPy39pandas-15" + +toxTask "testPy39pandas-20", "py39-pandas-20", "${posargs}" +test.dependsOn "testPy39pandas-20" +postCommitPyDep.dependsOn "testPy39pandas-20" + +// TODO(https://github.com/apache/beam/issues/31192): Add below suites +// after dependency compat tests suite switches to Python 3.9 or we add +// Python 2.2 support. + +// toxTask "testPy39pandas-21", "py39-pandas-21", "${posargs}" +// test.dependsOn "testPy39pandas-21" +// postCommitPyDep.dependsOn "testPy39pandas-21" + +// toxTask "testPy39pandas-22", "py39-pandas-22", "${posargs}" +// test.dependsOn "testPy39pandas-22" +// postCommitPyDep.dependsOn "testPy39pandas-22" + +// TODO(https://github.com/apache/beam/issues/30908): Revise what are we testing + +// Create a test task for each minor version of pytorch +toxTask "testPy39pytorch-19", "py39-pytorch-19", "${posargs}" +test.dependsOn "testPy39pytorch-19" +postCommitPyDep.dependsOn "testPy39pytorch-19" + +toxTask "testPy39pytorch-110", "py39-pytorch-110", "${posargs}" +test.dependsOn "testPy39pytorch-110" +postCommitPyDep.dependsOn "testPy39pytorch-110" + +toxTask "testPy39pytorch-111", "py39-pytorch-111", "${posargs}" +test.dependsOn "testPy39pytorch-111" +postCommitPyDep.dependsOn "testPy39pytorch-111" + +toxTask "testPy39pytorch-112", "py39-pytorch-112", "${posargs}" +test.dependsOn "testPy39pytorch-112" +postCommitPyDep.dependsOn "testPy39pytorch-112" + +toxTask "testPy39pytorch-113", "py39-pytorch-113", "${posargs}" +test.dependsOn "testPy39pytorch-113" +postCommitPyDep.dependsOn "testPy39pytorch-113" + +// run on precommit +toxTask "testPy39pytorch-200", "py39-pytorch-200", "${posargs}" +test.dependsOn "testPy39pytorch-200" +postCommitPyDep.dependsOn "testPy39pytorch-200" + +toxTask "testPy39tft-113", "py39-tft-113", "${posargs}" +test.dependsOn "testPy39tft-113" +postCommitPyDep.dependsOn "testPy39tft-113" + +// TODO(https://github.com/apache/beam/issues/25796) - uncomment onnx tox task once onnx supports protobuf 4.x.x +// Create a test task for each minor version of onnx +// toxTask "testPy39onnx-113", "py39-onnx-113", "${posargs}" +// test.dependsOn "testPy39onnx-113" +// postCommitPyDep.dependsOn "testPy39onnx-113" + +// Create a test task for each minor version of tensorflow +toxTask "testPy39tensorflow-212", "py39-tensorflow-212", "${posargs}" +test.dependsOn "testPy39tensorflow-212" +postCommitPyDep.dependsOn "testPy39tensorflow-212" + +// Create a test task for each minor version of transformers +toxTask "testPy39transformers-428", "py39-transformers-428", "${posargs}" +test.dependsOn "testPy39transformers-428" +postCommitPyDep.dependsOn "testPy39transformers-428" + +toxTask "testPy39transformers-429", "py39-transformers-429", "${posargs}" +test.dependsOn "testPy39transformers-429" +postCommitPyDep.dependsOn "testPy39transformers-429" + +toxTask "testPy39transformers-430", "py39-transformers-430", "${posargs}" +test.dependsOn "testPy39transformers-430" +postCommitPyDep.dependsOn "testPy39transformers-430" + +toxTask "testPy39embeddingsMLTransform", "py39-embeddings", "${posargs}" +test.dependsOn "testPy39embeddingsMLTransform" +postCommitPyDep.dependsOn "testPy39embeddingsMLTransform" + +// Part of MLTransform embeddings test suite but requires tensorflow hub, which we need to test on +// mutliple versions so keeping this suite separate. +toxTask "testPy39TensorflowHubEmbeddings-014", "py39-TFHubEmbeddings-014", "${posargs}" +test.dependsOn "testPy39TensorflowHubEmbeddings-014" +postCommitPyDep.dependsOn "testPy39TensorflowHubEmbeddings-014" + +toxTask "testPy39TensorflowHubEmbeddings-015", "py39-TFHubEmbeddings-015", "${posargs}" +test.dependsOn "testPy39TensorflowHubEmbeddings-015" +postCommitPyDep.dependsOn "testPy39TensorflowHubEmbeddings-015" + +toxTask "whitespacelint", "whitespacelint", "${posargs}" + +task archiveFilesToLint(type: Zip) { + archiveFileName = "files-to-whitespacelint.zip" + destinationDirectory = file("$buildDir/dist") + + from ("$rootProject.projectDir") { + include "**/*.md" + include "**/build.gradle" + include '**/build.gradle.kts' + exclude '**/build/**' // intermediate build directory + exclude 'website/www/site/themes/docsy/**' // fork to google/docsy + exclude "**/node_modules/*" + exclude "**/.gogradle/*" + } +} + +task unpackFilesToLint(type: Copy) { + from zipTree("$buildDir/dist/files-to-whitespacelint.zip") + into "$buildDir/files-to-whitespacelint" +} + +whitespacelint.dependsOn archiveFilesToLint, unpackFilesToLint +unpackFilesToLint.dependsOn archiveFilesToLint +archiveFilesToLint.dependsOn cleanPython + +toxTask "jest", "jest", "${posargs}" + +toxTask "eslint", "eslint", "${posargs}" + +task copyTsSource(type: Copy) { + from ("$rootProject.projectDir") { + include "sdks/python/apache_beam/runners/interactive/extensions/**/*" + exclude "sdks/python/apache_beam/runners/interactive/extensions/**/lib/*" + exclude "sdks/python/apache_beam/runners/interactive/extensions/**/node_modules/*" + } + into "$buildDir/ts" +} + +jest.dependsOn copyTsSource +eslint.dependsOn copyTsSource +copyTsSource.dependsOn cleanPython \ No newline at end of file From 78ae8ffd4cb6b429ef1957b5813acd23e6e46f11 Mon Sep 17 00:00:00 2001 From: Jack McCluskey Date: Thu, 22 Aug 2024 11:43:06 -0400 Subject: [PATCH 3/3] Try to fix whitespace check --- sdks/python/test-suites/tox/py39/build.gradle | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sdks/python/test-suites/tox/py39/build.gradle b/sdks/python/test-suites/tox/py39/build.gradle index 9fde83c1dcde..e66a1a48a2f0 100644 --- a/sdks/python/test-suites/tox/py39/build.gradle +++ b/sdks/python/test-suites/tox/py39/build.gradle @@ -26,6 +26,8 @@ applyPythonNature() // Required to setup a Python 3 virtualenv and task names. pythonVersion = '3.9' +def posargs = project.findProperty("posargs") ?: "" + apply from: "../common.gradle" toxTask "testPy39CloudCoverage", "py39-cloudcoverage", "${posargs}"