Skip to content

[SPARK-50517][PYTHON][TESTS][FOLLOW-UP] Add refactored package into p… #930

[SPARK-50517][PYTHON][TESTS][FOLLOW-UP] Add refactored package into p…

[SPARK-50517][PYTHON][TESTS][FOLLOW-UP] Add refactored package into p… #930

#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
name: Build / Cache base image
on:
# Run jobs when a commit is merged
push:
branches:
- 'master'
- 'branch-*'
paths:
- 'dev/infra/Dockerfile'
- 'dev/spark-test-image/docs/Dockerfile'
- 'dev/spark-test-image/lint/Dockerfile'
- 'dev/spark-test-image/sparkr/Dockerfile'
- 'dev/spark-test-image/python-309/Dockerfile'
- 'dev/spark-test-image/python-310/Dockerfile'
- 'dev/spark-test-image/python-312/Dockerfile'
- 'dev/spark-test-image/python-313/Dockerfile'
- '.github/workflows/build_infra_images_cache.yml'
# Create infra image when cutting down branches/tags
create:
jobs:
main:
if: github.repository == 'apache/spark'
runs-on: ubuntu-latest
permissions:
packages: write
steps:
- name: Checkout Spark repository
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to DockerHub
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v6
with:
context: ./dev/infra/
push: true
tags: ghcr.io/apache/spark/apache-spark-github-action-image-cache:${{ github.ref_name }}-static
cache-from: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-cache:${{ github.ref_name }}
cache-to: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-cache:${{ github.ref_name }},mode=max
- name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
- name: Build and push (Documentation)
if: hashFiles('dev/spark-test-image/docs/Dockerfile') != ''
id: docker_build_docs
uses: docker/build-push-action@v6
with:
context: ./dev/spark-test-image/docs/
push: true
tags: ghcr.io/apache/spark/apache-spark-github-action-image-docs-cache:${{ github.ref_name }}-static
cache-from: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-docs-cache:${{ github.ref_name }}
cache-to: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-docs-cache:${{ github.ref_name }},mode=max
- name: Image digest (Documentation)
if: hashFiles('dev/spark-test-image/docs/Dockerfile') != ''
run: echo ${{ steps.docker_build_docs.outputs.digest }}
- name: Build and push (Linter)
if: hashFiles('dev/spark-test-image/lint/Dockerfile') != ''
id: docker_build_lint
uses: docker/build-push-action@v6
with:
context: ./dev/spark-test-image/lint/
push: true
tags: ghcr.io/apache/spark/apache-spark-github-action-image-lint-cache:${{ github.ref_name }}-static
cache-from: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-lint-cache:${{ github.ref_name }}
cache-to: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-lint-cache:${{ github.ref_name }},mode=max
- name: Image digest (Linter)
if: hashFiles('dev/spark-test-image/lint/Dockerfile') != ''
run: echo ${{ steps.docker_build_lint.outputs.digest }}
- name: Build and push (SparkR)
if: hashFiles('dev/spark-test-image/sparkr/Dockerfile') != ''
id: docker_build_sparkr
uses: docker/build-push-action@v6
with:
context: ./dev/spark-test-image/sparkr/
push: true
tags: ghcr.io/apache/spark/apache-spark-github-action-image-sparkr-cache:${{ github.ref_name }}-static
cache-from: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-sparkr-cache:${{ github.ref_name }}
cache-to: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-sparkr-cache:${{ github.ref_name }},mode=max
- name: Image digest (SparkR)
if: hashFiles('dev/spark-test-image/sparkr/Dockerfile') != ''
run: echo ${{ steps.docker_build_sparkr.outputs.digest }}
- name: Build and push (PySpark with Python 3.9)
if: hashFiles('dev/spark-test-image/python-309/Dockerfile') != ''
id: docker_build_pyspark_python_309
uses: docker/build-push-action@v6
with:
context: ./dev/spark-test-image/python-309/
push: true
tags: ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-309-cache:${{ github.ref_name }}-static
cache-from: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-309-cache:${{ github.ref_name }}
cache-to: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-309-cache:${{ github.ref_name }},mode=max
- name: Image digest (PySpark with Python 3.9)
if: hashFiles('dev/spark-test-image/python-309/Dockerfile') != ''
run: echo ${{ steps.docker_build_pyspark_python_309.outputs.digest }}
- name: Build and push (PySpark with Python 3.10)
if: hashFiles('dev/spark-test-image/python-310/Dockerfile') != ''
id: docker_build_pyspark_python_310
uses: docker/build-push-action@v6
with:
context: ./dev/spark-test-image/python-310/
push: true
tags: ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-310-cache:${{ github.ref_name }}-static
cache-from: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-310-cache:${{ github.ref_name }}
cache-to: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-310-cache:${{ github.ref_name }},mode=max
- name: Image digest (PySpark with Python 3.10)
if: hashFiles('dev/spark-test-image/python-310/Dockerfile') != ''
run: echo ${{ steps.docker_build_pyspark_python_310.outputs.digest }}
- name: Build and push (PySpark with Python 3.12)
if: hashFiles('dev/spark-test-image/python-312/Dockerfile') != ''
id: docker_build_pyspark_python_312
uses: docker/build-push-action@v6
with:
context: ./dev/spark-test-image/python-312/
push: true
tags: ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-312-cache:${{ github.ref_name }}-static
cache-from: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-312-cache:${{ github.ref_name }}
cache-to: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-312-cache:${{ github.ref_name }},mode=max
- name: Image digest (PySpark with Python 3.12)
if: hashFiles('dev/spark-test-image/python-312/Dockerfile') != ''
run: echo ${{ steps.docker_build_pyspark_python_312.outputs.digest }}
- name: Build and push (PySpark with Python 3.13)
if: hashFiles('dev/spark-test-image/python-313/Dockerfile') != ''
id: docker_build_pyspark_python_313
uses: docker/build-push-action@v6
with:
context: ./dev/spark-test-image/python-313/
push: true
tags: ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-313-cache:${{ github.ref_name }}-static
cache-from: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-313-cache:${{ github.ref_name }}
cache-to: type=registry,ref=ghcr.io/apache/spark/apache-spark-github-action-image-pyspark-python-313-cache:${{ github.ref_name }},mode=max
- name: Image digest (PySpark with Python 3.13)
if: hashFiles('dev/spark-test-image/python-313/Dockerfile') != ''
run: echo ${{ steps.docker_build_pyspark_python_313.outputs.digest }}