Skip to content

[8.x](backport #2833) Overwrite notice to github.com/gocsaf/csaf/v3 (… #934

[8.x](backport #2833) Overwrite notice to github.com/gocsaf/csaf/v3 (…

[8.x](backport #2833) Overwrite notice to github.com/gocsaf/csaf/v3 (… #934

Workflow file for this run

name: EKS-CI
on:
# On demand execution of workflow will run all suites.
workflow_dispatch:
inputs:
test-targets:
required: true
description: "Specify test markers to run"
default: '["eks"]'
# On post-merge execution running eks tests
push:
branches:
- main
- "[0-9]+.[0-9]+"
- "8.x"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_TEST_ACC }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_TEST_ACC }}
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CONTAINER_SUFFIX: ${{ github.run_id }}
TEST_TARGETS_DEFAULT: '["pre_merge"]'
AWS_REGION: eu-west-2
REPORTS_DIR: tests/allure/results/
KUBE_NAMESPACE: kube-system
CI_ELASTIC_AGENT_DOCKER_TAG: "8.14.0-SNAPSHOT"
CI_ELASTIC_AGENT_DOCKER_IMAGE: "391946104644.dkr.ecr.eu-west-2.amazonaws.com/elastic-agent"
# run only a single job at a time
concurrency: EKS-Functional-Tests
jobs:
Setup:
runs-on: ubuntu-22.04
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
timeout-minutes: 40
steps:
- id: set-default-matrix
env:
TEST_TARGETS: ${{ env.TEST_TARGETS_DEFAULT }}
run: |
echo "TEST_MATRIX=$TEST_TARGETS" >> $GITHUB_ENV
- id: Dispatch
if: github.event_name == 'workflow_dispatch'
env:
TEST_TARGETS: ${{ github.event.inputs.test-targets }}
run: echo "TEST_MATRIX=${TEST_TARGETS}" >> $GITHUB_ENV
- id: set-matrix
run: echo "matrix=${{ env.TEST_MATRIX}}" >> $GITHUB_OUTPUT
Build:
name: Build
runs-on: ubuntu-22.04
timeout-minutes: 30
steps:
# Disk cleanup
- name: Free Disk Space (Ubuntu)
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
android: true
dotnet: true
haskell: true
large-packages: false
docker-images: true
swap-storage: true
- name: Check out the repo
uses: actions/checkout@v4
- name: Setup GO (with caching)
uses: magnetikonline/action-golang-cache@v5
with:
go-version-file: .go-version
- name: build cloudbeat binary
uses: magefile/mage-action@v3
with:
version: latest
args: build
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Cache Build dependencies
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.workflow }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Build cloudbeat-docker
uses: docker/build-push-action@v6
with:
context: .
file: ./deploy/Dockerfile
push: false
tags: cloudbeat:latest
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new
outputs: type=docker,dest=/tmp/cloudbeat-${{ env.CONTAINER_SUFFIX }}.tar
- name: Build elastic-agent
env:
BASE_IMAGE: docker.elastic.co/beats/elastic-agent:${{ env.CI_ELASTIC_AGENT_DOCKER_TAG }}
GOOS: linux
GOARCH: amd64
run: ./scripts/packaging/docker/elastic-agent/build.sh -t ${{ env.CI_ELASTIC_AGENT_DOCKER_IMAGE }}:${{ env.CI_ELASTIC_AGENT_DOCKER_TAG }}
shell: bash
- name: Build pytest-docker
uses: docker/build-push-action@v6
with:
context: ./tests/.
push: false
tags: cloudbeat-test:latest
cache-from: type=local,mode=max,src=/tmp/.buildx-cache
cache-to: type=local,mode=max,dest=/tmp/.buildx-cache-new
outputs: type=docker,dest=/tmp/pytest-${{ env.CONTAINER_SUFFIX }}.tar
- name: Cache docker images
uses: actions/cache@v4
with:
path: /tmp/*.tar
key: ${{ runner.os }}-dockers-cache-${{ env.CONTAINER_SUFFIX }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: Load images cloudbeat and cloudbeat-test to ECR
id: load-to-ecr
env:
ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
run: |
./.ci/scripts/ecr-images.sh ${{ env.CONTAINER_SUFFIX }} ${{ env.ECR_REGISTRY }} ${{ env.CI_ELASTIC_AGENT_DOCKER_TAG }}
shell: bash
- name: Move cache
run: |
./.ci/scripts/gh-cache.sh
shell: bash
Test_Matrix:
needs: ["Setup", "Build"]
strategy:
fail-fast: false
matrix:
include:
- test-target: eks
range: ""
values_file: tests/test_environments/values/ci-eks-config-1.yml
k8s_context: "test-eks-config-1"
label: "EKS functional tests: config 1"
- test-target: eks
range: ""
values_file: tests/test_environments/values/ci-eks-config-2.yml
k8s_context: "test-eks-config-2"
label: "EKS functional tests: config 2"
name: ${{ matrix.label }}
runs-on: ubuntu-20.04
timeout-minutes: 90
steps:
- name: Check out the repo
uses: actions/checkout@v4
# Setup all required tools
- name: Init Hermit
run: ./bin/hermit env -r >> $GITHUB_ENV
- name: Get kubeconfig and select context
if: success()
run: |
aws eks update-kubeconfig --name ${{ matrix.k8s_context }} --region ${{ env.AWS_REGION }} --alias ${{ matrix.k8s_context }}
echo 'KUBE_CONFIG_DATA=$(cat ~/.kube/config | base64)' >> $GITHUB_ENV
kubectl config use-context ${{ matrix.k8s_context }}
kubectl config set-context --current --namespace=${{ env.KUBE_NAMESPACE }}
- name: Deploy tests Helm chart
id: deploy_helm
if: success()
run: |
just deploy-tests-helm ${{ matrix.test-target }} ${{ matrix.values_file }} ${{ matrix.range }}
- name: Execute tests
id: run_tests
if: success()
run: |
just run-tests ${{ matrix.test-target }} ${{ matrix.k8s_context }}
- name: Copy test results
id: copy_results
if: always()
run: |
NODE=$(kubectl get po test-pod-v1 -o jsonpath='{.spec.nodeName}')
RESULTS_POD=$(kubectl get po -l k8s-app=eks-results --field-selector spec.nodeName=$NODE -o jsonpath='{.items[*].metadata.name}')
kubectl cp $RESULTS_POD:/reports ${{ env.REPORTS_DIR }}
kubectl exec $RESULTS_POD -- sh -c 'rm -rf /reports/*'
- name: Upload Test Results
if: always()
uses: actions/upload-artifact@v3
with:
name: allure_results-${{ matrix.test-target}}
path: ${{ env.REPORTS_DIR }}
- name: Purge tests environment
id: purge_helm
if: always()
run: |
just purge-tests
publish_results:
name: Publish Results
needs: Test_Matrix
if: always()
runs-on: ubuntu-20.04
steps:
- name: Download Artifacts
uses: actions/download-artifact@v3
with:
path: artifacts
- name: Extract Artifacts
run: |
mkdir -p tests/allure/results
find artifacts/ -type f -print0 | xargs -0 mv -t tests/allure/results
- name: Publish allure report
if: always()
uses: andrcuns/allure-publish-action@v2.9.0
env:
GITHUB_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_REGION: eu-west-1
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
with:
storageType: s3
resultsGlob: "tests/allure/results"
updatePr: actions
collapseSummary: false
summary: suites
summaryTableType: markdown
copyLatest: true
bucket: csp-allure-reports
prefix: allure_reports/cloudbeat/${{ github.ref_name }}
ignoreMissingResults: true