Skip to content

Migrate federation build to ghcr #4273

Migrate federation build to ghcr

Migrate federation build to ghcr #4273

Workflow file for this run

---
name: tests
env:
GINKGO_VERSION: v1.15.2
on:
push:
branches-ignore:
- development/**
- q/*/**
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Docker Buildk
uses: docker/setup-buildx-action@v3
- name: Login to Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ github.token }}
- name: Build and push kafka
uses: docker/build-push-action@v5
with:
push: true
context: .github/dockerfiles/kafka
tags: ghcr.io/${{ github.repository }}/ci-kafka:${{ github.sha }}
cache-from: type=gha,scope=ci-kafka
cache-to: type=gha,mode=max,scope=ci-kafka
- name: Build and push syntheticbucketd
uses: docker/build-push-action@v5
with:
push: true
context: .
file: .github/dockerfiles/syntheticbucketd/Dockerfile
tags: ghcr.io/${{ github.repository }}/syntheticbucketd:${{ github.sha }}
cache-from: type=gha,scope=syntheticbucketd
cache-to: type=gha,mode=max,scope=syntheticbucketd
tests:
needs: build
runs-on: ubuntu-latest
services:
redis:
image: redis:alpine
ports:
- 6379:6379
syntheticbucketd:
image: ghcr.io/${{ github.repository }}/syntheticbucketd:${{ github.sha }}
ports:
- 9001:9001
kafka:
image: ghcr.io/${{ github.repository }}/ci-kafka:${{ github.sha }}
credentials:
username: ${{ github.repository_owner }}
password: ${{ github.token }}
ports:
- 2181:2181
- 9092:9092
env:
ADVERTISED_HOST: "localhost"
ADVERTISED_PORT: 9092
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install build dependencies
run: |
sudo apt-get update
sudo apt-get install -y build-essential
- uses: actions/setup-go@v5
with:
go-version: '1.16.2'
cache-dependency-path: ./bucket-scanner/go.sum
- uses: actions/setup-node@v4
with:
node-version: '16'
cache: yarn
- name: Install node dependencies
run: yarn install --frozen-lockfile --network-concurrency 1
- name: Install ginkgo
run: go get github.com/onsi/ginkgo/ginkgo@${GINKGO_VERSION}
- name: Lint markdown
run: yarn run --silent lint_md
- name: Lint Javascript
run: yarn run --silent lint
- name: Run unit tests
run: yarn test
- name: Run bucket scanner unit tests
run: ginkgo -r --randomizeAllSpecs --randomizeSuites --failOnPending --cover --trace --race --progress -nodes 1 -stream -timeout 5m -slowSpecThreshold 60
working-directory: bucket-scanner
- name: Run backbeat routes test
run: .github/scripts/run_server_tests.bash ft_test:api:routes
- name: Run backbeat retry tests with account authentication
run: .github/scripts/run_server_tests.bash ft_test:api:retry
env:
CI_AUTH_TYPE: "account"
BACKBEAT_CONFIG_FILE: "tests/config.json"
- name: Run backbeat retry tests with role authentication
run: .github/scripts/run_server_tests.bash ft_test:api:retry
env:
CI_AUTH_TYPE: "role"
BACKBEAT_CONFIG_FILE: "tests/config.roleAuth.json"
- name: Run backbeat lib feature tests
run: yarn run ft_test:lib
- name: Run backbeat replication feature tests
run: yarn run ft_test:replication
- name: run feature lifecycle tests
run: .github/scripts/run_ft_tests.bash ft_test:lifecycle
env:
EXPIRE_ONE_DAY_EARLIER: "true"
TRANSITION_ONE_DAY_EARLIER: "true"
BACKBEAT_CONFIG_FILE: "tests/config.json"
- name: run backbeat notification feature tests
run: yarn run ft_test:notification
- name: run ballooning tests for lifecycle conductor
run: yarn mocha tests/performance/lifecycle/conductor-with-bucketd-check-memory-balloon.js
env:
# Constrain heap long-lived heap size to 90MB, so that pushing 200K messages
# will crash if they end up in memory all at the same time (circuit breaking
# ineffective) while waiting to be committed to the kafka topic.
NODE_OPTIONS: '--max-old-space-size=90'