Skip to content

helm-chart-integration-test #36

helm-chart-integration-test

helm-chart-integration-test #36

name: helm-chart-integration-test
'on':
workflow_dispatch:
inputs:
tf_script_dir:
description: Path to directory where terraform scripts are stored
required: false
default: "datasqrl-examples/finance-credit-card-chatbot/mock_tf_build_deploy"
sqrl_image_version:
description: Version of sqrl compile
required: false
default: "v0.5.2"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
REPOSITORY_NAME: datasqrl-examples
EXAMPLE_NAME: finance-credit-card-chatbot
DEPLOYMENT_PACKAGE_NAME: package-analytics-no-chat-helm-profile
ARTIFACTORY_S3_BUCKET_NAME: "sqrl-examples-artifactory-dev"
# docker
DOCKER_IMAGE: "public.ecr.aws/j5u7a3j2/datasqrl/cloud:latest"
DOCKER_COMMAND: "docker run --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --env AWS_SESSION_TOKEN --env AWS_REGION --env BACKEND_S3_BUCKET --env BACKEND_S3_KEY -v ~/.ssh:/root/.ssh -v $(pwd):/mnt/host -v /var/run/docker.sock:/var/run/docker.sock public.ecr.aws/j5u7a3j2/datasqrl/cloud:latest"
TEST_K8S_NAMESPACE: "github-action-test-ns-1"
HELM_CHART_VERSION: "0.1.0"
HELM_PROFILE_VERSION: "TODO"
jobs:
build-artifacts:
name: "Build Artifacts"
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
actions: read
env:
ENV: DEV
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ secrets.AWS_DEV_GITHUB_ACTION_ROLE }}
aws-region: us-east-1
- name: Getting Code
uses: actions/checkout@v3
- name: Download helm profile
working-directory: ${{ env.EXAMPLE_NAME }}
run: |
echo "TODO: download helm profile from a remote repository"
shell: bash
- name: SQRL Compile
working-directory: ${{ env.EXAMPLE_NAME }}
run: |
echo "${{ github.workspace }}"
docker run --rm -v $PWD:/build datasqrl/cmd:"${{ inputs.sqrl_image_version }}" compile -c "${{ env.DEPLOYMENT_PACKAGE_NAME }}.json"
ls -la build/deploy
shell: bash
- name: Build Docker Images
working-directory: "${{ env.EXAMPLE_NAME }}/build/deploy"
run: |
docker compose build
shell: bash
deploy:
name: "Deploy Terraform Stack"
needs: build-artifacts
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
actions: read
env:
ENV: DEV
steps:
- name: Getting Code
uses: actions/checkout@v3
- uses: azure/setup-kubectl@v4
with:
version: "v1.29.1"
id: install
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ secrets.AWS_DEV_GITHUB_ACTION_ROLE }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Update kubeconfig for EKS and login ECR
run: |
aws ecr get-login-password --region us-east-1 | helm registry login --username AWS --password-stdin 286928876767.dkr.ecr.us-east-1.amazonaws.com
aws eks update-kubeconfig --region ${{ secrets.AWS_REGION }} --name datasqrl-cloud
- name: Update kubeconfig for EKS
working-directory: "${{ env.EXAMPLE_NAME }}/build/deploy"
run: |
helm install test-dependencies oci://286928876767.dkr.ecr.us-east-1.amazonaws.com/namespace-dependencies --version "${{ env.HELM_CHART_VERSION }}" --namespace "${{ env.TEST_K8S_NAMESPACE }}"
helm install test-pipeline1 oci://286928876767.dkr.ecr.us-east-1.amazonaws.com/sqrlpipeline --version ${{ env.HELM_CHART_VERSION }} -f values.yaml --set-file flink_sql=./files/flink.sql --set-file database_schema_sql=./files/database-schema.sql --set-file server_config_json=./files/server-config.json --set-file server_model_json=./files/server-model.json --namespace "${{ env.TEST_K8S_NAMESPACE }}" --set deployment_id=53856403-840b-4334-8b31-836c7c482c1e
kubectl get pods -n "${{ env.TEST_K8S_NAMESPACE }}"
- name: Destroy
run: |
helm uninstall test-dependencies -n "${{ env.TEST_K8S_NAMESPACE }}"
# - name: Fetch Artifacts
# working-directory: ${{ env.EXAMPLE_NAME }}
# run: |
# aws s3 cp s3://${ARTIFACTORY_S3_BUCKET_NAME}/${REPOSITORY_NAME}/${EXAMPLE_NAME}/${DEPLOYMENT_PACKAGE_NAME}/database-schema.sql ./mock_tf_build_deploy
# aws s3 cp s3://${ARTIFACTORY_S3_BUCKET_NAME}/${REPOSITORY_NAME}/${EXAMPLE_NAME}/${DEPLOYMENT_PACKAGE_NAME}/FlinkJob.jar ./mock_tf_build_deploy/
# # load local data
# cp -r ./creditcard-local ./mock_tf_build_deploy
# ls -la ./mock_tf_build_deploy
# shell: bash
# - name: Set up SSH to sqrl-cloud repo
# working-directory: "${{ env.EXAMPLE_NAME }}"
# id: ssh_setup
# run: |
# mkdir -p ~/.ssh
# echo "$SSH_PRIVATE_KEY" > ~/.ssh/id_rsa
# chmod 600 ~/.ssh/id_rsa
# ssh-keyscan github.com >> ~/.ssh/known_hosts
# env:
# SSH_PRIVATE_KEY: ${{ secrets.SSH_KEY_SQRL_CLOUD }}
# - name: Pull datasqrl/cloud Image
# id: pull-datasqrl-cloud
# run: |
# docker pull ${{ env.DOCKER_IMAGE }}
# shell: bash
# - name: Integration Testing
# id: test
# run: |
# git clone git@github.com:DataSQRL/sqrl-cloud.git
# cd sqrl-cloud
# pip install poetry
# poetry install
# poetry run pytest tests/integration/test_service_availability.py --alb ${{ env.server_alb_dns_name }}
# shell: bash