Skip to content

Python Overwatch Workflow #524

Python Overwatch Workflow

Python Overwatch Workflow #524

Workflow file for this run

# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
name: Python Overwatch Workflow
on:
workflow_dispatch:
inputs:
language:
description: 'Runtime Language'
required: true
default: 'python'
language_version:
description: 'Runtime Language'
required: true
default: '3.8'
process_results:
description: 'Process results and update badges or not'
required: true
default: 'true'
cleanup:
description: 'To cleanup the test resource groups'
default: 'true'
schedule:
# Runs every 8 hours.
# * is a special character in YAML so you have to quote this string
- cron: "10 0/8 * * *"
env:
PROCESS_COUNT: "FUNCTIONS_WORKER_PROCESS_COUNT"
THREAD_COUNT: "PYTHON_THREADPOOL_THREAD_COUNT"
UNDER: "_"
PASS: "passed"
PASS_COLOR: "brightgreen"
FAILED: "failed"
FAIL_COLOR: "red"
PARTIAL_PASS_COLOR: "yellow"
BADGE_FOLDER_NAME: "badges"
DEFAULT_DEPLOYMENT_LANGUAGE: "python"
DEFAULT_DEPLOYMENT_LANGUAGE_VERSION: "3.8"
LANGUAGE: ${{ github.event.inputs.language }}
LANGUAGE_VERSION: ${{ github.event.inputs.language_version }}
OS: "Linux"
RunOutputFolder: "k6runs"
K6_TEST_SCRIPT: "https://raw.githubusercontent.com/vrdmr/AzFunctionsPythonPerformance/master/k6-configuration/generic-test-script.js"
storage_account_key: ${{ secrets.OGFSTORAGEACCOUNTKEY }}
TEST_SUITE_BLOB_LINK: "https://ogfworkflowartifacts.blob.core.windows.net/ogf-testing-code-blobs/PythonPerformanceTestSuite.zip"
NUM_OF_USERS: 20
# Badges Information
BADGES_STORAGE_ACCOUNT_NAME: "ogfworkflowartifacts"
BADGES_OVERALL_STORAGE_CONTAINER_NAME: "overwatch-python-ogf-testing-overall-badges"
BADGES_SCENARIO_STORAGE_CONTAINER_NAME: "overwatch-python-ogf-testing-scenario-badges"
BADGES_TIMEUTC_STORAGE_CONTAINER_NAME: "overwatch-python-ogf-testing-timeutc-badges"
BADGES_STORAGE_CONTAINER_NAME: "overwatch-python-ogf-testing-raw-logs"
jobs:
test:
env:
RESOURCE_GROUP_NAME: "ogf_test_run_${{ matrix.sku }}_${{ matrix.region }}"
FUNCTION_APP_NAME: "ogfpy-${{ matrix.sku }}-${{ matrix.region }}-${{ github.run_number }}"
STORAGE_ACCOUNT_NAME: "ogf${{ matrix.sku }}${{ matrix.region }}${{ github.run_number }}"
FUNCTION_APP_PLAN_NAME: "ogf-epplan-${{ matrix.sku }}-${{ matrix.region }}-${{ github.run_number }}"
runs-on: ubuntu-latest
permissions: read-all
timeout-minutes: 30
strategy:
# V. IMPORTANT: Copy this strategy section to the dependent
# cleanup job as well, if you are making changes.
# Make sure they are the same.
fail-fast: false
matrix:
region: [centraluseuap, westcentralus, westus2, northcentralus, westeurope, eastus, southeastasia, uksouth, westus, southcentralus, northeurope, japaneast]
sku: [cons, ep1, p1v2]
# https://docs.microsoft.com/en-us/azure/azure-functions/functions-premium-plan#region-max-scale-out
exclude:
- region: westcentralus
sku: cons
# V. IMPORTANT: Copy this strategy section to the dependent cleanup job as well,
# if you are making changes. Make sure they are the same.
steps:
- name: Install node and npm
uses: actions/setup-node@v2
with:
node-version: '16'
check-latest: true
- name: Azure Login
uses: azure/login@v1
with:
creds: ${{ secrets.AZURE_CREDENTIALS }}
- name: Install Core Tools and K6
run: |
npm i -g azure-functions-core-tools@3 --unsafe-perm true
# make sure that the $GITHUB_WORKSPACE is cd'd into,
# then running the k6 tests.
cd $GITHUB_WORKSPACE
curl https://github.com/loadimpact/k6/releases/download/v0.29.0/k6-v0.29.0-linux64.tar.gz -L | \
tar xvz --strip-components 1
chmod 755 ./k6
./k6 version
- name: Setup Azure RG and Storage Account For Functions App
uses: Azure/cli@1.0.4
with:
azcliversion: 2.16.0
inlineScript: |
echo "GITHUB_WORKSPACE: " $GITHUB_WORKSPACE
az group create --location ${{ matrix.region }} --name $RESOURCE_GROUP_NAME \
--tags "created on= $(date)" "Use=OGFTest" > /dev/null
sleep 3 # once the RG is created
if [ ${{ matrix.region }} = 'northcentralus' ]
then
STORAGE_ACCOUNT_NAME=(ogf${{ matrix.sku }}ncus${{ github.run_number }})
fi
if [ ${{ matrix.region }} = 'southcentralus' ]
then
STORAGE_ACCOUNT_NAME=(ogf${{ matrix.sku }}scus${{ github.run_number }})
fi
az storage account create --resource-group $RESOURCE_GROUP_NAME \
--name $STORAGE_ACCOUNT_NAME --location ${{ matrix.region }} \
--tags "created on= $(date)" "Use=OGFTest" --sku Standard_LRS
sleep 3 # Adding some breathing space for Storage Account creation.
- name: Create new Consumption Azure Function App
if: ${{ matrix.sku == 'cons' }}
uses: Azure/cli@1.0.4
with:
azcliversion: 2.16.0
inlineScript: |
if [ ${{ matrix.region }} = 'northcentralus' ]
then
STORAGE_ACCOUNT_NAME="ogf${{ matrix.sku }}ncus${{ github.run_number }}"
fi
if [ ${{ matrix.region }} = 'southcentralus' ]
then
STORAGE_ACCOUNT_NAME="ogf${{ matrix.sku }}scus${{ github.run_number }}"
fi
az functionapp create --consumption-plan-location ${{ matrix.region }} \
--name $FUNCTION_APP_NAME --os-type $OS \
--functions-version 3 --runtime ${{ env.LANGUAGE || env.DEFAULT_DEPLOYMENT_LANGUAGE }} \
--resource-group $RESOURCE_GROUP_NAME \
--runtime-version ${{ env.LANGUAGE_VERSION || env.DEFAULT_DEPLOYMENT_LANGUAGE_VERSION }} \
--storage-account $STORAGE_ACCOUNT_NAME \
--tags "created on= $(date)" "Use=OGFTest"
sleep 5 # Adding some breathing space for Storage Account creation.
- name: Create new EP1 Azure Function App
if: ${{ matrix.sku == 'ep1' }}
run: |
if [ ${{ matrix.region }} = 'northcentralus' ]
then
STORAGE_ACCOUNT_NAME="ogf${{ matrix.sku }}ncus${{ github.run_number }}"
fi
if [ ${{ matrix.region }} = 'southcentralus' ]
then
STORAGE_ACCOUNT_NAME="ogf${{ matrix.sku }}scus${{ github.run_number }}"
fi
az functionapp plan create --resource-group $RESOURCE_GROUP_NAME \
--name $FUNCTION_APP_PLAN_NAME --is-linux true --max-burst 1 \
--min-instances 1 --location ${{ matrix.region }} --sku EP1 \
--tags "created on= $(date)" "Use=OGFTest"
sleep 2
az functionapp create --resource-group $RESOURCE_GROUP_NAME --plan $FUNCTION_APP_PLAN_NAME \
--name $FUNCTION_APP_NAME --functions-version 3 --runtime ${{ env.LANGUAGE || env.DEFAULT_DEPLOYMENT_LANGUAGE }} \
--runtime-version ${{ env.LANGUAGE_VERSION || env.DEFAULT_DEPLOYMENT_LANGUAGE_VERSION }} --storage-account $STORAGE_ACCOUNT_NAME \
--tags "created on= $(date)" "Use=OGFTest"
sleep 2
- name: Create new AppService plan Azure Function App
if: ${{ matrix.sku == 'p1v2' }}
run: |
if [ ${{ matrix.region }} = 'northcentralus' ]
then
STORAGE_ACCOUNT_NAME="ogf${{ matrix.sku }}ncus${{ github.run_number }}"
fi
if [ ${{ matrix.region }} = 'southcentralus' ]
then
STORAGE_ACCOUNT_NAME="ogf${{ matrix.sku }}scus${{ github.run_number }}"
fi
az functionapp plan create --resource-group $RESOURCE_GROUP_NAME \
--name $FUNCTION_APP_PLAN_NAME --is-linux true --number-of-workers 1 \
--location ${{ matrix.region }} --sku P1v2 \
--tags "created on= $(date)" "Use=OGFTest"
sleep 3
az functionapp create --resource-group $RESOURCE_GROUP_NAME --plan $FUNCTION_APP_PLAN_NAME \
--name $FUNCTION_APP_NAME --functions-version 3 --runtime ${{ env.LANGUAGE || env.DEFAULT_DEPLOYMENT_LANGUAGE }} \
--runtime-version ${{ env.LANGUAGE_VERSION || env.DEFAULT_DEPLOYMENT_LANGUAGE_VERSION }} --storage-account $STORAGE_ACCOUNT_NAME \
--tags "created on= $(date)" "Use=OGFTest"
sleep 3
- name: Deploy Tests
shell: bash -l {0}
run: |
az webapp config appsettings set --name $FUNCTION_APP_NAME \
--resource-group $RESOURCE_GROUP_NAME \
--settings ENABLE_ORYX_BUILD=true $THREAD_COUNT=4 SCM_DO_BUILD_DURING_DEPLOYMENT=1
sleep 15 # Sprinkle some joy aka Wait!!
# Hope the deployment went through and with the logs show up.
cd $GITHUB_WORKSPACE
git clone https://github.com/vrdmr/AzFunctionsPythonPerformance.git
cd AzFunctionsPythonPerformance
func init --worker-runtime ${{ env.LANGUAGE || env.DEFAULT_DEPLOYMENT_LANGUAGE }}
try=1
is_it_deployed=1
while [ $is_it_deployed -eq 1 ]; do
func azure functionapp publish $FUNCTION_APP_NAME --${{ env.LANGUAGE || env.DEFAULT_DEPLOYMENT_LANGUAGE }}
if [ $? == 0 ]; then
is_it_deployed=0
else
echo -e "\e[31mFailed. Waiting 1 minute and trying again...\e[0m"
sleep 1m
try=$(($try + 1))
fi
if [ $try -ge 5 ];
then echo "\e[31mToo many failures (attempted 5 times); giving up\e[0m"
exit 1;
fi
done
# Sometimes the code isn't deployed correctly. Attempt deploying again.
func azure functionapp publish $FUNCTION_APP_NAME --${{ env.LANGUAGE || env.DEFAULT_DEPLOYMENT_LANGUAGE }}
o=$(func azure functionapp list-functions $FUNCTION_APP_NAME | grep Invoke || true)
echo $o
- name: Run Tests
run: |
sleep 10
echo "========================================="
echo "Checkout the functions"
o=$(func azure functionapp list-functions $FUNCTION_APP_NAME | grep Invoke || true)
echo $o
echo "========================================="
cd $GITHUB_WORKSPACE
mkdir $RunOutputFolder
echo "========================================="
for api_endpoint in $(func azure functionapp list-functions $FUNCTION_APP_NAME | grep Invoke | cut -d':' -f2,3); do
echo "========================================="
function=$(echo $api_endpoint | cut -d'/' -f5)
hostname=$(echo $api_endpoint | cut -d'/' -f3)
echo "*** Hitting the api endpoint: $api_endpoint ***"
curl --silent $api_endpoint # Invoke it to prime it
echo ""
echo "K6 now testing the Hostname: $hostname and Function: $function"
echo ""
# || with true to ensure a return code of 0, as some thresholds can cross the error rate limit.
FUNCTION=$function NUM_USERS=$NUM_OF_USERS TEST_TIME=10s PROTOCOL=https HOSTNAME=$hostname \
./k6 run --summary-export=$RunOutputFolder/$function $K6_TEST_SCRIPT \
--quiet --no-usage-report --discard-response-bodies || true
sleep 2
done
- name: Process test results
if: ${{ github.event.inputs.process_results == '' || github.event.inputs.process_results == 'true' }}
env:
SKU: ${{ matrix.sku }}
REGION: ${{ matrix.region }}
run: |
cd $GITHUB_WORKSPACE
get_host_verson_installed() {
az account get-access-token > access.json
local sub=`node -e "const fs = require('fs'); const data = fs.readFileSync('access.json'); const access = JSON.parse(data); console.log(access.subscription)"`
local at=`node -e "const fs = require('fs'); const data = fs.readFileSync('access.json'); const access = JSON.parse(data); console.log(access.accessToken)"`
rm access.json
local url="https://management.azure.com/subscriptions/$sub/resourceGroups/$RESOURCE_GROUP_NAME/providers/Microsoft.Web/sites/$FUNCTION_APP_NAME/host/default/properties/status?api-version=2018-11-01"
local authtoken="Authorization: Bearer $at"
curl --silent --location -H "Content-Type: application/json" -H "$authtoken" -X GET $url > hostinfo.json
local hostversion=`node -e "const fs = require('fs'); const data = fs.readFileSync('hostinfo.json'); const hostinfo = JSON.parse(data); console.log(hostinfo.properties.version)"`
rm hostinfo.json
echo "$hostversion"
}
HOST_VERSION=$(get_host_verson_installed)
generate_badge() {
# Parameters:
# text $1
# success state (pass/fail) $2
# color $3
# badge filename $4
# folder to write in $5
# badge_format - functionname_language_version-(passed|failed)
# eg: https://img.shields.io/badge/asynchttptriggernoop_python_3.8-passed-brightgreen.svg
curl --silent https://img.shields.io/badge/$1-$2-$3.svg > $5/$4.svg
echo "$5/$4.svg"
}
upload_badge() {
# args badge_filename blob_name
if [ -z $1 ]; then
echo "badge_filename ($1) passed is empty"
else
az storage blob upload --no-progress \
--account-name $3 --account-key $storage_account_key \
--container-name $4 --content-cache-control "no-cache" \
--file $1 --name $2
fi
}
process_results() {
cd $GITHUB_WORKSPACE/$RunOutputFolder
mkdir -p $BADGE_FOLDER_NAME
declare -i count
local count=0
local badge_filename=""
for functionsummaryfile in $(ls -p | grep -v /); do
local badge_id="$functionsummaryfile"
local LANG=${{ env.LANGUAGE || env.DEFAULT_DEPLOYMENT_LANGUAGE }}
local LANG_VER=${{ env.LANGUAGE_VERSION || env.DEFAULT_DEPLOYMENT_LANGUAGE_VERSION }}
local blob_name="$SKU$UNDER$REGION$UNDER$functionsummaryfile$UNDER$LANG$UNDER$LANG_VER"
if [ "$(node -e "const fs = require('fs'); const data = fs.readFileSync('$functionsummaryfile'); const summary = JSON.parse(data); console.log(summary.root_group.checks['status is 200'].fails)")" -eq "0" ]; then
echo "$functionsummaryfile ran successfully | Generating a new badge https://img.shields.io/badge/$badge_id-$HOST_VERSION-$PASS_COLOR.svg and will save to $BADGE_FOLDER_NAME/$functionsummaryfile.svg"
badge_filename=$(generate_badge $badge_id $HOST_VERSION $PASS_COLOR $functionsummaryfile $BADGE_FOLDER_NAME)
else
echo "$functionsummaryfile failed | Generating a new badge https://img.shields.io/badge/$badge_id-$HOST_VERSION-$FAIL_COLOR.svg and will save to $BADGE_FOLDER_NAME/$functionsummaryfile.svg"
count+=1
badge_filename=$(generate_badge $badge_id $HOST_VERSION $FAIL_COLOR $functionsummaryfile $BADGE_FOLDER_NAME)
fi
upload_badge $badge_filename $blob_name $BADGES_STORAGE_ACCOUNT_NAME $BADGES_SCENARIO_STORAGE_CONTAINER_NAME
done
echo "count: $count"
local overall="Overall"
local badge_filename=""
local badge_id="$overall"
local blob_name="$SKU$UNDER$REGION$UNDER$overall"
if [ $count -eq "0" ]; then
# SKU Region Region Host_version
echo "$overall passed | Generating a new badge https://img.shields.io/badge/$badge_id-$HOST_VERSION-$PASS_COLOR.svg and will save to $BADGE_FOLDER_NAME/$overall.svg"
badge_filename=$(generate_badge $badge_id $HOST_VERSION $PASS_COLOR $overall $BADGE_FOLDER_NAME)
elif [ $count -lt 4 ]; then
echo "$overall partially failed | Generating a new badge https://img.shields.io/badge/$badge_id-$HOST_VERSION-$PARTIAL_PASS_COLOR.svg and will save to $BADGE_FOLDER_NAME/$overall.svg"
badge_filename=$(generate_badge $badge_id $HOST_VERSION $PARTIAL_PASS_COLOR $overall $BADGE_FOLDER_NAME)
else
echo "$overall failed | Generating a new badge https://img.shields.io/badge/$badge_id-$HOST_VERSION-$FAIL_COLOR.svg and will save to $BADGE_FOLDER_NAME/$overall.svg"
badge_filename=$(generate_badge $badge_id $HOST_VERSION $FAIL_COLOR $overall $BADGE_FOLDER_NAME)
fi
upload_badge $badge_filename $blob_name $BADGES_STORAGE_ACCOUNT_NAME $BADGES_OVERALL_STORAGE_CONTAINER_NAME
local test_date=$(date -u | sed 's/ /\%20/g') # url encoding the date for imgsheilds
local message="TestTimeUTC"
local blob_name="$SKU$UNDER$REGION$UNDER$message"
local badge_id="$message"
badge_filename=$(generate_badge $badge_id $test_date black $overall $BADGE_FOLDER_NAME)
upload_badge $badge_filename $blob_name $BADGES_STORAGE_ACCOUNT_NAME $BADGES_TIMEUTC_STORAGE_CONTAINER_NAME
local test_date=$(date -u | sed 's/ /\%20/g') # url encoding the date for imgsheilds
local message="LastSuccessfulRunNumber"
local blob_name="$SKU$UNDER$REGION$UNDER$message"
local badge_id="$message"
badge_filename=$(generate_badge $badge_id ${{ github.run_number }} white $overall $BADGE_FOLDER_NAME)
upload_badge $badge_filename $blob_name $BADGES_STORAGE_ACCOUNT_NAME $BADGES_TIMEUTC_STORAGE_CONTAINER_NAME
}
process_results
cd $GITHUB_WORKSPACE/$RunOutputFolder
archivename=$(date +"Run_${{ github.run_number }}-$SKU-$REGION-%Y%m%d-%H%M").tar.gz
tar -czvf $GITHUB_WORKSPACE/$archivename .
echo $GITHUB_WORKSPACE
ls -la $GITHUB_WORKSPACE
- name: Upload a Build Artifact
uses: actions/upload-artifact@v2.2.1
with:
# Artifact name
name: RawTestRunArchive
# A file, directory or wildcard pattern that describes what to upload
path: |
$GITHUB_WORKSPACE/*.tar.gz
# The desired behavior if no files are found using the provided path.
if-no-files-found: warn
# Duration after which artifact will expire in days. 0 means using default retention.
retention-days: 30
cleanup:
env:
RESOURCE_GROUP_NAME: "ogf_test_run_${{ matrix.sku }}_${{ matrix.region }}"
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
region: [centraluseuap, westcentralus, westus2, northcentralus, westeurope, eastus, southeastasia, uksouth, westus, southcentralus, northeurope, japaneast]
sku: [cons, ep1, p1v2]
# https://docs.microsoft.com/en-us/azure/azure-functions/functions-premium-plan#region-max-scale-out
exclude:
- region: westcentralus
sku: cons
if: always()
needs: test
steps:
- name: Azure Login
uses: azure/login@v1
with:
creds: ${{ secrets.AZURE_CREDENTIALS }}
- name: Cleaning up the resource group
run: |
cleanup() {
az group delete --name $RESOURCE_GROUP_NAME --yes
}
cleanup