chore(deps): update all dependencies #38
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: build-test-release | ||
on: | ||
workflow_call: | ||
inputs: | ||
marker: | ||
required: false | ||
description: 'Parallel run marker' | ||
type: string | ||
default: >- | ||
[""] | ||
k8s-environment: | ||
required: false | ||
description: Specifies which environmet to use for k8s testing. ["production", "staging"] | ||
type: string | ||
default: "production" | ||
k8s-manifests-branch: | ||
required: false | ||
description: "branch for k8s manifests to run the tests on" | ||
type: string | ||
default: "main" | ||
secrets: | ||
GH_TOKEN_ADMIN: | ||
description: Github admin token | ||
required: true | ||
SEMGREP_PUBLISH_TOKEN: | ||
description: Semgrep token | ||
required: true | ||
AWS_ACCESS_KEY_ID: | ||
description: AWS access key id | ||
required: true | ||
AWS_DEFAULT_REGION: | ||
description: AWS default region | ||
required: true | ||
AWS_SECRET_ACCESS_KEY: | ||
description: AWS secret access key | ||
required: true | ||
OTHER_TA_REQUIRED_CONFIGS: | ||
description: other required configs | ||
required: true | ||
FOSSA_API_KEY: | ||
description: API token for FOSSA app | ||
required: true | ||
SA_GH_USER_NAME: | ||
description: GPG signature username | ||
required: true | ||
SA_GH_USER_EMAIL: | ||
description: GPG signature user email | ||
required: true | ||
SA_GPG_PRIVATE_KEY: | ||
description: GPG signature private key | ||
required: true | ||
SA_GPG_PASSPHRASE: | ||
description: GPG signature passphrase | ||
required: true | ||
SPL_COM_USER: | ||
description: username to splunk.com | ||
required: true | ||
SPL_COM_PASSWORD: | ||
description: password to splunk.com | ||
required: true | ||
permissions: | ||
contents: read | ||
packages: read | ||
concurrency: | ||
group: ${{ github.head_ref || github.run_id }} | ||
cancel-in-progress: true | ||
jobs: | ||
setup-workflow: | ||
runs-on: ubuntu-latest | ||
outputs: | ||
skip-workflow: ${{ steps.skip-workflow.outputs.skip-workflow }} | ||
delay-destroy-ko: ${{ steps.delay-destroy-setup.outputs.delay-destroy-ko }} | ||
delay-destroy-ui: ${{ steps.delay-destroy-setup.outputs.delay-destroy-ui }} | ||
delay-destroy-modinput_functional: ${{ steps.delay-destroy-setup.outputs.delay-destroy-modinput_functional }} | ||
delay-destroy-scripted_inputs: ${{ steps.delay-destroy-setup.outputs.delay-destroy-scripted_inputs }} | ||
delay-destroy-requirement_test: ${{ steps.delay-destroy-setup.outputs.delay-destroy-requirement_test }} | ||
execute-ko: ${{ steps.delay-destroy-setup.outputs.execute-ko }} | ||
execute-ui: ${{ steps.delay-destroy-setup.outputs.execute-ui }} | ||
execute-modinput_functional: ${{ steps.delay-destroy-setup.outputs.execute-modinput_functional }} | ||
execute-scripted_inputs: ${{ steps.delay-destroy-setup.outputs.execute-scripted_inputs }} | ||
execute-requirement_test: ${{ steps.delay-destroy-setup.outputs.execute-requirement_test }} | ||
execute-knowledge-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_knowledge_labeled }} | ||
execute-ui-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_ui_labeled }} | ||
execute-modinput-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_modinput_functional_labeled }} | ||
execute-scripted_inputs-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_scripted_inputs_labeled }} | ||
execute-requirement-labeled: ${{ steps.configure-tests-on-labels.outputs.execute_requirement_test_labeled }} | ||
s3_bucket_k8s: ${{ steps.k8s-environment.outputs.s3_bucket }} | ||
argo_server_domain_k8s: ${{ steps.k8s-environment.outputs.argo_server_domain }} | ||
argo_token_secret_id_k8s: ${{ steps.k8s-environment.outputs.argo_token_secret_id }} | ||
steps: | ||
- name: set k8s environment | ||
id: k8s-environment | ||
run: | | ||
if [[ ${{ inputs.k8s-environment }} == 'staging' ]]; then | ||
echo "setting up argo variables for staging" | ||
{ | ||
echo "s3_bucket=ta-staging-artifacts" | ||
echo "argo_server_domain=argo.staging.wfe.splgdi.com" | ||
echo "argo_token_secret_id=ta-staging-github-workflow-automation-token" | ||
} >> "$GITHUB_OUTPUT" | ||
else | ||
echo "setting up argo variables for production" | ||
{ | ||
echo "s3_bucket=ta-production-artifacts" | ||
echo "argo_server_domain=argo.wfe.splgdi.com" | ||
echo "argo_token_secret_id=ta-github-workflow-automation-token" | ||
} >> "$GITHUB_OUTPUT" | ||
fi | ||
- name: skip workflow if description is empty for labeled pr | ||
id: skip-workflow | ||
env: | ||
PR_BODY: ${{ github.event.pull_request.body }} | ||
run: | | ||
set +e | ||
TESTSET="knowledge ui modinput_functional scripted_inputs requirement_test" | ||
echo "testset=$TESTSET" >> "$GITHUB_OUTPUT" | ||
SKIP_WORKFLOW="No" | ||
if [[ '${{ github.event.action }}' == 'labeled' && '${{ github.event.label.name }}' == 'preserve_infra' ]]; then | ||
echo "$PR_BODY" >> body.txt | ||
SKIP_WORKFLOW="Yes" | ||
tests=$(grep -i "^preserve:" body.txt | { grep -v grep || true; }) | ||
for test_type in $TESTSET; do | ||
if [[ $tests =~ $test_type ]]; then | ||
SKIP_WORKFLOW="No" | ||
fi | ||
done | ||
fi | ||
echo "skip-workflow=$SKIP_WORKFLOW" >> "$GITHUB_OUTPUT" | ||
if [ "$SKIP_WORKFLOW" == "Yes" ]; then | ||
echo "No description is provided with preserve infra label" | ||
fi | ||
- name: setup for delay destroy | ||
id: delay-destroy-setup | ||
shell: bash | ||
env: | ||
PR_BODY: ${{ github.event.pull_request.body }} | ||
run: | | ||
set +e | ||
TESTSET="${{ steps.skip-workflow.outputs.testset }}" | ||
for test_type in $TESTSET; do | ||
eval DELAY_DESTROY_$test_type="No" | ||
eval EXECUTE_$test_type="No" | ||
done | ||
if [[ '${{ github.event.label.name }}' == 'preserve_infra' ]]; then | ||
echo "$PR_BODY" >> body.txt | ||
tests=$(grep -i "^preserve:" body.txt | { grep -v grep || true; }) | ||
for test_type in $TESTSET; do | ||
if [[ $tests =~ $test_type ]]; then | ||
eval EXECUTE_$test_type="Yes" | ||
eval DELAY_DESTROY_$test_type="Yes" | ||
fi | ||
done | ||
fi | ||
{ | ||
echo "delay-destroy-ko=$DELAY_DESTROY_knowledge" | ||
echo "delay-destroy-ui=$DELAY_DESTROY_ui" | ||
echo "delay-destroy-modinput_functional=$DELAY_DESTROY_modinput_functional" | ||
echo "delay-destroy-scripted_inputs=$DELAY_DESTROY_scripted_inputs" | ||
echo "delay-destroy-requirement_test=$DELAY_DESTROY_requirement_test" | ||
echo "execute-ko=$EXECUTE_knowledge" | ||
echo "execute-ui=$EXECUTE_ui" | ||
echo "execute-modinput_functional=$EXECUTE_modinput_functional" | ||
echo "execute-scripted_inputs=$EXECUTE_scripted_inputs" | ||
echo "execute-requirement_test=$EXECUTE_requirement_test" | ||
} >> "$GITHUB_OUTPUT" | ||
- name: configure tests based on labels | ||
id: configure-tests-on-labels | ||
run: | | ||
set +e | ||
declare -A EXECUTE_LABELED | ||
TESTSET=("execute_knowledge" "execute_ui" "execute_modinput_functional" "execute_scripted_inputs" "execute_requirement_test") | ||
for test_type in "${TESTSET[@]}"; do | ||
EXECUTE_LABELED["$test_type"]="false" | ||
done | ||
case "${{ github.event_name }}" in | ||
"pull_request") | ||
labels=$(echo '${{ toJSON(github.event.pull_request.labels) }}' | jq -r '.[] | .name') | ||
if ${{ github.base_ref == 'main' }} && ${{ contains(github.event.pull_request.labels.*.name, 'use_labels') }}; then | ||
for test_type in "${TESTSET[@]}"; do | ||
if [[ "$labels" =~ $test_type ]]; then | ||
EXECUTE_LABELED["$test_type"]="true" | ||
fi | ||
done | ||
elif ${{ github.base_ref == 'main' }} || ${{ contains(github.event.pull_request.labels.*.name, 'execute_all_tests') }}; then | ||
for test_type in "${TESTSET[@]}"; do | ||
EXECUTE_LABELED["$test_type"]="true" | ||
done | ||
else | ||
for test_type in "${TESTSET[@]}"; do | ||
if [[ "$labels" =~ $test_type ]]; then | ||
EXECUTE_LABELED["$test_type"]="true" | ||
fi | ||
done | ||
fi | ||
;; | ||
"push") | ||
if ${{ github.ref_name == 'main' }} || ${{ github.ref_name == 'develop' }} || ${{ github.ref_type == 'tag' }} ; then | ||
for test_type in "${TESTSET[@]}"; do | ||
EXECUTE_LABELED["$test_type"]="true" | ||
done | ||
fi | ||
;; | ||
"schedule") | ||
for test_type in "${TESTSET[@]}"; do | ||
EXECUTE_LABELED["$test_type"]="true" | ||
done | ||
;; | ||
*) | ||
echo "No tests were labeled for execution!" | ||
;; | ||
esac | ||
echo "Tests to execute based on labels:" | ||
for test_type in "${TESTSET[@]}"; do | ||
echo "$test_type""_labeled=${EXECUTE_LABELED["$test_type"]}" >> "$GITHUB_OUTPUT" | ||
echo "$test_type""_labeled: ${EXECUTE_LABELED["$test_type"]}" | ||
done | ||
validate-pr-title: | ||
name: Validate PR title | ||
needs: | ||
- setup-workflow | ||
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' && github.event_name == 'pull_request' }} | ||
runs-on: ubuntu-latest | ||
permissions: | ||
contents: read | ||
packages: read | ||
pull-requests: read | ||
statuses: write | ||
steps: | ||
- uses: amannn/action-semantic-pull-request@v5.4.0 | ||
with: | ||
wip: true | ||
validateSingleCommit: true | ||
env: | ||
GITHUB_TOKEN: ${{ github.token }} | ||
meta: | ||
runs-on: ubuntu-latest | ||
needs: | ||
- setup-workflow | ||
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | ||
outputs: | ||
sc4s: ghcr.io/${{ github.repository }}/container:${{ fromJSON(steps.docker_action_meta.outputs.json).labels['org.opencontainers.image.version'] }} | ||
container_tags: ${{ steps.docker_action_meta.outputs.tags }} | ||
container_labels: ${{ steps.docker_action_meta.outputs.labels }} | ||
container_buildtime: ${{ fromJSON(steps.docker_action_meta.outputs.json).labels['org.opencontainers.image.created'] }} | ||
container_version: ${{ fromJSON(steps.docker_action_meta.outputs.json).labels['org.opencontainers.image.version'] }} | ||
container_revision: ${{ fromJSON(steps.docker_action_meta.outputs.json).labels['org.opencontainers.image.revision'] }} | ||
container_base: ${{ fromJSON(steps.docker_action_meta.outputs.json).tags[0] }} | ||
matrix_supportedSplunk: ${{ steps.matrix.outputs.supportedSplunk }} | ||
matrix_latestSplunk: ${{ steps.matrix.outputs.latestSplunk }} | ||
matrix_combinedSplunkversion: ${{ steps.combined_Splunkmatrix.outputs.combinedSplunkversions }} | ||
matrix_supportedSC4S: ${{ steps.matrix.outputs.supportedSC4S }} | ||
matrix_supportedModinputFunctionalVendors: ${{ steps.matrix.outputs.supportedModinputFunctionalVendors }} | ||
matrix_supportedUIVendors: ${{ steps.matrix.outputs.supportedUIVendors }} | ||
python39_splunk: ${{steps.python39_splunk.outputs.splunk}} | ||
python39_sc4s: ${{steps.python39_splunk.outputs.sc4s}} | ||
permissions: | ||
contents: write | ||
packages: read | ||
steps: | ||
- name: Checkout | ||
uses: actions/checkout@v4 | ||
with: | ||
submodules: false | ||
persist-credentials: false | ||
- name: Semantic Release | ||
id: version | ||
uses: splunk/semantic-release-action@v1.3 | ||
with: | ||
dry_run: true | ||
git_committer_name: ${{ secrets.SA_GH_USER_NAME }} | ||
git_committer_email: ${{ secrets.SA_GH_USER_EMAIL }} | ||
gpg_private_key: ${{ secrets.SA_GPG_PRIVATE_KEY }} | ||
passphrase: ${{ secrets.SA_GPG_PASSPHRASE }} | ||
env: | ||
GITHUB_TOKEN: ${{ github.token }} | ||
- name: Docker meta | ||
id: docker_action_meta | ||
uses: docker/metadata-action@v5.5.1 | ||
with: | ||
images: ghcr.io/${{ github.repository }}/container | ||
tags: | | ||
type=sha,format=long | ||
type=sha | ||
type=semver,pattern={{version}},value=${{ steps.version.outputs.new_release_version }} | ||
type=semver,pattern={{major}},value=${{ steps.version.outputs.new_release_version }} | ||
type=semver,pattern={{major}}.{{minor}},value=${{ steps.version.outputs.new_release_version }} | ||
type=ref,event=branch | ||
type=ref,event=pr | ||
- name: matrix | ||
id: matrix | ||
uses: splunk/addonfactory-test-matrix-action@v1.13 | ||
- name: python39_Splunk | ||
id: python39_splunk | ||
run: | | ||
echo "splunk={\"version\":\"unreleased-python3_9-7027496d63d8\", \"build\":\"7027496d63d8\", \"islatest\":false, \"isoldest\":false}" >> "$GITHUB_OUTPUT" | ||
echo "sc4s={\"version\":\"2.49.5\", \"docker_registry\":\"ghcr.io/splunk/splunk-connect-for-syslog/container2\"}" >> "$GITHUB_OUTPUT" | ||
- name: combined_Splunkmatrix | ||
id: combined_Splunkmatrix | ||
run: | | ||
combinedSplunkversions=$(echo '${{ steps.matrix.outputs.supportedSplunk }}' | jq --argjson A '${{steps.python39_splunk.outputs.splunk}}' '. + [$A]') | ||
echo "combinedSplunkversions=$(echo "$combinedSplunkversions"| jq -c .)" >> "$GITHUB_OUTPUT" | ||
fossa-scan: | ||
runs-on: ubuntu-latest | ||
needs: | ||
- setup-workflow | ||
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- name: run fossa analyze and create report | ||
run: | | ||
curl -H 'Cache-Control: no-cache' https://raw.githubusercontent.com/fossas/fossa-cli/master/install-latest.sh | bash | ||
fossa analyze --debug | ||
fossa report attribution --format text --timeout 600 > /tmp/THIRDPARTY | ||
env: | ||
FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }} | ||
- name: upload THIRDPARTY file | ||
uses: actions/upload-artifact@v4 | ||
with: | ||
name: THIRDPARTY | ||
path: /tmp/THIRDPARTY | ||
fossa-test: | ||
continue-on-error: true | ||
runs-on: ubuntu-latest | ||
needs: | ||
- fossa-scan | ||
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- name: run fossa test | ||
run: | | ||
curl -H 'Cache-Control: no-cache' https://raw.githubusercontent.com/fossas/fossa-cli/master/install-latest.sh | bash | ||
fossa test --debug | ||
env: | ||
FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }} | ||
compliance-copyrights: | ||
name: compliance-copyrights | ||
runs-on: ubuntu-latest | ||
needs: | ||
- setup-workflow | ||
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | ||
steps: | ||
- name: Checkout | ||
uses: actions/checkout@v4 | ||
- name: REUSE Compliance Check | ||
uses: fsfe/reuse-action@v1.1 | ||
lint: | ||
runs-on: ubuntu-latest | ||
needs: | ||
- setup-workflow | ||
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- uses: actions/setup-python@v5 | ||
with: | ||
python-version: "3.7" | ||
- uses: pre-commit/action@v3.0.1 | ||
review_secrets: | ||
name: security-detect-secrets | ||
runs-on: ubuntu-latest | ||
needs: | ||
- setup-workflow | ||
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | ||
steps: | ||
- name: Checkout | ||
if: github.event_name != 'pull_request' | ||
uses: actions/checkout@v4 | ||
with: | ||
submodules: false | ||
fetch-depth: "0" | ||
- name: Checkout for PR | ||
if: github.event_name == 'pull_request' | ||
uses: actions/checkout@v4 | ||
with: | ||
submodules: false | ||
fetch-depth: "0" | ||
ref: ${{ github.head_ref }} | ||
- name: Trufflehog Actions Scan | ||
uses: edplato/trufflehog-actions-scan@v0.9l-beta | ||
with: | ||
scanArguments: "--max_dept 5 -x .github/workflows/exclude-patterns.txt --allow .github/workflows/trufflehog-false-positive.json" | ||
semgrep: | ||
runs-on: ubuntu-latest | ||
name: security-sast-semgrep | ||
needs: | ||
- setup-workflow | ||
container: | ||
image: returntocorp/semgrep | ||
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- name: Semgrep | ||
id: semgrep | ||
run: semgrep ci | ||
env: | ||
SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_PUBLISH_TOKEN }} | ||
test-inventory: | ||
runs-on: ubuntu-latest | ||
needs: setup-workflow | ||
if: ${{ needs.setup-workflow.outputs.skip-workflow != 'Yes' }} | ||
# Map a step output to a job output | ||
outputs: | ||
unit: ${{ steps.testset.outputs.unit }} | ||
knowledge: ${{ steps.testset.outputs.knowledge }} | ||
ui: ${{ steps.testset.outputs.ui }} | ||
modinput_functional: ${{ steps.testset.outputs.modinput_functional }} | ||
requirement_test: ${{ steps.testset.outputs.requirement_test }} | ||
scripted_inputs: ${{ steps.testset.outputs.scripted_inputs }} | ||
ucc_modinput_functional: ${{ steps.modinput-version.outputs.ucc_modinput_tests }} | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- id: testset | ||
name: Check available test types | ||
run: | | ||
find tests -type d -maxdepth 1 -mindepth 1 | sed 's|^tests/||g' | while read -r TESTSET; do echo "$TESTSET=true" >> "$GITHUB_OUTPUT"; echo "$TESTSET::true"; done | ||
- id: modinput-version | ||
name: Check modinput tests version | ||
run: | | ||
CENTAURS_MODINPUT_TESTS_CHECK_DIR="tests/modinput_functional/centaurs" | ||
ucc_modinput_tests="true" | ||
if [ -d "$CENTAURS_MODINPUT_TESTS_CHECK_DIR" ]; then | ||
ucc_modinput_tests="false" | ||
fi | ||
echo "ucc_modinput_tests=$ucc_modinput_tests" >> "$GITHUB_OUTPUT" | ||
run-unit-tests: | ||
name: test-unit-python3-${{ matrix.python-version }} | ||
if: ${{ needs.test-inventory.outputs.unit == 'true' }} | ||
runs-on: ubuntu-latest | ||
needs: | ||
- test-inventory | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
python-version: | ||
- "3.7" | ||
permissions: | ||
actions: read | ||
deployments: read | ||
contents: read | ||
packages: read | ||
statuses: read | ||
checks: write | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- uses: actions/setup-python@v5 | ||
with: | ||
python-version: ${{ matrix.python-version }} | ||
- name: Setup addon | ||
run: | | ||
if [ -f "poetry.lock" ] | ||
then | ||
mkdir -p package/lib || true | ||
pip install poetry==1.5.1 poetry-plugin-export==1.4.0 | ||
poetry lock --check | ||
poetry export --without-hashes -o package/lib/requirements.txt | ||
poetry export --without-hashes --dev -o requirements_dev.txt | ||
fi | ||
if [ ! -f requirements_dev.txt ]; then echo no requirements;exit 0 ;fi | ||
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf https://github.com | ||
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf ssh://git@github.com | ||
pip install -r requirements_dev.txt | ||
- name: Create directories | ||
run: | | ||
mkdir -p /opt/splunk/var/log/splunk | ||
chmod -R 777 /opt/splunk/var/log/splunk | ||
- name: Copy pytest ini | ||
run: cp tests/unit/pytest-ci.ini pytest.ini | ||
- name: Run Pytest with coverage | ||
run: pytest --cov=./ --cov-report=xml --junitxml=test-results/junit.xml tests/unit | ||
- uses: actions/upload-artifact@v4 | ||
if: success() || failure() | ||
with: | ||
name: test-results-unit-python_${{ matrix.python-version }} | ||
path: test-results/* | ||
run-unit-tests-3_9: | ||
name: test-unit-python3-${{ matrix.python-version }} | ||
if: ${{ needs.test-inventory.outputs.unit == 'true' }} | ||
runs-on: ubuntu-latest | ||
needs: | ||
- test-inventory | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
python-version: | ||
- "3.9" | ||
permissions: | ||
actions: read | ||
deployments: read | ||
contents: read | ||
packages: read | ||
statuses: read | ||
checks: write | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- uses: actions/setup-python@v5 | ||
with: | ||
python-version: ${{ matrix.python-version }} | ||
- name: Setup addon | ||
run: | | ||
if [ -f "poetry.lock" ] | ||
then | ||
mkdir -p package/lib || true | ||
pip install poetry==1.5.1 poetry-plugin-export==1.4.0 | ||
poetry lock --check | ||
poetry export --without-hashes -o package/lib/requirements.txt | ||
poetry export --without-hashes --dev -o requirements_dev.txt | ||
fi | ||
if [ ! -f requirements_dev.txt ]; then echo no requirements;exit 0 ;fi | ||
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf https://github.com | ||
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf ssh://git@github.com | ||
pip install -r requirements_dev.txt | ||
- name: Create directories | ||
run: | | ||
mkdir -p /opt/splunk/var/log/splunk | ||
chmod -R 777 /opt/splunk/var/log/splunk | ||
- name: Copy pytest ini | ||
run: cp tests/unit/pytest-ci.ini pytest.ini | ||
- name: Run Pytest with coverage | ||
run: pytest --cov=./ --cov-report=xml --junitxml=test-results/junit.xml tests/unit | ||
- uses: actions/upload-artifact@v4 | ||
if: success() || failure() | ||
with: | ||
name: test-results-unit-python_${{ matrix.python-version }} | ||
path: test-results/* | ||
build: | ||
runs-on: ubuntu-latest | ||
needs: | ||
- setup-workflow | ||
- test-inventory | ||
- meta | ||
- compliance-copyrights | ||
- lint | ||
- review_secrets | ||
- semgrep | ||
- run-unit-tests | ||
- fossa-scan | ||
if: ${{ !cancelled() && (needs.run-unit-tests.result == 'success' || needs.run-unit-tests.result == 'skipped') }} | ||
outputs: | ||
buildname: ${{ steps.buildupload.outputs.name }} | ||
permissions: | ||
contents: write | ||
packages: read | ||
steps: | ||
- uses: actions/checkout@v4 | ||
with: | ||
# Very Important semantic-release won't trigger a tagged | ||
# build if this is not set false | ||
persist-credentials: false | ||
- name: Setup python | ||
uses: actions/setup-python@v5 | ||
with: | ||
python-version: 3.7 | ||
- name: create requirements file for pip | ||
run: | | ||
if [ -f "poetry.lock" ] | ||
then | ||
echo " poetry.lock found " | ||
sudo pip3 install poetry==1.5.1 poetry-plugin-export==1.4.0 | ||
poetry export --without-hashes -o requirements.txt | ||
if [ "$(grep -cve '^\s*$' requirements.txt)" -ne 0 ] | ||
then | ||
echo "Prod dependencies were found, creating package/lib folder" | ||
mkdir -p package/lib || true | ||
mv requirements.txt package/lib | ||
else | ||
echo "No prod dependencies were found" | ||
rm requirements.txt | ||
fi | ||
poetry export --without-hashes --dev -o requirements_dev.txt | ||
cat requirements_dev.txt | ||
fi | ||
- name: Get pip cache dir | ||
id: pip-cache | ||
run: | | ||
echo "dir=$(pip cache dir)" >> "$GITHUB_OUTPUT" | ||
- name: Run Check there are libraries to scan | ||
id: checklibs | ||
run: if [ -f requirements_dev.txt ]; then echo "ENABLED=true" >> "$GITHUB_OUTPUT"; fi | ||
- name: pip cache | ||
if: ${{ steps.checklibs.outputs.ENABLED == 'true' }} | ||
uses: actions/cache@v4 | ||
with: | ||
path: ${{ steps.pip-cache.outputs.dir }} | ||
key: ${{ runner.os }}-pip-${{ hashFiles('requirements_dev.txt') }} | ||
restore-keys: | | ||
${{ runner.os }}-pip- | ||
- name: Install deps | ||
if: ${{ steps.checklibs.outputs.ENABLED == 'true' }} | ||
run: | | ||
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf https://github.com | ||
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf ssh://git@github.com | ||
pip install -r requirements_dev.txt | ||
- name: Semantic Release Get Next | ||
id: semantic | ||
if: github.event_name != 'pull_request' | ||
uses: splunk/semantic-release-action@v1.3 | ||
with: | ||
dry_run: true | ||
git_committer_name: ${{ secrets.SA_GH_USER_NAME }} | ||
git_committer_email: ${{ secrets.SA_GH_USER_EMAIL }} | ||
gpg_private_key: ${{ secrets.SA_GPG_PRIVATE_KEY }} | ||
passphrase: ${{ secrets.SA_GPG_PASSPHRASE }} | ||
env: | ||
GITHUB_TOKEN: ${{ github.token }} | ||
- name: Determine the version to build | ||
id: BuildVersion | ||
run: | | ||
INPUT_SEMVER="${{ steps.semantic.outputs.new_release_version }}" | ||
echo "Initial semver ${INPUT_SEMVER}" | ||
INPUT_PRNUMBER="${{ github.event.number }}" | ||
SEMVER_REGEX='^v?[0-9]+\.[0-9]+\.[0-9]+$' | ||
BETA_REGEX='^v?[0-9]+\.[0-9]+\.[0-9]+-beta\.[0-9]+$' | ||
echo working with version $INPUT_SEMVER | ||
if [[ $INPUT_SEMVER =~ $SEMVER_REGEX ]]; | ||
then | ||
echo using provided semver | ||
VERSION=$INPUT_SEMVER | ||
elif [[ $INPUT_SEMVER =~ $BETA_REGEX ]]; | ||
then | ||
VERSION=$(echo $INPUT_SEMVER | awk '{gsub("-beta\.", "-B");print}') | ||
else | ||
if [[ $GITHUB_EVENT_NAME != 'pull_request' ]]; | ||
then | ||
echo this is not a release build and NOT PR RUNID + run ID | ||
VERSION=0.0.${GITHUB_RUN_ID} | ||
else | ||
echo this is not a release build and is a PR use run ID | ||
VERSION=0.${INPUT_PRNUMBER}.${GITHUB_RUN_ID} | ||
fi | ||
fi | ||
FINALVERSION="${VERSION//v}" | ||
echo "Version to build is ${FINALVERSION}" | ||
echo "VERSION=${FINALVERSION}" >> "$GITHUB_OUTPUT" | ||
- name: Download THIRDPARTY | ||
if: github.event_name != 'pull_request' && github.event_name != 'schedule' | ||
uses: actions/download-artifact@v4 | ||
with: | ||
name: THIRDPARTY | ||
- name: Download THIRDPARTY (Optional for PR and schedule) | ||
if: github.event_name == 'pull_request' || github.event_name == 'schedule' | ||
continue-on-error: true | ||
uses: actions/download-artifact@v4 | ||
with: | ||
name: THIRDPARTY | ||
- name: Update Notices | ||
run: | | ||
cp -f THIRDPARTY package/THIRDPARTY || echo "THIRDPARTY file not found (allowed for PR and schedule)" | ||
- name: Build Package | ||
id: uccgen | ||
uses: splunk/addonfactory-ucc-generator-action@v2 | ||
with: | ||
version: ${{ steps.BuildVersion.outputs.VERSION }} | ||
- name: Slim Package | ||
id: slim | ||
run: | | ||
pip install splunk-packaging-toolkit | ||
INPUT_SOURCE=${{ steps.uccgen.outputs.OUTPUT }} | ||
SOURCE_REGEX='^.*/$' | ||
if [[ $INPUT_SOURCE =~ $SOURCE_REGEX ]];then | ||
echo Removing trailing / from INPUT_SOURCE slim is picky | ||
INPUT_SOURCE=$(echo $INPUT_SOURCE | sed 's/\(.*\)\//\1/') | ||
fi | ||
mkdir -p build/package/splunkbase | ||
slim package -o build/package/splunkbase "${INPUT_SOURCE}" | ||
for f in build/package/splunkbase/*.tar.gz; do | ||
n=$(echo "${f}" | awk '{gsub("-[0-9]+.[0-9]+.[0-9]+-[a-f0-9]+-?", "");print}' | sed 's/.tar.gz/.spl/') | ||
mv "${f}" "${n}" | ||
done | ||
PACKAGE=$(ls build/package/splunkbase/*) | ||
slim validate "${PACKAGE}" | ||
chmod -R +r build | ||
echo "OUTPUT=$PACKAGE" >> "$GITHUB_OUTPUT" | ||
if: always() | ||
- name: artifact-openapi | ||
uses: actions/upload-artifact@v4 | ||
with: | ||
name: artifact-openapi | ||
path: ${{ github.workspace }}/${{ steps.uccgen.outputs.OUTPUT }}/appserver/static/openapi.json | ||
if: ${{ !cancelled() && needs.test-inventory.outputs.ucc_modinput_functional == 'true' && needs.test-inventory.outputs.modinput_functional == 'true' }} | ||
- name: artifact-splunk-base | ||
uses: actions/upload-artifact@v4 | ||
with: | ||
name: package-splunkbase | ||
path: ${{ steps.slim.outputs.OUTPUT }} | ||
if: ${{ !cancelled() }} | ||
- name: upload-build-to-s3 | ||
id: buildupload | ||
env: | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
run: | | ||
echo "name=$(basename "${{ steps.slim.outputs.OUTPUT }}")" >> "$GITHUB_OUTPUT" | ||
basename "${{ steps.slim.outputs.OUTPUT }}" | ||
aws s3 cp "${{ steps.slim.outputs.OUTPUT }}" "s3://${{ needs.setup-workflow.outputs.s3_bucket_k8s }}/ta-apps/" | ||
build-3_9: | ||
runs-on: ubuntu-latest | ||
needs: | ||
- setup-workflow | ||
- test-inventory | ||
- meta | ||
- compliance-copyrights | ||
- lint | ||
- review_secrets | ||
- semgrep | ||
- run-unit-tests-3_9 | ||
- fossa-scan | ||
if: | | ||
always() && | ||
(needs.run-unit-tests-3_9.result == 'success' || needs.run-unit-tests-3_9.result == 'skipped') | ||
permissions: | ||
contents: write | ||
packages: read | ||
steps: | ||
- uses: actions/checkout@v4 | ||
with: | ||
# Very Important semantic-release won't trigger a tagged | ||
# build if this is not set false | ||
persist-credentials: false | ||
- name: Setup python | ||
uses: actions/setup-python@v5 | ||
with: | ||
python-version: 3.9 | ||
- name: create requirements file for pip | ||
run: | | ||
if [ -f "poetry.lock" ] | ||
then | ||
echo " poetry.lock found " | ||
sudo pip3 install poetry==1.5.1 poetry-plugin-export==1.4.0 | ||
poetry export --without-hashes -o requirements.txt | ||
if [ "$(grep -cve '^\s*$' requirements.txt)" -ne 0 ] | ||
then | ||
echo "Prod dependencies were found, creating package/lib folder" | ||
mkdir -p package/lib || true | ||
mv requirements.txt package/lib | ||
else | ||
echo "No prod dependencies were found" | ||
rm requirements.txt | ||
fi | ||
poetry export --without-hashes --dev -o requirements_dev.txt | ||
cat requirements_dev.txt | ||
fi | ||
- id: pip-cache | ||
run: | | ||
echo "dir=$(pip cache dir)" >> "$GITHUB_OUTPUT" | ||
- name: pip cache | ||
uses: actions/cache@v4 | ||
with: | ||
path: ${{ steps.pip-cache.outputs.dir }} | ||
key: ${{ runner.os }}-pip-python3_9-${{ hashFiles('requirements_dev.txt') }} | ||
restore-keys: | | ||
${{ runner.os }}-pip-python3_9 | ||
- run: | | ||
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf https://github.com | ||
git config --global --add url."https://${{ secrets.GH_TOKEN_ADMIN }}@github.com".insteadOf ssh://git@github.com | ||
pip install -r requirements_dev.txt | ||
- id: semantic | ||
if: github.event_name != 'pull_request' | ||
uses: splunk/semantic-release-action@v1.3 | ||
with: | ||
dry_run: true | ||
git_committer_name: ${{ secrets.SA_GH_USER_NAME }} | ||
git_committer_email: ${{ secrets.SA_GH_USER_EMAIL }} | ||
gpg_private_key: ${{ secrets.SA_GPG_PRIVATE_KEY }} | ||
passphrase: ${{ secrets.SA_GPG_PASSPHRASE }} | ||
env: | ||
GITHUB_TOKEN: ${{ github.token }} | ||
- id: BuildVersion | ||
run: | | ||
INPUT_SEMVER="${{ steps.semantic.outputs.new_release_version }}" | ||
echo "Initial semver ${INPUT_SEMVER}" | ||
INPUT_PRNUMBER="${{ github.event.number }}" | ||
SEMVER_REGEX='^v?[0-9]+\.[0-9]+\.[0-9]+$' | ||
BETA_REGEX='^v?[0-9]+\.[0-9]+\.[0-9]+-beta\.[0-9]+$' | ||
echo working with version $INPUT_SEMVER | ||
if [[ $INPUT_SEMVER =~ $SEMVER_REGEX ]]; | ||
then | ||
echo using provided semver | ||
VERSION=$INPUT_SEMVER | ||
elif [[ $INPUT_SEMVER =~ $BETA_REGEX ]]; | ||
then | ||
VERSION=$(echo $INPUT_SEMVER | awk '{gsub("-beta\.", "-B");print}') | ||
else | ||
if [[ $GITHUB_EVENT_NAME != 'pull_request' ]]; | ||
then | ||
echo this is not a release build and NOT PR RUNID + run ID | ||
VERSION=0.0.${GITHUB_RUN_ID} | ||
else | ||
echo this is not a release build and is a PR use run ID | ||
VERSION=0.${INPUT_PRNUMBER}.${GITHUB_RUN_ID} | ||
fi | ||
fi | ||
FINALVERSION="${VERSION//v}" | ||
echo "Version to build is $FINALVERSION" | ||
echo "VERSION=$FINALVERSION" >> "$GITHUB_OUTPUT" | ||
- id: uccgen | ||
uses: splunk/addonfactory-ucc-generator-action@v2 | ||
with: | ||
version: ${{ steps.BuildVersion.outputs.VERSION }} | ||
run-requirements-unit-tests: | ||
runs-on: ubuntu-latest | ||
needs: | ||
- build | ||
- test-inventory | ||
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.requirement_test == 'true' }} | ||
permissions: | ||
actions: read | ||
deployments: read | ||
contents: read | ||
packages: read | ||
statuses: read | ||
checks: write | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- name: Install Python 3 | ||
uses: actions/setup-python@v5 | ||
with: | ||
python-version: 3.7 | ||
- name: run-tests | ||
uses: splunk/addonfactory-workflow-requirement-files-unit-tests@v1.4 | ||
with: | ||
input-files: tests/requirement_test/logs | ||
- name: Archive production artifacts | ||
if: ${{ !cancelled() }} | ||
uses: actions/upload-artifact@v4 | ||
with: | ||
name: test-results | ||
path: | | ||
test_*.txt | ||
appinspect: | ||
name: quality-appinspect-${{ matrix.tags }} | ||
needs: build | ||
if: ${{ !cancelled() && needs.build.result == 'success' }} | ||
runs-on: ubuntu-latest | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
tags: | ||
- "cloud" | ||
- "appapproval" | ||
- "deprecated_feature" | ||
- "developer_guidance" | ||
- "future" | ||
- "self-service" | ||
- "splunk_appinspect" | ||
- "manual" | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- uses: actions/download-artifact@v4 | ||
with: | ||
name: package-splunkbase | ||
path: build/package/ | ||
- name: Scan | ||
<<<<<<< HEAD | ||
uses: splunk/appinspect-cli-action@v2.6 | ||
======= | ||
uses: splunk/appinspect-cli-action@v2.6 | ||
>>>>>>> main | ||
with: | ||
app_path: build/package/ | ||
included_tags: ${{ matrix.tags }} | ||
result_file: appinspect_result_${{ matrix.tags }}.json | ||
- name: upload-appinspect-report | ||
if: ${{ !cancelled() }} | ||
uses: actions/upload-artifact@v4 | ||
with: | ||
name: appinspect_${{ matrix.tags }}_checks.json | ||
path: appinspect_result_${{ matrix.tags }}.json | ||
- name: upload-markdown | ||
if: matrix.tags == 'manual' | ||
uses: actions/upload-artifact@v4 | ||
with: | ||
name: check_markdown | ||
path: | | ||
*_markdown.txt | ||
appinspect-api: | ||
name: appinspect api ${{ matrix.tags }} | ||
needs: build | ||
if: | | ||
!cancelled() && | ||
needs.build.result == 'success' && | ||
( github.base_ref == 'main' || github.ref_name == 'main' ) | ||
runs-on: ubuntu-latest | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
tags: | ||
- "cloud" | ||
steps: | ||
- uses: actions/checkout@v4 | ||
- uses: actions/download-artifact@v4 | ||
with: | ||
name: package-splunkbase | ||
path: build/package | ||
- name: AppInspect API | ||
uses: splunk/appinspect-api-action@v3.0 | ||
with: | ||
username: ${{ secrets.SPL_COM_USER }} | ||
password: ${{ secrets.SPL_COM_PASSWORD }} | ||
app_path: build/package/ | ||
included_tags: ${{ matrix.tags }} | ||
- uses: actions/upload-artifact@v4 | ||
if: always() | ||
with: | ||
name: appinspect-api-html-report-${{ matrix.tags }} | ||
path: AppInspect_response.html | ||
setup: | ||
needs: | ||
- setup-workflow | ||
- build | ||
- test-inventory | ||
if: ${{ !cancelled() && needs.build.result == 'success' }} | ||
runs-on: ubuntu-latest | ||
outputs: | ||
argo-server: ${{ steps.test-setup.outputs.argo-server }} | ||
argo-http1: ${{ steps.test-setup.outputs.argo-http1 }} | ||
argo-secure: ${{ steps.test-setup.outputs.argo-secure }} | ||
spl-host-suffix: ${{ steps.test-setup.outputs.spl-host-suffix }} | ||
argo-href: "" | ||
argo-base-href: ${{ steps.test-setup.outputs.argo-base-href }} | ||
argo-workflow-tmpl-name: ${{ steps.test-setup.outputs.argo-workflow-tmpl-name }} | ||
argo-cancel-workflow-tmpl-name: ${{ steps.test-setup.outputs.argo-cancel-workflow-tmpl-name }} | ||
k8s-manifests-branch: ${{ steps.test-setup.outputs.k8s-manifests-branch }} | ||
argo-namespace: ${{ steps.test-setup.outputs.argo-namespace }} | ||
addon-name: ${{ steps.test-setup.outputs.addon-name }} | ||
job-name: ${{ steps.test-setup.outputs.job-name }} | ||
labels: ${{ steps.test-setup.outputs.labels }} | ||
addon-upload-path: ${{ steps.test-setup.outputs.addon-upload-path }} | ||
directory-path: ${{ steps.test-setup.outputs.directory-path }} | ||
s3-bucket: ${{ steps.test-setup.outputs.s3-bucket }} | ||
env: | ||
BUILD_NAME: ${{ needs.build.outputs.buildname }} | ||
steps: | ||
- uses: actions/checkout@v4 | ||
with: | ||
submodules: recursive | ||
token: ${{ secrets.GH_TOKEN_ADMIN }} | ||
- name: setup for test | ||
id: test-setup | ||
shell: bash | ||
run: | | ||
sudo apt-get install -y crudini | ||
ADDON_NAME=$(crudini --get package/default/app.conf id name | tr '[:lower:]' '[:upper:]') | ||
if [[ -n $(echo "${ADDON_NAME}" | awk -F 'SPLUNK_TA_' '{print $2}') ]]; | ||
then | ||
ADDON_NAME=$(echo "${ADDON_NAME}" | awk -F 'SPLUNK_TA_' '{print $2}') | ||
elif [[ -n $(echo "${ADDON_NAME}" | awk -F '_FOR_SPLUNK' '{print $1}') ]]; | ||
then | ||
ADDON_NAME=$(echo "${ADDON_NAME}" | awk -F '_FOR_SPLUNK' '{print $1}') | ||
fi | ||
JOB_NAME=$(echo "$ADDON_NAME" | tail -c 16)-$(echo "${GITHUB_SHA}" | tail -c 8)-TEST-TYPE-${GITHUB_RUN_ID} | ||
JOB_NAME=${JOB_NAME//[_.]/-} | ||
LABELS="addon-name=${ADDON_NAME}" | ||
ADDON_UPLOAD_PATH="s3://${{ needs.setup-workflow.outputs.s3_bucket_k8s }}/ta-apps/${{ needs.build.outputs.buildname }}" | ||
{ | ||
echo "argo-server=${{ needs.setup-workflow.outputs.argo_server_domain_k8s }}:443" | ||
echo "argo-http1=true" | ||
echo "argo-secure=true" | ||
echo "argo-base-href=\'\'" | ||
echo "argo-namespace=workflows" | ||
echo "argo-workflow-tmpl-name=ta-workflow" | ||
echo "argo-cancel-workflow-tmpl-name=cancel-workflow" | ||
echo "directory-path=/tmp" | ||
echo "s3-bucket=${{ needs.setup-workflow.outputs.s3_bucket_k8s }}" | ||
echo "addon-name=\"$ADDON_NAME\"" | ||
echo "job-name=wf-$JOB_NAME" | ||
echo "labels=$LABELS" | ||
echo "addon-upload-path=$ADDON_UPLOAD_PATH" | ||
echo "spl-host-suffix=wfe.splgdi.com" | ||
echo "k8s-manifests-branch=${{ inputs.k8s-manifests-branch }}" | ||
} >> "$GITHUB_OUTPUT" | ||
- uses: actions/download-artifact@v4 | ||
if: ${{ needs.test-inventory.outputs.ucc_modinput_functional == 'true' && needs.test-inventory.outputs.modinput_functional == 'true'}} | ||
id: download-openapi | ||
with: | ||
name: artifact-openapi | ||
path: ${{ github.workspace }} | ||
- name: Setup python | ||
if: steps.download-openapi.conclusion != 'skipped' | ||
uses: actions/setup-python@v5 | ||
with: | ||
python-version: 3.7 | ||
- name: modinput-test-prerequisites | ||
if: steps.download-openapi.conclusion != 'skipped' | ||
shell: bash | ||
env: | ||
PYTHON_KEYRING_BACKEND: keyring.backends.null.Keyring | ||
run: | | ||
sudo pip3 install poetry==1.5.1 | ||
export POETRY_REPOSITORIES_SPLUNK_ADD_ON_UCC_MODINPUT_TEST_URL=https://github.com/splunk/addonfactory-ucc-test.git | ||
export POETRY_HTTP_BASIC_SPLUNK_ADD_ON_UCC_MODINPUT_TEST_USERNAME=${{ secrets.SA_GH_USER_NAME }} | ||
export POETRY_HTTP_BASIC_SPLUNK_ADD_ON_UCC_MODINPUT_TEST_PASSWORD=${{ secrets.GH_TOKEN_ADMIN }} | ||
poetry install --only modinput | ||
poetry run ucc-test-modinput -o ${{ steps.download-openapi.outputs.download-path }}/openapi.json -t ${{ steps.download-openapi.outputs.download-path }}/tmp/ | ||
- name: upload-swagger-artifacts-to-s3 | ||
if: steps.download-openapi.conclusion != 'skipped' | ||
id: swaggerupload | ||
env: | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
run: | | ||
swagger_name=swagger_$(basename "$BUILD_NAME" .spl) | ||
aws s3 sync "${{ steps.download-openapi.outputs.download-path }}/tmp/restapi_client/" "s3://${{ needs.setup-workflow.outputs.s3_bucket_k8s }}/ta-apps/$swagger_name/" --exclude "*" --include "README.md" --include "*swagger_client*" --only-show-errors | ||
run-knowledge-tests: | ||
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.knowledge == 'true' && (needs.setup-workflow.outputs.execute-ko == 'Yes' || needs.setup-workflow.outputs.execute-knowledge-labeled == 'true') }} | ||
needs: | ||
- build | ||
- test-inventory | ||
- setup | ||
- meta | ||
- setup-workflow | ||
runs-on: ubuntu-latest | ||
continue-on-error: ${{ matrix.python39 }} | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
splunk: ${{ fromJson(needs.meta.outputs.matrix_supportedSplunk) }} | ||
sc4s: ${{ fromJson(needs.meta.outputs.matrix_supportedSC4S) }} | ||
python39: [false] | ||
include: | ||
- splunk: ${{ fromJson(needs.meta.outputs.python39_splunk) }} | ||
sc4s: ${{ fromJson(needs.meta.outputs.python39_sc4s) }} | ||
python39: true | ||
container: | ||
image: ghcr.io/splunk/workflow-engine-base:2.0.12 | ||
env: | ||
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | ||
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | ||
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | ||
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | ||
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | ||
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | ||
TEST_TYPE: "knowledge" | ||
TEST_ARGS: "" | ||
permissions: | ||
actions: read | ||
deployments: read | ||
contents: read | ||
packages: read | ||
statuses: read | ||
checks: write | ||
steps: | ||
- uses: actions/checkout@v4 | ||
with: | ||
submodules: recursive | ||
- name: configure git # This step configures git to omit "dubious git ownership error" in later test-reporter stage | ||
id: configure-git | ||
run: | | ||
git --version | ||
git_path="$(pwd)" | ||
echo "$git_path" | ||
git config --global --add safe.directory "$git_path" | ||
- name: capture start time | ||
id: capture-start-time | ||
run: | | ||
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | ||
- name: Configure AWS credentials | ||
uses: aws-actions/configure-aws-credentials@v4 | ||
with: | ||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | ||
- name: Read secrets from AWS Secrets Manager into environment variables | ||
id: get-argo-token | ||
run: | | ||
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString') | ||
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | ||
- name: create job name | ||
id: create-job-name | ||
shell: bash | ||
run: | | ||
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | ||
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | ||
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} | ||
JOB_NAME=${JOB_NAME//[_.]/-} | ||
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | ||
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | ||
- name: Splunk instance details | ||
id: splunk-instance-details | ||
if: ${{ needs.setup-workflow.outputs.delay-destroy-ko == 'Yes' }} | ||
shell: bash | ||
run: | | ||
BOLD="\033[1m" | ||
NORMAL="\033[0m" | ||
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | ||
echo -e "Splunk username is${BOLD} admin${NORMAL}" | ||
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | ||
- name: run-tests | ||
id: run-tests | ||
timeout-minutes: 340 | ||
continue-on-error: true | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
uses: splunk/wfe-test-runner-action@v1.6 | ||
with: | ||
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | ||
test-type: ${{ env.TEST_TYPE }} | ||
test-args: "" | ||
job-name: ${{ steps.create-job-name.outputs.job-name }} | ||
labels: ${{ needs.setup.outputs.labels }} | ||
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | ||
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | ||
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-ko }} | ||
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | ||
addon-name: ${{ needs.setup.outputs.addon-name }} | ||
sc4s-version: ${{ matrix.sc4s.version }} | ||
sc4s-docker-registry: ${{ matrix.sc4s.docker_registry }} | ||
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | ||
- name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation | ||
id: update-argo-token | ||
if: ${{ !cancelled() }} | ||
run: | | ||
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString') | ||
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | ||
- name: calculate timeout | ||
id: calculate-timeout | ||
run: | | ||
start_time=${{ steps.capture-start-time.outputs.start_time }} | ||
current_time=$(date +%s) | ||
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | ||
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | ||
- name: Check if pod was deleted | ||
id: is-pod-deleted | ||
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | ||
if: ${{ !cancelled() }} | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | ||
run: | | ||
set -o xtrace | ||
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then | ||
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | ||
fi | ||
- name: Cancel workflow | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }} | ||
run: | | ||
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | ||
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | ||
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | ||
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | ||
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | ||
else | ||
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | ||
exit 1 | ||
fi | ||
- name: Retrying workflow | ||
id: retry-wf | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | ||
if: ${{ !cancelled() }} | ||
run: | | ||
set -o xtrace | ||
set +e | ||
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | ||
then | ||
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | ||
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | ||
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | ||
else | ||
echo "No retry required" | ||
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | ||
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | ||
fi | ||
- name: check if workflow completed | ||
env: | ||
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | ||
shell: bash | ||
if: ${{ !cancelled() }} | ||
run: | | ||
set +e | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
echo "Status of workflow:" "$ARGO_STATUS" | ||
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | ||
do | ||
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | ||
argo wait "${WORKFLOW_NAME}" -n workflows || true | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
done | ||
- name: pull artifacts from s3 bucket | ||
if: ${{ !cancelled() }} | ||
run: | | ||
echo "pulling artifacts" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | ||
- name: pull logs from s3 bucket | ||
if: ${{ !cancelled() }} | ||
run: | | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
echo "pulling logs" | ||
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests artifacts | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/test-results | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests logs | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/argo-logs | ||
- name: Upload cim-compliance-report for ${{ matrix.splunk.version }} | ||
uses: actions/upload-artifact@v4 | ||
if: ${{ matrix.splunk.islatest == true }} | ||
with: | ||
name: cim-compliance-report | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/test-results/cim-compliance-report.md | ||
- name: Upload cim-field-report for ${{ matrix.splunk.version }} | ||
uses: actions/upload-artifact@v4 | ||
if: ${{ matrix.splunk.islatest == true }} | ||
with: | ||
name: cim-field-report | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/test-results/cim_field_report.json | ||
- name: Test Report | ||
id: test_report | ||
uses: dorny/test-reporter@v1.9.0 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} test report | ||
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | ||
reporter: java-junit | ||
- name: pull diag from s3 bucket | ||
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | ||
run: | | ||
echo "pulling diag" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests diag | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/diag* | ||
run-requirement-tests: | ||
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.requirement_test == 'true' && (needs.setup-workflow.outputs.execute-requirement_test == 'Yes' || needs.setup-workflow.outputs.execute-requirement-labeled == 'true') }} | ||
needs: | ||
- build | ||
- test-inventory | ||
- setup | ||
- meta | ||
- setup-workflow | ||
runs-on: ubuntu-latest | ||
continue-on-error: ${{ matrix.python39 }} | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
splunk: ${{ fromJson(needs.meta.outputs.matrix_latestSplunk) }} | ||
sc4s: ${{ fromJson(needs.meta.outputs.matrix_supportedSC4S) }} | ||
python39: [false] | ||
include: | ||
- splunk: ${{ fromJson(needs.meta.outputs.python39_splunk) }} | ||
sc4s: ${{ fromJson(needs.meta.outputs.python39_sc4s) }} | ||
python39: true | ||
container: | ||
image: ghcr.io/splunk/workflow-engine-base:2.0.12 | ||
env: | ||
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | ||
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | ||
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | ||
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | ||
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | ||
TEST_TYPE: "requirement_test" | ||
TEST_ARGS: "" | ||
permissions: | ||
actions: read | ||
deployments: read | ||
contents: read | ||
packages: read | ||
statuses: read | ||
checks: write | ||
steps: | ||
- uses: actions/checkout@v4 | ||
with: | ||
submodules: recursive | ||
- name: configure git # This step configures git to omit "dubious git ownership error" in later test-reporter stage | ||
id: configure-git | ||
run: | | ||
git --version | ||
git_path="$(pwd)" | ||
echo "$git_path" | ||
git config --global --add safe.directory "$git_path" | ||
- name: capture start time | ||
id: capture-start-time | ||
run: | | ||
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | ||
- name: Configure AWS credentials | ||
uses: aws-actions/configure-aws-credentials@v4 | ||
with: | ||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | ||
- name: Read secrets from AWS Secrets Manager into environment variables | ||
id: get-argo-token | ||
run: | | ||
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString') | ||
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | ||
- name: create job name | ||
id: create-job-name | ||
shell: bash | ||
run: | | ||
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | ||
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | ||
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} | ||
JOB_NAME=${JOB_NAME//[_.]/-} | ||
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | ||
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | ||
- name: Splunk instance details | ||
id: splunk-instance-details | ||
if: ${{ needs.setup-workflow.outputs.delay-destroy-requirement_test == 'Yes' }} | ||
shell: bash | ||
run: | | ||
BOLD="\033[1m" | ||
NORMAL="\033[0m" | ||
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | ||
echo -e "Splunk username is${BOLD} admin${NORMAL}" | ||
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | ||
- name: run-tests | ||
id: run-tests | ||
timeout-minutes: 340 | ||
continue-on-error: true | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
uses: splunk/wfe-test-runner-action@v1.6 | ||
with: | ||
splunk: ${{ matrix.splunk.version }} | ||
test-type: ${{ env.TEST_TYPE }} | ||
test-args: "" | ||
job-name: ${{ steps.create-job-name.outputs.job-name }} | ||
labels: ${{ needs.setup.outputs.labels }} | ||
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | ||
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | ||
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-requirement_test }} | ||
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | ||
addon-name: ${{ needs.setup.outputs.addon-name }} | ||
sc4s-version: ${{ matrix.sc4s.version }} | ||
sc4s-docker-registry: ${{ matrix.sc4s.docker_registry }} | ||
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | ||
- name: calculate timeout | ||
id: calculate-timeout | ||
run: | | ||
start_time=${{ steps.capture-start-time.outputs.start_time }} | ||
current_time=$(date +%s) | ||
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | ||
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | ||
- name: Check if pod was deleted | ||
id: is-pod-deleted | ||
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | ||
if: ${{ !cancelled() }} | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
run: | | ||
set -o xtrace | ||
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then | ||
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | ||
fi | ||
- name: Cancel workflow | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }} | ||
run: | | ||
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | ||
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | ||
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | ||
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | ||
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | ||
else | ||
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | ||
exit 1 | ||
fi | ||
- name: Retrying workflow | ||
id: retry-wf | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: ${{ !cancelled() }} | ||
run: | | ||
set -o xtrace | ||
set +e | ||
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | ||
then | ||
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | ||
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | ||
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | ||
else | ||
echo "No retry required" | ||
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | ||
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | ||
fi | ||
- name: check if workflow completed | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
shell: bash | ||
if: ${{ !cancelled() }} | ||
run: | | ||
set +e | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
echo "Status of workflow:" "$ARGO_STATUS" | ||
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | ||
do | ||
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | ||
argo wait "${WORKFLOW_NAME}" -n workflows || true | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
done | ||
- name: pull artifacts from s3 bucket | ||
if: ${{ !cancelled() }} | ||
run: | | ||
echo "pulling artifacts" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | ||
- name: pull logs from s3 bucket | ||
if: ${{ !cancelled() }} | ||
run: | | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
echo "pulling logs" | ||
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} tests artifacts | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/test-results | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} tests logs | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/argo-logs | ||
- name: Test Report | ||
id: test_report | ||
uses: dorny/test-reporter@v1.9.0 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} test report | ||
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | ||
reporter: java-junit | ||
- name: pull diag from s3 bucket | ||
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | ||
run: | | ||
echo "pulling diag" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }} ${{ env.TEST_TYPE }} tests diag | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/diag* | ||
run-ui-tests: | ||
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.ui == 'true' && (needs.setup-workflow.outputs.execute-ui == 'Yes' || needs.setup-workflow.outputs.execute-ui-labeled == 'true') }} | ||
needs: | ||
- build | ||
- test-inventory | ||
- setup | ||
- meta | ||
- setup-workflow | ||
runs-on: ubuntu-latest | ||
continue-on-error: ${{ matrix.python39 }} | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
splunk: ${{ fromJson(needs.meta.outputs.matrix_supportedSplunk) }} | ||
browser: [ "chrome" ] | ||
vendor-version: ${{ fromJson(needs.meta.outputs.matrix_supportedUIVendors) }} | ||
python39: [false] | ||
include: | ||
- splunk: ${{ fromJson(needs.meta.outputs.python39_splunk) }} | ||
browser: "chrome" | ||
python39: true | ||
container: | ||
image: ghcr.io/splunk/workflow-engine-base:2.0.12 | ||
env: | ||
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | ||
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | ||
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | ||
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | ||
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | ||
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | ||
TEST_TYPE: "ui" | ||
TEST_ARGS: "--browser ${{ matrix.browser }}" | ||
permissions: | ||
actions: read | ||
deployments: read | ||
contents: read | ||
packages: read | ||
statuses: read | ||
checks: write | ||
steps: | ||
- uses: actions/checkout@v4 | ||
with: | ||
submodules: recursive | ||
- name: configure git # This step configures git to omit "dubious git ownership error" in later test-reporter stage | ||
id: configure-git | ||
run: | | ||
git --version | ||
git_path="$(pwd)" | ||
echo "$git_path" | ||
git config --global --add safe.directory "$git_path" | ||
- name: capture start time | ||
id: capture-start-time | ||
run: | | ||
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | ||
- name: Configure AWS credentials | ||
uses: aws-actions/configure-aws-credentials@v4 | ||
with: | ||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | ||
- name: Read secrets from AWS Secrets Manager into environment variables | ||
id: get-argo-token | ||
run: | | ||
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString') | ||
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | ||
- name: create job name | ||
id: create-job-name | ||
shell: bash | ||
run: | | ||
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | ||
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | ||
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}-${{ matrix.browser }}} | ||
JOB_NAME=${JOB_NAME//[_.:]/-} | ||
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | ||
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | ||
- name: Splunk instance details | ||
id: splunk-instance-details | ||
if: ${{ needs.setup-workflow.outputs.delay-destroy-ui == 'Yes' }} | ||
shell: bash | ||
run: | | ||
BOLD="\033[1m" | ||
NORMAL="\033[0m" | ||
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | ||
echo -e "Splunk username is${BOLD} admin${NORMAL}" | ||
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | ||
- name: run-tests | ||
id: run-tests | ||
timeout-minutes: 340 | ||
continue-on-error: true | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
uses: splunk/wfe-test-runner-action@v1.6 | ||
with: | ||
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | ||
test-type: ${{ env.TEST_TYPE }} | ||
test-args: ${{ env.TEST_ARGS }} | ||
job-name: ${{ steps.create-job-name.outputs.job-name }} | ||
labels: ${{ needs.setup.outputs.labels }} | ||
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | ||
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | ||
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-ui }} | ||
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | ||
addon-name: ${{ needs.setup.outputs.addon-name }} | ||
vendor-version: ${{ matrix.vendor-version.image }} | ||
sc4s-version: "No" | ||
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | ||
- name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation | ||
id: update-argo-token | ||
if: ${{ !cancelled() }} | ||
run: | | ||
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString') | ||
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | ||
- name: calculate timeout | ||
id: calculate-timeout | ||
run: | | ||
start_time=${{ steps.capture-start-time.outputs.start_time }} | ||
current_time=$(date +%s) | ||
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | ||
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | ||
- name: Check if pod was deleted | ||
id: is-pod-deleted | ||
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | ||
if: ${{ !cancelled() }} | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | ||
run: | | ||
set -o xtrace | ||
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted" ; then | ||
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | ||
fi | ||
- name: Cancel workflow | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }} | ||
run: | | ||
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | ||
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | ||
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | ||
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | ||
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | ||
else | ||
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | ||
exit 1 | ||
fi | ||
- name: Retrying workflow | ||
id: retry-wf | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | ||
if: ${{ !cancelled() }} | ||
run: | | ||
set -o xtrace | ||
set +e | ||
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | ||
then | ||
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | ||
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | ||
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | ||
else | ||
echo "No retry required" | ||
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | ||
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | ||
fi | ||
- name: check if workflow completed | ||
env: | ||
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | ||
if: ${{ !cancelled() }} | ||
shell: bash | ||
run: | | ||
set +e | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
echo "Status of workflow:" "$ARGO_STATUS" | ||
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | ||
do | ||
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | ||
argo wait "${WORKFLOW_NAME}" -n workflows || true | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
done | ||
- name: pull artifacts from s3 bucket | ||
if: ${{ !cancelled() }} | ||
run: | | ||
echo "pulling artifacts" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | ||
- name: pull logs from s3 bucket | ||
if: ${{ !cancelled() }} | ||
run: | | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
echo "pulling logs" | ||
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} tests artifacts | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/test-results | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} tests logs | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/argo-logs | ||
- name: Test Report | ||
id: test_report | ||
uses: dorny/test-reporter@v1.9.0 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} test report | ||
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | ||
reporter: java-junit | ||
- name: pull diag from s3 bucket | ||
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | ||
run: | | ||
echo "pulling diag" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.browser }} ${{ matrix.vendor-version.image }} tests diag | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/diag* | ||
run-modinput-tests: | ||
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.modinput_functional == 'true' && (needs.setup-workflow.outputs.execute-modinput_functional == 'Yes' || needs.setup-workflow.outputs.execute-modinput-labeled == 'true') }} | ||
needs: | ||
- build | ||
- test-inventory | ||
- setup | ||
- meta | ||
- setup-workflow | ||
runs-on: ubuntu-latest | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
splunk: ${{ fromJson(needs.meta.outputs.matrix_combinedSplunkversion) }} | ||
modinput-type: [ "modinput_functional" ] | ||
vendor-version: ${{ fromJson(needs.meta.outputs.matrix_supportedModinputFunctionalVendors) }} | ||
marker: ${{ fromJson(inputs.marker) }} | ||
container: | ||
image: ghcr.io/splunk/workflow-engine-base:2.0.12 | ||
env: | ||
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | ||
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | ||
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | ||
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | ||
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | ||
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | ||
TEST_TYPE: "modinput_functional" | ||
TEST_ARGS: "" | ||
permissions: | ||
actions: read | ||
deployments: read | ||
contents: read | ||
packages: read | ||
statuses: read | ||
checks: write | ||
steps: | ||
- uses: actions/checkout@v4 | ||
with: | ||
submodules: recursive | ||
- name: configure git # This step configures git to omit "dubious git ownership error" in later test-reporter stage | ||
id: configure-git | ||
run: | | ||
git --version | ||
git_path="$(pwd)" | ||
echo "$git_path" | ||
git config --global --add safe.directory "$git_path" | ||
- name: capture start time | ||
id: capture-start-time | ||
run: | | ||
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | ||
- name: Configure AWS credentials | ||
uses: aws-actions/configure-aws-credentials@v4 | ||
with: | ||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | ||
- name: Read secrets from AWS Secrets Manager into environment variables | ||
id: get-argo-token | ||
run: | | ||
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString') | ||
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | ||
- name: create job name | ||
id: create-job-name | ||
shell: bash | ||
run: | | ||
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | ||
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | ||
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} | ||
JOB_NAME=${JOB_NAME//[_.]/-} | ||
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | ||
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | ||
- name: Splunk instance details | ||
id: splunk-instance-details | ||
if: ${{ needs.setup-workflow.outputs.delay-destroy-modinput_functional == 'Yes' }} | ||
shell: bash | ||
run: | | ||
BOLD="\033[1m" | ||
NORMAL="\033[0m" | ||
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | ||
echo -e "Splunk username is${BOLD} admin${NORMAL}" | ||
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | ||
- name: create test argument | ||
id: create-test-arg | ||
shell: bash | ||
run: | | ||
export comparing_variable="[]" | ||
if [ "${{ inputs.marker }}" == "$comparing_variable" ] | ||
then | ||
TEST_ARG_M="" | ||
else | ||
TEST_ARG_M="-m" | ||
fi | ||
echo "test-arg=$TEST_ARG_M" >> "$GITHUB_OUTPUT" | ||
- name: run-tests | ||
id: run-tests | ||
timeout-minutes: 340 | ||
continue-on-error: true | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
uses: splunk/wfe-test-runner-action@v1.6 | ||
with: | ||
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | ||
test-type: ${{ env.TEST_TYPE }} | ||
test-args: ${{ env.TEST_ARGS }} ${{ steps.create-test-arg.outputs.test-arg }} ${{ matrix.marker }} | ||
job-name: ${{ steps.create-job-name.outputs.job-name }} | ||
labels: ${{ needs.setup.outputs.labels }} | ||
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | ||
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | ||
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-modinput_functional }} | ||
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | ||
addon-name: ${{ needs.setup.outputs.addon-name }} | ||
vendor-version: ${{ matrix.vendor-version.image }} | ||
sc4s-version: "No" | ||
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | ||
- name: Read secrets from AWS Secrets Manager again into environment variables in case credential rotation | ||
id: update-argo-token | ||
if: ${{ !cancelled() }} | ||
run: | | ||
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString') | ||
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | ||
- name: calculate timeout | ||
id: calculate-timeout | ||
run: | | ||
start_time=${{ steps.capture-start-time.outputs.start_time }} | ||
current_time=$(date +%s) | ||
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | ||
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | ||
- name: Check if pod was deleted | ||
id: is-pod-deleted | ||
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | ||
if: ${{ !cancelled() }} | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | ||
run: | | ||
set -o xtrace | ||
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then | ||
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | ||
fi | ||
- name: Cancel workflow | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }} | ||
run: | | ||
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | ||
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | ||
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | ||
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | ||
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | ||
else | ||
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | ||
exit 1 | ||
fi | ||
- name: Retrying workflow | ||
id: retry-wf | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | ||
if: ${{ !cancelled() }} | ||
run: | | ||
set -o xtrace | ||
set +e | ||
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | ||
then | ||
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | ||
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | ||
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | ||
else | ||
echo "No retry required" | ||
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | ||
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | ||
fi | ||
- name: check if workflow completed | ||
env: | ||
ARGO_TOKEN: ${{ steps.update-argo-token.outputs.argo-token }} | ||
if: ${{ !cancelled() }} | ||
shell: bash | ||
run: | | ||
set +e | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
echo "Status of workflow:" "$ARGO_STATUS" | ||
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | ||
do | ||
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | ||
argo wait "${WORKFLOW_NAME}" -n workflows || true | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
done | ||
- name: pull artifacts from s3 bucket | ||
if: ${{ !cancelled() }} | ||
run: | | ||
echo "pulling artifacts" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | ||
- name: pull logs from s3 bucket | ||
if: ${{ !cancelled() }} | ||
run: | | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
echo "pulling logs" | ||
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} tests artifacts | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/test-results | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} tests logs | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/argo-logs | ||
- name: Test Report | ||
id: test_report | ||
uses: dorny/test-reporter@v1.9.0 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} test report | ||
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | ||
reporter: java-junit | ||
- name: pull diag from s3 bucket | ||
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | ||
run: | | ||
echo "pulling diag" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} tests diag | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/diag* | ||
run-scripted-input-tests-full-matrix: | ||
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'main' || github.ref_name == 'main' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-scripted_inputs-labeled == 'true') }} | ||
needs: | ||
- build | ||
- test-inventory | ||
- setup | ||
- meta | ||
- setup-workflow | ||
runs-on: ubuntu-latest | ||
continue-on-error: ${{ matrix.python39 }} | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
splunk: ${{ fromJson(needs.meta.outputs.matrix_supportedSplunk) }} | ||
os: [ "ubuntu:14.04", "ubuntu:16.04","ubuntu:18.04","ubuntu:22.04", "centos:7", "redhat:8.0", "redhat:8.2", "redhat:8.3", "redhat:8.4", "redhat:8.5" ] | ||
python39: [false] | ||
include: | ||
- splunk: ${{ fromJson(needs.meta.outputs.python39_splunk) }} | ||
os: "ubuntu:22.04" | ||
python39: true | ||
container: | ||
image: ghcr.io/splunk/workflow-engine-base:2.0.12 | ||
env: | ||
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | ||
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | ||
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | ||
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | ||
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | ||
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | ||
TEST_TYPE: "scripted_inputs" | ||
permissions: | ||
actions: read | ||
deployments: read | ||
contents: read | ||
packages: read | ||
statuses: read | ||
checks: write | ||
steps: | ||
- uses: actions/checkout@v4 | ||
with: | ||
submodules: recursive | ||
- name: configure git # This step configures git to omit "dubious git ownership error" in later test-reporter stage | ||
id: configure-git | ||
run: | | ||
git --version | ||
git_path="$(pwd)" | ||
echo "$git_path" | ||
git config --global --add safe.directory "$git_path" | ||
- name: capture start time | ||
id: capture-start-time | ||
run: | | ||
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | ||
- name: Configure AWS credentials | ||
uses: aws-actions/configure-aws-credentials@v4 | ||
with: | ||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | ||
- name: Read secrets from AWS Secrets Manager into environment variables | ||
id: get-argo-token | ||
run: | | ||
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString') | ||
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | ||
- name: create job name | ||
id: create-job-name | ||
shell: bash | ||
run: | | ||
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | ||
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | ||
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} | ||
JOB_NAME=${JOB_NAME//[_.]/-} | ||
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | ||
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | ||
- name: Splunk instance details | ||
id: splunk-instance-details | ||
if: ${{ needs.setup-workflow.outputs.delay-destroy-scripted_inputs == 'Yes' }} | ||
shell: bash | ||
run: | | ||
BOLD="\033[1m" | ||
NORMAL="\033[0m" | ||
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | ||
echo -e "Splunk username is${BOLD} admin${NORMAL}" | ||
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | ||
- name: get os name and version | ||
id: os-name-version | ||
shell: bash | ||
run: | | ||
OS_NAME_VERSION=${{ matrix.os }} | ||
# shellcheck disable=SC2206 | ||
OS_NAME_VERSION=(${OS_NAME_VERSION//:/ }) | ||
OS_NAME=${OS_NAME_VERSION[0]} | ||
OS_VERSION=${OS_NAME_VERSION[1]} | ||
{ | ||
echo "os-name=$OS_NAME" | ||
echo "os-version=$OS_VERSION" | ||
} >> "$GITHUB_OUTPUT" | ||
- name: run-tests | ||
id: run-tests | ||
timeout-minutes: 340 | ||
continue-on-error: true | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
uses: splunk/wfe-test-runner-action@v1.6 | ||
with: | ||
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | ||
test-type: ${{ env.TEST_TYPE }} | ||
test-args: "--hostname=spl --os-name=${{ steps.os-name-version.outputs.os-name }} --os-version=${{ steps.os-name-version.outputs.os-version }} -m script_input" | ||
job-name: ${{ steps.create-job-name.outputs.job-name }} | ||
labels: ${{ needs.setup.outputs.labels }} | ||
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | ||
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | ||
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-scripted_inputs }} | ||
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | ||
addon-name: ${{ needs.setup.outputs.addon-name }} | ||
vendor-version: ${{ matrix.vendor-version.image }} | ||
sc4s-version: "No" | ||
os-name: ${{ steps.os-name-version.outputs.os-name }} | ||
os-version: ${{ steps.os-name-version.outputs.os-version }} | ||
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | ||
- name: calculate timeout | ||
id: calculate-timeout | ||
run: | | ||
start_time=${{ steps.capture-start-time.outputs.start_time }} | ||
current_time=$(date +%s) | ||
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | ||
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | ||
- name: Check if pod was deleted | ||
id: is-pod-deleted | ||
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | ||
if: ${{ !cancelled() }} | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
run: | | ||
set -o xtrace | ||
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then | ||
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | ||
fi | ||
- name: Cancel workflow | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }} | ||
run: | | ||
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | ||
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | ||
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | ||
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | ||
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | ||
else | ||
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | ||
exit 1 | ||
fi | ||
- name: Retrying workflow | ||
id: retry-wf | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: ${{ !cancelled() }} | ||
run: | | ||
set -o xtrace | ||
set +e | ||
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | ||
then | ||
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | ||
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | ||
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | ||
else | ||
echo "No retry required" | ||
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | ||
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | ||
fi | ||
- name: check if workflow completed | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: ${{ !cancelled() }} | ||
shell: bash | ||
run: | | ||
set +e | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
echo "Status of workflow:" "$ARGO_STATUS" | ||
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | ||
do | ||
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | ||
argo wait "${WORKFLOW_NAME}" -n workflows || true | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
done | ||
- name: pull artifacts from s3 bucket | ||
if: ${{ !cancelled() }} | ||
run: | | ||
echo "pulling artifacts" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | ||
- name: pull logs from s3 bucket | ||
if: ${{ !cancelled() }} | ||
run: | | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
echo "pulling logs" | ||
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests artifacts | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/test-results | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests logs | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/argo-logs | ||
- name: Test Report | ||
id: test_report | ||
uses: dorny/test-reporter@v1.9.0 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} test report | ||
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | ||
reporter: java-junit | ||
- name: pull diag from s3 bucket | ||
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | ||
run: | | ||
echo "pulling diag" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests diag | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/diag* | ||
run-scripted-input-tests-canary: | ||
if: ${{ !cancelled() && needs.build.result == 'success' && needs.test-inventory.outputs.scripted_inputs == 'true' && ( github.base_ref == 'develop' || github.ref_name == 'develop' ) && (needs.setup-workflow.outputs.execute-scripted_inputs == 'Yes' || needs.setup-workflow.outputs.execute-scripted_inputs-labeled == 'true') }} | ||
needs: | ||
- build | ||
- test-inventory | ||
- setup | ||
- meta | ||
- setup-workflow | ||
runs-on: ubuntu-latest | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
splunk: ${{ fromJson(needs.meta.outputs.matrix_supportedSplunk) }} | ||
os: [ "ubuntu:22.04", "centos:7","redhat:8.5" ] | ||
container: | ||
image: ghcr.io/splunk/workflow-engine-base:2.0.12 | ||
env: | ||
ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} | ||
ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} | ||
ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} | ||
ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} | ||
ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} | ||
SPLUNK_VERSION_BASE: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | ||
TEST_TYPE: "scripted_inputs" | ||
permissions: | ||
actions: read | ||
deployments: read | ||
contents: read | ||
packages: read | ||
statuses: read | ||
checks: write | ||
steps: | ||
- uses: actions/checkout@v4 | ||
with: | ||
submodules: recursive | ||
- name: configure git # This step configures git to omit "dubious git ownership error" in later test-reporter stage | ||
id: configure-git | ||
run: | | ||
git --version | ||
git_path="$(pwd)" | ||
echo "$git_path" | ||
git config --global --add safe.directory "$git_path" | ||
- name: capture start time | ||
id: capture-start-time | ||
run: | | ||
echo "start_time=$(date +%s)" >> "$GITHUB_OUTPUT" | ||
- name: Configure AWS credentials | ||
uses: aws-actions/configure-aws-credentials@v4 | ||
with: | ||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
aws-region: ${{ secrets.AWS_DEFAULT_REGION }} | ||
- name: Read secrets from AWS Secrets Manager into environment variables | ||
id: get-argo-token | ||
run: | | ||
ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id "${{ needs.setup-workflow.outputs.argo_token_secret_id_k8s }}" | jq -r '.SecretString') | ||
echo "argo-token=$ARGO_TOKEN" >> "$GITHUB_OUTPUT" | ||
- name: create job name | ||
id: create-job-name | ||
shell: bash | ||
run: | | ||
RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) | ||
JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} | ||
JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} | ||
JOB_NAME=${JOB_NAME//[_.]/-} | ||
JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') | ||
echo "job-name=$JOB_NAME" >> "$GITHUB_OUTPUT" | ||
- name: Splunk instance details | ||
id: splunk-instance-details | ||
if: ${{ needs.setup-workflow.outputs.delay-destroy-scripted_inputs == 'Yes' }} | ||
shell: bash | ||
run: | | ||
BOLD="\033[1m" | ||
NORMAL="\033[0m" | ||
echo "Splunk Web UI will be available at https://${{ steps.create-job-name.outputs.job-name }}.${{ needs.setup.outputs.spl-host-suffix }}:8000 after test execution starts" | ||
echo -e "Splunk username is${BOLD} admin${NORMAL}" | ||
echo "Splunk password is available in SecretServer shared folder: Shared Splunk - GDI - Lab Credentials under SPLUNK_DEPLOYMENT_PASSWORD" | ||
- name: get os name and version | ||
id: os-name-version | ||
shell: bash | ||
run: | | ||
OS_NAME_VERSION=${{ matrix.os }} | ||
OS_NAME_VERSION=("${OS_NAME_VERSION//:/ }") | ||
OS_NAME=${OS_NAME_VERSION[0]} | ||
OS_VERSION=${OS_NAME_VERSION[1]} | ||
{ | ||
echo "os-name=$OS_NAME" | ||
echo "os-version=$OS_VERSION" | ||
} >> "$GITHUB_OUTPUT" | ||
- name: run-tests | ||
id: run-tests | ||
timeout-minutes: 340 | ||
continue-on-error: true | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
uses: splunk/wfe-test-runner-action@v1.6 | ||
with: | ||
splunk: ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} | ||
test-type: ${{ env.TEST_TYPE }} | ||
test-args: "--hostname=spl --os-name=${{ steps.os-name-version.outputs.os-name }} --os-version=${{ steps.os-name-version.outputs.os-version }} -m script_input" | ||
job-name: ${{ steps.create-job-name.outputs.job-name }} | ||
labels: ${{ needs.setup.outputs.labels }} | ||
workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} | ||
workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} | ||
delay-destroy: ${{ needs.setup-workflow.outputs.delay-destroy-scripted_inputs }} | ||
addon-url: ${{ needs.setup.outputs.addon-upload-path }} | ||
addon-name: ${{ needs.setup.outputs.addon-name }} | ||
vendor-version: ${{ matrix.vendor-version.image }} | ||
sc4s-version: "No" | ||
os-name: ${{ steps.os-name-version.outputs.os-name }} | ||
os-version: ${{ steps.os-name-version.outputs.os-version }} | ||
k8s-manifests-branch: ${{ needs.setup.outputs.k8s-manifests-branch }} | ||
- name: calculate timeout | ||
id: calculate-timeout | ||
run: | | ||
start_time=${{ steps.capture-start-time.outputs.start_time }} | ||
current_time=$(date +%s) | ||
remaining_time_minutes=$(( 350-((current_time-start_time)/60) )) | ||
echo "remaining_time_minutes=$remaining_time_minutes" >> "$GITHUB_OUTPUT" | ||
- name: Check if pod was deleted | ||
id: is-pod-deleted | ||
timeout-minutes: ${{ fromJson(steps.calculate-timeout.outputs.remaining_time_minutes) }} | ||
if: ${{ !cancelled() }} | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
run: | | ||
set -o xtrace | ||
if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then | ||
echo "retry-workflow=true" >> "$GITHUB_OUTPUT" | ||
fi | ||
- name: Cancel workflow | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: ${{ cancelled() || steps.is-pod-deleted.outcome != 'success' }} | ||
run: | | ||
cancel_response=$(argo submit -v -o json --from wftmpl/${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} -l workflows.argoproj.io/workflow-template=${{ needs.setup.outputs.argo-cancel-workflow-tmpl-name }} --argo-base-href '' -p workflow-to-cancel=${{ steps.run-tests.outputs.workflow-name }}) | ||
cancel_workflow_name=$( echo "$cancel_response" |jq -r '.metadata.name' ) | ||
cancel_logs=$(argo logs --follow "$cancel_workflow_name" -n workflows) | ||
if echo "$cancel_logs" | grep -q "workflow ${{ steps.run-tests.outputs.workflow-name }} stopped"; then | ||
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} stopped" | ||
else | ||
echo "Workflow ${{ steps.run-tests.outputs.workflow-name }} didn't stop" | ||
exit 1 | ||
fi | ||
- name: Retrying workflow | ||
id: retry-wf | ||
shell: bash | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: ${{ !cancelled() }} | ||
run: | | ||
set -o xtrace | ||
set +e | ||
if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] | ||
then | ||
WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) | ||
echo "workflow-name=$WORKFLOW_NAME" >> "$GITHUB_OUTPUT" | ||
argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." | ||
else | ||
echo "No retry required" | ||
argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | ||
argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" | ||
fi | ||
- name: check if workflow completed | ||
env: | ||
ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} | ||
if: ${{ !cancelled() }} | ||
shell: bash | ||
run: | | ||
set +e | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
echo "Status of workflow:" "$ARGO_STATUS" | ||
while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] | ||
do | ||
echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." | ||
argo wait "${WORKFLOW_NAME}" -n workflows || true | ||
ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') | ||
done | ||
- name: pull artifacts from s3 bucket | ||
if: ${{ !cancelled() }} | ||
run: | | ||
echo "pulling artifacts" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} | ||
- name: pull logs from s3 bucket | ||
if: ${{ !cancelled() }} | ||
run: | | ||
# shellcheck disable=SC2157 | ||
if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then | ||
WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} | ||
else | ||
WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" | ||
fi | ||
echo "pulling logs" | ||
mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests artifacts | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/test-results | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ !cancelled() }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests logs | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/argo-logs | ||
- name: Test Report | ||
id: test_report | ||
uses: dorny/test-reporter@v1.9.0 | ||
if: ${{ !cancelled() && !contains(matrix.splunk.version, 'unreleased-python3_9') }} | ||
with: | ||
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} test report | ||
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | ||
reporter: java-junit | ||
- name: Test Report Python 3.9 | ||
continue-on-error: true | ||
id: test_report_python_3_9 | ||
uses: dorny/test-reporter@v1.9.0 | ||
if: ${{ !cancelled() && contains(matrix.splunk.version, 'unreleased-python3_9') }} | ||
with: | ||
name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} test report | ||
path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" | ||
reporter: java-junit | ||
- name: pull diag from s3 bucket | ||
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | ||
run: | | ||
echo "pulling diag" | ||
aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/diag-${{ steps.create-job-name.outputs.job-name }}/diag-${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ | ||
- uses: actions/upload-artifact@v4 | ||
if: ${{ failure() && steps.test_report.outputs.conclusion == 'failure' }} | ||
with: | ||
name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} ${{ matrix.vendor-version.image }} ${{ steps.os-name-version.outputs.os-name }} ${{ steps.os-name-version.outputs.os-version }} tests diag | ||
path: | | ||
${{ needs.setup.outputs.directory-path }}/diag* | ||
pre-publish: | ||
if: ${{ !cancelled() }} | ||
# The following line will rename 'pre-publish' to 'pre-publish-not_main_pr' when PR is created towards main branch | ||
# It is necessary to avoid confusion caused by githubactions considering pre-publish for both push to develop branch | ||
# and pull_request to main branch events. | ||
name: ${{ github.event_name == 'pull_request' && github.base_ref == 'main' && 'pre-publish' || 'pre-publish-not_main_pr' }} | ||
needs: | ||
- meta | ||
- compliance-copyrights | ||
- lint | ||
- review_secrets | ||
- semgrep | ||
- build | ||
- test-inventory | ||
- run-unit-tests | ||
- appinspect | ||
- setup | ||
- run-knowledge-tests | ||
- run-modinput-tests | ||
- run-ui-tests | ||
- validate-pr-title | ||
runs-on: ubuntu-latest | ||
env: | ||
NEEDS: ${{ toJson(needs) }} | ||
steps: | ||
- name: check if tests have passed or skipped | ||
id: check | ||
shell: bash | ||
run: | | ||
RUN_PUBLISH=$(echo "$NEEDS" | jq ".[] | select( ( .result != \"skipped\" ) and .result != \"success\" ) | length == 0") | ||
if [[ "$RUN_PUBLISH" != *'false'* ]] | ||
then | ||
echo "run-publish=true" >> "$GITHUB_OUTPUT" | ||
else | ||
echo "run-publish=false" >> "$GITHUB_OUTPUT" | ||
fi | ||
- name: exit without publish | ||
if: ${{ steps.check.outputs.run-publish == 'false' || ( github.event.action == 'labeled' && github.event.label.name == 'preserve_infra' ) }} | ||
run: | | ||
echo "Some test job failed or Workflow has triggered on preserve_infra label." | ||
exit 1 | ||
publish: | ||
if: ${{ !cancelled() && needs.pre-publish.result == 'success' && github.event_name != 'pull_request' && github.event_name != 'schedule' }} | ||
needs: | ||
- pre-publish | ||
runs-on: ubuntu-latest | ||
permissions: | ||
contents: write | ||
packages: read | ||
pull-requests: read | ||
statuses: write | ||
steps: | ||
- name: Checkout | ||
uses: actions/checkout@v4 | ||
with: | ||
submodules: false | ||
persist-credentials: false | ||
- name: Semantic Release | ||
id: semantic | ||
uses: splunk/semantic-release-action@v1.3 | ||
env: | ||
GITHUB_TOKEN: ${{ secrets.GH_TOKEN_ADMIN }} | ||
with: | ||
git_committer_name: ${{ secrets.SA_GH_USER_NAME }} | ||
git_committer_email: ${{ secrets.SA_GH_USER_EMAIL }} | ||
gpg_private_key: ${{ secrets.SA_GPG_PRIVATE_KEY }} | ||
passphrase: ${{ secrets.SA_GPG_PASSPHRASE }} | ||
- name: Download package-splunkbase | ||
if: ${{ steps.semantic.outputs.new_release_published == 'true' }} | ||
uses: actions/download-artifact@v4 | ||
id: download-package-splunkbase | ||
with: | ||
name: package-splunkbase | ||
path: download/artifacts/deployment | ||
- name: Download cim-compliance-report | ||
id: download-cim-compliance-report | ||
if: ${{ steps.semantic.outputs.new_release_published == 'true' }} | ||
continue-on-error: true | ||
uses: actions/download-artifact@v4 | ||
with: | ||
name: cim-compliance-report | ||
path: download/artifacts/deployment | ||
- name: Download cim-field-report | ||
id: download-cim-field-report | ||
if: ${{ steps.semantic.outputs.new_release_published == 'true' }} | ||
continue-on-error: true | ||
uses: actions/download-artifact@v4 | ||
with: | ||
name: cim-field-report | ||
path: download/artifacts/deployment | ||
- name: List of assets | ||
if: ${{ steps.semantic.outputs.new_release_published == 'true' }} | ||
run: | | ||
ls -la ${{ steps.download-package-splunkbase.outputs.download-path }} | ||
- name: Upload assets to release | ||
if: ${{ steps.semantic.outputs.new_release_published == 'true' }} | ||
uses: svenstaro/upload-release-action@v2 | ||
with: | ||
repo_token: ${{ github.token }} | ||
file: ${{ steps.download-package-splunkbase.outputs.download-path }}/* | ||
overwrite: true | ||
file_glob: true | ||
tag: v${{ steps.semantic.outputs.new_release_version }} |