Skip to content

Implement pwquality macro #22

Implement pwquality macro

Implement pwquality macro #22

name: Automatus Ubuntu 24.04
on:
pull_request:
branches: [ master, 'stabilization*' ]
concurrency:
group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
cancel-in-progress: true
env:
DATASTREAM: ssg-ubuntu2404-ds.xml
jobs:
build-content:
name: Build Content
runs-on: ubuntu-24.04
steps:
- name: Install build deps
run: sudo apt-get update && sudo apt-get install -y cmake ninja-build xsltproc libxml2-utils python3-yaml python3-jinja2 openscap-utils
- name: Install workflow deps
run: sudo apt install -y git python3-deepdiff python3-requests jq python3-pip
- name: Install deps python
run: pip3 install gitpython xmldiff
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
with:
fetch-depth: 0
- name: Checkout (CTF)
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
with:
repository: ComplianceAsCode/content-test-filtering
path: ctf
# https://github.com/actions/checkout/issues/766
- name: Set git safe directory
run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- name: Find forking point
env:
BASE_BRANCH: ${{ github.base_ref }}
run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT
id: fork_point
- name: Detect content changes in the PR
run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > output.json
- name: Test if there are no content changes
run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
id: ctf
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
with:
name: output.json
path: output.json
- name: Print changes to content detected if any
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
run: cat output.json
- name: Get product attribute
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
id: product
uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
with:
path: 'output.json'
prop_path: 'product'
- name: Build product
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
run: ./build_product ubuntu2404 --datastream-only
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
with:
name: ${{ env.DATASTREAM }}
path: build/${{ env.DATASTREAM }}
validate-ubuntu:
name: Run Tests
needs: build-content
runs-on: ubuntu-24.04
steps:
- name: Install test deps
run: sudo apt-get update && sudo apt-get install -y cmake ninja-build xsltproc libxml2-utils python3-yaml python3-jinja2 openscap-utils podman
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
- name: Get cached CTF output
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4
id: get_ctf_output
with:
name: output.json
# continue even if the file is unavailable that
# means there are no changes detected by CTF in the previous job
continue-on-error: true
- name: Test if there are no content changes
if: ${{ steps.get_ctf_output.outcome == 'success' }}
run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
id: ctf
- name: Print changes to content detected if any
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
run: cat output.json
- name: Generate id_rsa key
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
run: ssh-keygen -N '' -t rsa -f ~/.ssh/id_rsa
- name: Build test suite container
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
run: podman build --build-arg "CLIENT_PUBLIC_KEY=$(cat ~/.ssh/id_rsa.pub)" -t ssg_test_suite -f test_suite-ubuntu2404
working-directory: ./Dockerfiles
- name: Get rule ids to be tested
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
id: rules
uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
with:
path: 'output.json'
prop_path: 'rules'
- name: Get product attribute
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
id: product
uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
with:
path: 'output.json'
prop_path: 'product'
- name: Get bash attribute
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
id: bash
uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
with:
path: 'output.json'
prop_path: 'bash'
- name: Get ansible attribute
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
id: ansible
uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
with:
path: 'output.json'
prop_path: 'ansible'
- uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4
if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
with:
name: ${{ env.DATASTREAM }}
- name: Run tests in a container - Bash
if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_bash --remediate-using bash --name ssg_test_suite --datastream ${{ env.DATASTREAM }} ${{join(fromJSON(steps.rules.outputs.prop))}}
env:
ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified"
- name: Check for ERROR in logs
if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
run: grep -q "^ERROR" logs_bash/test_suite.log
id: check_results_bash
# when grep returns 1 means it didn't find the ^ERROR string in the test_suite.log file
# and this means tests finished successfully without errors. So the job needs to keep going.
# By using continue-on-error: true the "conclusion" parameter is set to true so it's not possible to use
# it to determine whether the task has failed or succeed. The "outcome" parameter has to be used instead.
# See the step below
continue-on-error: true
- name: Upload logs in case of failure
if: ${{steps.bash.outputs.prop == 'True' && steps.check_results_bash.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4
with:
name: logs_bash
path: logs_bash/
- name: Run tests in a container - Ansible
if: ${{ steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_ansible --remediate-using ansible --name ssg_test_suite --datastream ${{ env.DATASTREAM }} ${{join(fromJSON(steps.rules.outputs.prop))}}
env:
ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified"
- name: Check for ERROR in logs
if: ${{steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
run: grep -q "^ERROR" logs_ansible/test_suite.log
id: check_results_ansible
continue-on-error: true
- name: Upload logs in case of failure
if: ${{ steps.ansible.outputs.prop == 'True' && steps.check_results_ansible.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4
with:
name: logs_ansible
path: logs_ansible/
- name: Fail in case of ERROR present in logs_bash/test_suite.log or logs_ansible/test_suite.log
if: ${{ (steps.check_results_bash.outcome == 'success' || steps.check_results_ansible.outcome == 'success') && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
run: |
[[ -f logs_bash/test_suite.log ]] && echo "---------Bash Remediation Logs---------" && cat logs_bash/test_suite.log | grep -v "DEBUG - "
[[ -f logs_ansible/test_suite.log ]] && echo "---------Ansible Remediation Logs---------" && cat logs_ansible/test_suite.log | grep -v "DEBUG - "
exit 1