diff --git a/.github/workflows/entrypoint_nightly.yml b/.github/workflows/entrypoint_nightly.yml index 7a723986d..7ad059803 100644 --- a/.github/workflows/entrypoint_nightly.yml +++ b/.github/workflows/entrypoint_nightly.yml @@ -5,7 +5,7 @@ on: branches: - dev paths-ignore: - - ".github/**" +# - ".github/**" - "**.md" env: @@ -89,12 +89,117 @@ jobs: docker manifest push ${DOCKER_TARGET_REPO}:${IMAGE_BASE_NAME}-${IMAGE_VERSION} docker manifest rm ${DOCKER_TARGET_REPO}:${IMAGE_BASE_NAME}-${IMAGE_VERSION} + tools_list: + name: Export tools list + needs: + - varset + - publish + - build_belt + # only exporting tools list if publish was a success AND the tests were a success + if: always() && ( needs.publish.result == 'success' && !contains(needs.build_belt.outputs.build, 'failure')) + strategy: + fail-fast: false + # only 1 job at a time, else there will be a conflict. Runner 2 will pull exegol-docs before runner 1 pushes changes. + max-parallel: 1 + matrix: + arch: [ arm64, amd64 ] + runs-on: + - self-hosted + - builder + - ${{ matrix.arch }} + steps: + - name: Checkout Exegol-images + uses: actions/checkout@v3 + - name: Prepare image version + id: prepare + run: | + COMMIT_ID=$(git rev-parse "$GITHUB_SHA") + if [ "$IMAGE_VERSION" == "" ]; then + IMAGE_VERSION=${COMMIT_ID:0:8} + fi + echo "image_version=${IMAGE_VERSION}" + echo "image_version=${IMAGE_VERSION}" >> $GITHUB_OUTPUT + - name: Checkout Exegol-docs + uses: actions/checkout@v3 + with: + repository: 'ThePorgs/Exegol-docs' + ref: 'dev-images' + - name: Inspect the built image + run: docker inspect ${{ needs.varset.outputs.DOCKER_BUILD_REPO }}:${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} + - name: Create a container from the built image + run: | + docker run --name exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} --rm -t -d ${{ needs.varset.outputs.DOCKER_BUILD_REPO }}:${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} endless + - name: Export the tools list + if: success() + run: | + docker cp exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }}:/.exegol/installed_tools.csv installed_tools.csv + - name: Debug print installed_tools.csv + id: list_exists + run: cat installed_tools.csv + - name: Sanity check for installed_tools.csv + if: success() + run: | + grep -qE '([^,]*,[^,]*){3,}' installed_tools.csv \ + && (echo '[-] Wrong number of columns on the following lines' \ + && grep -oE '([^,]*,[^,]*){3,}' installed_tools.csv || exit 1) \ + || (echo '[+] List contains right number of columns' && exit 0) + - name: Stop the container + if: always() + run: docker stop exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} + - name: Adding list to repo and tables + if: always() && steps.list_exists.outcome == 'success' + run: | + mkdir -p source/assets/installed_tools/lists + echo '[*] [SHOULD BE NIGHTLY ONLY] Moving tools list to latest_nightly_arch.csv' + mv installed_tools.csv source/assets/installed_tools/lists/latest_nightly_${{ matrix.arch }}.csv + echo '[*] Changing lists.csv so that new tools list appears' + echo '[*] [SHOULD BE NIGHTLY ONLY] Removing occurences of nightly,whateverversion,arch so that only the latest nightly is shown' + (head -n 1 source/assets/installed_tools/lists.csv; \ + echo "${{ needs.varset.outputs.IMAGE_BASE_NAME }},${{ steps.prepare.outputs.image_version }},${{ matrix.arch }},$(date -u +"%Y-%m-%dT%H:%M:%SZ"),:download:\`${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv \ + \`"; \ + ( \ + tail -n +2 source/assets/installed_tools/lists.csv | grep -Ev "${{ needs.varset.outputs.IMAGE_BASE_NAME }},[0-9a-fA-F]{8},${{ matrix.arch }}" \ + ) \ + ) | tee source/assets/installed_tools/new_lists.csv + mv source/assets/installed_tools/new_lists.csv source/assets/installed_tools/lists.csv + - name: Debug print lists.csv + if: always() + id: final_list_exists + run: cat source/assets/installed_tools/lists.csv + - name: Push Exegol-docs + if: always() && steps.final_list_exists.outcome == 'success' + env: + SSH_DEPLOY_KEY: ${{ secrets.EXEGOL_DOCS_SSH_DEPLOY_KEY }} + run: | + echo '[*] Setting up git env for SSH use' + mkdir -p "$HOME/.ssh" + DEPLOY_KEY_FILE="$HOME/.ssh/deploy_key" + echo "${SSH_DEPLOY_KEY}" > "$DEPLOY_KEY_FILE" + chmod 600 "$DEPLOY_KEY_FILE" + SSH_KNOWN_HOSTS_FILE="$HOME/.ssh/known_hosts" + ssh-keyscan -H github.com > "$SSH_KNOWN_HOSTS_FILE" + export GIT_SSH_COMMAND="ssh -i "$DEPLOY_KEY_FILE" -o UserKnownHostsFile=$SSH_KNOWN_HOSTS_FILE" + GIT_CMD_REPOSITORY="git@github.com:ThePorgs/Exegol-docs.git" + echo '[*] Setting git config' + git config --global user.name "exegol-images[pipeline]" + git config --global user.email "pipeline@exegol.images" + echo '[*] Staging changes' + echo '[*] [SHOULD BE NIGHTLY ONLY] Staging latest_nightly_arch.csv' + git add --verbose source/assets/installed_tools/lists/latest_nightly_${{ matrix.arch }}.csv + git add --verbose source/assets/installed_tools/lists.csv + echo '[*] Commiting changes' + git commit --verbose -m "PIPELINE: tools list for ${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}" + echo '[*] Pushing changes' + git push --verbose "$GIT_CMD_REPOSITORY" + clean_runners: name: Clean runner needs: + - tools_list - publish - build_belt # clean runners, if publish was a success, or at least if build succeeded + # we don't care if tools_list worked or not if: always() && ( needs.publish.result == 'success' || !contains(needs.build_belt.outputs.build, 'failure')) # even if this job fails, it won't affect the success/fail status of the whole workflow continue-on-error: true diff --git a/.github/workflows/entrypoint_preprod_ad.yml b/.github/workflows/entrypoint_preprod_ad.yml index 4c439e49e..bae2fa2c4 100644 --- a/.github/workflows/entrypoint_preprod_ad.yml +++ b/.github/workflows/entrypoint_preprod_ad.yml @@ -94,9 +94,112 @@ jobs: docker manifest push ${DOCKER_TARGET_REPO}:${IMAGE_BASE_NAME}-${IMAGE_VERSION} docker manifest rm ${DOCKER_TARGET_REPO}:${IMAGE_BASE_NAME}-${IMAGE_VERSION} + tools_list: + name: Export tools list + needs: + - varset + - publish + - build_belt + # only exporting tools list if publish was a success AND the tests were a success + if: always() && ( needs.publish.result == 'success' && !contains(needs.build_belt.outputs.build, 'failure')) + strategy: + fail-fast: false + # only 1 job at a time, else there will be a conflict. Runner 2 will pull exegol-docs before runner 1 pushes changes. + max-parallel: 1 + matrix: + arch: [ arm64, amd64 ] + runs-on: + - self-hosted + - builder + - ${{ matrix.arch }} + steps: + - name: Checkout Exegol-images + uses: actions/checkout@v3 + - name: Prepare image version + id: prepare + run: | + COMMIT_ID=$(git rev-parse "$GITHUB_SHA") + if [ "$IMAGE_VERSION" == "" ]; then + IMAGE_VERSION=${COMMIT_ID:0:8} + fi + echo "image_version=${IMAGE_VERSION}" + echo "image_version=${IMAGE_VERSION}" >> $GITHUB_OUTPUT + - name: Checkout Exegol-docs + uses: actions/checkout@v3 + with: + repository: 'ThePorgs/Exegol-docs' + ref: 'dev-images' + - name: Inspect the built image + run: docker inspect ${{ needs.varset.outputs.DOCKER_BUILD_REPO }}:${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} + - name: Create a container from the built image + run: | + docker run --name exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} --rm -t -d ${{ needs.varset.outputs.DOCKER_BUILD_REPO }}:${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} endless + - name: Export the tools list + if: success() + run: | + docker cp exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }}:/.exegol/installed_tools.csv installed_tools.csv + - name: Debug print installed_tools.csv + id: list_exists + run: cat installed_tools.csv + - name: Sanity check for installed_tools.csv + if: success() + run: | + grep -qE '([^,]*,[^,]*){3,}' installed_tools.csv \ + && (echo '[-] Wrong number of columns on the following lines' \ + && grep -oE '([^,]*,[^,]*){3,}' installed_tools.csv || exit 1) \ + || (echo '[+] List contains right number of columns' && exit 0) + - name: Stop the container + if: always() + run: docker stop exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} + - name: Adding list to repo and tables + if: always() && steps.list_exists.outcome == 'success' + run: | + mkdir -p source/assets/installed_tools/lists + echo '[*] Moving tools list to imagetag_version_arch.csv' + mv installed_tools.csv source/assets/installed_tools/lists/${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv + echo '[*] Changing lists.csv so that new tools list appears' + echo '[*] [SHOULD BE RELEASES ONLY] Removing occurences of image,version,arch. This is because this workflow runs in the prerelease pipeline, meaning that there's a possibility tools list is pushed to exegol-docs even if the images are not released for some reasons (e.g. imageA prerelease works but not for imageB). Doing this grep -v will ensure that lists.csv doesn't have duplicates to the same tag, same version and same arch' + (head -n 1 source/assets/installed_tools/lists.csv; \ + echo "${{ needs.varset.outputs.IMAGE_BASE_NAME }},${{ steps.prepare.outputs.image_version }},${{ matrix.arch }},$(date -u +"%Y-%m-%dT%H:%M:%SZ"),:download:\`${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv \ + \`"; \ + ( \ + tail -n +2 source/assets/installed_tools/lists.csv | grep -Ev "${{ needs.varset.outputs.IMAGE_BASE_NAME }},${{ steps.prepare.outputs.image_version }},${{ matrix.arch }}" \ + ) \ + ) | tee source/assets/installed_tools/new_lists.csv + mv source/assets/installed_tools/new_lists.csv source/assets/installed_tools/lists.csv + - name: Debug print lists.csv + if: always() + id: final_list_exists + run: cat source/assets/installed_tools/lists.csv + - name: Push Exegol-docs + if: always() && steps.final_list_exists.outcome == 'success' + env: + SSH_DEPLOY_KEY: ${{ secrets.EXEGOL_DOCS_SSH_DEPLOY_KEY }} + run: | + echo '[*] Setting up git env for SSH use' + mkdir -p "$HOME/.ssh" + DEPLOY_KEY_FILE="$HOME/.ssh/deploy_key" + echo "${SSH_DEPLOY_KEY}" > "$DEPLOY_KEY_FILE" + chmod 600 "$DEPLOY_KEY_FILE" + SSH_KNOWN_HOSTS_FILE="$HOME/.ssh/known_hosts" + ssh-keyscan -H github.com > "$SSH_KNOWN_HOSTS_FILE" + export GIT_SSH_COMMAND="ssh -i "$DEPLOY_KEY_FILE" -o UserKnownHostsFile=$SSH_KNOWN_HOSTS_FILE" + GIT_CMD_REPOSITORY="git@github.com:ThePorgs/Exegol-docs.git" + echo '[*] Setting git config' + git config --global user.name "exegol-images[pipeline]" + git config --global user.email "pipeline@exegol.images" + echo '[*] Staging changes' + git add --verbose source/assets/installed_tools/lists/${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv + git add --verbose source/assets/installed_tools/lists.csv + echo '[*] Commiting changes' + git commit --verbose -m "PIPELINE: tools list for ${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}" + echo '[*] Pushing changes' + git push --verbose "$GIT_CMD_REPOSITORY" + clean_runners: name: Clean runner needs: + - tools_list - publish - build_belt # clean runners, if publish was a success, or at least if build succeeded diff --git a/.github/workflows/entrypoint_preprod_full.yml b/.github/workflows/entrypoint_preprod_full.yml index 93086ff30..a5b7d8919 100644 --- a/.github/workflows/entrypoint_preprod_full.yml +++ b/.github/workflows/entrypoint_preprod_full.yml @@ -94,9 +94,112 @@ jobs: docker manifest push ${DOCKER_TARGET_REPO}:${IMAGE_BASE_NAME}-${IMAGE_VERSION} docker manifest rm ${DOCKER_TARGET_REPO}:${IMAGE_BASE_NAME}-${IMAGE_VERSION} + tools_list: + name: Export tools list + needs: + - varset + - publish + - build_belt + # only exporting tools list if publish was a success AND the tests were a success + if: always() && ( needs.publish.result == 'success' && !contains(needs.build_belt.outputs.build, 'failure')) + strategy: + fail-fast: false + # only 1 job at a time, else there will be a conflict. Runner 2 will pull exegol-docs before runner 1 pushes changes. + max-parallel: 1 + matrix: + arch: [ arm64, amd64 ] + runs-on: + - self-hosted + - builder + - ${{ matrix.arch }} + steps: + - name: Checkout Exegol-images + uses: actions/checkout@v3 + - name: Prepare image version + id: prepare + run: | + COMMIT_ID=$(git rev-parse "$GITHUB_SHA") + if [ "$IMAGE_VERSION" == "" ]; then + IMAGE_VERSION=${COMMIT_ID:0:8} + fi + echo "image_version=${IMAGE_VERSION}" + echo "image_version=${IMAGE_VERSION}" >> $GITHUB_OUTPUT + - name: Checkout Exegol-docs + uses: actions/checkout@v3 + with: + repository: 'ThePorgs/Exegol-docs' + ref: 'dev-images' + - name: Inspect the built image + run: docker inspect ${{ needs.varset.outputs.DOCKER_BUILD_REPO }}:${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} + - name: Create a container from the built image + run: | + docker run --name exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} --rm -t -d ${{ needs.varset.outputs.DOCKER_BUILD_REPO }}:${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} endless + - name: Export the tools list + if: success() + run: | + docker cp exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }}:/.exegol/installed_tools.csv installed_tools.csv + - name: Debug print installed_tools.csv + id: list_exists + run: cat installed_tools.csv + - name: Sanity check for installed_tools.csv + if: success() + run: | + grep -qE '([^,]*,[^,]*){3,}' installed_tools.csv \ + && (echo '[-] Wrong number of columns on the following lines' \ + && grep -oE '([^,]*,[^,]*){3,}' installed_tools.csv || exit 1) \ + || (echo '[+] List contains right number of columns' && exit 0) + - name: Stop the container + if: always() + run: docker stop exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} + - name: Adding list to repo and tables + if: always() && steps.list_exists.outcome == 'success' + run: | + mkdir -p source/assets/installed_tools/lists + echo '[*] Moving tools list to imagetag_version_arch.csv' + mv installed_tools.csv source/assets/installed_tools/lists/${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv + echo '[*] Changing lists.csv so that new tools list appears' + echo '[*] [SHOULD BE RELEASES ONLY] Removing occurences of image,version,arch. This is because this workflow runs in the prerelease pipeline, meaning that there's a possibility tools list is pushed to exegol-docs even if the images are not released for some reasons (e.g. imageA prerelease works but not for imageB). Doing this grep -v will ensure that lists.csv doesn't have duplicates to the same tag, same version and same arch' + (head -n 1 source/assets/installed_tools/lists.csv; \ + echo "${{ needs.varset.outputs.IMAGE_BASE_NAME }},${{ steps.prepare.outputs.image_version }},${{ matrix.arch }},$(date -u +"%Y-%m-%dT%H:%M:%SZ"),:download:\`${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv \ + \`"; \ + ( \ + tail -n +2 source/assets/installed_tools/lists.csv | grep -Ev "${{ needs.varset.outputs.IMAGE_BASE_NAME }},${{ steps.prepare.outputs.image_version }},${{ matrix.arch }}" \ + ) \ + ) | tee source/assets/installed_tools/new_lists.csv + mv source/assets/installed_tools/new_lists.csv source/assets/installed_tools/lists.csv + - name: Debug print lists.csv + if: always() + id: final_list_exists + run: cat source/assets/installed_tools/lists.csv + - name: Push Exegol-docs + if: always() && steps.final_list_exists.outcome == 'success' + env: + SSH_DEPLOY_KEY: ${{ secrets.EXEGOL_DOCS_SSH_DEPLOY_KEY }} + run: | + echo '[*] Setting up git env for SSH use' + mkdir -p "$HOME/.ssh" + DEPLOY_KEY_FILE="$HOME/.ssh/deploy_key" + echo "${SSH_DEPLOY_KEY}" > "$DEPLOY_KEY_FILE" + chmod 600 "$DEPLOY_KEY_FILE" + SSH_KNOWN_HOSTS_FILE="$HOME/.ssh/known_hosts" + ssh-keyscan -H github.com > "$SSH_KNOWN_HOSTS_FILE" + export GIT_SSH_COMMAND="ssh -i "$DEPLOY_KEY_FILE" -o UserKnownHostsFile=$SSH_KNOWN_HOSTS_FILE" + GIT_CMD_REPOSITORY="git@github.com:ThePorgs/Exegol-docs.git" + echo '[*] Setting git config' + git config --global user.name "exegol-images[pipeline]" + git config --global user.email "pipeline@exegol.images" + echo '[*] Staging changes' + git add --verbose source/assets/installed_tools/lists/${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv + git add --verbose source/assets/installed_tools/lists.csv + echo '[*] Commiting changes' + git commit --verbose -m "PIPELINE: tools list for ${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}" + echo '[*] Pushing changes' + git push --verbose "$GIT_CMD_REPOSITORY" + clean_runners: name: Clean runner needs: + - tools_list - publish - build_belt # clean runners, if publish was a success, or at least if build succeeded diff --git a/.github/workflows/entrypoint_preprod_light.yml b/.github/workflows/entrypoint_preprod_light.yml index 9ce80daaa..c3f147a89 100644 --- a/.github/workflows/entrypoint_preprod_light.yml +++ b/.github/workflows/entrypoint_preprod_light.yml @@ -94,9 +94,112 @@ jobs: docker manifest push ${DOCKER_TARGET_REPO}:${IMAGE_BASE_NAME}-${IMAGE_VERSION} docker manifest rm ${DOCKER_TARGET_REPO}:${IMAGE_BASE_NAME}-${IMAGE_VERSION} + tools_list: + name: Export tools list + needs: + - varset + - publish + - build_belt + # only exporting tools list if publish was a success AND the tests were a success + if: always() && ( needs.publish.result == 'success' && !contains(needs.build_belt.outputs.build, 'failure')) + strategy: + fail-fast: false + # only 1 job at a time, else there will be a conflict. Runner 2 will pull exegol-docs before runner 1 pushes changes. + max-parallel: 1 + matrix: + arch: [ arm64, amd64 ] + runs-on: + - self-hosted + - builder + - ${{ matrix.arch }} + steps: + - name: Checkout Exegol-images + uses: actions/checkout@v3 + - name: Prepare image version + id: prepare + run: | + COMMIT_ID=$(git rev-parse "$GITHUB_SHA") + if [ "$IMAGE_VERSION" == "" ]; then + IMAGE_VERSION=${COMMIT_ID:0:8} + fi + echo "image_version=${IMAGE_VERSION}" + echo "image_version=${IMAGE_VERSION}" >> $GITHUB_OUTPUT + - name: Checkout Exegol-docs + uses: actions/checkout@v3 + with: + repository: 'ThePorgs/Exegol-docs' + ref: 'dev-images' + - name: Inspect the built image + run: docker inspect ${{ needs.varset.outputs.DOCKER_BUILD_REPO }}:${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} + - name: Create a container from the built image + run: | + docker run --name exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} --rm -t -d ${{ needs.varset.outputs.DOCKER_BUILD_REPO }}:${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} endless + - name: Export the tools list + if: success() + run: | + docker cp exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }}:/.exegol/installed_tools.csv installed_tools.csv + - name: Debug print installed_tools.csv + id: list_exists + run: cat installed_tools.csv + - name: Sanity check for installed_tools.csv + if: success() + run: | + grep -qE '([^,]*,[^,]*){3,}' installed_tools.csv \ + && (echo '[-] Wrong number of columns on the following lines' \ + && grep -oE '([^,]*,[^,]*){3,}' installed_tools.csv || exit 1) \ + || (echo '[+] List contains right number of columns' && exit 0) + - name: Stop the container + if: always() + run: docker stop exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} + - name: Adding list to repo and tables + if: always() && steps.list_exists.outcome == 'success' + run: | + mkdir -p source/assets/installed_tools/lists + echo '[*] Moving tools list to imagetag_version_arch.csv' + mv installed_tools.csv source/assets/installed_tools/lists/${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv + echo '[*] Changing lists.csv so that new tools list appears' + echo '[*] [SHOULD BE RELEASES ONLY] Removing occurences of image,version,arch. This is because this workflow runs in the prerelease pipeline, meaning that there's a possibility tools list is pushed to exegol-docs even if the images are not released for some reasons (e.g. imageA prerelease works but not for imageB). Doing this grep -v will ensure that lists.csv doesn't have duplicates to the same tag, same version and same arch' + (head -n 1 source/assets/installed_tools/lists.csv; \ + echo "${{ needs.varset.outputs.IMAGE_BASE_NAME }},${{ steps.prepare.outputs.image_version }},${{ matrix.arch }},$(date -u +"%Y-%m-%dT%H:%M:%SZ"),:download:\`${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv \ + \`"; \ + ( \ + tail -n +2 source/assets/installed_tools/lists.csv | grep -Ev "${{ needs.varset.outputs.IMAGE_BASE_NAME }},${{ steps.prepare.outputs.image_version }},${{ matrix.arch }}" \ + ) \ + ) | tee source/assets/installed_tools/new_lists.csv + mv source/assets/installed_tools/new_lists.csv source/assets/installed_tools/lists.csv + - name: Debug print lists.csv + if: always() + id: final_list_exists + run: cat source/assets/installed_tools/lists.csv + - name: Push Exegol-docs + if: always() && steps.final_list_exists.outcome == 'success' + env: + SSH_DEPLOY_KEY: ${{ secrets.EXEGOL_DOCS_SSH_DEPLOY_KEY }} + run: | + echo '[*] Setting up git env for SSH use' + mkdir -p "$HOME/.ssh" + DEPLOY_KEY_FILE="$HOME/.ssh/deploy_key" + echo "${SSH_DEPLOY_KEY}" > "$DEPLOY_KEY_FILE" + chmod 600 "$DEPLOY_KEY_FILE" + SSH_KNOWN_HOSTS_FILE="$HOME/.ssh/known_hosts" + ssh-keyscan -H github.com > "$SSH_KNOWN_HOSTS_FILE" + export GIT_SSH_COMMAND="ssh -i "$DEPLOY_KEY_FILE" -o UserKnownHostsFile=$SSH_KNOWN_HOSTS_FILE" + GIT_CMD_REPOSITORY="git@github.com:ThePorgs/Exegol-docs.git" + echo '[*] Setting git config' + git config --global user.name "exegol-images[pipeline]" + git config --global user.email "pipeline@exegol.images" + echo '[*] Staging changes' + git add --verbose source/assets/installed_tools/lists/${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv + git add --verbose source/assets/installed_tools/lists.csv + echo '[*] Commiting changes' + git commit --verbose -m "PIPELINE: tools list for ${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}" + echo '[*] Pushing changes' + git push --verbose "$GIT_CMD_REPOSITORY" + clean_runners: name: Clean runner needs: + - tools_list - publish - build_belt # clean runners, if publish was a success, or at least if build succeeded diff --git a/.github/workflows/entrypoint_preprod_osint.yml b/.github/workflows/entrypoint_preprod_osint.yml index 9d8cb6ea1..ddcbb0e38 100644 --- a/.github/workflows/entrypoint_preprod_osint.yml +++ b/.github/workflows/entrypoint_preprod_osint.yml @@ -94,9 +94,112 @@ jobs: docker manifest push ${DOCKER_TARGET_REPO}:${IMAGE_BASE_NAME}-${IMAGE_VERSION} docker manifest rm ${DOCKER_TARGET_REPO}:${IMAGE_BASE_NAME}-${IMAGE_VERSION} + tools_list: + name: Export tools list + needs: + - varset + - publish + - build_belt + # only exporting tools list if publish was a success AND the tests were a success + if: always() && ( needs.publish.result == 'success' && !contains(needs.build_belt.outputs.build, 'failure')) + strategy: + fail-fast: false + # only 1 job at a time, else there will be a conflict. Runner 2 will pull exegol-docs before runner 1 pushes changes. + max-parallel: 1 + matrix: + arch: [ arm64, amd64 ] + runs-on: + - self-hosted + - builder + - ${{ matrix.arch }} + steps: + - name: Checkout Exegol-images + uses: actions/checkout@v3 + - name: Prepare image version + id: prepare + run: | + COMMIT_ID=$(git rev-parse "$GITHUB_SHA") + if [ "$IMAGE_VERSION" == "" ]; then + IMAGE_VERSION=${COMMIT_ID:0:8} + fi + echo "image_version=${IMAGE_VERSION}" + echo "image_version=${IMAGE_VERSION}" >> $GITHUB_OUTPUT + - name: Checkout Exegol-docs + uses: actions/checkout@v3 + with: + repository: 'ThePorgs/Exegol-docs' + ref: 'dev-images' + - name: Inspect the built image + run: docker inspect ${{ needs.varset.outputs.DOCKER_BUILD_REPO }}:${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} + - name: Create a container from the built image + run: | + docker run --name exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} --rm -t -d ${{ needs.varset.outputs.DOCKER_BUILD_REPO }}:${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} endless + - name: Export the tools list + if: success() + run: | + docker cp exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }}:/.exegol/installed_tools.csv installed_tools.csv + - name: Debug print installed_tools.csv + id: list_exists + run: cat installed_tools.csv + - name: Sanity check for installed_tools.csv + if: success() + run: | + grep -qE '([^,]*,[^,]*){3,}' installed_tools.csv \ + && (echo '[-] Wrong number of columns on the following lines' \ + && grep -oE '([^,]*,[^,]*){3,}' installed_tools.csv || exit 1) \ + || (echo '[+] List contains right number of columns' && exit 0) + - name: Stop the container + if: always() + run: docker stop exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} + - name: Adding list to repo and tables + if: always() && steps.list_exists.outcome == 'success' + run: | + mkdir -p source/assets/installed_tools/lists + echo '[*] Moving tools list to imagetag_version_arch.csv' + mv installed_tools.csv source/assets/installed_tools/lists/${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv + echo '[*] Changing lists.csv so that new tools list appears' + echo '[*] [SHOULD BE RELEASES ONLY] Removing occurences of image,version,arch. This is because this workflow runs in the prerelease pipeline, meaning that there's a possibility tools list is pushed to exegol-docs even if the images are not released for some reasons (e.g. imageA prerelease works but not for imageB). Doing this grep -v will ensure that lists.csv doesn't have duplicates to the same tag, same version and same arch' + (head -n 1 source/assets/installed_tools/lists.csv; \ + echo "${{ needs.varset.outputs.IMAGE_BASE_NAME }},${{ steps.prepare.outputs.image_version }},${{ matrix.arch }},$(date -u +"%Y-%m-%dT%H:%M:%SZ"),:download:\`${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv \ + \`"; \ + ( \ + tail -n +2 source/assets/installed_tools/lists.csv | grep -Ev "${{ needs.varset.outputs.IMAGE_BASE_NAME }},${{ steps.prepare.outputs.image_version }},${{ matrix.arch }}" \ + ) \ + ) | tee source/assets/installed_tools/new_lists.csv + mv source/assets/installed_tools/new_lists.csv source/assets/installed_tools/lists.csv + - name: Debug print lists.csv + if: always() + id: final_list_exists + run: cat source/assets/installed_tools/lists.csv + - name: Push Exegol-docs + if: always() && steps.final_list_exists.outcome == 'success' + env: + SSH_DEPLOY_KEY: ${{ secrets.EXEGOL_DOCS_SSH_DEPLOY_KEY }} + run: | + echo '[*] Setting up git env for SSH use' + mkdir -p "$HOME/.ssh" + DEPLOY_KEY_FILE="$HOME/.ssh/deploy_key" + echo "${SSH_DEPLOY_KEY}" > "$DEPLOY_KEY_FILE" + chmod 600 "$DEPLOY_KEY_FILE" + SSH_KNOWN_HOSTS_FILE="$HOME/.ssh/known_hosts" + ssh-keyscan -H github.com > "$SSH_KNOWN_HOSTS_FILE" + export GIT_SSH_COMMAND="ssh -i "$DEPLOY_KEY_FILE" -o UserKnownHostsFile=$SSH_KNOWN_HOSTS_FILE" + GIT_CMD_REPOSITORY="git@github.com:ThePorgs/Exegol-docs.git" + echo '[*] Setting git config' + git config --global user.name "exegol-images[pipeline]" + git config --global user.email "pipeline@exegol.images" + echo '[*] Staging changes' + git add --verbose source/assets/installed_tools/lists/${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv + git add --verbose source/assets/installed_tools/lists.csv + echo '[*] Commiting changes' + git commit --verbose -m "PIPELINE: tools list for ${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}" + echo '[*] Pushing changes' + git push --verbose "$GIT_CMD_REPOSITORY" + clean_runners: name: Clean runner needs: + - tools_list - publish - build_belt # clean runners, if publish was a success, or at least if build succeeded diff --git a/.github/workflows/entrypoint_preprod_web.yml b/.github/workflows/entrypoint_preprod_web.yml index ef2e7b854..7153b86ee 100644 --- a/.github/workflows/entrypoint_preprod_web.yml +++ b/.github/workflows/entrypoint_preprod_web.yml @@ -94,9 +94,112 @@ jobs: docker manifest push ${DOCKER_TARGET_REPO}:${IMAGE_BASE_NAME}-${IMAGE_VERSION} docker manifest rm ${DOCKER_TARGET_REPO}:${IMAGE_BASE_NAME}-${IMAGE_VERSION} + tools_list: + name: Export tools list + needs: + - varset + - publish + - build_belt + # only exporting tools list if publish was a success AND the tests were a success + if: always() && ( needs.publish.result == 'success' && !contains(needs.build_belt.outputs.build, 'failure')) + strategy: + fail-fast: false + # only 1 job at a time, else there will be a conflict. Runner 2 will pull exegol-docs before runner 1 pushes changes. + max-parallel: 1 + matrix: + arch: [ arm64, amd64 ] + runs-on: + - self-hosted + - builder + - ${{ matrix.arch }} + steps: + - name: Checkout Exegol-images + uses: actions/checkout@v3 + - name: Prepare image version + id: prepare + run: | + COMMIT_ID=$(git rev-parse "$GITHUB_SHA") + if [ "$IMAGE_VERSION" == "" ]; then + IMAGE_VERSION=${COMMIT_ID:0:8} + fi + echo "image_version=${IMAGE_VERSION}" + echo "image_version=${IMAGE_VERSION}" >> $GITHUB_OUTPUT + - name: Checkout Exegol-docs + uses: actions/checkout@v3 + with: + repository: 'ThePorgs/Exegol-docs' + ref: 'dev-images' + - name: Inspect the built image + run: docker inspect ${{ needs.varset.outputs.DOCKER_BUILD_REPO }}:${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} + - name: Create a container from the built image + run: | + docker run --name exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} --rm -t -d ${{ needs.varset.outputs.DOCKER_BUILD_REPO }}:${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} endless + - name: Export the tools list + if: success() + run: | + docker cp exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }}:/.exegol/installed_tools.csv installed_tools.csv + - name: Debug print installed_tools.csv + id: list_exists + run: cat installed_tools.csv + - name: Sanity check for installed_tools.csv + if: success() + run: | + grep -qE '([^,]*,[^,]*){3,}' installed_tools.csv \ + && (echo '[-] Wrong number of columns on the following lines' \ + && grep -oE '([^,]*,[^,]*){3,}' installed_tools.csv || exit 1) \ + || (echo '[+] List contains right number of columns' && exit 0) + - name: Stop the container + if: always() + run: docker stop exegol-${{ needs.varset.outputs.IMAGE_BASE_NAME }}-${{ matrix.arch }} + - name: Adding list to repo and tables + if: always() && steps.list_exists.outcome == 'success' + run: | + mkdir -p source/assets/installed_tools/lists + echo '[*] Moving tools list to imagetag_version_arch.csv' + mv installed_tools.csv source/assets/installed_tools/lists/${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv + echo '[*] Changing lists.csv so that new tools list appears' + echo '[*] [SHOULD BE RELEASES ONLY] Removing occurences of image,version,arch. This is because this workflow runs in the prerelease pipeline, meaning that there's a possibility tools list is pushed to exegol-docs even if the images are not released for some reasons (e.g. imageA prerelease works but not for imageB). Doing this grep -v will ensure that lists.csv doesn't have duplicates to the same tag, same version and same arch' + (head -n 1 source/assets/installed_tools/lists.csv; \ + echo "${{ needs.varset.outputs.IMAGE_BASE_NAME }},${{ steps.prepare.outputs.image_version }},${{ matrix.arch }},$(date -u +"%Y-%m-%dT%H:%M:%SZ"),:download:\`${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv \ + \`"; \ + ( \ + tail -n +2 source/assets/installed_tools/lists.csv | grep -Ev "${{ needs.varset.outputs.IMAGE_BASE_NAME }},${{ steps.prepare.outputs.image_version }},${{ matrix.arch }}" \ + ) \ + ) | tee source/assets/installed_tools/new_lists.csv + mv source/assets/installed_tools/new_lists.csv source/assets/installed_tools/lists.csv + - name: Debug print lists.csv + if: always() + id: final_list_exists + run: cat source/assets/installed_tools/lists.csv + - name: Push Exegol-docs + if: always() && steps.final_list_exists.outcome == 'success' + env: + SSH_DEPLOY_KEY: ${{ secrets.EXEGOL_DOCS_SSH_DEPLOY_KEY }} + run: | + echo '[*] Setting up git env for SSH use' + mkdir -p "$HOME/.ssh" + DEPLOY_KEY_FILE="$HOME/.ssh/deploy_key" + echo "${SSH_DEPLOY_KEY}" > "$DEPLOY_KEY_FILE" + chmod 600 "$DEPLOY_KEY_FILE" + SSH_KNOWN_HOSTS_FILE="$HOME/.ssh/known_hosts" + ssh-keyscan -H github.com > "$SSH_KNOWN_HOSTS_FILE" + export GIT_SSH_COMMAND="ssh -i "$DEPLOY_KEY_FILE" -o UserKnownHostsFile=$SSH_KNOWN_HOSTS_FILE" + GIT_CMD_REPOSITORY="git@github.com:ThePorgs/Exegol-docs.git" + echo '[*] Setting git config' + git config --global user.name "exegol-images[pipeline]" + git config --global user.email "pipeline@exegol.images" + echo '[*] Staging changes' + git add --verbose source/assets/installed_tools/lists/${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}.csv + git add --verbose source/assets/installed_tools/lists.csv + echo '[*] Commiting changes' + git commit --verbose -m "PIPELINE: tools list for ${{ needs.varset.outputs.IMAGE_BASE_NAME }}_${{ steps.prepare.outputs.image_version }}_${{ matrix.arch }}" + echo '[*] Pushing changes' + git push --verbose "$GIT_CMD_REPOSITORY" + clean_runners: name: Clean runner needs: + - tools_list - publish - build_belt # clean runners, if publish was a success, or at least if build succeeded diff --git a/Dockerfile b/Dockerfile index b038e8e7a..4ee30712b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -16,7 +16,7 @@ COPY sources /root/sources/ WORKDIR /root/sources/install -# WARNING: package_most_used can't be used with other functions other than: package_base, post_install_clean +# WARNING: package_most_used can't be used with other functions other than: package_base, post_install # ./entrypoint.sh package_most_used RUN echo "${TAG}-${VERSION}" > /opt/.exegol_version && \ @@ -49,7 +49,7 @@ RUN echo "${TAG}-${VERSION}" > /opt/.exegol_version && \ ./entrypoint.sh package_reverse && \ ./entrypoint.sh package_crypto && \ ./entrypoint.sh package_code_analysis && \ - ./entrypoint.sh post_install_clean && \ + ./entrypoint.sh post_install && \ rm -rf /root/sources /var/lib/apt/lists/* WORKDIR /workspace diff --git a/ad.dockerfile b/ad.dockerfile index d8418ac08..8513b25ba 100644 --- a/ad.dockerfile +++ b/ad.dockerfile @@ -16,7 +16,7 @@ COPY sources /root/sources/ WORKDIR /root/sources/install -# WARNING: package_most_used can't be used with other functions other than: package_base, post_install_clean +# WARNING: package_most_used can't be used with other functions other than: package_base, post_install # ./entrypoint.sh package_most_used RUN echo "${TAG}-${VERSION}" > /opt/.exegol_version && \ @@ -35,7 +35,7 @@ RUN echo "${TAG}-${VERSION}" > /opt/.exegol_version && \ ./entrypoint.sh package_ad && \ ./entrypoint.sh package_ad_configure && \ ./entrypoint.sh package_network - ./entrypoint.sh post_install_clean && \ + ./entrypoint.sh post_install && \ rm -rf /root/sources /var/lib/apt/lists/* WORKDIR /workspace diff --git a/debug.dockerfile b/debug.dockerfile index c1a5180dc..a53fe6b98 100644 --- a/debug.dockerfile +++ b/debug.dockerfile @@ -19,7 +19,7 @@ WORKDIR /root/sources/install RUN echo "${TAG}-${VERSION}" > /opt/.exegol_version && \ chmod +x entrypoint.sh && \ ./entrypoint.sh package_base_debug && \ - ./entrypoint.sh post_install_clean && \ + ./entrypoint.sh post_install && \ rm -rf /root/sources /var/lib/apt/lists/* WORKDIR /workspace diff --git a/light.dockerfile b/light.dockerfile index e472cc716..4ce620d4f 100644 --- a/light.dockerfile +++ b/light.dockerfile @@ -16,7 +16,7 @@ COPY sources /root/sources/ WORKDIR /root/sources/install -# WARNING: package_most_used can't be used with other functions other than: package_base, post_install_clean +# WARNING: package_most_used can't be used with other functions other than: package_base, post_install # ./entrypoint.sh package_most_used RUN echo "${TAG}-${VERSION}" > /opt/.exegol_version && \ @@ -25,7 +25,7 @@ RUN echo "${TAG}-${VERSION}" > /opt/.exegol_version && \ ./entrypoint.sh package_most_used && \ ./entrypoint.sh package_misc && \ ./entrypoint.sh package_misc_configure && \ - ./entrypoint.sh post_install_clean && \ + ./entrypoint.sh post_install && \ rm -rf /root/sources /var/lib/apt/lists/* WORKDIR /workspace diff --git a/osint.dockerfile b/osint.dockerfile index 949867bf1..9a75751bc 100644 --- a/osint.dockerfile +++ b/osint.dockerfile @@ -16,7 +16,7 @@ COPY sources /root/sources/ WORKDIR /root/sources/install -# WARNING: package_most_used can't be used with other functions other than: package_base, post_install_clean +# WARNING: package_most_used can't be used with other functions other than: package_base, post_install # ./entrypoint.sh package_most_used RUN echo "${TAG}-${VERSION}" > /opt/.exegol_version && \ @@ -26,7 +26,7 @@ RUN echo "${TAG}-${VERSION}" > /opt/.exegol_version && \ ./entrypoint.sh package_misc_configure && \ ./entrypoint.sh package_osint && \ ./entrypoint.sh package_osint_configure && \ - ./entrypoint.sh post_install_clean && \ + ./entrypoint.sh post_install && \ rm -rf /root/sources /var/lib/apt/lists/* WORKDIR /workspace diff --git a/sources/assets/zsh/aliases.d/arsenal b/sources/assets/zsh/aliases.d/arsenal index b80738672..05852b277 100644 --- a/sources/assets/zsh/aliases.d/arsenal +++ b/sources/assets/zsh/aliases.d/arsenal @@ -1 +1 @@ -alias a='/opt/tools/arsenal/run' +alias a='arsenal' diff --git a/sources/assets/zsh/history.d/geowordlists b/sources/assets/zsh/history.d/geowordlists new file mode 100644 index 000000000..98477e831 --- /dev/null +++ b/sources/assets/zsh/history.d/geowordlists @@ -0,0 +1 @@ +geowordlists --postal-code 75001 --kilometers 25 --output-file /tmp/around_paris.txt \ No newline at end of file diff --git a/sources/assets/zsh/history.d/scrcpy b/sources/assets/zsh/history.d/scrcpy new file mode 100644 index 000000000..1021f02f6 --- /dev/null +++ b/sources/assets/zsh/history.d/scrcpy @@ -0,0 +1 @@ +scrcpy --tcpip=$TARGET diff --git a/sources/install/common.sh b/sources/install/common.sh index 21b6797d3..dccb11b51 100644 --- a/sources/install/common.sh +++ b/sources/install/common.sh @@ -23,7 +23,7 @@ function criticalecho-noexit () { } function add-to-list() { - echo $1 >> "/.exegol/installed_tools" + echo $1 >> "/.exegol/installed_tools.csv" } function add-aliases() { diff --git a/sources/install/package_ad.sh b/sources/install/package_ad.sh index db1e82748..1eb901f84 100644 --- a/sources/install/package_ad.sh +++ b/sources/install/package_ad.sh @@ -31,7 +31,7 @@ function install_responder() { add-aliases responder add-history responder add-test-command "responder --version" - add-to-list "responder,https://github.com/lgandx/Responder,a LLMNR, NBT-NS and MDNS poisoner." + add-to-list "responder,https://github.com/lgandx/Responder,a LLMNR / NBT-NS and MDNS poisoner." } function configure_responder() { @@ -100,7 +100,7 @@ function install_bloodhound-py() { add-aliases bloodhound-py add-history bloodhound-py add-test-command "bloodhound.py --help" - add-to-list "bloodhound-py,https://github.com/fox-it/BloodHound.py,Trust relationship analysis tool for Active Directory environments." + add-to-list "bloodhound.py,https://github.com/fox-it/BloodHound.py,BloodHound ingestor in Python." } function install_bloodhound() { @@ -141,7 +141,7 @@ function install_cypheroth() { add-aliases cypheroth add-history cypheroth add-test-command "cypheroth --help|& grep 'Example with Defaults:'" - add-to-list "cyperoth,https://github.com/seajaysec/cypheroth/,Automated, extensible toolset that runs cypher queries against Bloodhound's Neo4j backend and saves output to spreadsheets." + add-to-list "cyperoth,https://github.com/seajaysec/cypheroth,Automated extensible toolset that runs cypher queries against Bloodhound's Neo4j backend and saves output to spreadsheets." } function install_mitm6_pip() { @@ -193,7 +193,7 @@ function install_pykek() { add-aliases pykek add-history pykek add-test-command "ms14-068.py |& grep ''" - add-to-list "pykek,https://github.com/preempt/pykek,PyKEK (Python Kerberos Exploitation Kit), a python library to manipulate KRB5-related data." + add-to-list "pykek,https://github.com/preempt/pykek,PyKEK (Python Kerberos Exploitation Kit) a python library to manipulate KRB5-related data." } function install_lsassy() { @@ -248,7 +248,7 @@ function install_amber() { go install -v github.com/EgeBalci/amber@latest add-history amber add-test-command "amber --help" - add-to-list "amber,https://github.com/EgeBalci/amber,Forensic tool to recover browser history, cookies, and credentials" + add-to-list "amber,https://github.com/EgeBalci/amber,Forensic tool to recover browser history / cookies and credentials" } function install_powershell() { @@ -401,7 +401,7 @@ function install_lnkup() { add-aliases lnkup add-history lnkup add-test-command "lnk-generate.py --help" - add-to-list "lnkup,https://github.com/Plazmaz/lnkUp,This tool will allow you to generate LNK payloads. Upon rendering or being run, they will exfiltrate data." + add-to-list "lnkup,https://github.com/Plazmaz/lnkUp,This tool will allow you to generate LNK payloads. Upon rendering or being run they will exfiltrate data." } function install_polenum() { @@ -616,7 +616,7 @@ function install_manspider() { add-aliases manspider add-history manspider add-test-command "manspider --help" - add-to-list "manspider,https://github.com/blacklanternsecurity/MANSPIDER,Manspider will crawl every share on every target system. If provided creds don't work, it will fall back to "guest", then to a null session." + add-to-list "manspider,https://github.com/blacklanternsecurity/MANSPIDER,Manspider will crawl every share on every target system. If provided creds don't work it will fall back to 'guest' then to a null session." } function install_targetedKerberoast() { @@ -761,7 +761,7 @@ function install_crackhound() { add-aliases crackhound add-history crackhound add-test-command "crackhound.py --help" - add-to-list "crackhound,https://github.com/trustedsec/crackhound.git,A fast WPA/WPA2/WPA3 WiFi Handshake capture, password recovery and analysis tool" + add-to-list "crackhound,https://github.com/trustedsec/crackhound.git,A fast WPA/WPA2/WPA3 WiFi Handshake capture / password recovery and analysis tool" } function install_kerbrute() { @@ -788,10 +788,12 @@ function install_rusthound() { # Sourcing rustup shell setup, so that rust binaries are found when installing cme source "$HOME/.cargo/env" cargo build --release + # Clean dependencies used to build the binary + rm -rf target/release/{deps,build} ln -s /opt/tools/RustHound/target/release/rusthound /opt/tools/bin/rusthound add-history rusthound add-test-command "rusthound --help" - add-to-list "rusthound,https://github.com/OPENCYBER-FR/RustHound,Rusthound is a tool for searching through git repositories for secrets and sensitive information." + add-to-list "rusthound,https://github.com/OPENCYBER-FR/RustHound,BloodHound ingestor in Rust." } function install_certsync() { @@ -831,7 +833,7 @@ function install_masky() { python3 -m pipx install git+https://github.com/Z4kSec/Masky add-history masky add-test-command "masky --help" - add-to-list "masky,https://github.com/Z4kSec/masky,masky is a tool to mask sensitive data, such as credit card numbers, in logs and other files." + add-to-list "masky,https://github.com/Z4kSec/masky,masky is a tool to mask sensitive data / such as credit card numbers / in logs and other files." } function install_roastinthemiddle() { diff --git a/sources/install/package_base.sh b/sources/install/package_base.sh index 737b4fd8a..ad1990b08 100644 --- a/sources/install/package_base.sh +++ b/sources/install/package_base.sh @@ -38,6 +38,8 @@ function filesystem() { colorecho "Preparing filesystem" mkdir -p /opt/tools/bin/ /data/ /var/log/exegol /.exegol/build_pipeline_tests/ touch /.exegol/build_pipeline_tests/all_commands.txt + touch /.exegol/installed_tools.csv + echo "Tool,Link,Description" >> /.exegol/installed_tools.csv } function install_go() { @@ -175,11 +177,15 @@ function install_gf() { add-test-command "ls ~/.gf | grep 'redirect.json'" } -function post_install_clean() { +function post_install() { # Function used to clean up post-install files colorecho "Cleaning..." updatedb rm -rfv /tmp/* + colorecho "Sorting tools list" + (head -n 1 /.exegol/installed_tools.csv && tail -n +2 /.exegol/installed_tools.csv | sort -f ) | tee /tmp/installed_tools.csv.sorted + mv /tmp/installed_tools.csv.sorted /.exegol/installed_tools.csv + colorecho "Adding end-of-preset in zsh_history" echo "# -=-=-=-=-=-=-=- YOUR COMMANDS BELOW -=-=-=-=-=-=-=- #" >> ~/.zsh_history } diff --git a/sources/install/package_cracking.sh b/sources/install/package_cracking.sh index 365331c57..8aa88416d 100644 --- a/sources/install/package_cracking.sh +++ b/sources/install/package_cracking.sh @@ -53,6 +53,14 @@ function install_haiti() { add-to-list "haiti,https://github.com/noraj/haiti is a A CLI tool (and library) to identify hash types (hash type identifier)." } +function install_geowordlists() { + colorecho "Installing GeoWordlists" + python3 -m pipx install git+https://github.com/p0dalirius/GeoWordlists + add-history geowordlists + add-test-command "geowordlists --help" + add-to-list "geowordlists,https://github.com/p0dalirius/GeoWordlists,tool to generate wordlists of passwords containing cities at a defined distance around the client city." +} + # Package dedicated to offline cracking/bruteforcing tools function package_cracking() { install_cracking_apt_tools diff --git a/sources/install/package_forensic.sh b/sources/install/package_forensic.sh index 0e5922e02..a216ec90f 100644 --- a/sources/install/package_forensic.sh +++ b/sources/install/package_forensic.sh @@ -19,10 +19,10 @@ function install_forensic_apt_tools() { add-test-command "blkcalc -V" # Collection of command line tools that allow you to investigate disk images add-to-list "pst-utils,https://manpages.debian.org/jessie/pst-utils/readpst.1,pst-utils is a set of tools for working with Outlook PST files." - add-to-list "binwalk,https://github.com/ReFirmLabs/binwalk,Binwalk is a tool for analyzing, reverse engineering, and extracting firmware images." - add-to-list "foremost,https://doc.ubuntu-fr.org/foremost,Foremost is a forensic tool for recovering files based on their headers, footers, and internal data structures." + add-to-list "binwalk,https://github.com/ReFirmLabs/binwalk,Binwalk is a tool for analyzing / reverse engineering / and extracting firmware images." + add-to-list "foremost,https://doc.ubuntu-fr.org/foremost,Foremost is a forensic tool for recovering files based on their headers / footers / and internal data structures." add-to-list "testdisk,https://github.com/cgsecurity/testdisk,Partition recovery and file undelete utility" - add-to-list "fdisk,https://github.com/karelzak/util-linux,Collection of basic system utilities, including fdisk partitioning tool" + add-to-list "fdisk,https://github.com/karelzak/util-linux,Collection of basic system utilities / including fdisk partitioning tool" add-to-list "sleuthkit,https://github.com/sleuthkit/sleuthkit,Forensic toolkit to analyze volume and file system data" } diff --git a/sources/install/package_misc.sh b/sources/install/package_misc.sh index 090516b70..57cd03851 100644 --- a/sources/install/package_misc.sh +++ b/sources/install/package_misc.sh @@ -16,8 +16,8 @@ function install_misc_apt_tools() { add-test-command "ascii -v" # The ascii table in the shell add-test-command "rsync -h" # File synchronization tool for efficiently copying and updating data between local or remote locations. - add-to-list "rlwrap,https://github.com/hanslub42/rlwrap,rlwrap is a small utility that wraps input and output streams of executables, making it possible to edit and re-run input history" - add-to-list "imagemagick,https://github.com/ImageMagick/ImageMagick,ImageMagick is a free and open-source image manipulation tool used to create, edit, compose, or convert bitmap images." + add-to-list "rlwrap,https://github.com/hanslub42/rlwrap,rlwrap is a small utility that wraps input and output streams of executables / making it possible to edit and re-run input history" + add-to-list "imagemagick,https://github.com/ImageMagick/ImageMagick,ImageMagick is a free and open-source image manipulation tool used to create / edit / compose / or convert bitmap images." add-to-list "ascii,https://github.com/moul/ascii,ASCII command-line tool to replace images with color-coded ASCII art." add-to-list "rsync,https://packages.debian.org/sid/rsync,File synchronization tool for efficiently copying and updating data between local or remote locations" } @@ -43,7 +43,7 @@ function install_uberfile() { python3 -m pipx install git+https://github.com/ShutdownRepo/uberfile add-history uberfile add-test-command "uberfile --help" - add-to-list "uberfile,https://github.com/ShutdownRepo/Uberfile,Uberfile is a simple command-line tool aimed to help pentesters quickly generate file downloader one-liners in multiple contexts (wget, curl, powershell, certutil...). This project code is based on my other similar project for one-liner reverseshell generation Shellerator." + add-to-list "uberfile,https://github.com/ShutdownRepo/Uberfile,Uberfile is a simple command-line tool aimed to help pentesters quickly generate file downloader one-liners in multiple contexts (wget / curl / powershell / certutil...). This project code is based on my other similar project for one-liner reverseshell generation Shellerator." } function install_arsenal() { diff --git a/sources/install/package_mobile.sh b/sources/install/package_mobile.sh index c19c7834c..7bac0f547 100644 --- a/sources/install/package_mobile.sh +++ b/sources/install/package_mobile.sh @@ -4,22 +4,25 @@ source common.sh function install_mobile_apt_tools() { - fapt android-tools-adb zipalign apksigner apktool + fapt android-tools-adb zipalign apksigner apktool scrcpy add-history adb add-history zipalign add-history apksigner add-history apktool + add-history scrcpy add-test-command "adb --help" add-test-command "zipalign --help |& grep 'verbose output'" add-test-command "apksigner --version" add-test-command "apktool --version" + add-test-command "scrcpy --version" add-to-list "android-tools-adb,https://developer.android.com/studio/command-line/adb,A collection of tools for debugging Android applications" add-to-list "zipalign,https://developer.android.com/studio/command-line/zipalign,arguably the most important step to optimize your APK file" add-to-list "apksigner,https://source.android.com/security/apksigning,arguably the most important step to optimize your APK file" - add-to-list "apktool,https://github.com/iBotPeaches/Apktool,It is a tool for reverse engineering 3rd party, closed, binary Android apps." + add-to-list "apktool,https://github.com/iBotPeaches/Apktool,It is a tool for reverse engineering 3rd party / closed / binary Android apps." + add-to-list "scrcpy,https://github.com/Genymobile/scrcpy,Display and control your Android device." } function install_smali(){ diff --git a/sources/install/package_network.sh b/sources/install/package_network.sh index 5bc12c4b1..29774f272 100644 --- a/sources/install/package_network.sh +++ b/sources/install/package_network.sh @@ -46,13 +46,13 @@ function install_network_apt_tools() { add-to-list "iptables,https://linux.die.net/man/8/iptables,Userspace command line tool for configuring kernel firewall" add-to-list "traceroute,https://github.com/iputils/iputils,Traceroute is a command which can show you the path a packet of information takes from your computer to one you specify." add-to-list "dns2tcp,https://github.com/alex-sector/dns2tcp,dns2tcp is a tool for relaying TCP connections over DNS." - add-to-list "freerdp2-x11,https://github.com/FreeRDP/FreeRDP,FreeRDP is a free implementation of the Remote Desktop Protocol (RDP), released under the Apache license." - add-to-list "rdesktop,https://github.com/rdesktop/rdesktop,rdesktop is a client for Remote Desktop Protocol (RDP), used in a number of Microsoft products including Windows NT Terminal Server, Windows 2000 Server, Windows XP and Windows 2003 Server." + add-to-list "freerdp2-x11,https://github.com/FreeRDP/FreeRDP,FreeRDP is a free implementation of the Remote Desktop Protocol (RDP) released under the Apache license." + add-to-list "rdesktop,https://github.com/rdesktop/rdesktop,rdesktop is a client for Remote Desktop Protocol (RDP) used in a number of Microsoft products including Windows NT Terminal Server / Windows 2000 Server / Windows XP and Windows 2003 Server." add-to-list "xtightvncviewer,https://www.commandlinux.com/man-page/man1/xtightvncviewer.1.html,xtightvncviewer is an open source VNC client software." add-to-list "ssh-audit,https://github.com/arthepsy/ssh-audit,ssh-audit is a tool to test SSH server configuration for best practices." add-to-list "hydra,https://github.com/vanhauser-thc/thc-hydra,Hydra is a parallelized login cracker which supports numerous protocols to attack." add-to-list "mariadb-client,https://github.com/MariaDB/server,MariaDB is a community-developed fork of the MySQL relational database management system. The mariadb-client package includes command-line utilities for interacting with a MariaDB server." - add-to-list "redis-tools,https://github.com/antirez/redis-tools,redis-tools is a collection of Redis client utilities, including redis-cli and redis-benchmark." + add-to-list "redis-tools,https://github.com/antirez/redis-tools,redis-tools is a collection of Redis client utilities including redis-cli and redis-benchmark." } function install_proxychains() { @@ -125,7 +125,7 @@ function install_chisel() { # TODO: add windows pre-compiled binaries in /opt/ressources/windows ? add-history chisel add-test-command "chisel --help" - add-to-list "chisel,https://github.com/jpillora/chisel,Go based TCP tunnel, with authentication and encryption support" + add-to-list "chisel,https://github.com/jpillora/chisel,Go based TCP tunnel with authentication and encryption support" } function install_sshuttle() { diff --git a/sources/install/package_osint.sh b/sources/install/package_osint.sh index 06ce18be4..1ce9223ba 100644 --- a/sources/install/package_osint.sh +++ b/sources/install/package_osint.sh @@ -24,7 +24,7 @@ function install_osint_apt_tools() { add-test-command "whois --help" # See information about a specific domain name or IP address add-test-command "recon-ng --help" # External recon tool - add-to-list "exiftool,https://github.com/exiftool/exiftool,ExifTool is a Perl library and command-line tool for reading, writing and editing meta information in image, audio and video files." + add-to-list "exiftool,https://github.com/exiftool/exiftool,ExifTool is a Perl library and command-line tool for reading / writing and editing meta information in image / audio and video files." add-to-list "exifprobe,https://github.com/hfiguiere/exifprobe,Exifprobe is a command-line tool to parse EXIF data from image files." add-to-list "dnsenum,https://github.com/fwaeytens/dnsenum,dnsenum is a tool for enumerating DNS information about a domain." add-to-list "tor,https://github.com/torproject/tor,Anonymity tool that can help protect your privacy and online identity by routing your traffic through a network of servers." @@ -115,7 +115,7 @@ function install_theharvester() { add-aliases theharvester add-history theharvester add-test-command "theHarvester.py --help" - add-to-list "theharvester,https://github.com/laramies/theHarvester,Tool for gathering e-mail accounts, subdomain names, virtual hosts, open ports/ banners, and employee names from different public sources" + add-to-list "theharvester,https://github.com/laramies/theHarvester,Tool for gathering e-mail accounts / subdomain names / virtual host / open ports / banners / and employee names from different public sources" } function install_h8mail() { @@ -211,7 +211,7 @@ function install_toutatis() { python3 -m pipx install git+https://github.com/megadose/toutatis add-history toutatis add-test-command "toutatis --help" - add-to-list "toutatis,https://github.com/megadose/Toutatis,Toutatis is a tool that allows you to extract information from instagrams accounts such as e-mails, phone numbers and more." + add-to-list "toutatis,https://github.com/megadose/Toutatis,Toutatis is a tool that allows you to extract information from instagrams accounts such as e-mails / phone numbers and more." } function install_waybackurls() { @@ -243,7 +243,7 @@ function install_photon() { add-aliases photon add-history photon add-test-command "photon.py --help" - add-to-list "photon,https://github.com/s0md3v/Photon,a fast web crawler which extracts URLs, files, intel & endpoints from a target." + add-to-list "photon,https://github.com/s0md3v/Photon,a fast web crawler which extracts URLs / files / intel & endpoints from a target." } function install_ipinfo() { @@ -313,7 +313,7 @@ function install_osrframework() { python3 -m pipx inject osrframework 'pip==21.2' add-history osrframework add-test-command "osrframework-cli --help" - add-to-list "osrframework,https://github.com/i3visio/osrframework,Include references to a bunch of different applications related to username checking, DNS lookups, information leaks research, deep web search, regular expressions extraction and many others." + add-to-list "osrframework,https://github.com/i3visio/osrframework,Include references to a bunch of different applications related to username checking / DNS lookups / information leaks research / deep web search / regular expressions extraction and many others." } function install_pwndb() { @@ -373,7 +373,7 @@ function install_trevorspray() { python3 -m pipx install . add-history trevorspray add-test-command "trevorspray --help" - add-to-list "trevorspray,https://github.com/blacklanternsecurity/TREVORspray,TREVORspray is a modular password sprayer with threading SSH proxying loot modules, and more" + add-to-list "trevorspray,https://github.com/blacklanternsecurity/TREVORspray,TREVORspray is a modular password sprayer with threading SSH proxying loot modules / and more" } # Package dedicated to osint, recon and passive tools diff --git a/sources/install/package_reverse.sh b/sources/install/package_reverse.sh index 175c09655..06d60bf4c 100644 --- a/sources/install/package_reverse.sh +++ b/sources/install/package_reverse.sh @@ -22,7 +22,7 @@ function install_reverse_apt_tools() { add-test-command "strace --version" add-to-list "nasm,https://github.com/netwide-assembler/nasm,NASM is an 80x86 assembler designed for portability and modularity." - add-to-list "wabt,https://github.com/WebAssembly/wabt,The WebAssembly Binary Toolkit (WABT) is a suite of tools for WebAssembly (Wasm), including assembler and disassembler, a syntax checker, and a binary format validator." + add-to-list "wabt,https://github.com/WebAssembly/wabt,The WebAssembly Binary Toolkit (WABT) is a suite of tools for WebAssembly (Wasm) including assembler and disassembler / a syntax checker / and a binary format validator." add-to-list "strace,https://github.com/strace/strace,strace is a debugging utility for Linux that allows you to monitor and diagnose system calls made by a process." } diff --git a/sources/install/package_steganography.sh b/sources/install/package_steganography.sh index 742a0f396..d8bcf48d1 100644 --- a/sources/install/package_steganography.sh +++ b/sources/install/package_steganography.sh @@ -21,7 +21,7 @@ function install_steganography_apt_tools() { add-to-list "stegosuite,https://github.com/osde8info/stegosuite,Stegosuite is a free steganography tool that allows you to hide data in image and audio files." add-to-list "steghide,https://github.com/StefanoDeVuono/steghide,steghide is a steganography program that is able to hide data in various kinds of image and audio files." - add-to-list "exif,https://exiftool.org/,Utility to read, write and edit metadata in image, audio and video files" + add-to-list "exif,https://exiftool.org/,Utility to read / write and edit metadata in image / audio and video files" add-to-list "exiv2,https://github.com/Exiv2/exiv2,Image metadata library and toolset" add-to-list "hexedit,https://github.com/pixel/hexedit,View and edit binary files" } diff --git a/sources/install/package_web.sh b/sources/install/package_web.sh index 5b78cdf74..076cbd5a4 100644 --- a/sources/install/package_web.sh +++ b/sources/install/package_web.sh @@ -67,7 +67,7 @@ function install_amass(){ go install -v github.com/owasp-amass/amass/v3/...@master add-history amass add-test-command "amass -version" - add-to-list "amass,https://github.com/OWASP/Amass,A DNS enumeration, attack surface mapping & external assets discovery tool" + add-to-list "amass,https://github.com/OWASP/Amass,A DNS enumeration / attack surface mapping & external assets discovery tool" } function install_ffuf() { @@ -328,7 +328,7 @@ function install_eyewitness() { add-aliases eyewitness add-history eyewitness add-test-command "eyewitness --help" - add-to-list "eyewitness,https://github.com/FortyNorthSecurity/EyeWitness,a tool to take screenshots of websites, provide some server header info, and identify default credentials if possible." + add-to-list "eyewitness,https://github.com/FortyNorthSecurity/EyeWitness,a tool to take screenshots of websites / provide some server header info / and identify default credentials if possible." } function install_oneforall() { @@ -485,7 +485,7 @@ function install_jdwp_shellifier(){ add-aliases jdwp-shellifier add-history jdwp-shellifier add-test-command "jdwp-shellifier.py --help" - add-to-list "jdwp,https://github.com/IOActive/jdwp-shellifier,This exploitation script is meant to be used by pentesters against active JDWP service, in order to gain Remote Code Execution." + add-to-list "jdwp,https://github.com/IOActive/jdwp-shellifier,This exploitation script is meant to be used by pentesters against active JDWP service / in order to gain Remote Code Execution." } function install_httpmethods() { @@ -497,7 +497,7 @@ function install_httpmethods() { add-aliases httpmethods add-history httpmethods add-test-command "httpmethods --help" - add-to-list "httpmethods,https://github.com/ShutdownRepo/httpmethods,Tool for exploiting HTTP methods (e.g. PUT, DELETE, etc.)" + add-to-list "httpmethods,https://github.com/ShutdownRepo/httpmethods,Tool for exploiting HTTP methods (e.g. PUT / DELETE / etc.)" } function install_h2csmuggler() { @@ -530,7 +530,7 @@ function install_feroxbuster() { add-aliases feroxbuster add-history feroxbuster add-test-command "feroxbuster --help" - add-to-list "feroxbuster,https://github.com/epi052/feroxbuster,Simple, fast and recursive content discovery tool" + add-to-list "feroxbuster,https://github.com/epi052/feroxbuster,Simple / fast and recursive content discovery tool" } function install_tomcatwardeployer() { @@ -574,7 +574,7 @@ function install_nuclei() { go install -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei@latest add-history nuclei add-test-command "nuclei --version" - add-to-list "nuclei,https://github.com/projectdiscovery/nuclei,A fast and customizable vulnerability scanner that can detect a wide range of issues, including XSS, SQL injection, and misconfigured servers." + add-to-list "nuclei,https://github.com/projectdiscovery/nuclei,A fast and customizable vulnerability scanner that can detect a wide range of issues / including XSS / SQL injection / and misconfigured servers." } function configure_nuclei() { @@ -610,7 +610,7 @@ function install_httpx() { go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest add-history httpx add-test-command "httpx --help" - add-to-list "httpx,https://github.com/projectdiscovery/httpx,A tool for identifying web technologies and vulnerabilities, including outdated software versions and weak encryption protocols." + add-to-list "httpx,https://github.com/projectdiscovery/httpx,A tool for identifying web technologies and vulnerabilities / including outdated software versions and weak encryption protocols." } function install_anew() { @@ -618,7 +618,7 @@ function install_anew() { go install -v github.com/tomnomnom/anew@latest add-history anew add-test-command "anew --help" - add-to-list "anew,https://github.com/tomnomnom/anew,A simple tool for filtering and manipulating text data, such as log files and other outputs." + add-to-list "anew,https://github.com/tomnomnom/anew,A simple tool for filtering and manipulating text data / such as log files and other outputs." } function install_robotstester() { @@ -682,7 +682,7 @@ function install_kraken() { add-aliases kraken add-history kraken add-test-command "kraken.py -h" - add-to-list "Modular multi-language webshell,https://github.com/kraken-ng/Kraken.git,Kraken is a modular multi-language webshell focused on web post-exploitation and defense evasion. It supports three technologies (PHP, JSP and ASPX) and is core is developed in Python." + add-to-list "Kraken,https://github.com/kraken-ng/Kraken.git,Kraken is a modular multi-language webshell focused on web post-exploitation and defense evasion. It supports three technologies (PHP / JSP and ASPX) and is core is developed in Python." } function install_soapui() { diff --git a/sources/install/package_wifi.sh b/sources/install/package_wifi.sh index 7a5274c2d..2d4103ed3 100644 --- a/sources/install/package_wifi.sh +++ b/sources/install/package_wifi.sh @@ -69,7 +69,7 @@ function install_bettercap() { add-aliases bettercap add-history bettercap add-test-command "bettercap --version" - add-to-list "bettercap,https://github.com/bettercap/bettercap,The Swiss Army knife for 802.11, BLE, and Ethernet networks reconnaissance and MITM attacks." + add-to-list "bettercap,https://github.com/bettercap/bettercap,The Swiss Army knife for 802.11 / BLE / and Ethernet networks reconnaissance and MITM attacks." } function install_hcxtools() { diff --git a/web.dockerfile b/web.dockerfile index a700b61e3..23be4fafd 100644 --- a/web.dockerfile +++ b/web.dockerfile @@ -16,7 +16,7 @@ COPY sources /root/sources/ WORKDIR /root/sources/install -# WARNING: package_most_used can't be used with other functions other than: package_base, post_install_clean +# WARNING: package_most_used can't be used with other functions other than: package_base, post_install # ./entrypoint.sh package_most_used RUN echo "${TAG}-${VERSION}" > /opt/.exegol_version && \ @@ -33,7 +33,7 @@ RUN echo "${TAG}-${VERSION}" > /opt/.exegol_version && \ ./entrypoint.sh package_web && \ ./entrypoint.sh package_web_configure && \ ./entrypoint.sh package_code_analysis && \ - ./entrypoint.sh post_install_clean && \ + ./entrypoint.sh post_install && \ rm -rf /root/sources /var/lib/apt/lists/* WORKDIR /workspace