diff --git a/.github/scripts/performance-tests/produce_metric_widget_images.py b/.github/scripts/performance-tests/produce_metric_widget_images.py index a38ac90..cb243e1 100644 --- a/.github/scripts/performance-tests/produce_metric_widget_images.py +++ b/.github/scripts/performance-tests/produce_metric_widget_images.py @@ -366,17 +366,21 @@ def parse_args(): f"Will create a snapshot at URL: https://github.com/{args.github_repository}/blob/gh-pages/{snapshot_location}", ) - # Delete oldest snapshots - - snapshot_dirs_length = len(os.listdir(SOAK_TESTS_SNAPSHOTS_COMMITS_DIR)) - - if snapshot_dirs_length > args.max_benchmarks_to_keep: - oldest_snapshot_dirs = nsmallest( - snapshot_dirs_length - args.max_benchmarks_to_keep, - Path(SOAK_TESTS_SNAPSHOTS_COMMITS_DIR).iterdir(), - key=os.path.getmtime, - ) - for old_snapshots_dir in oldest_snapshot_dirs: - shutil.rmtree(old_snapshots_dir, ignore_errors=True) + # Delete oldest run folders in most recent commit and oldest commit folders + + for snapshots_dir in [ + SOAK_TESTS_SNAPSHOTS_COMMITS_DIR, + f"{SOAK_TESTS_SNAPSHOTS_COMMITS_DIR}/{ args.target_sha }/runs", + ]: + snapshot_dirs_length = len(os.listdir(snapshots_dir)) + + if snapshot_dirs_length > args.max_benchmarks_to_keep: + oldest_snapshot_dirs = nsmallest( + snapshot_dirs_length - args.max_benchmarks_to_keep, + Path(snapshots_dir).iterdir(), + key=os.path.getmtime, + ) + for old_snapshots_dir in oldest_snapshot_dirs: + shutil.rmtree(old_snapshots_dir, ignore_errors=True) logger.info("Done creating metric widget images.") diff --git a/.github/workflows/soak-testing.yml b/.github/workflows/soak-testing.yml index 78b2e3a..72c73d3 100644 --- a/.github/workflows/soak-testing.yml +++ b/.github/workflows/soak-testing.yml @@ -216,7 +216,7 @@ jobs: with: filename: .github/auto-issue-templates/failure-during-soak_tests.md - name: Publish Issue if failed AFTER Performance Tests - uses: NathanielRN/create-an-issue@v2.5.1-alpha2 + uses: JasonEtco/create-an-issue@v2 if: ${{ github.event_name == 'schedule' && steps.check-failure-after-performance-tests.outcome == 'failure' }} env: