Skip to content

Commit

Permalink
Upload average CPU consumption of CI jobs to DataDog
Browse files Browse the repository at this point in the history
  • Loading branch information
Kobzol committed Sep 27, 2024
1 parent 76ed7a1 commit c1c0bd7
Show file tree
Hide file tree
Showing 3 changed files with 92 additions and 1 deletion.
10 changes: 10 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,16 @@ jobs:
# erroring about invalid credentials instead.
if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1'

- name: upload job metrics to DataDog
if: needs.calculate_matrix.outputs.run_type != 'pr'
env:
DATADOG_SITE: datadoghq.com
DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }}
DD_GITHUB_JOB_NAME: ${{ matrix.name }}
run: |
npm install -g @datadog/datadog-ci@^2.x.x
python3 src/ci/scripts/upload-build-metrics.py build/cpu-usage.csv
# This job isused to tell bors the final status of the build, as there is no practical way to detect
# when a workflow is successful listening to webhooks only in our current bors implementation (homu).
outcome:
Expand Down
2 changes: 1 addition & 1 deletion src/ci/scripts/upload-artifacts.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ if [[ "${DEPLOY-0}" -eq "1" ]] || [[ "${DEPLOY_ALT-0}" -eq "1" ]]; then
fi

# CPU usage statistics.
mv build/cpu-usage.csv "${upload_dir}/cpu-${CI_JOB_NAME}.csv"
cp build/cpu-usage.csv "${upload_dir}/cpu-${CI_JOB_NAME}.csv"

# Build metrics generated by x.py.
mv "${build_dir}/metrics.json" "${upload_dir}/metrics-${CI_JOB_NAME}.json"
Expand Down
81 changes: 81 additions & 0 deletions src/ci/scripts/upload-build-metrics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
"""
This script postprocesses data gathered during a CI run, computes certain metrics
from them, and uploads these metrics to DataDog.
This script is expected to be executed from within a GitHub Actions job.
It expects the following environment variables:
- DATADOG_SITE: path to the DataDog API endpoint
- DATADOG_API_KEY: DataDog API token
- DD_GITHUB_JOB_NAME: Name of the current GitHub Actions job
And it also expects the presence of a binary called `datadog-ci` to be in PATH.
It can be installed with `npm install -g @datadog/datadog-ci`.
Usage:
```bash
$ python3 upload-build-metrics.py <path-to-CPU-usage-CSV>
```
`path-to-CPU-usage-CSV` is a path to a CSV generated by the `src/ci/cpu-usage-over-time.py` script.
"""
import argparse
import csv
import os
import subprocess
import sys
from pathlib import Path
from typing import List


def load_cpu_usage(path: Path) -> List[float]:
usage = []
with open(path) as f:
reader = csv.reader(f, delimiter=',')
for row in reader:
# The log might contain incomplete rows or some Python exception
if len(row) == 2:
try:
idle = float(row[1])
usage.append(100.0 - idle)
except ValueError:
pass
return usage


def upload_datadog_measure(name: str, value: float):
"""
Uploads a single numeric metric for the current GitHub Actions job to DataDog.
"""
print(f"Metric {name}: {value:.4f}")

datadog_cmd = "datadog-ci"
if os.getenv("GITHUB_ACTIONS") is not None and sys.platform.lower().startswith("win"):
# Due to weird interaction of MSYS2 and Python, we need to use an absolute path,
# and also specify the ".cmd" at the end. See https://github.com/rust-lang/rust/pull/125771.
datadog_cmd = "C:\\npm\\prefix\\datadog-ci.cmd"

subprocess.run([
datadog_cmd,
"measure",
"--level", "job",
"--measures", f"{name}:{value}"
],
check=False
)


if __name__ == "__main__":
parser = argparse.ArgumentParser(
prog="DataDog metric uploader"
)
parser.add_argument("cpu-usage-history-csv")
args = parser.parse_args()

build_usage_csv = vars(args)["cpu-usage-history-csv"]
usage_timeseries = load_cpu_usage(Path(build_usage_csv))
if len(usage_timeseries) > 0:
avg_cpu_usage = sum(usage_timeseries) / len(usage_timeseries)
else:
avg_cpu_usage = 0
upload_datadog_measure("avg-cpu-usage", avg_cpu_usage)

0 comments on commit c1c0bd7

Please sign in to comment.