Skip to content

Commit 870653f

Browse files
committed
Upload average CPU consumption of CI jobs to DataDog
1 parent 7d97c59 commit 870653f

File tree

2 files changed

+82
-0
lines changed

2 files changed

+82
-0
lines changed

.github/workflows/ci.yml

+10
Original file line numberDiff line numberDiff line change
@@ -212,6 +212,16 @@ jobs:
212212
# erroring about invalid credentials instead.
213213
if: github.event_name == 'push' || env.DEPLOY == '1' || env.DEPLOY_ALT == '1'
214214

215+
- name: Upload job metrics to DataDog
216+
if: needs.calculate_matrix.outputs.run_type == 'try'
217+
env:
218+
DATADOG_SITE: datadoghq.com
219+
DATADOG_API_KEY: ${{ secrets.DATADOG_API_KEY }}
220+
DD_GITHUB_JOB_NAME: ${{ matrix.name }}
221+
run: |
222+
npm install -g @datadog/datadog-ci
223+
python3 src/ci/scripts/upload-build-metrics.py build/cpu-usage.csv
224+
215225
# This job isused to tell bors the final status of the build, as there is no practical way to detect
216226
# when a workflow is successful listening to webhooks only in our current bors implementation (homu).
217227
outcome:
+72
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
"""
2+
This script postprocesses the metrics.json file generated by a bootstrap build with `build.metrics = true`,
3+
extracts several high-level metrics from it (like the duration of test executions), and then uploads the
4+
computed metrics to DataDog.
5+
6+
This script is expected to be executed from within a GitHub Actions job.
7+
8+
It expects the following environment variables:
9+
- DATADOG_SITE: path to the DataDog API endpoint
10+
- DATADOG_API_KEY: DataDog API token
11+
- DD_GITHUB_JOB_NAME: Name of the current GitHub Actions job
12+
13+
And it also expects the presence of a binary called `datadog-ci` to be in PATH.
14+
It can be installed with `npm install -g @datadog/datadog-ci`.
15+
16+
Usage:
17+
```bash
18+
$ python3 upload-build-metrics.py <path-to-CPU-usage-CSV>
19+
```
20+
21+
`path-to-CPU-usage-CSV` is a path to a CSV generated by the `src/ci/cpu-usage-over-time.py` script.
22+
"""
23+
import argparse
24+
import csv
25+
import subprocess
26+
from pathlib import Path
27+
from typing import List
28+
29+
30+
def load_cpu_usage(path: Path) -> List[float]:
31+
usage = []
32+
with open(path) as f:
33+
reader = csv.reader(f, delimiter=',')
34+
for row in reader:
35+
# The log might contain incomplete rows or some Python exception
36+
if len(row) == 2:
37+
try:
38+
idle = float(row[1])
39+
usage.append(100.0 - idle)
40+
except ValueError:
41+
pass
42+
return usage
43+
44+
45+
def upload_datadog_measure(name: str, value: float):
46+
"""
47+
Uploads a single numeric metric for the current GitHub Actions job to DataDog.
48+
"""
49+
subprocess.run([
50+
"datadog-ci",
51+
"measure",
52+
"--level", "job",
53+
"--measures", f"{name}:{value}"
54+
],
55+
check=False
56+
)
57+
58+
59+
if __name__ == "__main__":
60+
parser = argparse.ArgumentParser(
61+
prog="DataDog metric uploader"
62+
)
63+
parser.add_argument("cpu-usage-history-csv")
64+
args = parser.parse_args()
65+
66+
build_usage_csv = vars(args)["cpu-usage-history-csv"]
67+
usage_timeseries = load_cpu_usage(Path(build_usage_csv))
68+
if len(usage_timeseries) > 0:
69+
avg_cpu_usage = sum(usage_timeseries) / len(usage_timeseries)
70+
else:
71+
avg_cpu_usage = 0
72+
upload_datadog_measure("avg-cpu-usage", avg_cpu_usage)

0 commit comments

Comments
 (0)