-
-
Notifications
You must be signed in to change notification settings - Fork 450
/
upload.mjs
151 lines (128 loc) · 4.86 KB
/
upload.mjs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
/*
* Combine benchmark data from different jobs, and upload to Codspeed.
*/
import {createReadStream} from 'fs';
import fs from 'fs/promises';
import {join as pathJoin, dirname} from 'path';
import {fileURLToPath} from 'url';
import {createHash} from 'crypto';
import assert from 'assert';
import {create as createTar, extract as extractTar} from 'tar';
import axios from 'axios';
const __dirname = dirname(fileURLToPath(import.meta.url));
const METADATA_SUFFIX = '_metadata.json',
CODSPEED_UPLOAD_URL = 'https://api.codspeed.io/upload';
const dataDir = process.env.DATA_DIR,
token = process.env.CODSPEED_TOKEN;
// Find profile files and first metadata file
const profileFiles = [],
components = new Set();
let metadataPath;
for (const filename of await fs.readdir(dataDir)) {
const path = pathJoin(dataDir, filename);
if (filename.endsWith(METADATA_SUFFIX)) {
if (!metadataPath) metadataPath = path;
components.add(metadataPath.slice(0, -METADATA_SUFFIX.length));
} else {
const match = filename.match(/_(\d+)\.out$/);
assert(match, `Unexpected file: ${filename}`);
const pid = +match[1];
profileFiles.push({pid, path});
}
}
// Add cached results for benchmarks which weren't run
const cacheZipPath = pathJoin(__dirname, 'cachedBenches.tar.gz'),
cacheDir = pathJoin(dataDir, 'cache');
await fs.mkdir(cacheDir);
await extractTar({file: cacheZipPath, cwd: cacheDir});
for (const filename of await fs.readdir(cacheDir)) {
const match = filename.match(/^(.+)_(\d+)\.out$/);
assert(match, `Unexpected file in cache: ${filename}`);
const [, component, pid] = match;
if (components.has(component)) continue;
const outPath = pathJoin(dataDir, filename);
await fs.rename(pathJoin(cacheDir, filename), outPath);
profileFiles.push({pid: +pid, path: outPath});
}
// Move all `.out` files to one directory
console.log('Combining profiles');
const outDir = pathJoin(dataDir, 'out');
await fs.mkdir(outDir);
const pids = new Set(),
duplicates = [];
let highestPid = -1;
for (const {pid, path} of profileFiles) {
if (pids.has(pid)) {
// Duplicate PID
duplicates.push({pid, path});
} else {
pids.add(pid);
if (pid > highestPid) highestPid = pid;
await fs.rename(path, pathJoin(outDir, `${pid}.out`));
}
}
// Alter PIDs for `.out` files with duplicate filenames
for (let {pid, path} of duplicates) {
let content = await fs.readFile(path, 'utf8');
const pidLine = `\npid: ${pid}\n`;
const index = content.indexOf(pidLine);
assert(index !== -1, `Could not locate PID in ${path}`);
const before = content.slice(0, index);
assert(before.split('\n').length === 3, `Unexpected formatting in ${path}`);
pid = ++highestPid;
content = `${before}\npid: ${pid}\n${content.slice(index + pidLine.length)}`;
await fs.writeFile(pathJoin(outDir, `${pid}.out`), content);
await fs.rm(path);
}
// ZIP combined profile directory
console.log('Zipping combined profile directory');
const archivePath = pathJoin(dataDir, 'archive.tar.gz');
await createTar({file: archivePath, gzip: true, cwd: outDir}, ['./']);
// Get size + MD5 hash of archive
console.log('Hashing ZIP');
const {size} = await fs.stat(archivePath);
const hash = createHash('md5');
const inputStream = createReadStream(archivePath);
for await (const chunk of inputStream) {
hash.update(chunk);
}
const md5 = hash.digest('base64');
// Alter MD5 hash in metadata object
const metadata = JSON.parse(await fs.readFile(metadataPath, 'utf8'));
metadata.profileMd5 = md5;
// If no token, set `metadata.tokenless`, and log hash of metadata JSON.
// For tokenless runs (PRs from forks), `codspeed-runner` logs SHA256 hash of metadata JSON.
// CodSpeed then reads the job logs to find a line matching `CodSpeed Run Hash: "..."`.
// So we used a dummy token for `CodSpeedHQ/action` to prevent it logging the hash,
// so can log the correct hash ourselves here instead.
if (!token) metadata.tokenless = true;
const metadataJson = JSON.stringify(metadata);
if (!token) {
const metadataHash = createHash('sha256').update(metadataJson).digest('hex');
console.log(`CodSpeed Run Hash: "${metadataHash}"`);
}
// Upload metadata to CodSpeed
console.log('Uploading metadata to CodSpeed');
const {data} = await axios({
method: 'post',
url: CODSPEED_UPLOAD_URL,
data: metadataJson,
headers: {
'Content-Type': 'application/json',
...(token ? {Authorization: token} : null),
},
});
assert(data?.status === 'success', 'Failed to upload metadata to Codspeed');
const {uploadUrl} = data;
// Upload profile ZIP to Codspeed
console.log('Uploading profile ZIP to CodSpeed');
await axios({
method: 'put',
url: uploadUrl,
data: createReadStream(archivePath),
headers: {
'Content-Type': 'application/gzip',
'Content-Length': size,
'Content-MD5': md5,
}
});