benchmark: add --track to benchmark · nodejs/node@28538f2
@@ -3,6 +3,7 @@
33const path = require('path');
44const { spawn, fork } = require('node:child_process');
55const CLI = require('./_cli.js');
6+const { styleText } = require('node:util');
6778const cli = new CLI(`usage: ./node run.js [options] [--] <category> ...
89 Run each benchmark in the <category> directory a single time, more than one
@@ -16,6 +17,7 @@ const cli = new CLI(`usage: ./node run.js [options] [--] <category> ...
1617 Default: 1
1718 --set variable=value set benchmark variable (can be repeated)
1819 --format [simple|csv] optional value that specifies the output format
20+ --track Display the time elapsed to run each benchmark file.
1921 test only run a single configuration from the options
2022 matrix
2123 all each benchmark category is run one after the other
@@ -25,7 +27,7 @@ const cli = new CLI(`usage: ./node run.js [options] [--] <category> ...
2527 --set CPUSET=0-2 Specifies that benchmarks should run on CPU cores 0 to 2.
26282729 Note: The CPUSET format should match the specifications of the 'taskset' command on your system.
28-`, { arrayArgs: ['set', 'filter', 'exclude'] });
30+`, { arrayArgs: ['set', 'filter', 'exclude'], boolArgs: ['track'] });
29313032const benchmarks = cli.benchmarks();
3133@@ -107,7 +109,12 @@ async function run() {
107109}
108110109111while (runs-- > 0) {
112+const start = performance.now();
110113await runBenchmark(filename);
114+if (format !== 'csv' && cli.optional.track) {
115+const ms = styleText(['bold', 'yellow'], `${Math.round(performance.now() - start)}ms`);
116+console.log(`[${ms}] ${filename}`);
117+}
111118}
112119}
113120}