summaryrefslogtreecommitdiff
path: root/benchmark/compare.js
diff options
context:
space:
mode:
Diffstat (limited to 'benchmark/compare.js')
-rw-r--r--benchmark/compare.js229
1 files changed, 67 insertions, 162 deletions
diff --git a/benchmark/compare.js b/benchmark/compare.js
index 4faa8f8638..fb179e0e47 100644
--- a/benchmark/compare.js
+++ b/benchmark/compare.js
@@ -1,181 +1,86 @@
'use strict';
-var usage = 'node benchmark/compare.js ' +
- '<node-binary1> <node-binary2> ' +
- '[--html] [--red|-r] [--green|-g] ' +
- '[-- <type> [testFilter]]';
-var show = 'both';
-var nodes = [];
-var html = false;
-var benchmarks;
+const fork = require('child_process').fork;
+const path = require('path');
+const CLI = require('./_cli.js');
+
+//
+// Parse arguments
+//
+const cli = CLI(`usage: ./node compare.js [options] [--] <category> ...
+ Run each benchmark in the <category> directory many times using two diffrent
+ node versions. More than one <category> directory can be specified.
+ The output is formatted as csv, which can be processed using for
+ example 'compare.R'.
+
+ --new ./new-node-binary new node binary (required)
+ --old ./old-node-binary old node binary (required)
+ --runs 30 number of samples
+ --filter pattern string to filter benchmark scripts
+ --set variable=value set benchmark variable (can be repeated)
+`, {
+ arrayArgs: ['set']
+});
+
+if (!cli.optional.new || !cli.optional.old) {
+ cli.abort(cli.usage);
+ return;
+}
-for (var i = 2; i < process.argv.length; i++) {
- var arg = process.argv[i];
- switch (arg) {
- case '--red': case '-r':
- show = show === 'green' ? 'both' : 'red';
- break;
- case '--green': case '-g':
- show = show === 'red' ? 'both' : 'green';
- break;
- case '--html':
- html = true;
- break;
- case '-h': case '-?': case '--help':
- console.log(usage);
- process.exit(0);
- break;
- case '--':
- benchmarks = [];
- break;
- default:
- if (Array.isArray(benchmarks))
- benchmarks.push(arg);
- else
- nodes.push(arg);
- break;
- }
+const binaries = ['old', 'new'];
+const runs = cli.optional.runs ? parseInt(cli.optional.runs, 10) : 30;
+const benchmarks = cli.benchmarks();
+
+if (benchmarks.length === 0) {
+ console.error('no benchmarks found');
+ process.exit(1);
}
-var start, green, red, reset, end;
-if (!html) {
- start = '';
- green = '\u001b[1;32m';
- red = '\u001b[1;31m';
- reset = '\u001b[m';
- end = '';
-} else {
- start = '<pre style="background-color:#333;color:#eee">';
- green = '<span style="background-color:#0f0;color:#000">';
- red = '<span style="background-color:#f00;color:#fff">';
- reset = '</span>';
- end = '</pre>';
+// Create queue from the benchmarks list such both node versions are tested
+// `runs` amount of times each.
+const queue = [];
+for (let iter = 0; iter < runs; iter++) {
+ for (const filename of benchmarks) {
+ for (const binary of binaries) {
+ queue.push({ binary, filename, iter });
+ }
+ }
}
-var runBench = process.env.NODE_BENCH || 'bench';
+// Print csv header
+console.log('"binary", "filename", "configuration", "rate", "time"');
-if (nodes.length !== 2)
- return console.error('usage:\n %s', usage);
+(function recursive(i) {
+ const job = queue[i];
-var spawn = require('child_process').spawn;
-var results = {};
-var toggle = 1;
-var r = (+process.env.NODE_BENCH_RUNS || 1) * 2;
+ const child = fork(path.resolve(__dirname, job.filename), cli.optional.set, {
+ execPath: cli.optional[job.binary]
+ });
-run();
-function run() {
- if (--r < 0)
- return compare();
- toggle = ++toggle % 2;
+ child.on('message', function(data) {
+ // Construct configuration string, " A=a, B=b, ..."
+ let conf = '';
+ for (const key of Object.keys(data.conf)) {
+ conf += ' ' + key + '=' + JSON.stringify(data.conf[key]);
+ }
+ conf = conf.slice(1);
- var node = nodes[toggle];
- console.error('running %s', node);
- var env = {};
- for (var i in process.env)
- env[i] = process.env[i];
- env.NODE = node;
+ // Escape qoutes (") for correct csv formatting
+ conf = conf.replace(/"/g, '""');
- var out = '';
- var child;
- if (Array.isArray(benchmarks) && benchmarks.length) {
- child = spawn(
- node,
- ['benchmark/run.js'].concat(benchmarks),
- { env: env }
- );
- } else {
- child = spawn('make', [runBench], { env: env });
- }
- child.stdout.setEncoding('utf8');
- child.stdout.on('data', function(c) {
- out += c;
+ console.log(`"${job.binary}", "${job.filename}", "${conf}", ` +
+ `${data.rate}, ${data.time}`);
});
- child.stderr.pipe(process.stderr);
-
- child.on('close', function(code) {
+ child.once('close', function(code) {
if (code) {
- console.error('%s exited with code=%d', node, code);
process.exit(code);
- } else {
- out.trim().split(/\r?\n/).forEach(function(line) {
- line = line.trim();
- if (!line)
- return;
-
- var s = line.split(':');
- var num = +s.pop();
- if (!num && num !== 0)
- return;
-
- line = s.join(':');
- var res = results[line] = results[line] || {};
- res[node] = res[node] || [];
- res[node].push(num);
- });
-
- run();
- }
- });
-}
-
-function compare() {
- // each result is an object with {"foo.js arg=bar":12345,...}
- // compare each thing, and show which node did the best.
- // node[0] is shown in green, node[1] shown in red.
- var maxLen = -Infinity;
- var util = require('util');
- console.log(start);
-
- Object.keys(results).map(function(bench) {
- var res = results[bench];
- var n0 = avg(res[nodes[0]]);
- var n1 = avg(res[nodes[1]]);
-
- var pct = ((n0 - n1) / n1 * 100).toFixed(2);
-
- var g = n0 > n1 ? green : '';
- var r = n0 > n1 ? '' : red;
- var c = r || g;
-
- if (show === 'green' && !g || show === 'red' && !r)
return;
+ }
- var r0 = util.format(
- '%s%s: %d%s',
- g,
- nodes[0],
- n0.toPrecision(5), g ? reset : ''
- );
- var r1 = util.format(
- '%s%s: %d%s',
- r,
- nodes[1],
- n1.toPrecision(5), r ? reset : ''
- );
- pct = c + pct + '%' + reset;
- var l = util.format('%s: %s %s', bench, r0, r1);
- maxLen = Math.max(l.length + pct.length, maxLen);
- return [l, pct];
- }).filter(function(l) {
- return l;
- }).forEach(function(line) {
- var l = line[0];
- var pct = line[1];
- var dotLen = maxLen - l.length - pct.length + 2;
- var dots = ' ' + new Array(Math.max(0, dotLen)).join('.') + ' ';
- console.log(l + dots + pct);
+ // If there are more benchmarks execute the next
+ if (i + 1 < queue.length) {
+ recursive(i + 1);
+ }
});
- console.log(end);
-}
-
-function avg(list) {
- if (list.length >= 3) {
- list = list.sort();
- var q = Math.floor(list.length / 4) || 1;
- list = list.slice(q, -q);
- }
- return list.reduce(function(a, b) {
- return a + b;
- }, 0) / list.length;
-}
+})(0);