@@ -97,36 +97,6 @@ function findFiles(cwd, pattern) {
97
97
return out . split ( '\n' ) . filter ( Boolean ) ;
98
98
}
99
99
100
- // Run a given benchmark test with the provided revisions.
101
- function runBenchmark ( benchmark , environments ) {
102
- let benchmarkName ;
103
- const benches = environments . map ( environment => {
104
- const module = require ( path . join ( environment . distPath , benchmark ) ) ;
105
- benchmarkName = module . name ;
106
- return {
107
- name : environment . revision ,
108
- fn : module . measure ,
109
- } ;
110
- } ) ;
111
-
112
- console . log ( '⏱️ ' + benchmarkName ) ;
113
- const results = [ ] ;
114
- for ( let i = 0 ; i < benches . length ; ++ i ) {
115
- const { name, fn } = benches [ i ] ;
116
- try {
117
- const samples = collectSamples ( fn ) ;
118
- results . push ( { name, samples, ...computeStats ( samples ) } ) ;
119
- process . stdout . write ( ' ' + cyan ( i + 1 ) + ' tests completed.\u000D' ) ;
120
- } catch ( error ) {
121
- console . log ( ' ' + name + ': ' + red ( String ( error ) ) ) ;
122
- }
123
- }
124
- console . log ( '\n' ) ;
125
-
126
- beautifyBenchmark ( results ) ;
127
- console . log ( '' ) ;
128
- }
129
-
130
100
function collectSamples ( fn ) {
131
101
clock ( initCount , fn ) ; // initial warm up
132
102
@@ -269,29 +239,57 @@ function maxBy(array, fn) {
269
239
270
240
// Prepare all revisions and run benchmarks matching a pattern against them.
271
241
function prepareAndRunBenchmarks ( benchmarkPatterns , revisions ) {
272
- // Find all benchmark tests to be run.
242
+ const environments = revisions . map ( revision => ( {
243
+ revision,
244
+ distPath : prepareRevision ( revision ) ,
245
+ } ) ) ;
246
+
247
+ for ( const benchmark of matchBenchmarks ( benchmarkPatterns ) ) {
248
+ const results = [ ] ;
249
+ for ( let i = 0 ; i < environments . length ; ++ i ) {
250
+ const environment = environments [ i ] ;
251
+ const module = require ( path . join ( environment . distPath , benchmark ) ) ;
252
+
253
+ if ( i ) {
254
+ console . log ( '⏱️ ' + module . name ) ;
255
+ }
256
+
257
+ try {
258
+ const samples = collectSamples ( module . measure ) ;
259
+ results . push ( {
260
+ name : environment . revision ,
261
+ samples,
262
+ ...computeStats ( samples ) ,
263
+ } ) ;
264
+ process . stdout . write ( ' ' + cyan ( i + 1 ) + ' tests completed.\u000D' ) ;
265
+ } catch ( error ) {
266
+ console . log ( ' ' + module . name + ': ' + red ( String ( error ) ) ) ;
267
+ }
268
+ }
269
+ console . log ( '\n' ) ;
270
+
271
+ beautifyBenchmark ( results ) ;
272
+ console . log ( '' ) ;
273
+ }
274
+ }
275
+
276
+ // Find all benchmark tests to be run.
277
+ function matchBenchmarks ( patterns ) {
273
278
let benchmarks = findFiles ( LOCAL_DIR ( 'src' ) , '*/__tests__/*-benchmark.js' ) ;
274
- if ( benchmarkPatterns . length !== 0 ) {
279
+ if ( patterns . length > 0 ) {
275
280
benchmarks = benchmarks . filter ( benchmark =>
276
- benchmarkPatterns . some ( pattern =>
277
- path . join ( 'src' , benchmark ) . includes ( pattern ) ,
278
- ) ,
281
+ patterns . some ( pattern => path . join ( 'src' , benchmark ) . includes ( pattern ) ) ,
279
282
) ;
280
283
}
281
284
282
285
if ( benchmarks . length === 0 ) {
283
286
console . warn (
284
287
'No benchmarks matching: ' +
285
- `\u001b[1m${ benchmarkPatterns . join ( '\u001b[0m or \u001b[1m' ) } \u001b[0m` ,
288
+ `\u001b[1m${ patterns . join ( '\u001b[0m or \u001b[1m' ) } \u001b[0m` ,
286
289
) ;
287
- return ;
288
290
}
289
291
290
- const environments = revisions . map ( revision => ( {
291
- revision,
292
- distPath : prepareRevision ( revision ) ,
293
- } ) ) ;
294
- benchmarks . forEach ( benchmark => runBenchmark ( benchmark , environments ) ) ;
292
+ return benchmarks ;
295
293
}
296
294
297
295
function getArguments ( argv ) {
0 commit comments