1+ import 'dart:async' ;
12import 'dart:io' ;
23
34import 'package:meta/meta.dart' ;
@@ -12,40 +13,71 @@ class Benchmark {
1213 final watch = Stopwatch ();
1314 final Emitter emitter;
1415
16+ /// Create a benchmark with the given [name] , starts measuring total run time.
17+ ///
18+ /// ```dart
19+ /// await Benchmark('Name', coefficient: 1 / count).report();
20+ /// ```
21+ ///
22+ /// Call [report] on this to await results.
23+ ///
24+ /// Runs the [runIteration] function at least [iterations] times, defaults to
25+ /// 1. If possible, leave this at the default. The benchmark is automatically
26+ /// re-run a) to warm up until at least 100 ms have passed (e.g. to avoid
27+ /// caching skewing results) and b) to measure until at least 2 seconds have
28+ /// passed. This is similar to how Dart's benchmark_harness and various other
29+ /// benchmarking libraries in other languages work.
30+ ///
31+ /// Set a fraction in [coefficient] to multiply the measured value of a run
32+ /// with, defaults to 1. Use this if a run calls a to be measured function
33+ /// multiple times (e.g. `1 / times` ) to get the duration of a single call.
34+ ///
35+ /// Results are printed to the command line.
1536 Benchmark (this .name, {this .iterations = 1 , this .coefficient = 1 })
1637 : emitter = Emitter (iterations, coefficient) {
1738 print ('-------------------------------------------------------------' );
18- print ('$name (iterations): ' +
39+ print ('$name (iterations): ' +
1940 Emitter ._format (iterations.toDouble (), decimalPoints: 0 ));
20- print ('$name (count): ' +
21- Emitter ._format (iterations / coefficient));
41+ print ('$name (unit count): ' +
42+ Emitter ._format (iterations / coefficient, decimalPoints : 0 ));
2243 // Measure the total time of the test - if it's too high, you should
2344 // decrease the number of iterations. Expected time is between 2 and 3 sec.
2445 watch.start ();
2546 }
2647
27- // Not measured setup code executed prior to the benchmark runs .
48+ /// Not measured setup code executed prior to [run] getting called .
2849 void setup () {}
2950
51+ /// Called after all [run] calls have completed, measures total time of the
52+ /// benchmark.
53+ ///
54+ /// A method overriding this must call this.
3055 @mustCallSuper
3156 void teardown () {
3257 final millis = watch.elapsedMilliseconds;
3358 final color = millis > 3000 ? '\x 1B[31m' : '' ;
34- print ('$name (total time taken): '
59+ print ('$name (benchmark run time): '
3560 '$color ${Emitter ._format (millis .toDouble (), suffix : ' ms' )}\x 1B[0m' );
3661 }
3762
63+ /// Calls [runIteration] [iterations] of times.
3864 Future <void > run () async {
39- for (var i = 0 ; i < iterations; i++ ) runIteration (i);
65+ for (var i = 0 ; i < iterations; i++ ) {
66+ final result = runIteration (i);
67+ if (result is Future ) {
68+ await result;
69+ }
70+ }
4071 return Future .value ();
4172 }
4273
43- void runIteration (int iteration) {
74+ /// A single test iteration, given [iteration] index starting from 0.
75+ FutureOr <void > runIteration (int iteration) {
4476 throw UnimplementedError ('Please override runIteration() or run()' );
4577 }
4678
47- // Runs [f] for at least [minimumMillis] milliseconds.
48- static Future <double > _measureFor (Function f, int minimumMillis) async {
79+ /// Runs [f] for at least [minimumMillis] milliseconds.
80+ Future <double > _measureFor (Function f, int minimumMillis, bool warmUp ) async {
4981 final minimumMicros = minimumMillis * 1000 ;
5082 var iter = 0 ;
5183 final watch = Stopwatch ()..start ();
@@ -55,21 +87,38 @@ class Benchmark {
5587 elapsed = watch.elapsedMicroseconds;
5688 iter++ ;
5789 }
90+ if (! warmUp) {
91+ // Print how often f had to be re-run to reach minimum run time.
92+ final reruns = iter - 1 ;
93+ print ('$name (re-runs): '
94+ '${Emitter ._format (reruns .toDouble (), decimalPoints : 0 )}' );
95+ }
5896 return elapsed / iter;
5997 }
6098
61- // Measures the score for the benchmark and returns it.
99+ /// Measures the score for the benchmark and returns it.
100+ ///
101+ /// See [report] for details.
62102 @nonVirtual
63103 Future <double > _measure () async {
64104 setup ();
65105 // Warmup for at least 100ms. Discard result.
66- await _measureFor (run, 100 );
106+ await _measureFor (run, 100 , true );
67107 // Run the benchmark for at least 2000ms.
68- var result = await _measureFor (run, 2000 );
108+ var result = await _measureFor (run, 2000 , false );
69109 teardown ();
70110 return result;
71111 }
72112
113+ /// Starts the benchmark and waits for the result.
114+ ///
115+ /// - Calls [setup] , then
116+ /// - repeatedly calls [run] for at least 100 ms to warm up to avoid effects
117+ /// e.g. due to caching,
118+ /// - then calls [run] repeatedly until at least 2000 ms have passed to ensure
119+ /// stable results and collects the average elapsed time of a call (if run
120+ /// multiple times), then
121+ /// - calls [teardown] and returns the result.
73122 @nonVirtual
74123 Future <void > report () async {
75124 emitter.emit (name, await _measure ());
@@ -87,12 +136,14 @@ class Emitter {
87136 void emit (String testName, double value) {
88137 final timePerIter = value / iterations;
89138 final timePerUnit = timePerIter * coefficient;
90- print (
91- '$testName (Single iteration): ${_format (timePerIter , suffix : ' us' )}' );
92- print (
93- '$testName (Runtime per unit): ${_format (timePerUnit , suffix : ' us' )}' );
94- print ('$testName (Runs per second): ${_format (usInSec / timePerIter )}' );
95- print ('$testName (Units per second): ${_format (usInSec / timePerUnit )}' );
139+ print ('$testName (single iteration): '
140+ '${_format (timePerIter , suffix : ' us' )}' );
141+ print ('$testName (per unit): '
142+ '${_format (timePerUnit , suffix : ' us' )}' );
143+ print ('$testName (iterations / second): '
144+ '${_format (usInSec / timePerIter )}' );
145+ print ('$testName (units / second): '
146+ '${_format (usInSec / timePerUnit )}' );
96147 }
97148
98149 // Simple number formatting, maybe use a lib?
0 commit comments