Skip to content

Commit f91a09b

Browse files
committed
Rename class Benchmark -> ScoreGenerator
1 parent d148e1c commit f91a09b

File tree

5 files changed

+296
-263
lines changed

5 files changed

+296
-263
lines changed

lib/src/base/async_benchmark.dart

Lines changed: 14 additions & 131 deletions
Original file line numberDiff line numberDiff line change
@@ -4,136 +4,12 @@ import 'dart:isolate';
44
import 'package:ansi_modifier/ansi_modifier.dart';
55

66
import '../emitter/score_emitter.dart';
7-
import '../extension/benchmark_helper.dart';
87
import '../extension/color_profile.dart';
98
import '../extension/string_utils.dart';
10-
import '../util/stats.dart';
9+
import 'async_score_generator.dart';
1110
import 'group.dart';
12-
import 'score.dart';
1311

14-
typedef AsyncFunction = Future<void> Function();
15-
16-
/// An asynchronous function that does nothing.
17-
Future<void> futureDoNothing() async {}
18-
19-
/// A class used to benchmark asynchronous functions.
20-
/// The benchmarked function is provided as a constructor argument.
21-
class AsyncBenchmark {
22-
/// Constructs an [AsyncBenchmark] object using the following arguments:
23-
24-
/// * [run]: the asynchronous function to be benchmarked,
25-
/// * [setup]: an asynchronous function that is executed
26-
/// once before running the benchmark,
27-
/// * [teardown]: an asynchronous function that is executed once after
28-
/// the benchmark has completed.
29-
const AsyncBenchmark({
30-
required AsyncFunction run,
31-
AsyncFunction setup = futureDoNothing,
32-
AsyncFunction teardown = futureDoNothing,
33-
}) : _run = run,
34-
_setup = setup,
35-
_teardown = teardown;
36-
37-
final AsyncFunction _run;
38-
final AsyncFunction _setup;
39-
final AsyncFunction _teardown;
40-
41-
// The benchmark code.
42-
Future<void> run() => _run();
43-
44-
// Not measured setup code executed prior to the benchmark runs.
45-
Future<void> setup() => _setup();
46-
47-
// Not measures teardown code executed after the benchmark runs.
48-
Future<void> teardown() => _teardown();
49-
50-
// To opt into the reporting the time per run() instead of per 10 run() calls.
51-
Future<void> exercise() => run();
52-
53-
/// Returns a sample of benchmark scores.
54-
/// The benchmark scores represent the run time in microseconds. The integer
55-
/// `innerIter` is larger than 1 if each score entry was averaged over
56-
/// `innerIter` runs.
57-
///
58-
Future<({List<double> scores, int innerIter})> sample() async {
59-
await _setup();
60-
int warmupRuns = 3;
61-
final sample = <int>[];
62-
final innerIters = <int>[];
63-
final overhead = <int>[];
64-
final watch = Stopwatch();
65-
var innerIterMean = 1;
66-
67-
try {
68-
// Warmup (Default: For 200 ms with 3 pre-runs).
69-
final scoreEstimate = await watch.warmupAsync(_run);
70-
final sampleSize = BenchmarkHelper.sampleSize(
71-
scoreEstimate.ticks,
72-
);
73-
74-
if (sampleSize.inner > 1) {
75-
final durationAsTicks = sampleSize.inner * scoreEstimate.ticks;
76-
for (var i = 0; i < sampleSize.outer + warmupRuns; i++) {
77-
// Averaging each score over at least 25 runs.
78-
// For details see function BenchmarkHelper.sampleSize.
79-
final score = await watch.measureAsync(
80-
_run,
81-
durationAsTicks,
82-
);
83-
sample.add(score.ticks);
84-
innerIters.add(score.iter);
85-
}
86-
innerIterMean = innerIters.reduce((sum, element) => sum + element) ~/
87-
innerIters.length;
88-
} else {
89-
for (var i = 0; i < sampleSize.outer + warmupRuns; i++) {
90-
watch.reset();
91-
await _run();
92-
// These scores are not averaged.
93-
sample.add(watch.elapsedTicks);
94-
watch.reset();
95-
overhead.add(watch.elapsedTicks);
96-
}
97-
for (var i = 0; i < sampleSize.outer; i++) {
98-
// Removing overhead of calling elapsedTicks and adding list element.
99-
// overhead scores are of the order of 0.1 us.
100-
sample[i] = sample[i] - overhead[i];
101-
}
102-
}
103-
104-
// Rescale to microseconds.
105-
// Note: frequency is expressed in Hz (ticks/second).
106-
return (
107-
scores: sample
108-
.map<double>(
109-
(e) => e * (1000000 / watch.frequency),
110-
)
111-
.skip(warmupRuns)
112-
.toList(),
113-
innerIter: innerIterMean
114-
);
115-
} finally {
116-
await _teardown();
117-
}
118-
}
119-
120-
/// Returns an instance of [Score] holding the total benchmark duration
121-
/// and a [Stats] object created from the score samples.
122-
/// Note: The run time entries represent microseconds.
123-
Future<Score> score() async {
124-
final watch = Stopwatch()..start();
125-
final sample = await this.sample();
126-
watch.stop();
127-
//stats.removeOutliers(10);
128-
return Score(
129-
duration: watch.elapsed,
130-
sample: sample.scores,
131-
innerIter: sample.innerIter,
132-
);
133-
}
134-
}
135-
136-
/// Defines an asynchronous benchmark.
12+
/// Runs an asynchronous benchmark.
13713
/// * [run]: the benchmarked function,
13814
/// * [setup]: executed once before the benchmark,
13915
/// * [teardown]: executed once after the benchmark runs.
@@ -153,13 +29,14 @@ Future<void> asyncBenchmark(
15329
final groupDescription =
15430
group == null ? '' : '${group.description.addSeparator(':')} ';
15531

156-
final instance = AsyncBenchmark(
32+
final scoreGenerator = AsyncScoreGenerator(
15733
run: run,
15834
setup: setup,
15935
teardown: teardown,
16036
);
16137

162-
description = groupDescription +
38+
description =
39+
groupDescription +
16340
(hourGlass + description).style(ColorProfile.asyncBenchmark);
16441

16542
final watch = Stopwatch()..start();
@@ -168,11 +45,17 @@ Future<void> asyncBenchmark(
16845
() async {
16946
try {
17047
if (runInIsolate) {
171-
await Isolate.run(() async => scoreEmitter.emit(
172-
description: description, score: await instance.score()));
48+
await Isolate.run(
49+
() async => scoreEmitter.emit(
50+
description: description,
51+
score: await scoreGenerator.score(),
52+
),
53+
);
17354
} else {
17455
scoreEmitter.emit(
175-
description: description, score: await instance.score());
56+
description: description,
57+
score: await scoreGenerator.score(),
58+
);
17659
}
17760
addSuccessMark();
17861
} catch (error, stack) {
Lines changed: 134 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,134 @@
1+
import 'dart:async';
2+
3+
import '../extension/benchmark_helper.dart';
4+
import '../util/stats.dart';
5+
import 'score.dart';
6+
7+
typedef AsyncFunction = Future<void> Function();
8+
9+
/// An asynchronous function that does nothing.
10+
Future<void> futureDoNothing() async {}
11+
12+
/// A class used to benchmark asynchronous functions.
13+
/// The benchmarked function is provided as a constructor argument.
14+
class AsyncScoreGenerator {
15+
/// Constructs an [AsyncScoreGenerator] object using the following arguments:
16+
17+
/// * [run]: the asynchronous function to be benchmarked,
18+
/// * [setup]: an asynchronous function that is executed
19+
/// once before running the benchmark,
20+
/// * [teardown]: an asynchronous function that is executed once after
21+
/// the benchmark has completed.
22+
const AsyncScoreGenerator({
23+
required AsyncFunction run,
24+
AsyncFunction setup = futureDoNothing,
25+
AsyncFunction teardown = futureDoNothing,
26+
}) : _run = run,
27+
_setup = setup,
28+
_teardown = teardown;
29+
30+
final AsyncFunction _run;
31+
final AsyncFunction _setup;
32+
final AsyncFunction _teardown;
33+
34+
// The benchmark code.
35+
Future<void> run() => _run();
36+
37+
// Not measured setup code executed prior to the benchmark runs.
38+
Future<void> setup() => _setup();
39+
40+
// Not measures teardown code executed after the benchmark runs.
41+
Future<void> teardown() => _teardown();
42+
43+
// To opt into the reporting the time per run() instead of per 10 run() calls.
44+
Future<void> exercise() => run();
45+
46+
/// Returns a sample of benchmark scores.
47+
/// The benchmark scores represent the run time in microseconds. The integer
48+
/// `innerIter` is larger than 1 if each score entry was averaged over
49+
/// `innerIter` runs.
50+
///
51+
Future<({List<double> scores, int innerIter})> sample({
52+
final int warmUpRuns = 3,
53+
final Duration warmUpDuration = const Duration(milliseconds: 200),
54+
}) async {
55+
await _setup();
56+
final sample = <int>[];
57+
final innerIters = <int>[];
58+
final overhead = <int>[];
59+
final watch = Stopwatch();
60+
int innerIterMean = 1;
61+
62+
try {
63+
// Warmup (Default: For 200 ms with 3 pre-runs).
64+
final scoreEstimate = await watch.warmUpAsync(
65+
_run,
66+
duration: warmUpDuration,
67+
warmUpRuns: warmUpRuns,
68+
);
69+
final sampleSize = BenchmarkHelper.sampleSize(scoreEstimate.ticks);
70+
71+
if (sampleSize.inner > 1) {
72+
final durationAsTicks = sampleSize.inner * scoreEstimate.ticks;
73+
for (var i = 0; i < sampleSize.outer + warmUpRuns; i++) {
74+
// Averaging each score over approx. sampleSize.inner runs.
75+
// For details see function BenchmarkHelper.sampleSize.
76+
final score = await watch.measureAsync(_run, durationAsTicks);
77+
sample.add(score.ticks);
78+
innerIters.add(score.iter);
79+
}
80+
innerIterMean =
81+
innerIters.reduce((sum, element) => sum + element) ~/
82+
innerIters.length;
83+
} else {
84+
for (var i = 0; i < sampleSize.outer + warmUpRuns; i++) {
85+
watch.reset();
86+
await _run();
87+
// These scores are not averaged.
88+
sample.add(watch.elapsedTicks);
89+
watch.reset();
90+
overhead.add(watch.elapsedTicks);
91+
}
92+
for (var i = 0; i < sampleSize.outer; i++) {
93+
// Removing overhead of calling elapsedTicks and adding list element.
94+
// overhead scores are of the order of 0.1 us.
95+
sample[i] = sample[i] - overhead[i];
96+
}
97+
}
98+
99+
// Rescale to microseconds.
100+
// Note: frequency is expressed in Hz (ticks/second).
101+
return (
102+
scores:
103+
sample
104+
.map<double>((e) => e * (1000000 / watch.frequency))
105+
.skip(warmUpRuns)
106+
.toList(),
107+
innerIter: innerIterMean,
108+
);
109+
} finally {
110+
await _teardown();
111+
}
112+
}
113+
114+
/// Returns an instance of [Score] holding the total benchmark duration
115+
/// and a [Stats] object created from the score sample.
116+
/// Note: The run time entries represent microseconds.
117+
Future<Score> score({
118+
final int warmUpRuns = 3,
119+
final Duration warmUpDuration = const Duration(microseconds: 200),
120+
}) async {
121+
final watch = Stopwatch()..start();
122+
final sample = await this.sample(
123+
warmUpDuration: warmUpDuration,
124+
warmUpRuns: warmUpRuns,
125+
);
126+
watch.stop();
127+
//stats.removeOutliers(10);
128+
return Score(
129+
duration: watch.elapsed,
130+
sample: sample.scores,
131+
innerIter: sample.innerIter,
132+
);
133+
}
134+
}

0 commit comments

Comments
 (0)