Skip to content

Commit d7bbf88

Browse files
authored
Update results.cc | Add another significant digit to percentile latency display (#2066)
1 parent 10d123c commit d7bbf88

File tree

1 file changed

+8
-8
lines changed

1 file changed

+8
-8
lines changed

loadgen/results.cc

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ bool PerformanceSummary::EarlyStopping(
189189
std::to_string(queries_issued) + ").\n" + " * Would discard " +
190190
std::to_string(t - 1) + " highest latency queries.\n" +
191191
" * Early stopping " +
192-
DoubleToString(target_latency_percentile.percentile * 100, 0) +
192+
DoubleToString(target_latency_percentile.percentile * 100, 1) +
193193
"th percentile estimate: " + std::to_string(percentile_estimate);
194194
early_stopping_latency_ss = percentile_estimate;
195195

@@ -202,7 +202,7 @@ bool PerformanceSummary::EarlyStopping(
202202
if (queries_issued < h_min + 1) {
203203
*recommendation +=
204204
"\n * Not enough queries processed for " +
205-
DoubleToString(multi_stream_percentile * 100, 0) +
205+
DoubleToString(multi_stream_percentile * 100, 1) +
206206
"th percentile\n" +
207207
" early stopping estimate (would need to process at\n least " +
208208
std::to_string(h_min + 1) + " total queries).";
@@ -218,7 +218,7 @@ bool PerformanceSummary::EarlyStopping(
218218
percentile_estimate = (*sample_latencies)[queries_issued - t];
219219
*recommendation +=
220220
"\n * Early stopping " +
221-
DoubleToString(multi_stream_percentile * 100, 0) +
221+
DoubleToString(multi_stream_percentile * 100, 1) +
222222
"th percentile estimate: " + std::to_string(percentile_estimate);
223223
early_stopping_latency_ms = percentile_estimate;
224224
}
@@ -273,7 +273,7 @@ bool PerformanceSummary::EarlyStopping(
273273
std::to_string(queries_issued) + ").\n" + " * Would discard " +
274274
std::to_string(t - 1) + " highest latency queries.\n" +
275275
" * Early stopping " +
276-
DoubleToString(target_latency_percentile.percentile * 100, 0) +
276+
DoubleToString(target_latency_percentile.percentile * 100, 1) +
277277
"th percentile estimate: " + std::to_string(percentile_estimate);
278278
early_stopping_latency_ms = percentile_estimate;
279279
break;
@@ -392,13 +392,13 @@ void PerformanceSummary::LogSummary(AsyncSummary& summary) {
392392

393393
switch (settings.scenario) {
394394
case TestScenario::SingleStream: {
395-
summary(DoubleToString(target_latency_percentile.percentile * 100, 0) +
395+
summary(DoubleToString(target_latency_percentile.percentile * 100, 1) +
396396
"th percentile latency (ns) : ",
397397
target_latency_percentile.sample_latency);
398398
break;
399399
}
400400
case TestScenario::MultiStream: {
401-
summary(DoubleToString(target_latency_percentile.percentile * 100, 0) +
401+
summary(DoubleToString(target_latency_percentile.percentile * 100, 1) +
402402
"th percentile latency (ns) : ",
403403
target_latency_percentile.query_latency);
404404
break;
@@ -430,14 +430,14 @@ void PerformanceSummary::LogSummary(AsyncSummary& summary) {
430430
switch (settings.scenario) {
431431
case TestScenario::SingleStream: {
432432
summary(DoubleToString(token_target_latency_percentile.percentile * 100,
433-
0) +
433+
1) +
434434
"th first token percentile latency (ns) : ",
435435
token_target_latency_percentile.sample_latency);
436436
break;
437437
}
438438
case TestScenario::MultiStream: {
439439
summary(DoubleToString(token_target_latency_percentile.percentile * 100,
440-
0) +
440+
1) +
441441
"th first token percentile latency (ns) : ",
442442
token_target_latency_percentile.sample_latency);
443443
break;

0 commit comments

Comments
 (0)