Skip to content

Commit 8a7f03b

Browse files
committed
RUST-611 Add back code mistakenly lost in rebase
1 parent 0a0f82f commit 8a7f03b

File tree

12 files changed

+121
-69
lines changed

12 files changed

+121
-69
lines changed

benchmarks/Cargo.toml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ edition = "2018"
66

77
[features]
88
default = ["tokio-runtime"]
9-
tokio-runtime = ["tokio/macros", "tokio/rt-core", "tokio/rt-threaded"]
9+
tokio-runtime = ["tokio/fs", "tokio/macros", "tokio/rt-core", "tokio/rt-threaded"]
1010
async-std-runtime = ["async-std"]
1111

1212
[dependencies]
@@ -17,6 +17,7 @@ clap = "2.33.3"
1717
indicatif = "0.15.0"
1818
async-trait = "0.1.41"
1919
tokio = { version = "0.2.23", features = ["sync"] }
20-
async-std = { version = "1.6.2", optional = true, features = ["attributes"] }
20+
# "unstable" feature is needed for `spawn_blocking`, which is only used in task setup
21+
async-std = { version = "=1.6.2", optional = true, features = ["attributes", "unstable"] }
2122
futures = "0.3.8"
2223
anyhow = "1.0.34"

benchmarks/README.md

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@ the single-doc benchmarks. By default, all benchmarks are executed. The table be
3434

3535
Note that in order to compare against the other drivers, an inMemory mongod instance should be used.
3636

37-
At this point, BSON and GridFS benchmarks are not implemented because we do not own the Rust BSON library, and GridFS has not been implemented
38-
in the driver.
37+
At this point, GridFS benchmarks are not implemented because it has not been implemented in the driver.
3938

4039
Also note that the parallel benchmarks are implemented to mirror the C++ driver's interpretation of the spec.

benchmarks/src/bench/find_many.rs

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,14 @@
1-
use std::{convert::TryInto, fs::File, path::PathBuf};
1+
use std::{convert::TryInto, path::PathBuf};
22

33
use anyhow::{bail, Result};
44
use futures::stream::StreamExt;
55
use mongodb::{bson::Bson, Client, Collection, Database};
66
use serde_json::Value;
77

8-
use crate::bench::{Benchmark, COLL_NAME, DATABASE_NAME};
8+
use crate::{
9+
bench::{Benchmark, COLL_NAME, DATABASE_NAME},
10+
fs::read_to_string,
11+
};
912

1013
pub struct FindManyBenchmark {
1114
db: Database,
@@ -30,9 +33,9 @@ impl Benchmark for FindManyBenchmark {
3033

3134
let num_iter = options.num_iter;
3235

33-
let mut file = spawn_blocking_and_await!(File::open(options.path))?;
36+
let mut file = read_to_string(&options.path).await?;
3437

35-
let json: Value = spawn_blocking_and_await!(serde_json::from_reader(&mut file))?;
38+
let json: Value = serde_json::from_str(&mut file)?;
3639
let doc = match json.try_into()? {
3740
Bson::Document(doc) => doc,
3841
_ => bail!("invalid json test file"),

benchmarks/src/bench/find_one.rs

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use std::{convert::TryInto, fs::File, path::PathBuf};
1+
use std::{convert::TryInto, path::PathBuf};
22

33
use anyhow::{bail, Result};
44
use mongodb::{
@@ -9,7 +9,10 @@ use mongodb::{
99
};
1010
use serde_json::Value;
1111

12-
use crate::bench::{Benchmark, COLL_NAME, DATABASE_NAME};
12+
use crate::{
13+
bench::{Benchmark, COLL_NAME, DATABASE_NAME},
14+
fs::read_to_string,
15+
};
1316

1417
pub struct FindOneBenchmark {
1518
db: Database,
@@ -35,9 +38,9 @@ impl Benchmark for FindOneBenchmark {
3538

3639
let num_iter = options.num_iter;
3740

38-
let mut file = spawn_blocking_and_await!(File::open(options.path))?;
41+
let mut file = read_to_string(&options.path).await?;
3942

40-
let json: Value = spawn_blocking_and_await!(serde_json::from_reader(&mut file))?;
43+
let json: Value = serde_json::from_str(&mut file)?;
4144
let mut doc = match json.try_into()? {
4245
Bson::Document(doc) => doc,
4346
_ => bail!("invalid json test file"),

benchmarks/src/bench/insert_many.rs

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,13 @@ impl Benchmark for InsertManyBenchmark {
3636

3737
let num_copies = options.num_copies;
3838

39+
// This benchmark uses a file that's quite large, and unfortunately `serde_json` has no
40+
// async version of `from_reader`, so rather than read the whole file into memory at once,
41+
// we use the runtime's `spawn_blocking` functionality to do this efficiently.
42+
//
43+
// Note that the setup is _not_ measured as part of the benchmark runtime, so even if
44+
// `spawn_blocking` turned out not to be super efficient, it wouldn't be a big deal.
3945
let mut file = spawn_blocking_and_await!(File::open(options.path))?;
40-
4146
let json: Value = spawn_blocking_and_await!(serde_json::from_reader(&mut file))?;
4247

4348
let coll = db.collection(&COLL_NAME);

benchmarks/src/bench/insert_one.rs

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,13 @@ impl Benchmark for InsertOneBenchmark {
3636

3737
let num_iter = options.num_iter;
3838

39+
// This benchmark uses a file that's quite large, and unfortunately `serde_json` has no
40+
// async version of `from_reader`, so rather than read the whole file into memory at once,
41+
// we use the runtime's `spawn_blocking` functionality to do this efficiently.
42+
//
43+
// Note that the setup is _not_ measured as part of the benchmark runtime, so even if
44+
// `spawn_blocking` turned out not to be super efficient, it wouldn't be a big deal.
3945
let mut file = spawn_blocking_and_await!(File::open(options.path))?;
40-
4146
let json: Value = spawn_blocking_and_await!(serde_json::from_reader(&mut file))?;
4247

4348
let coll = db.collection(&COLL_NAME);

benchmarks/src/bench/json_multi_export.rs

Lines changed: 11 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
1-
use std::{
2-
fs::{File, OpenOptions},
3-
io::Write,
4-
path::PathBuf,
5-
};
1+
use std::path::PathBuf;
62

73
use anyhow::Result;
84
use futures::stream::{FuturesUnordered, StreamExt, TryStreamExt};
95
use mongodb::{bson::doc, Client, Collection, Database};
106

11-
use crate::bench::{parse_json_file_to_documents, Benchmark, COLL_NAME, DATABASE_NAME};
7+
use crate::{
8+
bench::{parse_json_file_to_documents, Benchmark, COLL_NAME, DATABASE_NAME},
9+
fs::File,
10+
};
1211

1312
const TOTAL_FILES: usize = 100;
1413

@@ -42,9 +41,9 @@ impl Benchmark for JsonMultiExportBenchmark {
4241

4342
tasks.push(async move {
4443
let json_file_name = path.join(format!("ldjson{:03}.txt", i));
45-
let file = spawn_blocking_and_await!(File::open(&json_file_name))?;
44+
let file = File::open_read(&json_file_name).await?;
4645

47-
let docs = spawn_blocking_and_await!(parse_json_file_to_documents(file))?;
46+
let docs = parse_json_file_to_documents(file).await?;
4847

4948
for mut doc in docs {
5049
doc.insert("file", i as i32);
@@ -57,7 +56,6 @@ impl Benchmark for JsonMultiExportBenchmark {
5756
}
5857

5958
while let Some(result) = tasks.next().await {
60-
println!("done!");
6159
result?;
6260
}
6361

@@ -77,32 +75,16 @@ impl Benchmark for JsonMultiExportBenchmark {
7775
// lot of work for little gain since we `unwrap()` in
7876
// main.rs anyway.
7977
let file_name = path.join(format!("ldjson{:03}.txt", i));
80-
let mut file = OpenOptions::new()
81-
.create(true)
82-
.write(true)
83-
.open(&file_name)
84-
.unwrap();
78+
let mut file = File::open_write(&file_name).await.unwrap();
8579

8680
let mut cursor = coll_ref
8781
.find(Some(doc! { "file": i as i32 }), None)
8882
.await
8983
.unwrap();
9084

91-
let (sender, mut receiver) = tokio::sync::mpsc::unbounded_channel();
92-
93-
let send_future = spawn!(async move {
94-
while let Some(doc) = cursor.try_next().await.unwrap() {
95-
sender.send(doc.to_string()).unwrap();
96-
}
97-
});
98-
99-
let rec_future = spawn_blocking_and_await!(async move {
100-
while let Some(s) = receiver.next().await {
101-
writeln!(file, "{}", s).unwrap();
102-
}
103-
});
104-
105-
futures::future::join(send_future, rec_future).await
85+
while let Some(doc) = cursor.try_next().await.unwrap() {
86+
file.write_line(&doc.to_string()).await.unwrap();
87+
}
10688
});
10789
}
10890

benchmarks/src/bench/json_multi_import.rs

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
1-
use std::{fs::File, path::PathBuf};
1+
use std::path::PathBuf;
22

33
use anyhow::Result;
44
use futures::stream::{FuturesUnordered, StreamExt};
55
use mongodb::{options::InsertManyOptions, Client, Collection, Database};
66

7-
use crate::bench::{parse_json_file_to_documents, Benchmark, COLL_NAME, DATABASE_NAME};
7+
use crate::{
8+
bench::{parse_json_file_to_documents, Benchmark, COLL_NAME, DATABASE_NAME},
9+
fs::File,
10+
};
811

912
const TOTAL_FILES: usize = 100;
1013

@@ -59,10 +62,9 @@ impl Benchmark for JsonMultiImportBenchmark {
5962
let mut docs = Vec::new();
6063

6164
let json_file_name = path.join(format!("ldjson{:03}.txt", i));
62-
let file: File = spawn_blocking_and_await!(File::open(&json_file_name)).unwrap();
65+
let file = File::open_read(&json_file_name).await.unwrap();
6366

64-
let mut new_docs =
65-
spawn_blocking_and_await!(parse_json_file_to_documents(file)).unwrap();
67+
let mut new_docs = parse_json_file_to_documents(file).await.unwrap();
6668

6769
docs.append(&mut new_docs);
6870

benchmarks/src/bench/mod.rs

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,17 +8,18 @@ pub mod run_command;
88

99
use std::{
1010
convert::TryInto,
11-
fs::File,
12-
io::{BufRead, BufReader},
1311
time::{Duration, Instant},
1412
};
1513

1614
use anyhow::{bail, Result};
15+
use futures::stream::TryStreamExt;
1716
use indicatif::{ProgressBar, ProgressStyle};
1817
use lazy_static::lazy_static;
1918
use mongodb::bson::{Bson, Document};
2019
use serde_json::Value;
2120

21+
use crate::fs::{BufReader, File};
22+
2223
lazy_static! {
2324
static ref DATABASE_NAME: String = option_env!("DATABASE_NAME")
2425
.unwrap_or("perftest")
@@ -61,11 +62,13 @@ pub trait Benchmark: Sized {
6162
async fn teardown(&self) -> Result<()>;
6263
}
6364

64-
pub fn parse_json_file_to_documents(file: File) -> Result<Vec<Document>> {
65+
pub(crate) async fn parse_json_file_to_documents(file: File) -> Result<Vec<Document>> {
6566
let mut docs: Vec<Document> = Vec::new();
6667

67-
for line in BufReader::new(file).lines() {
68-
let json: Value = serde_json::from_str(&line?)?;
68+
let mut lines = BufReader::new(file).lines();
69+
70+
while let Some(line) = lines.try_next().await? {
71+
let json: Value = serde_json::from_str(&line)?;
6972

7073
docs.push(match json.try_into()? {
7174
Bson::Document(doc) => doc,

benchmarks/src/error.rs

Lines changed: 0 additions & 1 deletion
This file was deleted.

0 commit comments

Comments
 (0)