diff --git a/.evergreen/run-fuzzer.sh b/.evergreen/run-fuzzer.sh index 559913d2..9a3e8c27 100755 --- a/.evergreen/run-fuzzer.sh +++ b/.evergreen/run-fuzzer.sh @@ -30,11 +30,12 @@ run_fuzzer() { } # Run existing targets -run_fuzzer "deserialize" +run_fuzzer "decode" run_fuzzer "raw_deserialize" +run_fuzzer "raw_deserialize_utf8_lossy" run_fuzzer "iterate" # Run new security-focused targets run_fuzzer "type_markers" run_fuzzer "string_handling" -run_fuzzer "serialization" +run_fuzzer "encoding" diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index 56c57f23..5b01e50f 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -4,7 +4,9 @@ set -o errexit . ~/.cargo/env -RUST_BACKTRACE=1 cargo test +# Test with default features and excluding doctests (some of which require the 'serde' feature) +RUST_BACKTRACE=1 cargo test --all-targets +# Test with all features and including doctests RUST_BACKTRACE=1 cargo test --all-features cd serde-tests diff --git a/Cargo.toml b/Cargo.toml index c772468c..abff6263 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,6 +47,7 @@ serde_path_to_error = ["dep:serde_path_to_error"] # if enabled, include serde_with interop. # should be used in conjunction with chrono-0_4 or uuid-0_8. serde_with-3 = ["dep:serde_with"] +serde = ["dep:serde"] [lib] name = "bson" @@ -55,7 +56,7 @@ name = "bson" ahash = "0.8.0" chrono = { version = "0.4.15", features = ["std"], default-features = false, optional = true } rand = "0.9" -serde = { version = "1.0", features = ["derive"] } +serde = { version = "1.0", features = ["derive"], optional = true } serde_json = { version = "1.0", features = ["preserve_order"] } indexmap = "2.1.0" hex = "0.4.2" diff --git a/examples/deserialize.rs b/examples/deserialize.rs deleted file mode 100644 index cfb911d2..00000000 --- a/examples/deserialize.rs +++ /dev/null @@ -1,11 +0,0 @@ -use std::fs::File; - -use bson::Document; - -fn main() { - let mut f = File::open("examples/test.bson").unwrap(); - - while let Ok(deserialized) = Document::from_reader(&mut f) { - println!("{:?}", deserialized); - } -} diff --git a/examples/serialize.rs b/examples/serialize.rs deleted file mode 100644 index 9dbba5a1..00000000 --- a/examples/serialize.rs +++ /dev/null @@ -1,26 +0,0 @@ -use std::io::Cursor; - -use bson::{oid, Bson, DateTime, Document}; - -fn main() { - let mut doc = Document::new(); - doc.insert("foo".to_string(), Bson::String("bar".to_string())); - - let arr = vec![ - Bson::String("blah".to_string()), - Bson::DateTime(DateTime::now()), - Bson::ObjectId(oid::ObjectId::from_bytes([ - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, - ])), - ]; - - doc.insert("array".to_string(), Bson::Array(arr)); - - let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); - - println!("Serialized: {:?}", buf); - - let doc = Document::from_reader(&mut Cursor::new(&buf[..])).unwrap(); - println!("Deserialized: {:?}", doc); -} diff --git a/examples/test.bson b/examples/test.bson deleted file mode 100644 index 7b619563..00000000 Binary files a/examples/test.bson and /dev/null differ diff --git a/fuzz/.gitignore b/fuzz/.gitignore index 572e03bd..bf9c8cb7 100644 --- a/fuzz/.gitignore +++ b/fuzz/.gitignore @@ -2,3 +2,4 @@ target corpus artifacts +Cargo.lock diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml index abd24a57..27a01ff3 100644 --- a/fuzz/Cargo.toml +++ b/fuzz/Cargo.toml @@ -10,6 +10,7 @@ cargo-fuzz = true [dependencies.bson] path = ".." +features = ["serde"] [dependencies.libfuzzer-sys] version = "0.4.0" @@ -24,8 +25,8 @@ version = "1.0" members = ["."] [[bin]] -name = "deserialize" -path = "fuzz_targets/deserialize.rs" +name = "decode" +path = "fuzz_targets/decode.rs" [[bin]] name = "iterate" @@ -48,8 +49,8 @@ name = "string_handling" path = "fuzz_targets/string_handling.rs" [[bin]] -name = "serialization" -path = "fuzz_targets/serialization.rs" +name = "encoding" +path = "fuzz_targets/encoding.rs" [[bin]] name = "generate_corpus" diff --git a/fuzz/fuzz_targets/deserialize.rs b/fuzz/fuzz_targets/decode.rs similarity index 61% rename from fuzz/fuzz_targets/deserialize.rs rename to fuzz/fuzz_targets/decode.rs index 4995394d..ef71f959 100644 --- a/fuzz/fuzz_targets/deserialize.rs +++ b/fuzz/fuzz_targets/decode.rs @@ -7,8 +7,8 @@ use bson::Document; use std::io::Cursor; fuzz_target!(|buf: &[u8]| { - if let Ok(doc) = Document::from_reader(&mut Cursor::new(&buf[..])) { + if let Ok(doc) = Document::decode_from_reader(&mut Cursor::new(&buf[..])) { let mut vec = Vec::with_capacity(buf.len()); - let _ = doc.to_writer(&mut vec); + let _ = doc.encode_to_writer(&mut vec); } }); diff --git a/fuzz/fuzz_targets/serialization.rs b/fuzz/fuzz_targets/encoding.rs similarity index 75% rename from fuzz/fuzz_targets/serialization.rs rename to fuzz/fuzz_targets/encoding.rs index ea6b13e5..99e66cda 100644 --- a/fuzz/fuzz_targets/serialization.rs +++ b/fuzz/fuzz_targets/encoding.rs @@ -45,20 +45,19 @@ fn compare_values(val1: &Bson, val2: &Bson) -> bool { } fuzz_target!(|input: &[u8]| { - if let Ok(rawdoc) = RawDocument::from_bytes(&input) { + if let Ok(rawdoc) = RawDocument::decode_from_bytes(&input) { if let Ok(doc) = Document::try_from(rawdoc) { let out = RawDocumentBuf::try_from(&doc).unwrap(); let out_bytes = out.as_bytes(); if input != out_bytes { - let reserialized = RawDocument::from_bytes(&out_bytes).unwrap(); - let reserialized_doc = Document::try_from(reserialized).unwrap(); - // Ensure that the reserialized document is the same as the original document, the + let reencoded = RawDocument::decode_from_bytes(&out_bytes).unwrap(); + let reencoded_doc = Document::try_from(reencoded).unwrap(); + // Ensure that the re-encoded document is the same as the original document, the // bytes can differ while still resulting in the same Document. - if !compare_docs(&doc, &reserialized_doc) { + if !compare_docs(&doc, &reencoded_doc) { panic!( - "Reserialized document is not the same as the original document: {:?} != \ - {:?}", - doc, reserialized_doc + "Reencoded document is not the same as the original document: {:?} != {:?}", + doc, reencoded_doc ); } } diff --git a/fuzz/fuzz_targets/iterate.rs b/fuzz/fuzz_targets/iterate.rs index 7dece7ca..8dc9404a 100644 --- a/fuzz/fuzz_targets/iterate.rs +++ b/fuzz/fuzz_targets/iterate.rs @@ -5,7 +5,7 @@ extern crate bson; use bson::RawDocument; fuzz_target!(|buf: &[u8]| { - if let Ok(doc) = RawDocument::from_bytes(buf) { + if let Ok(doc) = RawDocument::decode_from_bytes(buf) { for _ in doc {} } }); diff --git a/fuzz/fuzz_targets/raw_deserialize.rs b/fuzz/fuzz_targets/raw_deserialize.rs index 4fb91b09..3e7e8e18 100644 --- a/fuzz/fuzz_targets/raw_deserialize.rs +++ b/fuzz/fuzz_targets/raw_deserialize.rs @@ -5,8 +5,7 @@ extern crate bson; use bson::Document; fuzz_target!(|buf: &[u8]| { - if let Ok(doc) = bson::from_slice::(buf) { - let mut vec = Vec::with_capacity(buf.len()); - let _ = doc.to_writer(&mut vec); + if let Ok(doc) = bson::deserialize_from_slice::(buf) { + let _ = bson::serialize_to_vec(&doc); } }); diff --git a/fuzz/fuzz_targets/raw_deserialize_utf8_lossy.rs b/fuzz/fuzz_targets/raw_deserialize_utf8_lossy.rs index fb5343b4..cf872965 100644 --- a/fuzz/fuzz_targets/raw_deserialize_utf8_lossy.rs +++ b/fuzz/fuzz_targets/raw_deserialize_utf8_lossy.rs @@ -1,11 +1,11 @@ #![no_main] -#[macro_use] extern crate libfuzzer_sys; +#[macro_use] +extern crate libfuzzer_sys; extern crate bson; -use bson::Document; +use bson::{serde_helpers::Utf8LossyDeserialization, Document}; fuzz_target!(|buf: &[u8]| { - if let Ok(doc) = bson::from_slice_utf8_lossy::(buf) { - let mut vec = Vec::with_capacity(buf.len()); - let _ = doc.to_writer(&mut vec); + if let Ok(doc) = bson::deserialize_from_slice::>(buf) { + let _ = bson::serialize_to_vec(&doc.0); } }); diff --git a/fuzz/fuzz_targets/string_handling.rs b/fuzz/fuzz_targets/string_handling.rs index 090d132f..3b403bd0 100644 --- a/fuzz/fuzz_targets/string_handling.rs +++ b/fuzz/fuzz_targets/string_handling.rs @@ -6,7 +6,7 @@ use bson::{RawBsonRef, RawDocument}; use std::convert::TryInto; fuzz_target!(|buf: &[u8]| { - if let Ok(doc) = RawDocument::from_bytes(buf) { + if let Ok(doc) = RawDocument::decode_from_bytes(buf) { for elem in doc.iter_elements().flatten() { // Convert to RawBsonRef and check string-related types if let Ok(bson) = elem.try_into() { diff --git a/fuzz/fuzz_targets/type_markers.rs b/fuzz/fuzz_targets/type_markers.rs index 3506b96e..f6dd7aa7 100644 --- a/fuzz/fuzz_targets/type_markers.rs +++ b/fuzz/fuzz_targets/type_markers.rs @@ -6,7 +6,7 @@ use bson::{RawBsonRef, RawDocument}; use std::convert::TryInto; fuzz_target!(|buf: &[u8]| { - if let Ok(doc) = RawDocument::from_bytes(buf) { + if let Ok(doc) = RawDocument::decode_from_bytes(buf) { for elem in doc.iter_elements().flatten() { let _: Result = elem.try_into(); } diff --git a/fuzz/generate_corpus.rs b/fuzz/generate_corpus.rs index c67c4cfc..06b16f02 100644 --- a/fuzz/generate_corpus.rs +++ b/fuzz/generate_corpus.rs @@ -29,14 +29,18 @@ fn generate_length_edge_cases(dir: &Path) -> std::io::Result<()> { let min_doc = doc! {}; fs::write( target_dir.join("min_doc"), - bson::to_vec(&min_doc).map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, + min_doc + .encode_to_vec() + .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, )?; // Document with length near i32::MAX let large_doc = doc! { "a": "b".repeat(i32::MAX as usize / 2) }; fs::write( target_dir.join("large_doc"), - bson::to_vec(&large_doc).map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, + large_doc + .encode_to_vec() + .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, )?; Ok(()) @@ -73,7 +77,9 @@ fn generate_type_marker_cases(dir: &Path) -> std::io::Result<()> { }; fs::write( target_dir.join("all_types"), - bson::to_vec(&all_types).map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, + all_types + .encode_to_vec() + .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, )?; Ok(()) @@ -100,7 +106,9 @@ fn generate_string_edge_cases(dir: &Path) -> std::io::Result<()> { }; fs::write( target_dir.join("utf8_cases"), - bson::to_vec(&utf8_cases).map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, + utf8_cases + .encode_to_vec() + .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, )?; Ok(()) @@ -124,7 +132,9 @@ fn generate_serialization_cases(dir: &Path) -> std::io::Result<()> { } fs::write( target_dir.join("nested_doc"), - bson::to_vec(&nested_doc).map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, + nested_doc + .encode_to_vec() + .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, )?; // Document with large binary data @@ -136,7 +146,9 @@ fn generate_serialization_cases(dir: &Path) -> std::io::Result<()> { }; fs::write( target_dir.join("large_binary"), - bson::to_vec(&large_binary).map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, + large_binary + .encode_to_vec() + .map_err(|e| Error::new(ErrorKind::Other, e.to_string()))?, )?; Ok(()) diff --git a/serde-tests/Cargo.toml b/serde-tests/Cargo.toml index 99236d60..12a09e8c 100644 --- a/serde-tests/Cargo.toml +++ b/serde-tests/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" default = [] [dependencies] -bson = { path = "..", features = ["uuid-1", "chrono-0_4", "serde_with-3"] } +bson = { path = "..", features = ["uuid-1", "chrono-0_4", "serde", "serde_with-3"] } serde = { version = "1.0", features = ["derive"] } pretty_assertions = "0.6.1" hex = "0.4.2" diff --git a/serde-tests/json.rs b/serde-tests/json.rs index 417ea6c9..7d6d3b2d 100644 --- a/serde-tests/json.rs +++ b/serde-tests/json.rs @@ -99,18 +99,18 @@ fn owned_raw_bson() { }); let mut doc_buf = RawDocumentBuf::new(); - doc_buf.append("a", "key"); - doc_buf.append("number", 12); - doc_buf.append("bool", false); - doc_buf.append("nu", RawBson::Null); + doc_buf.append("a", "key").unwrap(); + doc_buf.append("number", 12).unwrap(); + doc_buf.append("bool", false).unwrap(); + doc_buf.append("nu", RawBson::Null).unwrap(); let mut array_buf = RawArrayBuf::new(); - array_buf.push(1); - array_buf.push("string"); + array_buf.push(1).unwrap(); + array_buf.push("string").unwrap(); let mut bson_doc = RawDocumentBuf::new(); - bson_doc.append("first", true); - bson_doc.append("second", "string"); + bson_doc.append("first", true).unwrap(); + bson_doc.append("second", "string").unwrap(); let expected = Foo { doc_buf, diff --git a/serde-tests/test.rs b/serde-tests/test.rs index 719f63c6..9fbafdc4 100644 --- a/serde-tests/test.rs +++ b/serde-tests/test.rs @@ -52,44 +52,42 @@ use bson::{ /// - serializing `expected_value` to BSON bytes matches the raw BSON bytes of `expected_doc` /// - deserializing a `T` from the serialized bytes produces `expected_value` /// - deserializing a `Document` from the serialized bytes produces `expected_doc` -/// - `bson::to_writer` and `Document::to_writer` produce the same result given the same input +/// - `bson::to_vec` and `Document::to_vec` produce the same result given the same input fn run_test(expected_value: &T, expected_doc: &Document, description: &str) where T: Serialize + DeserializeOwned + PartialEq + std::fmt::Debug, { - let mut expected_bytes = Vec::new(); - expected_doc - .to_writer(&mut expected_bytes) - .expect(description); + let expected_bytes = expected_doc.encode_to_vec().expect(description); - let expected_bytes_serde = bson::to_vec(&expected_value).expect(description); + let expected_bytes_serde = bson::serialize_to_vec(&expected_value).expect(description); assert_eq!(expected_bytes_serde, expected_bytes, "{}", description); - let expected_bytes_from_doc_serde = bson::to_vec(&expected_doc).expect(description); + let expected_bytes_from_doc_serde = bson::serialize_to_vec(&expected_doc).expect(description); assert_eq!( expected_bytes_from_doc_serde, expected_bytes, "{}", description ); - let serialized_doc = bson::to_document(&expected_value).expect(description); + let serialized_doc = bson::serialize_to_document(&expected_value).expect(description); assert_eq!(&serialized_doc, expected_doc, "{}", description); assert_eq!( expected_value, - &bson::from_document::(serialized_doc).expect(description), + &bson::deserialize_from_document::(serialized_doc).expect(description), "{}", description ); assert_eq!( - &bson::from_reader::<_, T>(expected_bytes.as_slice()).expect(description), + &bson::deserialize_from_reader::<_, T>(expected_bytes.as_slice()).expect(description), expected_value, "{}", description ); assert_eq!( - &bson::from_reader::<_, Document>(expected_bytes.as_slice()).expect(description), + &bson::deserialize_from_reader::<_, Document>(expected_bytes.as_slice()) + .expect(description), expected_doc, "{}", description @@ -104,25 +102,23 @@ fn run_deserialize_test(expected_value: &T, expected_doc: &Document, descript where T: DeserializeOwned + PartialEq + std::fmt::Debug, { - let mut expected_bytes = Vec::new(); - expected_doc - .to_writer(&mut expected_bytes) - .expect(description); + let expected_bytes = expected_doc.encode_to_vec().expect(description); assert_eq!( - &bson::from_document::(expected_doc.clone()).expect(description), + &bson::deserialize_from_document::(expected_doc.clone()).expect(description), expected_value, "{}", description ); assert_eq!( - &bson::from_reader::<_, T>(expected_bytes.as_slice()).expect(description), + &bson::deserialize_from_reader::<_, T>(expected_bytes.as_slice()).expect(description), expected_value, "{}", description ); assert_eq!( - &bson::from_reader::<_, Document>(expected_bytes.as_slice()).expect(description), + &bson::deserialize_from_reader::<_, Document>(expected_bytes.as_slice()) + .expect(description), expected_doc, "{}", description @@ -136,8 +132,8 @@ fn run_raw_round_trip_test<'de, T>(bytes: &'de [u8], description: &str) where T: Deserialize<'de> + Serialize + std::fmt::Debug, { - let t: T = bson::from_slice(bytes).expect(description); - let vec = bson::to_vec(&t).expect(description); + let t: T = bson::deserialize_from_slice(bytes).expect(description); + let vec = bson::serialize_to_vec(&t).expect(description); assert_eq!(vec.as_slice(), bytes); } @@ -444,13 +440,12 @@ fn type_conversion() { let doc = doc! { "bar": 1_i64 }; - let deserialized: Foo = bson::from_document(doc.clone()).unwrap(); + let deserialized: Foo = bson::deserialize_from_document(doc.clone()).unwrap(); assert_eq!(deserialized, v); - let mut bytes = Vec::new(); - doc.to_writer(&mut bytes).unwrap(); + let bytes = doc.encode_to_vec().unwrap(); - let bson_deserialized: Foo = bson::from_reader(bytes.as_slice()).unwrap(); + let bson_deserialized: Foo = bson::deserialize_from_reader(bytes.as_slice()).unwrap(); assert_eq!(bson_deserialized, v); } @@ -463,12 +458,11 @@ fn missing_errors() { let doc = doc! {}; - bson::from_document::(doc.clone()).unwrap_err(); + bson::deserialize_from_document::(doc.clone()).unwrap_err(); - let mut bytes = Vec::new(); - doc.to_writer(&mut bytes).unwrap(); + let bytes = doc.encode_to_vec().unwrap(); - bson::from_reader::<_, Foo>(bytes.as_slice()).unwrap_err(); + bson::deserialize_from_reader::<_, Foo>(bytes.as_slice()).unwrap_err(); } #[test] @@ -682,11 +676,12 @@ fn unused_fields_deny() { "a": 1, "b": 2, }; - bson::from_document::(doc.clone()).expect_err("extra fields should cause failure"); + bson::deserialize_from_document::(doc.clone()) + .expect_err("extra fields should cause failure"); - let mut bytes = Vec::new(); - doc.to_writer(&mut bytes).unwrap(); - bson::from_reader::<_, Foo>(bytes.as_slice()).expect_err("extra fields should cause failure"); + let bytes = doc.encode_to_vec().unwrap(); + bson::deserialize_from_reader::<_, Foo>(bytes.as_slice()) + .expect_err("extra fields should cause failure"); } #[test] @@ -741,7 +736,7 @@ fn raw_doc_buf() { d: RawDocumentBuf, } - let bytes = bson::to_vec(&doc! { + let bytes = bson::serialize_to_vec(&doc! { "d": { "a": 12, "b": 5.5, @@ -763,7 +758,7 @@ fn raw_doc() { d: &'a RawDocument, } - let bytes = bson::to_vec(&doc! { + let bytes = bson::serialize_to_vec(&doc! { "d": { "a": 12, "b": 5.5, @@ -785,7 +780,7 @@ fn raw_array() { d: &'a RawArray, } - let bytes = bson::to_vec(&doc! { + let bytes = bson::serialize_to_vec(&doc! { "d": [1, true, { "ok": 1 }, [ "sub", "array" ], Uuid::new()] }) .expect("raw_array"); @@ -810,7 +805,7 @@ fn raw_binary() { other: RawBinaryRef<'a>, } - let bytes = bson::to_vec(&doc! { + let bytes = bson::serialize_to_vec(&doc! { "generic": Binary { bytes: vec![1, 2, 3, 4, 5], subtype: BinarySubtype::Generic, @@ -838,7 +833,7 @@ fn raw_regex() { r: RawRegexRef<'a>, } - let bytes = bson::to_vec(&doc! { + let bytes = bson::serialize_to_vec(&doc! { "r": Regex { pattern: "a[b-c]d".to_string(), options: "ab".to_string(), @@ -857,7 +852,7 @@ fn raw_code_w_scope() { r: RawJavaScriptCodeWithScopeRef<'a>, } - let bytes = bson::to_vec(&doc! { + let bytes = bson::serialize_to_vec(&doc! { "r": JavaScriptCodeWithScope { code: "console.log(x)".to_string(), scope: doc! { "x": 1 }, @@ -949,7 +944,7 @@ impl AllTypes { let decimal = { let bytes = hex::decode("18000000136400D0070000000000000000000000003A3000").unwrap(); - let d = Document::from_reader(bytes.as_slice()).unwrap(); + let d = Document::decode_from_reader(bytes.as_slice()).unwrap(); match d.get("d") { Some(Bson::Decimal128(d)) => *d, c => panic!("expected decimal128, got {:?}", c), @@ -1053,7 +1048,7 @@ fn all_raw_types_rmp() { regex: RawRegexRef<'a>, } - let doc_bytes = bson::to_vec(&doc! { + let doc_bytes = bson::serialize_to_vec(&doc! { "bson": "some string", "array": [1, 2, 3], "binary": Binary { bytes: vec![1, 2, 3], subtype: BinarySubtype::Generic }, @@ -1068,7 +1063,7 @@ fn all_raw_types_rmp() { } }) .unwrap(); - let doc_buf = RawDocumentBuf::from_bytes(doc_bytes).unwrap(); + let doc_buf = RawDocumentBuf::decode_from_bytes(doc_bytes).unwrap(); let document = &doc_buf; let array = document.get_array("array").unwrap(); @@ -1124,8 +1119,7 @@ fn borrowed() { "cow": "cow", "array": ["borrowed string"], }; - let mut bson = Vec::new(); - doc.to_writer(&mut bson).unwrap(); + let bson = doc.encode_to_vec().unwrap(); let s = "borrowed string".to_string(); let ss = "another borrowed string".to_string(); @@ -1142,7 +1136,7 @@ fn borrowed() { }; let deserialized: Foo = - bson::from_slice(bson.as_slice()).expect("deserialization should succeed"); + bson::deserialize_from_slice(bson.as_slice()).expect("deserialization should succeed"); assert_eq!(deserialized, v); } @@ -1185,8 +1179,8 @@ fn u2i() { let v = TooBig { u_64: i64::MAX as u64 + 1, }; - bson::to_document(&v).unwrap_err(); - bson::to_vec(&v).unwrap_err(); + bson::serialize_to_document(&v).unwrap_err(); + bson::serialize_to_vec(&v).unwrap_err(); } #[test] @@ -1268,14 +1262,16 @@ fn owned_raw_types() { RawBson::JavaScriptCodeWithScope(raw_code_w_scope.clone()), ), ("decimal128", RawBson::Decimal128(d128)), - ]), + ]) + .unwrap(), array: RawArrayBuf::from_iter([ RawBson::String("a string".to_string()), RawBson::ObjectId(oid), RawBson::DateTime(dt), RawBson::JavaScriptCodeWithScope(raw_code_w_scope), RawBson::Decimal128(d128), - ]), + ]) + .unwrap(), }; let expected = doc! { @@ -1317,15 +1313,15 @@ fn hint_cleared() { "binary": binary_value.clone() }; - let bytes = bson::to_vec(&doc_value).unwrap(); + let bytes = bson::serialize_to_vec(&doc_value).unwrap(); - let doc = RawDocument::from_bytes(&bytes).unwrap(); + let doc = RawDocument::decode_from_bytes(&bytes).unwrap(); let binary = doc.get_binary("binary").unwrap(); let f = Foo { doc, binary }; - let serialized_bytes = bson::to_vec(&f).unwrap(); - let round_doc: Document = bson::from_slice(&serialized_bytes).unwrap(); + let serialized_bytes = bson::serialize_to_vec(&f).unwrap(); + let round_doc: Document = bson::deserialize_from_slice(&serialized_bytes).unwrap(); assert_eq!(round_doc, doc! { "doc": doc_value, "binary": binary_value }); } @@ -1333,5 +1329,5 @@ fn hint_cleared() { #[test] fn invalid_length() { // This is a regression test for fuzzer-generated input (RUST-1240). - assert!(bson::from_slice::(&[4, 0, 0, 128, 0, 87]).is_err()); + assert!(bson::deserialize_from_slice::(&[4, 0, 0, 128, 0, 87]).is_err()); } diff --git a/src/binary.rs b/src/binary.rs index f977088b..bd00a8db 100644 --- a/src/binary.rs +++ b/src/binary.rs @@ -2,9 +2,8 @@ mod vector; -use crate::{base64, spec::BinarySubtype, Document, RawBinaryRef}; +use crate::{base64, spec::BinarySubtype, RawBinaryRef}; use std::{ - convert::TryFrom, error, fmt::{self, Display}, }; @@ -61,7 +60,8 @@ impl Binary { Ok(Binary { subtype, bytes }) } - pub(crate) fn from_extended_doc(doc: &Document) -> Option { + #[cfg(feature = "serde")] + pub(crate) fn from_extended_doc(doc: &crate::Document) -> Option { let binary_doc = doc.get_document("$binary").ok()?; if let Ok(bytes) = binary_doc.get_str("base64") { diff --git a/src/binary/vector.rs b/src/binary/vector.rs index 7226e867..d67f3757 100644 --- a/src/binary/vector.rs +++ b/src/binary/vector.rs @@ -3,8 +3,6 @@ use std::{ mem::size_of, }; -use serde::{Deserialize, Serialize}; - use super::{Binary, Error, Result}; use crate::{spec::BinarySubtype, Bson, RawBson}; @@ -32,10 +30,10 @@ const PACKED_BIT: u8 = 0x10; /// } /// /// let data = Data { vector: Vector::Int8(vec![0, 1, 2]) }; -/// let document = bson::to_document(&data).unwrap(); +/// let document = bson::serialize_to_document(&data).unwrap(); /// assert_eq!(document.get("vector").unwrap().element_type(), ElementType::Binary); /// -/// let data: Data = bson::from_document(document).unwrap(); +/// let data: Data = bson::deserialize_from_document(document).unwrap(); /// assert_eq!(data.vector, Vector::Int8(vec![0, 1, 2])); /// ``` /// @@ -267,7 +265,8 @@ impl From for RawBson { } } -impl Serialize for Vector { +#[cfg(feature = "serde")] +impl serde::Serialize for Vector { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, @@ -277,7 +276,8 @@ impl Serialize for Vector { } } -impl<'de> Deserialize<'de> for Vector { +#[cfg(feature = "serde")] +impl<'de> serde::Deserialize<'de> for Vector { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, diff --git a/src/bson.rs b/src/bson.rs index 9b5f0ca4..8f35899d 100644 --- a/src/bson.rs +++ b/src/bson.rs @@ -22,7 +22,7 @@ //! BSON definition use std::{ - convert::{TryFrom, TryInto}, + convert::TryFrom, fmt::{self, Debug, Display, Formatter}, hash::Hash, ops::Index, @@ -31,13 +31,7 @@ use std::{ use serde_json::{json, Value}; pub use crate::document::Document; -use crate::{ - base64, - oid::{self, ObjectId}, - spec::{BinarySubtype, ElementType}, - Binary, - Decimal128, -}; +use crate::{base64, oid, spec::ElementType, Binary, Decimal128}; /// Possible BSON value types. #[derive(Clone, Default, PartialEq)] @@ -501,7 +495,7 @@ impl Bson { Bson::JavaScriptCode(code) => json!({ "$code": code }), Bson::JavaScriptCodeWithScope(JavaScriptCodeWithScope { code, scope }) => json!({ "$code": code, - "$scope": scope, + "$scope": Bson::Document(scope).into_relaxed_extjson(), }), Bson::Int32(v) => v.into(), Bson::Int64(v) => v.into(), @@ -618,6 +612,7 @@ impl Bson { /// This function mainly used for [extended JSON format](https://www.mongodb.com/docs/manual/reference/mongodb-extended-json/). // TODO RUST-426: Investigate either removing this from the serde implementation or unifying // with the extended JSON implementation. + #[cfg(feature = "serde")] pub(crate) fn into_extended_document(self, rawbson: bool) -> Document { match self { Bson::RegularExpression(Regex { @@ -660,7 +655,7 @@ impl Bson { if rawbson { doc! { "$binary": { - "bytes": Binary { subtype: BinarySubtype::Generic, bytes }, + "bytes": Binary { subtype: crate::spec::BinarySubtype::Generic, bytes }, "subType": Bson::Int32(tval.into()) } } @@ -727,6 +722,7 @@ impl Bson { } } + #[cfg(feature = "serde")] pub(crate) fn from_extended_document(doc: Document) -> Bson { if doc.len() > 2 { return Bson::Document(doc); @@ -738,7 +734,7 @@ impl Bson { match keys.as_slice() { ["$oid"] => { if let Ok(oid) = doc.get_str("$oid") { - if let Ok(oid) = ObjectId::parse_str(oid) { + if let Ok(oid) = crate::oid::ObjectId::parse_str(oid) { return Bson::ObjectId(oid); } } diff --git a/src/datetime.rs b/src/datetime.rs index b1394c32..41dca1df 100644 --- a/src/datetime.rs +++ b/src/datetime.rs @@ -73,7 +73,7 @@ use serde::{Deserialize, Deserializer, Serialize}; /// /// # fn main() -> bson::ser::Result<()> { /// let f = Foo { date_time: bson::DateTime::now(), chrono_datetime: chrono::Utc::now() }; -/// println!("{:?}", bson::to_document(&f)?); +/// println!("{:?}", bson::serialize_to_document(&f)?); /// # Ok(()) /// # } /// ``` @@ -170,7 +170,7 @@ use serde::{Deserialize, Deserializer, Serialize}; /// "as_bson": bson::DateTime::from_chrono(dt), /// }; /// -/// assert_eq!(bson::to_document(&foo)?, expected); +/// assert_eq!(bson::serialize_to_document(&foo)?, expected); /// # } /// # Ok::<(), Box>(()) /// ``` @@ -421,10 +421,6 @@ impl crate::DateTime { self.checked_duration_since(earlier) .unwrap_or(Duration::ZERO) } - - pub(crate) fn as_le_bytes(&self) -> [u8; 8] { - self.0.to_le_bytes() - } } impl fmt::Debug for crate::DateTime { diff --git a/src/de.rs b/src/de.rs index c1e1312a..821a4dca 100644 --- a/src/de.rs +++ b/src/de.rs @@ -33,25 +33,20 @@ pub use self::{ use std::io::Read; use crate::{ - bson::{Bson, Document, Timestamp}, - ser::write_i32, + bson::{Bson, Document}, + raw::reader_to_vec, spec::BinarySubtype, }; #[rustfmt::skip] -use ::serde::{ - de::{DeserializeOwned, Error as _}, - Deserialize, -}; +use ::serde::{de::DeserializeOwned, Deserialize}; pub(crate) use self::serde::{convert_unsigned_to_signed_raw, BsonVisitor}; +#[cfg(test)] pub(crate) use self::raw::Deserializer as RawDeserializer; pub(crate) const MAX_BSON_SIZE: i32 = i32::MAX; -pub(crate) const MIN_BSON_DOCUMENT_SIZE: i32 = 4 + 1; // 4 bytes for length, one byte for null terminator -pub(crate) const MIN_BSON_STRING_SIZE: i32 = 4 + 1; // 4 bytes for length, one byte for null terminator -pub(crate) const MIN_CODE_WITH_SCOPE_SIZE: i32 = 4 + MIN_BSON_STRING_SIZE + MIN_BSON_DOCUMENT_SIZE; /// Hint provided to the deserializer via `deserialize_newtype_struct` as to the type of thing /// being deserialized. @@ -69,21 +64,13 @@ enum DeserializerHint { RawBson, } -impl Timestamp { - pub(crate) fn from_reader(mut reader: R) -> Result { - let mut bytes = [0; 8]; - reader.read_exact(&mut bytes)?; - Ok(Timestamp::from_le_bytes(bytes)) - } -} - /// Deserialize a `T` from the provided [`Bson`] value. /// /// The [`Deserializer`] used by this function presents itself as human readable, whereas the -/// one used in [`from_slice`] does not. This means that this function may deserialize differently -/// than [`from_slice`] for types that change their deserialization logic depending on whether -/// the format is human readable or not. -pub fn from_bson(bson: Bson) -> Result +/// one used in [`deserialize_from_slice`] does not. This means that this function may deserialize +/// differently than [`deserialize_from_slice`] for types that change their deserialization logic +/// depending on whether the format is human readable or not. +pub fn deserialize_from_bson(bson: Bson) -> Result where T: DeserializeOwned, { @@ -101,51 +88,35 @@ where /// Deserialize a `T` from the provided [`Document`]. /// /// The [`Deserializer`] used by this function presents itself as human readable, whereas the -/// one used in [`from_slice`] does not. This means that this function may deserialize differently -/// than [`from_slice`] for types that change their deserialization logic depending on whether -/// the format is human readable or not. -pub fn from_document(doc: Document) -> Result +/// one used in [`deserialize_from_slice`] does not. This means that this function may deserialize +/// differently than [`deserialize_from_slice`] for types that change their deserialization logic +/// depending on whether the format is human readable or not. +pub fn deserialize_from_document(doc: Document) -> Result where T: DeserializeOwned, { - from_bson(Bson::Document(doc)) -} - -pub(crate) fn reader_to_vec(mut reader: R) -> Result> { - let mut buf = [0; 4]; - reader.read_exact(&mut buf)?; - let length = i32::from_le_bytes(buf); - - if length < MIN_BSON_DOCUMENT_SIZE { - return Err(Error::custom("document size too small")); - } - - let mut bytes = Vec::with_capacity(length as usize); - write_i32(&mut bytes, length).map_err(Error::custom)?; - - reader.take(length as u64 - 4).read_to_end(&mut bytes)?; - Ok(bytes) + deserialize_from_bson(Bson::Document(doc)) } /// Deserialize an instance of type `T` from an I/O stream of BSON. -pub fn from_reader(reader: R) -> Result +pub fn deserialize_from_reader(reader: R) -> Result where T: DeserializeOwned, R: Read, { let bytes = reader_to_vec(reader)?; - from_slice(bytes.as_slice()) + deserialize_from_slice(bytes.as_slice()) } /// Deserialize an instance of type `T` from a slice of BSON bytes. -pub fn from_slice<'de, T>(bytes: &'de [u8]) -> Result +pub fn deserialize_from_slice<'de, T>(bytes: &'de [u8]) -> Result where T: Deserialize<'de>, { - from_raw(raw::Deserializer::new(bytes)?) + deserialize_from_raw(raw::Deserializer::new(bytes)?) } -pub(crate) fn from_raw<'de, T: Deserialize<'de>>( +pub(crate) fn deserialize_from_raw<'de, T: Deserialize<'de>>( deserializer: raw::Deserializer<'de>, ) -> Result { #[cfg(feature = "serde_path_to_error")] diff --git a/src/decimal128.rs b/src/decimal128.rs index 546b9619..036b1f77 100644 --- a/src/decimal128.rs +++ b/src/decimal128.rs @@ -38,6 +38,7 @@ impl Decimal128 { self.bytes } + #[cfg(feature = "serde")] pub(crate) fn deserialize_from_slice( bytes: &[u8], ) -> std::result::Result { diff --git a/src/document.rs b/src/document.rs index eed8213c..5af4026b 100644 --- a/src/document.rs +++ b/src/document.rs @@ -673,7 +673,12 @@ impl Document { } } - /// Attempts to serialize the [`Document`] into a byte stream. + /// Attempt to encode the [`Document`] into a byte [`Vec`]. + pub fn encode_to_vec(&self) -> Result> { + Ok(crate::RawDocumentBuf::from_document(self)?.into_bytes()) + } + + /// Attempts to encode the [`Document`] into a byte stream. /// /// While the method signature indicates an owned writer must be passed in, a mutable reference /// may also be passed in due to blanket implementations of [`Write`] provided in the standard @@ -685,22 +690,17 @@ impl Document { /// /// let mut v: Vec = Vec::new(); /// let doc = doc! { "x" : 1 }; - /// doc.to_writer(&mut v)?; + /// doc.encode_to_writer(&mut v)?; /// # Ok(()) /// # } /// ``` - pub fn to_writer(&self, mut writer: W) -> crate::ser::Result<()> { - let buf = crate::to_vec(self)?; - writer.write_all(&buf)?; + pub fn encode_to_writer(&self, mut writer: W) -> crate::error::Result<()> { + let buf = crate::RawDocumentBuf::from_document(self)?; + writer.write_all(buf.as_bytes())?; Ok(()) } - fn decode(reader: &mut R) -> crate::de::Result { - let buf = crate::de::reader_to_vec(reader)?; - crate::de::from_raw(crate::de::RawDeserializer::new(&buf)?) - } - - /// Attempts to deserialize a [`Document`] from a byte stream. + /// Attempts to decode a [`Document`] from a byte stream. /// /// While the method signature indicates an owned reader must be passed in, a mutable reference /// may also be passed in due to blanket implementations of [`Read`] provided in the standard @@ -714,22 +714,23 @@ impl Document { /// /// let mut v: Vec = Vec::new(); /// let doc = doc! { "x" : 1 }; - /// doc.to_writer(&mut v)?; + /// doc.encode_to_writer(&mut v)?; /// /// // read from mutable reference /// let mut reader = Cursor::new(v.clone()); - /// let doc1 = Document::from_reader(&mut reader)?; + /// let doc1 = Document::decode_from_reader(&mut reader)?; /// /// // read from owned value - /// let doc2 = Document::from_reader(Cursor::new(v))?; + /// let doc2 = Document::decode_from_reader(Cursor::new(v))?; /// /// assert_eq!(doc, doc1); /// assert_eq!(doc, doc2); /// # Ok(()) /// # } /// ``` - pub fn from_reader(mut reader: R) -> crate::de::Result { - Self::decode(&mut reader) + pub fn decode_from_reader(reader: R) -> crate::error::Result { + let raw = crate::raw::RawDocumentBuf::decode_from_reader(reader)?; + raw.try_into() } } diff --git a/src/error.rs b/src/error.rs index d1497760..20380dcf 100644 --- a/src/error.rs +++ b/src/error.rs @@ -51,9 +51,14 @@ pub enum ErrorKind { kind: ValueAccessErrorKind, }, + /// A [`std::io::Error`] occurred. + #[error("An IO error occurred: {0}")] + Io(std::io::Error), + /// A wrapped deserialization error. /// TODO RUST-1406: collapse this - #[error("Deserialization error")] + #[cfg(feature = "serde")] + #[error("Deserialization error: {0}")] DeError(crate::de::Error), } @@ -67,13 +72,16 @@ impl From for Error { } } +impl From for Error { + fn from(value: std::io::Error) -> Self { + ErrorKind::Io(value).into() + } +} + +#[cfg(feature = "serde")] impl From for Error { fn from(value: crate::de::Error) -> Self { - Self { - kind: ErrorKind::DeError(value), - key: None, - index: None, - } + ErrorKind::DeError(value).into() } } @@ -162,4 +170,9 @@ impl Error { } ) } + + #[cfg(all(test, feature = "serde"))] + pub(crate) fn is_malformed_value(&self) -> bool { + matches!(self.kind, ErrorKind::MalformedValue { .. },) + } } diff --git a/src/lib.rs b/src/lib.rs index 3780be14..c5572c5d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -132,7 +132,7 @@ //! use std::io::Read; //! //! let mut bytes = hex::decode("0C0000001069000100000000").unwrap(); -//! let doc = Document::from_reader(&mut bytes.as_slice()).unwrap(); // { "i": 1 } +//! let doc = Document::decode_from_reader(&mut bytes.as_slice()).unwrap(); // { "i": 1 } //! //! let doc = doc! { //! "hello": "world", @@ -200,14 +200,14 @@ //! // Deserialize the Person struct from the BSON data, automatically //! // verifying that the necessary keys are present and that they are of //! // the correct types. -//! let mut person: Person = bson::from_bson(bson_data).unwrap(); +//! let mut person: Person = bson::deserialize_from_bson(bson_data).unwrap(); //! //! // Do things just like with any other Rust data structure. //! println!("Redacting {}'s record.", person.name); //! person.name = "REDACTED".to_string(); //! //! // Get a serialized version of the input data as a [`Bson`]. -//! let redacted_bson = bson::to_bson(&person).unwrap(); +//! let redacted_bson = bson::serialize_to_bson(&person).unwrap(); //! ``` //! //! Any types that implement [`Serialize`](serde::Serialize) and [`Deserialize`](serde::Deserialize) @@ -289,7 +289,6 @@ pub use self::{ binary::Binary, bson::{Array, Bson, DbPointer, Document, JavaScriptCodeWithScope, Regex, Timestamp}, datetime::DateTime, - de::{from_bson, from_document, from_reader, from_slice, Deserializer}, decimal128::Decimal128, raw::{ RawArray, @@ -304,24 +303,46 @@ pub use self::{ RawJavaScriptCodeWithScopeRef, RawRegexRef, }, - ser::{to_bson, to_document, to_raw_document_buf, to_vec, Serializer}, uuid::{Uuid, UuidRepresentation}, }; +#[cfg(feature = "serde")] +#[doc(inline)] +pub use self::{ + de::{ + deserialize_from_bson, + deserialize_from_document, + deserialize_from_reader, + deserialize_from_slice, + Deserializer, + }, + ser::{ + serialize_to_bson, + serialize_to_document, + serialize_to_raw_document_buf, + serialize_to_vec, + Serializer, + }, +}; + #[macro_use] mod macros; mod base64; pub mod binary; mod bson; pub mod datetime; +#[cfg(feature = "serde")] pub mod de; pub mod decimal128; pub mod document; pub mod error; +#[cfg(feature = "serde")] pub mod extjson; pub mod oid; pub mod raw; +#[cfg(feature = "serde")] pub mod ser; +#[cfg(feature = "serde")] pub mod serde_helpers; pub mod spec; pub mod uuid; diff --git a/src/macros.rs b/src/macros.rs index 3a0cb77b..62d640b7 100644 --- a/src/macros.rs +++ b/src/macros.rs @@ -240,12 +240,12 @@ macro_rules! rawbson { // Finished with trailing comma. (@array [$($elems:expr,)*]) => { - <$crate::RawArrayBuf as std::iter::FromIterator::<$crate::RawBson>>::from_iter(vec![$($elems,)*]) + $crate::RawArrayBuf::from_iter(vec![$($elems,)*]).expect("invalid bson value") }; // Finished without trailing comma. (@array [$($elems:expr),*]) => { - <$crate::RawArrayBuf as std::iter::FromIterator::<$crate::RawBson>>::from_iter(vec![$($elems),*]) + $crate::RawArrayBuf::from_iter(vec![$($elems),*]).expect("invalid bson value") }; // Next element is `null`. @@ -293,13 +293,13 @@ macro_rules! rawbson { // Insert the current entry followed by trailing comma. (@object $object:ident [$($key:tt)+] ($value:expr) , $($rest:tt)*) => { - $object.append(($($key)+), $value); + $object.append(($($key)+), $value).expect("invalid bson value"); $crate::rawbson!(@object $object () ($($rest)*) ($($rest)*)); }; // Insert the last entry without trailing comma. (@object $object:ident [$($key:tt)+] ($value:expr)) => { - $object.append(($($key)+), $value); + $object.append(($($key)+), $value).expect("invalid bson value"); }; // Next value is `null`. diff --git a/src/oid.rs b/src/oid.rs index a6bb7dbd..074b4a06 100644 --- a/src/oid.rs +++ b/src/oid.rs @@ -92,7 +92,7 @@ impl error::Error for Error {} /// /// # fn main() -> std::result::Result<(), Box> { /// let f = Foo { oid: ObjectId::new() }; -/// println!("bson: {}", bson::to_document(&f)?); +/// println!("bson: {}", bson::serialize_to_document(&f)?); /// println!("json: {}", serde_json::to_string(&f)?); /// # Ok(()) /// # } @@ -127,7 +127,7 @@ impl error::Error for Error {} /// } /// # fn main() -> std::result::Result<(), Box> { /// let f = Foo { oid: ObjectId::new(), oid_as_hex: ObjectId::new() }; -/// println!("bson: {}", bson::to_document(&f)?); +/// println!("bson: {}", bson::serialize_to_document(&f)?); /// println!("json: {}", serde_json::to_string(&f)?); /// # Ok(()) /// # } diff --git a/src/raw.rs b/src/raw.rs index 252832c2..9537e3da 100644 --- a/src/raw.rs +++ b/src/raw.rs @@ -23,7 +23,7 @@ //! }; //! //! // See http://bsonspec.org/spec.html for details on the binary encoding of BSON. -//! let doc = RawDocumentBuf::from_bytes(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00".to_vec())?; +//! let doc = RawDocumentBuf::decode_from_bytes(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00".to_vec())?; //! let elem = doc.get("hi")?.unwrap(); //! //! assert_eq!( @@ -76,7 +76,7 @@ //! use bson::raw::RawDocument; //! //! let bytes = b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00"; -//! assert_eq!(RawDocument::from_bytes(bytes)?.get_str("hi")?, "y'all"); +//! assert_eq!(RawDocument::decode_from_bytes(bytes)?.get_str("hi")?, "y'all"); //! # Ok::<(), Box>(()) //! ``` //! @@ -119,17 +119,18 @@ mod bson_ref; mod document; mod document_buf; mod iter; +#[cfg(feature = "serde")] pub(crate) mod serde; #[cfg(test)] mod test; -use std::convert::{TryFrom, TryInto}; - -use crate::{ - de::MIN_BSON_STRING_SIZE, - error::{Error, ErrorKind, Result}, +use std::{ + convert::{TryFrom, TryInto}, + io::Read, }; +use crate::error::{Error, ErrorKind, Result}; + pub use self::{ array::{RawArray, RawArrayIter}, array_buf::RawArrayBuf, @@ -146,15 +147,23 @@ pub use self::{ iter::{RawElement, RawIter}, }; +pub(crate) const MIN_BSON_STRING_SIZE: i32 = 4 + 1; // 4 bytes for length, one byte for null terminator +pub(crate) const MIN_BSON_DOCUMENT_SIZE: i32 = 4 + 1; // 4 bytes for length, one byte for null terminator +pub(crate) const MIN_CODE_WITH_SCOPE_SIZE: i32 = 4 + MIN_BSON_STRING_SIZE + MIN_BSON_DOCUMENT_SIZE; + +#[cfg(feature = "serde")] pub(crate) use self::iter::{Utf8LossyBson, Utf8LossyJavaScriptCodeWithScope}; /// Special newtype name indicating that the type being (de)serialized is a raw BSON document. +#[cfg(feature = "serde")] pub(crate) const RAW_DOCUMENT_NEWTYPE: &str = "$__private__bson_RawDocument"; /// Special newtype name indicating that the type being (de)serialized is a raw BSON array. +#[cfg(feature = "serde")] pub(crate) const RAW_ARRAY_NEWTYPE: &str = "$__private__bson_RawArray"; /// Special newtype name indicating that the type being (de)serialized is a raw BSON value. +#[cfg(feature = "serde")] pub(crate) const RAW_BSON_NEWTYPE: &str = "$__private__bson_RawBson"; /// Given a u8 slice, return an i32 calculated from the first four bytes in @@ -285,3 +294,37 @@ fn checked_add(lhs: usize, rhs: usize) -> Result { lhs.checked_add(rhs) .ok_or_else(|| Error::malformed_value("attempted to add with overflow")) } + +pub(crate) fn reader_to_vec(mut reader: R) -> Result> { + let mut buf = [0; 4]; + reader.read_exact(&mut buf)?; + let length = i32::from_le_bytes(buf); + + if length < MIN_BSON_DOCUMENT_SIZE { + return Err(Error::malformed_value("document size too small")); + } + + let mut bytes = Vec::with_capacity(length as usize); + bytes.extend(buf); + + reader.take(length as u64 - 4).read_to_end(&mut bytes)?; + Ok(bytes) +} + +pub(crate) fn write_string(buf: &mut Vec, s: &str) { + buf.extend(&(s.len() as i32 + 1).to_le_bytes()); + buf.extend(s.as_bytes()); + buf.push(0); +} + +pub(crate) fn write_cstring(buf: &mut Vec, s: &str) -> Result<()> { + if s.contains('\0') { + return Err(Error::malformed_value(format!( + "cstring with interior null: {:?}", + s + ))); + } + buf.extend(s.as_bytes()); + buf.push(0); + Ok(()) +} diff --git a/src/raw/array.rs b/src/raw/array.rs index a55daa03..0c6020d3 100644 --- a/src/raw/array.rs +++ b/src/raw/array.rs @@ -1,9 +1,6 @@ use std::{borrow::Cow, convert::TryFrom}; -use serde::{ser::SerializeSeq, Deserialize, Serialize}; - use super::{ - serde::OwnedOrBorrowedRawArray, Error as RawError, RawBinaryRef, RawBsonRef, @@ -15,7 +12,6 @@ use super::{ use crate::{ error::{Error, Result}, oid::ObjectId, - raw::RAW_ARRAY_NEWTYPE, spec::ElementType, Bson, DateTime, @@ -41,9 +37,9 @@ use crate::{ /// let doc = doc! { /// "x": [1, true, "two", 5.5] /// }; -/// let bytes = bson::to_vec(&doc)?; +/// let bytes = bson::serialize_to_vec(&doc)?; /// -/// let rawdoc = RawDocument::from_bytes(bytes.as_slice())?; +/// let rawdoc = RawDocument::decode_from_bytes(bytes.as_slice())?; /// let rawarray = rawdoc.get_array("x")?; /// /// for v in rawarray { @@ -63,9 +59,9 @@ use crate::{ /// let doc = doc! { /// "x": [1, true, "two", 5.5] /// }; -/// let bytes = bson::to_vec(&doc)?; +/// let bytes = doc.encode_to_vec()?; /// -/// let rawdoc = RawDocument::from_bytes(bytes.as_slice())?; +/// let rawdoc = RawDocument::decode_from_bytes(bytes.as_slice())?; /// let rawarray = rawdoc.get_array("x")?; /// /// assert_eq!(rawarray.get_bool(1)?, true); @@ -91,6 +87,7 @@ impl RawArray { unsafe { &*(doc as *const RawDocument as *const RawArray) } } + #[cfg(feature = "serde")] pub(crate) fn as_doc(&self) -> &RawDocument { &self.doc } @@ -294,11 +291,13 @@ impl<'a> Iterator for RawArrayIter<'a> { } } -impl<'de: 'a, 'a> Deserialize<'de> for &'a RawArray { +#[cfg(feature = "serde")] +impl<'de: 'a, 'a> serde::Deserialize<'de> for &'a RawArray { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { + use super::serde::OwnedOrBorrowedRawArray; match OwnedOrBorrowedRawArray::deserialize(deserializer)? { OwnedOrBorrowedRawArray::Borrowed(b) => Ok(b), o => Err(serde::de::Error::custom(format!( @@ -309,18 +308,20 @@ impl<'de: 'a, 'a> Deserialize<'de> for &'a RawArray { } } -impl Serialize for &RawArray { +#[cfg(feature = "serde")] +impl serde::Serialize for &RawArray { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, { struct SeqSerializer<'a>(&'a RawArray); - impl Serialize for SeqSerializer<'_> { + impl serde::Serialize for SeqSerializer<'_> { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, { + use serde::ser::SerializeSeq as _; if serializer.is_human_readable() { let mut seq = serializer.serialize_seq(None)?; for v in self.0 { @@ -334,6 +335,6 @@ impl Serialize for &RawArray { } } - serializer.serialize_newtype_struct(RAW_ARRAY_NEWTYPE, &SeqSerializer(self)) + serializer.serialize_newtype_struct(crate::raw::RAW_ARRAY_NEWTYPE, &SeqSerializer(self)) } } diff --git a/src/raw/array_buf.rs b/src/raw/array_buf.rs index b8ac1f06..d7e2068a 100644 --- a/src/raw/array_buf.rs +++ b/src/raw/array_buf.rs @@ -1,18 +1,15 @@ use std::{ borrow::{Borrow, Cow}, fmt::Debug, - iter::FromIterator, }; -use serde::{Deserialize, Serialize}; - use crate::{RawArray, RawBsonRef, RawDocumentBuf}; -use super::{document_buf::BindRawBsonRef, serde::OwnedOrBorrowedRawArray, RawArrayIter}; +use super::{document_buf::BindRawBsonRef, RawArrayIter}; /// An owned BSON array value (akin to [`std::path::PathBuf`]), backed by a buffer of raw BSON /// bytes. This type can be used to construct owned array values, which can be used to append to -/// [`RawDocumentBuf`] or as a field in a [`Deserialize`] struct. +/// [`RawDocumentBuf`] or as a field in a [`Deserialize`](serde::Deserialize) struct. /// /// Iterating over a [`RawArrayBuf`] yields either an error or a [`RawBson`](crate::raw::RawBson) /// value that borrows from the original document without making any additional allocations. @@ -56,6 +53,19 @@ impl RawArrayBuf { } } + #[allow(clippy::should_implement_trait)] + pub fn from_iter(iter: I) -> crate::error::Result + where + B: BindRawBsonRef, + I: IntoIterator, + { + let mut array_buf = RawArrayBuf::new(); + for item in iter { + array_buf.push(item)?; + } + Ok(array_buf) + } + /// Construct a new [`RawArrayBuf`] from the provided [`Vec`] of bytes. /// /// This involves a traversal of the array to count the values. @@ -92,9 +102,10 @@ impl RawArrayBuf { /// assert!(iter.next().is_none()); /// # Ok::<(), Error>(()) /// ``` - pub fn push(&mut self, value: impl BindRawBsonRef) { - self.inner.append(self.len.to_string(), value); + pub fn push(&mut self, value: impl BindRawBsonRef) -> crate::error::Result<()> { + self.inner.append(self.len.to_string(), value)?; self.len += 1; + Ok(()) } } @@ -148,26 +159,18 @@ impl<'a> From<&'a RawArrayBuf> for Cow<'a, RawArray> { } } -impl FromIterator for RawArrayBuf { - fn from_iter>(iter: I) -> Self { - let mut array_buf = RawArrayBuf::new(); - for item in iter { - array_buf.push(item); - } - array_buf - } -} - -impl<'de> Deserialize<'de> for RawArrayBuf { +#[cfg(feature = "serde")] +impl<'de> serde::Deserialize<'de> for RawArrayBuf { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { - Ok(OwnedOrBorrowedRawArray::deserialize(deserializer)?.into_owned()) + Ok(super::serde::OwnedOrBorrowedRawArray::deserialize(deserializer)?.into_owned()) } } -impl Serialize for RawArrayBuf { +#[cfg(feature = "serde")] +impl serde::Serialize for RawArrayBuf { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, @@ -181,3 +184,24 @@ impl Default for RawArrayBuf { Self::new() } } + +impl TryFrom<&crate::Array> for RawArrayBuf { + type Error = crate::error::Error; + + fn try_from(value: &crate::Array) -> Result { + Self::try_from(value.clone()) + } +} + +impl TryFrom for RawArrayBuf { + type Error = crate::error::Error; + + fn try_from(value: crate::Array) -> Result { + let mut tmp = RawArrayBuf::new(); + for val in value { + let raw: super::RawBson = val.try_into()?; + tmp.push(raw)?; + } + Ok(tmp) + } +} diff --git a/src/raw/bson.rs b/src/raw/bson.rs index 7bb475a4..fb2c7252 100644 --- a/src/raw/bson.rs +++ b/src/raw/bson.rs @@ -1,10 +1,7 @@ use std::convert::{TryFrom, TryInto}; -use serde::{Deserialize, Serialize}; - use crate::{ oid::{self, ObjectId}, - raw::RAW_BSON_NEWTYPE, spec::ElementType, Binary, Bson, @@ -23,11 +20,7 @@ use crate::{ Timestamp, }; -use super::{ - serde::{bson_visitor::OwnedOrBorrowedRawBsonVisitor, OwnedOrBorrowedRawBson}, - Error, - Result, -}; +use super::{Error, Result}; /// A BSON value backed by owned raw BSON bytes. #[derive(Debug, Clone, PartialEq)] @@ -424,21 +417,25 @@ impl From for RawBson { } } -impl<'de> Deserialize<'de> for RawBson { +#[cfg(feature = "serde")] +impl<'de> serde::Deserialize<'de> for RawBson { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { - match deserializer - .deserialize_newtype_struct(RAW_BSON_NEWTYPE, OwnedOrBorrowedRawBsonVisitor)? - { + use super::serde::{bson_visitor::OwnedOrBorrowedRawBsonVisitor, OwnedOrBorrowedRawBson}; + match deserializer.deserialize_newtype_struct( + crate::raw::RAW_BSON_NEWTYPE, + OwnedOrBorrowedRawBsonVisitor, + )? { OwnedOrBorrowedRawBson::Owned(o) => Ok(o), OwnedOrBorrowedRawBson::Borrowed(b) => Ok(b.to_raw_bson()), } } } -impl Serialize for RawBson { +#[cfg(feature = "serde")] +impl serde::Serialize for RawBson { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, @@ -483,18 +480,14 @@ impl TryFrom for Bson { } impl TryFrom for RawBson { - type Error = Error; + type Error = crate::error::Error; - fn try_from(bson: Bson) -> Result { + fn try_from(bson: Bson) -> crate::error::Result { Ok(match bson { Bson::Double(d) => RawBson::Double(d), Bson::String(s) => RawBson::String(s), - Bson::Document(doc) => RawBson::Document((&doc).try_into()?), - Bson::Array(arr) => RawBson::Array( - arr.into_iter() - .map(|b| -> Result { b.try_into() }) - .collect::>()?, - ), + Bson::Document(doc) => RawBson::Document(doc.try_into()?), + Bson::Array(arr) => RawBson::Array(arr.try_into()?), Bson::Binary(bin) => RawBson::Binary(bin), Bson::ObjectId(id) => RawBson::ObjectId(id), Bson::Boolean(b) => RawBson::Boolean(b), @@ -511,7 +504,7 @@ impl TryFrom for RawBson { Bson::JavaScriptCodeWithScope(jcws) => { RawBson::JavaScriptCodeWithScope(crate::RawJavaScriptCodeWithScope { code: jcws.code, - scope: (&jcws.scope).try_into()?, + scope: jcws.scope.try_into()?, }) } Bson::Decimal128(d) => RawBson::Decimal128(d), @@ -531,7 +524,8 @@ pub struct RawJavaScriptCodeWithScope { pub scope: RawDocumentBuf, } -impl<'de> Deserialize<'de> for RawJavaScriptCodeWithScope { +#[cfg(feature = "serde")] +impl<'de> serde::Deserialize<'de> for RawJavaScriptCodeWithScope { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, @@ -546,7 +540,8 @@ impl<'de> Deserialize<'de> for RawJavaScriptCodeWithScope { } } -impl Serialize for RawJavaScriptCodeWithScope { +#[cfg(feature = "serde")] +impl serde::Serialize for RawJavaScriptCodeWithScope { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, diff --git a/src/raw/bson_ref.rs b/src/raw/bson_ref.rs index 94a9b9fb..2edceaa7 100644 --- a/src/raw/bson_ref.rs +++ b/src/raw/bson_ref.rs @@ -1,21 +1,9 @@ use std::convert::{TryFrom, TryInto}; -use serde::{ser::SerializeStruct, Deserialize, Serialize}; -use serde_bytes::Bytes; - -use super::{ - bson::RawBson, - serde::{bson_visitor::OwnedOrBorrowedRawBsonVisitor, OwnedOrBorrowedRawBson}, - Error, - RawArray, - RawDocument, - Result, -}; +use super::{bson::RawBson, Error, RawArray, RawDocument, Result}; use crate::{ - base64, - extjson, oid::{self, ObjectId}, - raw::{RawJavaScriptCodeWithScope, RAW_BSON_NEWTYPE}, + raw::RawJavaScriptCodeWithScope, spec::{BinarySubtype, ElementType}, Binary, Bson, @@ -27,6 +15,9 @@ use crate::{ Timestamp, }; +#[cfg(feature = "serde")] +use serde::ser::SerializeStruct as _; + /// A BSON value referencing raw bytes stored elsewhere. #[derive(Debug, Clone, Copy, PartialEq)] pub enum RawBsonRef<'a> { @@ -296,14 +287,17 @@ impl<'a> RawBsonRef<'a> { } } -impl<'de: 'a, 'a> Deserialize<'de> for RawBsonRef<'a> { +#[cfg(feature = "serde")] +impl<'de: 'a, 'a> serde::Deserialize<'de> for RawBsonRef<'a> { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { - match deserializer - .deserialize_newtype_struct(RAW_BSON_NEWTYPE, OwnedOrBorrowedRawBsonVisitor)? - { + use super::serde::{bson_visitor::OwnedOrBorrowedRawBsonVisitor, OwnedOrBorrowedRawBson}; + match deserializer.deserialize_newtype_struct( + crate::raw::RAW_BSON_NEWTYPE, + OwnedOrBorrowedRawBsonVisitor, + )? { OwnedOrBorrowedRawBson::Borrowed(b) => Ok(b), o => Err(serde::de::Error::custom(format!( "RawBson must be deserialized from borrowed content, instead got {:?}", @@ -313,7 +307,8 @@ impl<'de: 'a, 'a> Deserialize<'de> for RawBsonRef<'a> { } } -impl Serialize for RawBsonRef<'_> { +#[cfg(feature = "serde")] +impl serde::Serialize for RawBsonRef<'_> { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, @@ -477,7 +472,8 @@ impl RawBinaryRef<'_> { } } -impl<'de: 'a, 'a> Deserialize<'de> for RawBinaryRef<'a> { +#[cfg(feature = "serde")] +impl<'de: 'a, 'a> serde::Deserialize<'de> for RawBinaryRef<'a> { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, @@ -492,7 +488,8 @@ impl<'de: 'a, 'a> Deserialize<'de> for RawBinaryRef<'a> { } } -impl Serialize for RawBinaryRef<'_> { +#[cfg(feature = "serde")] +impl serde::Serialize for RawBinaryRef<'_> { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, @@ -500,7 +497,9 @@ impl Serialize for RawBinaryRef<'_> { if let BinarySubtype::Generic = self.subtype { serializer.serialize_bytes(self.bytes) } else if !serializer.is_human_readable() { - #[derive(Serialize)] + use serde_bytes::Bytes; + + #[derive(serde::Serialize)] struct BorrowedBinary<'a> { bytes: &'a Bytes, @@ -517,8 +516,8 @@ impl Serialize for RawBinaryRef<'_> { state.end() } else { let mut state = serializer.serialize_struct("$binary", 1)?; - let body = extjson::models::BinaryBody { - base64: base64::encode(self.bytes), + let body = crate::extjson::models::BinaryBody { + base64: crate::base64::encode(self.bytes), subtype: hex::encode([self.subtype.into()]), }; state.serialize_field("$binary", &body)?; @@ -555,7 +554,8 @@ pub struct RawRegexRef<'a> { pub options: &'a str, } -impl<'de: 'a, 'a> Deserialize<'de> for RawRegexRef<'a> { +#[cfg(feature = "serde")] +impl<'de: 'a, 'a> serde::Deserialize<'de> for RawRegexRef<'a> { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, @@ -570,12 +570,13 @@ impl<'de: 'a, 'a> Deserialize<'de> for RawRegexRef<'a> { } } -impl Serialize for RawRegexRef<'_> { +#[cfg(feature = "serde")] +impl serde::Serialize for RawRegexRef<'_> { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, { - #[derive(Serialize)] + #[derive(serde::Serialize)] struct BorrowedRegexBody<'a> { pattern: &'a str, options: &'a str, @@ -613,7 +614,8 @@ impl RawJavaScriptCodeWithScopeRef<'_> { } } -impl<'de: 'a, 'a> Deserialize<'de> for RawJavaScriptCodeWithScopeRef<'a> { +#[cfg(feature = "serde")] +impl<'de: 'a, 'a> serde::Deserialize<'de> for RawJavaScriptCodeWithScopeRef<'a> { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, @@ -628,7 +630,8 @@ impl<'de: 'a, 'a> Deserialize<'de> for RawJavaScriptCodeWithScopeRef<'a> { } } -impl Serialize for RawJavaScriptCodeWithScopeRef<'_> { +#[cfg(feature = "serde")] +impl serde::Serialize for RawJavaScriptCodeWithScopeRef<'_> { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, @@ -653,7 +656,8 @@ pub struct RawDbPointerRef<'a> { pub(crate) id: ObjectId, } -impl<'de: 'a, 'a> Deserialize<'de> for RawDbPointerRef<'a> { +#[cfg(feature = "serde")] +impl<'de: 'a, 'a> serde::Deserialize<'de> for RawDbPointerRef<'a> { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, @@ -668,12 +672,13 @@ impl<'de: 'a, 'a> Deserialize<'de> for RawDbPointerRef<'a> { } } -impl Serialize for RawDbPointerRef<'_> { +#[cfg(feature = "serde")] +impl serde::Serialize for RawDbPointerRef<'_> { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, { - #[derive(Serialize)] + #[derive(serde::Serialize)] struct BorrowedDbPointerBody<'a> { #[serde(rename = "$ref")] ref_ns: &'a str, diff --git a/src/raw/document.rs b/src/raw/document.rs index e62eb62e..262df7ec 100644 --- a/src/raw/document.rs +++ b/src/raw/document.rs @@ -3,12 +3,8 @@ use std::{ convert::{TryFrom, TryInto}, }; -use serde::{ser::SerializeMap, Deserialize, Serialize}; - use crate::{ - de::MIN_BSON_DOCUMENT_SIZE, error::{Error, Result}, - raw::{serde::OwnedOrBorrowedRawDocument, RAW_DOCUMENT_NEWTYPE}, Bson, DateTime, JavaScriptCodeWithScope, @@ -29,6 +25,7 @@ use super::{ RawIter, RawRegexRef, Result as RawResult, + MIN_BSON_DOCUMENT_SIZE, }; use crate::{oid::ObjectId, spec::ElementType, Document}; @@ -49,7 +46,7 @@ use crate::{oid::ObjectId, spec::ElementType, Document}; /// # use bson::error::Error; /// use bson::raw::RawDocument; /// -/// let doc = RawDocument::from_bytes(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00")?; +/// let doc = RawDocument::decode_from_bytes(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00")?; /// let mut iter = doc.into_iter(); /// let (key, value) = iter.next().unwrap()?; /// assert_eq!(key, "hi"); @@ -66,7 +63,7 @@ use crate::{oid::ObjectId, spec::ElementType, Document}; /// ``` /// use bson::raw::RawDocument; /// -/// let doc = RawDocument::from_bytes(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00")?; +/// let doc = RawDocument::decode_from_bytes(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00")?; /// assert_eq!(doc.get_str("hi")?, "y'all"); /// # Ok::<(), Box>(()) /// ``` @@ -92,10 +89,10 @@ impl RawDocument { /// ``` /// use bson::raw::RawDocument; /// - /// let doc = RawDocument::from_bytes(b"\x05\0\0\0\0")?; + /// let doc = RawDocument::decode_from_bytes(b"\x05\0\0\0\0")?; /// # Ok::<(), bson::error::Error>(()) /// ``` - pub fn from_bytes + ?Sized>(data: &D) -> RawResult<&RawDocument> { + pub fn decode_from_bytes + ?Sized>(data: &D) -> RawResult<&RawDocument> { let data = data.as_ref(); if data.len() < 5 { @@ -135,12 +132,12 @@ impl RawDocument { /// use bson::raw::{RawDocument, RawDocumentBuf}; /// /// let data = b"\x05\0\0\0\0"; - /// let doc_ref = RawDocument::from_bytes(data)?; + /// let doc_ref = RawDocument::decode_from_bytes(data)?; /// let doc: RawDocumentBuf = doc_ref.to_raw_document_buf(); /// # Ok::<(), bson::error::Error>(()) pub fn to_raw_document_buf(&self) -> RawDocumentBuf { // unwrap is ok here because we already verified the bytes in `RawDocumentRef::new` - RawDocumentBuf::from_bytes(self.data.to_owned()).unwrap() + RawDocumentBuf::decode_from_bytes(self.data.to_owned()).unwrap() } /// Gets a reference to the value corresponding to the given key by iterating until the key is @@ -563,11 +560,13 @@ fn deep_utf8_lossy(src: RawBson) -> RawResult { } } -impl<'de: 'a, 'a> Deserialize<'de> for &'a RawDocument { +#[cfg(feature = "serde")] +impl<'de: 'a, 'a> serde::Deserialize<'de> for &'a RawDocument { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { + use super::serde::OwnedOrBorrowedRawDocument; match OwnedOrBorrowedRawDocument::deserialize(deserializer)? { OwnedOrBorrowedRawDocument::Borrowed(b) => Ok(b), OwnedOrBorrowedRawDocument::Owned(d) => Err(serde::de::Error::custom(format!( @@ -578,18 +577,20 @@ impl<'de: 'a, 'a> Deserialize<'de> for &'a RawDocument { } } -impl Serialize for &RawDocument { +#[cfg(feature = "serde")] +impl serde::Serialize for &RawDocument { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, { struct KvpSerializer<'a>(&'a RawDocument); - impl Serialize for KvpSerializer<'_> { + impl serde::Serialize for KvpSerializer<'_> { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, { + use serde::ser::SerializeMap as _; if serializer.is_human_readable() { let mut map = serializer.serialize_map(None)?; for kvp in self.0 { @@ -602,7 +603,7 @@ impl Serialize for &RawDocument { } } } - serializer.serialize_newtype_struct(RAW_DOCUMENT_NEWTYPE, &KvpSerializer(self)) + serializer.serialize_newtype_struct(super::RAW_DOCUMENT_NEWTYPE, &KvpSerializer(self)) } } @@ -645,6 +646,22 @@ impl TryFrom<&RawDocument> for crate::Document { } } +impl TryFrom for Document { + type Error = crate::error::Error; + + fn try_from(raw: RawDocumentBuf) -> Result { + Document::try_from(raw.as_ref()) + } +} + +impl TryFrom<&RawDocumentBuf> for Document { + type Error = crate::error::Error; + + fn try_from(raw: &RawDocumentBuf) -> Result { + Document::try_from(raw.as_ref()) + } +} + impl<'a> IntoIterator for &'a RawDocument { type IntoIter = Iter<'a>; type Item = RawResult<(&'a str, RawBsonRef<'a>)>; diff --git a/src/raw/document_buf.rs b/src/raw/document_buf.rs index e89ef704..2f5c22b9 100644 --- a/src/raw/document_buf.rs +++ b/src/raw/document_buf.rs @@ -1,24 +1,12 @@ use std::{ borrow::{Borrow, Cow}, convert::TryFrom, - iter::FromIterator, ops::Deref, }; -use serde::{Deserialize, Serialize}; +use crate::{raw::MIN_BSON_DOCUMENT_SIZE, Document}; -use crate::{de::MIN_BSON_DOCUMENT_SIZE, Document}; - -use super::{ - bson::RawBson, - iter::Iter, - serde::OwnedOrBorrowedRawDocument, - Error, - RawBsonRef, - RawDocument, - RawIter, - Result, -}; +use super::{bson::RawBson, iter::Iter, RawBsonRef, RawDocument, RawIter, Result}; mod raw_writer; @@ -36,7 +24,7 @@ mod raw_writer; /// # use bson::error::Error; /// use bson::raw::RawDocumentBuf; /// -/// let doc = RawDocumentBuf::from_bytes(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00".to_vec())?; +/// let doc = RawDocumentBuf::decode_from_bytes(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00".to_vec())?; /// let mut iter = doc.iter(); /// let (key, value) = iter.next().unwrap()?; /// assert_eq!(key, "hi"); @@ -54,7 +42,7 @@ mod raw_writer; /// ``` /// use bson::raw::RawDocumentBuf; /// -/// let doc = RawDocumentBuf::from_bytes(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00".to_vec())?; +/// let doc = RawDocumentBuf::decode_from_bytes(b"\x13\x00\x00\x00\x02hi\x00\x06\x00\x00\x00y'all\x00\x00".to_vec())?; /// assert_eq!(doc.get_str("hi")?, "y'all"); /// # Ok::<(), Box>(()) /// ``` @@ -86,17 +74,31 @@ impl RawDocumentBuf { /// /// ``` /// # use bson::raw::RawDocumentBuf; - /// let doc = RawDocumentBuf::from_bytes(b"\x05\0\0\0\0".to_vec())?; + /// let doc = RawDocumentBuf::decode_from_bytes(b"\x05\0\0\0\0".to_vec())?; /// # Ok::<(), bson::error::Error>(()) /// ``` - pub fn from_bytes(data: Vec) -> Result { - let _ = RawDocument::from_bytes(data.as_slice())?; + pub fn decode_from_bytes(data: Vec) -> Result { + let _ = RawDocument::decode_from_bytes(data.as_slice())?; Ok(Self { data }) } - pub fn from_reader(reader: R) -> Result { - let buf = crate::de::reader_to_vec(reader)?; - Self::from_bytes(buf) + pub fn decode_from_reader(reader: R) -> Result { + let buf = crate::raw::reader_to_vec(reader)?; + Self::decode_from_bytes(buf) + } + + #[allow(clippy::should_implement_trait)] + pub fn from_iter(iter: I) -> Result + where + S: AsRef, + B: BindRawBsonRef, + I: IntoIterator, + { + let mut buf = RawDocumentBuf::new(); + for (k, v) in iter { + buf.append(k, v)?; + } + Ok(buf) } /// Create a [`RawDocumentBuf`] from a [`Document`]. @@ -112,11 +114,13 @@ impl RawDocumentBuf { /// let doc = RawDocumentBuf::from_document(&document)?; /// # Ok::<(), bson::error::Error>(()) /// ``` - pub fn from_document(doc: &Document) -> Result { - let mut data = Vec::new(); - doc.to_writer(&mut data).map_err(Error::malformed_value)?; - - Ok(Self { data }) + pub fn from_document(doc: impl Borrow) -> Result { + let mut out = RawDocumentBuf::new(); + for (k, v) in doc.borrow() { + let val: RawBson = v.clone().try_into()?; + out.append(k, val)?; + } + Ok(out) } /// Gets an iterator over the elements in the [`RawDocumentBuf`], which yields @@ -211,11 +215,13 @@ impl RawDocumentBuf { /// assert_eq!(doc.to_document()?, expected); /// # Ok::<(), Error>(()) /// ``` - pub fn append(&mut self, key: impl AsRef, value: impl BindRawBsonRef) { + pub fn append( + &mut self, + key: impl AsRef, + value: impl BindRawBsonRef, + ) -> crate::error::Result<()> { value.bind(|value_ref| { - raw_writer::RawWriter::new(&mut self.data) - .append(key.as_ref(), value_ref) - .expect("key should not contain interior null byte") + raw_writer::RawWriter::new(&mut self.data).append(key.as_ref(), value_ref) }) } } @@ -226,16 +232,18 @@ impl Default for RawDocumentBuf { } } -impl<'de> Deserialize<'de> for RawDocumentBuf { +#[cfg(feature = "serde")] +impl<'de> serde::Deserialize<'de> for RawDocumentBuf { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { - Ok(OwnedOrBorrowedRawDocument::deserialize(deserializer)?.into_owned()) + Ok(super::serde::OwnedOrBorrowedRawDocument::deserialize(deserializer)?.into_owned()) } } -impl Serialize for RawDocumentBuf { +#[cfg(feature = "serde")] +impl serde::Serialize for RawDocumentBuf { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, @@ -265,19 +273,24 @@ impl<'a> From<&'a RawDocumentBuf> for Cow<'a, RawDocument> { } } -impl TryFrom for Document { - type Error = Error; +impl TryFrom<&Document> for RawDocumentBuf { + type Error = crate::error::Error; - fn try_from(raw: RawDocumentBuf) -> Result { - Document::try_from(raw.as_ref()) + fn try_from(doc: &Document) -> std::result::Result { + RawDocumentBuf::from_document(doc) } } -impl TryFrom<&Document> for RawDocumentBuf { - type Error = Error; +impl TryFrom for RawDocumentBuf { + type Error = crate::error::Error; - fn try_from(doc: &Document) -> Result { - RawDocumentBuf::from_document(doc) + fn try_from(doc: Document) -> std::result::Result { + let mut out = RawDocumentBuf::new(); + for (k, v) in doc { + let val: RawBson = v.try_into()?; + out.append(k, val)?; + } + Ok(out) } } @@ -310,16 +323,6 @@ impl Borrow for RawDocumentBuf { } } -impl, T: BindRawBsonRef> FromIterator<(S, T)> for RawDocumentBuf { - fn from_iter>(iter: I) -> Self { - let mut buf = RawDocumentBuf::new(); - for (k, v) in iter { - buf.append(k, v); - } - buf - } -} - /// Types that can be consumed to produce raw bson references valid for a limited lifetime. /// Conceptually a union between `T: Into` and `T: Into`; if your type /// implements `Into` it will automatically implement this, but if it only diff --git a/src/raw/document_buf/raw_writer.rs b/src/raw/document_buf/raw_writer.rs index 1d6fae22..59edfc82 100644 --- a/src/raw/document_buf/raw_writer.rs +++ b/src/raw/document_buf/raw_writer.rs @@ -1,5 +1,5 @@ use crate::{ - ser::{write_cstring, write_string}, + raw::{write_cstring, write_string}, spec::BinarySubtype, RawBsonRef, }; @@ -13,7 +13,7 @@ impl<'a> RawWriter<'a> { Self { data } } - pub(super) fn append(&mut self, key: &str, value: RawBsonRef) -> crate::ser::Result<()> { + pub(super) fn append(&mut self, key: &str, value: RawBsonRef) -> crate::error::Result<()> { let original_len = self.data.len(); self.data[original_len - 1] = value.element_type() as u8; diff --git a/src/raw/iter.rs b/src/raw/iter.rs index 8dddbf5b..6c3c640e 100644 --- a/src/raw/iter.rs +++ b/src/raw/iter.rs @@ -1,9 +1,8 @@ use std::convert::TryInto; use crate::{ - de::{MIN_BSON_DOCUMENT_SIZE, MIN_CODE_WITH_SCOPE_SIZE}, oid::ObjectId, - raw::{Error, Result}, + raw::{Error, Result, MIN_BSON_DOCUMENT_SIZE, MIN_CODE_WITH_SCOPE_SIZE}, spec::{BinarySubtype, ElementType}, Bson, DateTime, @@ -144,8 +143,9 @@ impl TryInto for RawElement<'_> { } impl<'a> RawElement<'a> { + #[cfg(feature = "serde")] pub(crate) fn toplevel(bytes: &'a [u8]) -> Result { - let doc = RawDocument::from_bytes(bytes)?; + let doc = RawDocument::decode_from_bytes(bytes)?; Ok(Self { key: "TOPLEVEL", kind: ElementType::EmbeddedDocument, @@ -179,11 +179,11 @@ impl<'a> RawElement<'a> { ElementType::Double => RawBsonRef::Double(f64_from_slice(self.slice())?), ElementType::String => RawBsonRef::String(self.read_str()?), ElementType::EmbeddedDocument => { - RawBsonRef::Document(RawDocument::from_bytes(self.slice())?) - } - ElementType::Array => { - RawBsonRef::Array(RawArray::from_doc(RawDocument::from_bytes(self.slice())?)) + RawBsonRef::Document(RawDocument::decode_from_bytes(self.slice())?) } + ElementType::Array => RawBsonRef::Array(RawArray::from_doc( + RawDocument::decode_from_bytes(self.slice())?, + )), ElementType::Boolean => RawBsonRef::Boolean( bool_from_slice(self.slice()).map_err(|e| self.malformed_error(e))?, ), @@ -210,9 +210,12 @@ impl<'a> RawElement<'a> { .read_cstring_at(self.start_at + pattern.len() + 1)?, }) } - ElementType::Timestamp => RawBsonRef::Timestamp( - Timestamp::from_reader(self.slice()).map_err(|e| self.malformed_error(e))?, - ), + ElementType::Timestamp => RawBsonRef::Timestamp({ + let bytes: [u8; 8] = self.slice()[0..8] + .try_into() + .map_err(|e| self.malformed_error(e))?; + Timestamp::from_le_bytes(bytes) + }), ElementType::Binary => { let len = self.size.checked_sub(4 + 1).ok_or_else(|| { self.malformed_error(format!("length exceeds maximum: {}", self.size)) @@ -258,7 +261,7 @@ impl<'a> RawElement<'a> { let slice = self.slice(); let code = read_lenencode(&slice[4..])?; let scope_start = 4 + 4 + code.len() + 1; - let scope = RawDocument::from_bytes(&slice[scope_start..])?; + let scope = RawDocument::decode_from_bytes(&slice[scope_start..])?; RawBsonRef::JavaScriptCodeWithScope(RawJavaScriptCodeWithScopeRef { code, scope }) } @@ -284,7 +287,7 @@ impl<'a> RawElement<'a> { let slice = self.slice(); let code = String::from_utf8_lossy(read_lenencode_bytes(&slice[4..])?).into_owned(); let scope_start = 4 + 4 + code.len() + 1; - let scope = RawDocument::from_bytes(&slice[scope_start..])?; + let scope = RawDocument::decode_from_bytes(&slice[scope_start..])?; Utf8LossyBson::JavaScriptCodeWithScope(Utf8LossyJavaScriptCodeWithScope { code, diff --git a/src/raw/serde.rs b/src/raw/serde.rs index 40dc03fb..6e2da2e2 100644 --- a/src/raw/serde.rs +++ b/src/raw/serde.rs @@ -108,8 +108,8 @@ impl<'a, 'de: 'a> TryFrom> for OwnedOrBorrowedRawDocument<'a> fn try_from(buffer: CowByteBuffer<'de>) -> Result { let doc = match buffer.0 { - Some(Cow::Borrowed(borrowed)) => RawDocument::from_bytes(borrowed)?.into(), - Some(Cow::Owned(owned)) => RawDocumentBuf::from_bytes(owned)?.into(), + Some(Cow::Borrowed(borrowed)) => RawDocument::decode_from_bytes(borrowed)?.into(), + Some(Cow::Owned(owned)) => RawDocumentBuf::decode_from_bytes(owned)?.into(), None => RawDocumentBuf::new().into(), }; Ok(doc) @@ -134,14 +134,14 @@ impl<'a, 'de: 'a> Deserialize<'de> for OwnedOrBorrowedRawDocument<'a> { if b.subtype == BinarySubtype::Generic => { Ok(Self::Borrowed( - RawDocument::from_bytes(b.bytes).map_err(SerdeError::custom)?, + RawDocument::decode_from_bytes(b.bytes).map_err(SerdeError::custom)?, )) } OwnedOrBorrowedRawBson::Owned(RawBson::Binary(b)) if b.subtype == BinarySubtype::Generic => { Ok(Self::Owned( - RawDocumentBuf::from_bytes(b.bytes).map_err(SerdeError::custom)?, + RawDocumentBuf::decode_from_bytes(b.bytes).map_err(SerdeError::custom)?, )) } @@ -186,13 +186,13 @@ impl<'a, 'de: 'a> Deserialize<'de> for OwnedOrBorrowedRawArray<'a> { OwnedOrBorrowedRawBson::Borrowed(RawBsonRef::Binary(b)) if b.subtype == BinarySubtype::Generic => { - let doc = RawDocument::from_bytes(b.bytes).map_err(SerdeError::custom)?; + let doc = RawDocument::decode_from_bytes(b.bytes).map_err(SerdeError::custom)?; Ok(Self::Borrowed(RawArray::from_doc(doc))) } OwnedOrBorrowedRawBson::Owned(RawBson::Binary(b)) if b.subtype == BinarySubtype::Generic => { - let doc = RawDocumentBuf::from_bytes(b.bytes).map_err(SerdeError::custom)?; + let doc = RawDocumentBuf::decode_from_bytes(b.bytes).map_err(SerdeError::custom)?; Ok(Self::Owned(RawArrayBuf::from_raw_document_buf(doc))) } diff --git a/src/raw/serde/bson_visitor.rs b/src/raw/serde/bson_visitor.rs index 24d7af35..8739321e 100644 --- a/src/raw/serde/bson_visitor.rs +++ b/src/raw/serde/bson_visitor.rs @@ -185,12 +185,12 @@ impl OwnedOrBorrowedRawBsonVisitor { } RAW_DOCUMENT_NEWTYPE => { let bson = map.next_value::<&[u8]>()?; - let doc = RawDocument::from_bytes(bson).map_err(SerdeError::custom)?; + let doc = RawDocument::decode_from_bytes(bson).map_err(SerdeError::custom)?; RawBsonRef::Document(doc).into() } RAW_ARRAY_NEWTYPE => { let bson = map.next_value::<&[u8]>()?; - let doc = RawDocument::from_bytes(bson).map_err(SerdeError::custom)?; + let doc = RawDocument::decode_from_bytes(bson).map_err(SerdeError::custom)?; RawBsonRef::Array(RawArray::from_doc(doc)).into() } _ => return Ok(MapParse::Aggregate(first_key)), diff --git a/src/raw/serde/seeded_visitor.rs b/src/raw/serde/seeded_visitor.rs index 16bbf466..2b5034fa 100644 --- a/src/raw/serde/seeded_visitor.rs +++ b/src/raw/serde/seeded_visitor.rs @@ -6,8 +6,7 @@ use serde::{ }; use crate::{ - raw::RAW_BSON_NEWTYPE, - ser::{write_cstring, write_string}, + raw::{write_cstring, write_string, RAW_BSON_NEWTYPE}, spec::{BinarySubtype, ElementType}, RawBson, RawBsonRef, @@ -294,7 +293,8 @@ impl<'de> Visitor<'de> for SeededVisitor<'_, 'de> { } RawBsonRef::Undefined => Ok(ElementType::Undefined), RawBsonRef::DateTime(dt) => { - self.buffer.append_bytes(&dt.as_le_bytes()); + self.buffer + .append_bytes(&dt.timestamp_millis().to_le_bytes()); Ok(ElementType::DateTime) } RawBsonRef::Timestamp(ts) => { diff --git a/src/raw/test.rs b/src/raw/test.rs index caf1fc99..0bc6db71 100644 --- a/src/raw/test.rs +++ b/src/raw/test.rs @@ -120,12 +120,13 @@ fn rawdoc_to_doc() { }; let doc: crate::Document = rawdoc.clone().try_into().expect("invalid bson"); - let round_tripped_bytes = crate::to_vec(&doc).expect("serialize should work"); - assert_eq!(round_tripped_bytes.as_slice(), rawdoc.as_bytes()); + #[cfg(feature = "serde")] + { + let round_tripped_bytes = crate::serialize_to_vec(&doc).expect("serialize should work"); + assert_eq!(round_tripped_bytes.as_slice(), rawdoc.as_bytes()); + } - let mut vec_writer_bytes = vec![]; - doc.to_writer(&mut vec_writer_bytes) - .expect("to writer should work"); + let vec_writer_bytes = doc.encode_to_vec().expect("encode should work"); assert_eq!(vec_writer_bytes, rawdoc.into_bytes()); } @@ -430,7 +431,7 @@ fn into_bson_conversion() { "binary": Binary { subtype: BinarySubtype::Generic, bytes: vec![1u8, 2, 3] }, "boolean": false, }; - let rawbson = RawBsonRef::Document(RawDocument::from_bytes(rawdoc.as_bytes()).unwrap()); + let rawbson = RawBsonRef::Document(RawDocument::decode_from_bytes(rawdoc.as_bytes()).unwrap()); let b: Bson = rawbson.try_into().expect("invalid bson"); let doc = b.as_document().expect("not a document"); assert_eq!(*doc.get("f64").expect("f64 not found"), Bson::Double(2.5)); @@ -470,12 +471,13 @@ fn into_bson_conversion() { ); } +#[cfg(feature = "serde")] #[test] fn fuzz_oom() { let bytes: &[u8] = &[ 17, 0, 0, 0, 11, 36, 100, 97, 116, 101, 0, 111, 112, 101, 0, 4, 0, ]; - let _ = crate::from_slice::(bytes); + let _ = crate::deserialize_from_slice::(bytes); } use props::arbitrary_bson; @@ -485,15 +487,21 @@ use std::convert::TryInto; proptest! { #[test] fn no_crashes(s: Vec) { - let _ = RawDocumentBuf::from_bytes(s); + let _ = RawDocumentBuf::decode_from_bytes(s); } #[test] fn roundtrip_bson(bson in arbitrary_bson()) { let doc = doc! { "bson": bson }; - let raw = crate::to_vec(&doc); - prop_assert!(raw.is_ok()); - let raw = RawDocumentBuf::from_bytes(raw.unwrap()); + let bytes = doc.encode_to_vec(); + prop_assert!(bytes.is_ok()); + let bytes = bytes.unwrap(); + #[cfg(feature = "serde")] + { + let raw = crate::serialize_to_vec(&doc); + prop_assert!(raw.is_ok()); + } + let raw = RawDocumentBuf::decode_from_bytes(bytes); prop_assert!(raw.is_ok()); let raw = raw.unwrap(); let roundtrip: Result = raw.try_into(); diff --git a/src/raw/test/append.rs b/src/raw/test/append.rs index 147fa152..fdaeef7f 100644 --- a/src/raw/test/append.rs +++ b/src/raw/test/append.rs @@ -1,5 +1,3 @@ -use std::iter::FromIterator; - use crate::{ oid::ObjectId, raw::RawJavaScriptCodeWithScope, @@ -21,10 +19,13 @@ use crate::{ use pretty_assertions::assert_eq; -fn append_test(expected: Document, append: impl FnOnce(&mut RawDocumentBuf)) { - let bytes = crate::to_vec(&expected).unwrap(); +fn append_test( + expected: Document, + append: impl FnOnce(&mut RawDocumentBuf) -> crate::error::Result<()>, +) { + let bytes = expected.encode_to_vec().unwrap(); let mut buf = RawDocumentBuf::new(); - append(&mut buf); + assert!(append(&mut buf).is_ok()); assert_eq!(buf.as_bytes(), bytes); } @@ -36,9 +37,10 @@ fn i32() { "c": 0_i32 }; append_test(expected, |doc| { - doc.append("a", -1_i32); - doc.append("b", 123_i32); - doc.append("c", 0_i32); + doc.append("a", -1_i32)?; + doc.append("b", 123_i32)?; + doc.append("c", 0_i32)?; + Ok(()) }); } @@ -50,9 +52,10 @@ fn i64() { "c": 0_i64 }; append_test(expected, |doc| { - doc.append("a", -1_i64); - doc.append("b", 123_i64); - doc.append("c", 0_i64); + doc.append("a", -1_i64)?; + doc.append("b", 123_i64)?; + doc.append("c", 0_i64)?; + Ok(()) }); } @@ -65,10 +68,11 @@ fn str() { "last": "the lazy sheep dog", }; append_test(expected, |doc| { - doc.append("first", "the quick"); - doc.append("second", "brown fox"); - doc.append("third", "jumped over"); - doc.append("last", "the lazy sheep dog"); + doc.append("first", "the quick")?; + doc.append("second", "brown fox")?; + doc.append("third", "jumped over")?; + doc.append("last", "the lazy sheep dog")?; + Ok(()) }); } @@ -82,11 +86,12 @@ fn double() { "inf": f64::INFINITY, }; append_test(expected, |doc| { - doc.append("positive", 12.5); - doc.append("0", 0.0); - doc.append("negative", -123.24); - doc.append("nan", f64::NAN); - doc.append("inf", f64::INFINITY); + doc.append("positive", 12.5)?; + doc.append("0", 0.0)?; + doc.append("negative", -123.24)?; + doc.append("nan", f64::NAN)?; + doc.append("inf", f64::INFINITY)?; + Ok(()) }); } @@ -97,8 +102,9 @@ fn boolean() { "false": false, }; append_test(expected, |doc| { - doc.append("true", true); - doc.append("false", false); + doc.append("true", true)?; + doc.append("false", false)?; + Ok(()) }); } @@ -107,9 +113,7 @@ fn null() { let expected = doc! { "null": null, }; - append_test(expected, |doc| { - doc.append("null", RawBson::Null); - }); + append_test(expected, |doc| doc.append("null", RawBson::Null)); } #[test] @@ -122,11 +126,12 @@ fn document() { } }; append_test(expected, |doc| { - doc.append("empty", RawDocumentBuf::new()); + doc.append("empty", RawDocumentBuf::new())?; let mut buf = RawDocumentBuf::new(); - buf.append("a", 1_i32); - buf.append("b", true); - doc.append("subdoc", buf); + buf.append("a", 1_i32)?; + buf.append("b", true)?; + doc.append("subdoc", buf)?; + Ok(()) }); } @@ -142,15 +147,16 @@ fn array() { ] }; append_test(expected, |doc| { - doc.append("empty", RawArrayBuf::new()); + doc.append("empty", RawArrayBuf::new())?; let mut buf = RawArrayBuf::new(); - buf.push(true); - buf.push("string"); + buf.push(true)?; + buf.push("string")?; let mut subdoc = RawDocumentBuf::new(); - subdoc.append("a", "subdoc"); - buf.push(subdoc); - buf.push(123_i32); - doc.append("array", buf); + subdoc.append("a", "subdoc")?; + buf.push(subdoc)?; + buf.push(123_i32)?; + doc.append("array", buf)?; + Ok(()) }); } @@ -176,8 +182,9 @@ fn datetime() { }; append_test(expected, |doc| { - doc.append("now", dt); - doc.append("old", old); + doc.append("now", dt)?; + doc.append("old", old)?; + Ok(()) }); } @@ -192,9 +199,7 @@ fn timestamp() { "ts": ts, }; - append_test(expected, |doc| { - doc.append("ts", ts); - }); + append_test(expected, |doc| doc.append("ts", ts)); } #[test] @@ -217,8 +222,9 @@ fn binary() { }; append_test(expected, |doc| { - doc.append("generic", bin); - doc.append("binary_old", old); + doc.append("generic", bin)?; + doc.append("binary_old", old)?; + Ok(()) }); } @@ -230,8 +236,9 @@ fn min_max_key() { }; append_test(expected, |doc| { - doc.append("min", RawBson::MinKey); - doc.append("max", RawBson::MaxKey); + doc.append("min", RawBson::MinKey)?; + doc.append("max", RawBson::MaxKey)?; + Ok(()) }); } @@ -241,9 +248,7 @@ fn undefined() { "undefined": Bson::Undefined, }; - append_test(expected, |doc| { - doc.append("undefined", RawBson::Undefined); - }); + append_test(expected, |doc| doc.append("undefined", RawBson::Undefined)); } #[test] @@ -253,7 +258,7 @@ fn regex() { }; append_test(expected, |doc| { - doc.append("regex", Regex::new("some pattern", "abc")); + doc.append("regex", Regex::new("some pattern", "abc")) }); } @@ -270,18 +275,19 @@ fn code() { }; append_test(expected, |doc| { - doc.append("code", RawBson::JavaScriptCode("some code".to_string())); + doc.append("code", RawBson::JavaScriptCode("some code".to_string()))?; let mut scope = RawDocumentBuf::new(); - scope.append("a", 1_i32); - scope.append("b", true); + scope.append("a", 1_i32)?; + scope.append("b", true)?; doc.append( "code_w_scope", RawJavaScriptCodeWithScope { code: "some code".to_string(), scope, }, - ); + )?; + Ok(()) }); } @@ -292,7 +298,7 @@ fn symbol() { }; append_test(expected, |doc| { - doc.append("symbol", RawBson::Symbol("symbol".to_string())); + doc.append("symbol", RawBson::Symbol("symbol".to_string())) }); } @@ -316,7 +322,7 @@ fn dbpointer() { namespace: "ns".to_string(), id, }), - ); + ) }); } @@ -327,9 +333,7 @@ fn decimal128() { "decimal": decimal }; - append_test(expected, |doc| { - doc.append("decimal", decimal); - }); + append_test(expected, |doc| doc.append("decimal", decimal)); } #[test] @@ -348,33 +352,34 @@ fn general() { }; append_test(expected, |doc| { - doc.append("a", true); - doc.append("second key", 123.4); - doc.append("third", 15_i64); - doc.append("32", -100101_i32); + doc.append("a", true)?; + doc.append("second key", 123.4)?; + doc.append("third", 15_i64)?; + doc.append("32", -100101_i32)?; let mut subdoc = RawDocumentBuf::new(); - subdoc.append("a", "subkey"); + subdoc.append("a", "subkey")?; let mut subsubdoc = RawDocumentBuf::new(); - subsubdoc.append("subdoc", dt); - subdoc.append("another", subsubdoc); - doc.append("subdoc", subdoc); + subsubdoc.append("subdoc", dt)?; + subdoc.append("another", subsubdoc)?; + doc.append("subdoc", subdoc)?; let mut array = RawArrayBuf::new(); - array.push(1_i64); - array.push(true); + array.push(1_i64)?; + array.push(true)?; let mut array_subdoc = RawDocumentBuf::new(); - array_subdoc.append("doc", 23_i64); - array.push(array_subdoc); + array_subdoc.append("doc", 23_i64)?; + array.push(array_subdoc)?; let mut sub_array = RawArrayBuf::new(); - sub_array.push("another"); - sub_array.push("array"); - array.push(sub_array); + sub_array.push("another")?; + sub_array.push("array")?; + array.push(sub_array)?; - doc.append("array", array); + doc.append("array", array)?; + Ok(()) }); } @@ -383,17 +388,24 @@ fn from_iter() { let doc_buf = RawDocumentBuf::from_iter([ ( "array", - RawBson::Array(RawArrayBuf::from_iter([ - RawBson::Boolean(true), - RawBson::Document(RawDocumentBuf::from_iter([ - ("ok", RawBson::Boolean(false)), - ("other", RawBson::String("hello".to_string())), - ])), - ])), + RawBson::Array( + RawArrayBuf::from_iter([ + RawBson::Boolean(true), + RawBson::Document( + RawDocumentBuf::from_iter([ + ("ok", RawBson::Boolean(false)), + ("other", RawBson::String("hello".to_string())), + ]) + .unwrap(), + ), + ]) + .unwrap(), + ), ), ("bool", RawBson::Boolean(true)), ("string", RawBson::String("some string".to_string())), - ]); + ]) + .unwrap(); let doc = doc! { "array": [ @@ -408,24 +420,22 @@ fn from_iter() { }; let expected = doc! { "expected": doc }; - append_test(expected, |doc| { - doc.append("expected", doc_buf); - }); + append_test(expected, |doc| doc.append("expected", doc_buf)); } #[test] fn array_buf() { let mut arr_buf = RawArrayBuf::new(); - arr_buf.push(true); + arr_buf.push(true).unwrap(); let mut doc_buf = RawDocumentBuf::new(); - doc_buf.append("x", 3_i32); - doc_buf.append("string", "string"); - arr_buf.push(doc_buf); + doc_buf.append("x", 3_i32).unwrap(); + doc_buf.append("string", "string").unwrap(); + arr_buf.push(doc_buf).unwrap(); let mut sub_arr = RawArrayBuf::new(); - sub_arr.push("a string"); - arr_buf.push(sub_arr); + sub_arr.push("a string").unwrap(); + arr_buf.push(sub_arr).unwrap(); let arr = rawbson!([ true, diff --git a/src/ser.rs b/src/ser.rs index 65f6c9b1..43ead226 100644 --- a/src/ser.rs +++ b/src/ser.rs @@ -43,21 +43,6 @@ use crate::{ RawDocumentBuf, }; -pub(crate) fn write_string(buf: &mut Vec, s: &str) { - buf.extend(&(s.len() as i32 + 1).to_le_bytes()); - buf.extend(s.as_bytes()); - buf.push(0); -} - -pub(crate) fn write_cstring(buf: &mut Vec, s: &str) -> Result<()> { - if s.contains('\0') { - return Err(Error::InvalidCString(s.into())); - } - buf.extend(s.as_bytes()); - buf.push(0); - Ok(()) -} - #[inline] pub(crate) fn write_i32(writer: &mut W, val: i32) -> Result<()> { writer @@ -110,10 +95,10 @@ fn write_binary(mut writer: W, bytes: &[u8], subtype: BinarySubtype) - /// Encode a `T` Serializable into a [`Bson`] value. /// /// The [`Serializer`] used by this function presents itself as human readable, whereas the -/// one used in [`to_vec`] does not. This means that this function will produce different BSON than -/// [`to_vec`] for types that change their serialization output depending on whether -/// the format is human readable or not. -pub fn to_bson(value: &T) -> Result +/// one used in [`serialize_to_vec`] does not. This means that this function will produce different +/// BSON than [`serialize_to_vec`] for types that change their serialization output depending on +/// whether the format is human readable or not. +pub fn serialize_to_bson(value: &T) -> Result where T: Serialize + ?Sized, { @@ -135,17 +120,17 @@ where value.serialize(ser) } -/// Encode a `T` Serializable into a BSON [`Document`]. +/// Serialize a `T` Serializable into a BSON [`Document`]. /// /// The [`Serializer`] used by this function presents itself as human readable, whereas the -/// one used in [`to_vec`] does not. This means that this function will produce different BSON than -/// [`to_vec`] for types that change their serialization output depending on whether -/// the format is human readable or not. -pub fn to_document(value: &T) -> Result +/// one used in [`serialize_to_vec`] does not. This means that this function will produce different +/// BSON than [`serialize_to_vec`] for types that change their serialization output depending on +/// whether the format is human readable or not. +pub fn serialize_to_document(value: &T) -> Result where T: Serialize + ?Sized, { - match to_bson(value)? { + match serialize_to_bson(value)? { Bson::Document(doc) => Ok(doc), bson => Err(Error::SerializationError { message: format!( @@ -158,7 +143,7 @@ where /// Serialize the given `T` as a BSON byte vector. #[inline] -pub fn to_vec(value: &T) -> Result> +pub fn serialize_to_vec(value: &T) -> Result> where T: Serialize, { @@ -187,14 +172,14 @@ where /// } /// /// let cat = Cat { name: "Garfield".to_string(), age: 43 }; -/// let doc = bson::to_raw_document_buf(&cat)?; +/// let doc = bson::serialize_to_raw_document_buf(&cat)?; /// assert_eq!(doc, rawdoc! { "name": "Garfield", "age": 43 }); /// # Ok::<(), Box>(()) /// ``` #[inline] -pub fn to_raw_document_buf(value: &T) -> Result +pub fn serialize_to_raw_document_buf(value: &T) -> Result where T: Serialize, { - RawDocumentBuf::from_bytes(to_vec(value)?).map_err(Error::custom) + RawDocumentBuf::decode_from_bytes(serialize_to_vec(value)?).map_err(Error::custom) } diff --git a/src/ser/error.rs b/src/ser/error.rs index 6f3b8877..1bddb7df 100644 --- a/src/ser/error.rs +++ b/src/ser/error.rs @@ -14,9 +14,6 @@ pub enum Error { /// A key could not be serialized to a BSON string. InvalidDocumentKey(Bson), - /// An invalid string was specified. - InvalidCString(String), - /// A general error that occurred during serialization. /// See: #[non_exhaustive] @@ -38,6 +35,9 @@ pub enum Error { /// The original error. source: Box, }, + + /// TODO RUST-1406 remove this + Crate(Arc), } impl Error { @@ -68,14 +68,17 @@ impl From for Error { } } +impl From for Error { + fn from(err: crate::error::Error) -> Error { + Error::Crate(Arc::new(err)) + } +} + impl fmt::Display for Error { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match self { Error::Io(inner) => inner.fmt(fmt), Error::InvalidDocumentKey(key) => write!(fmt, "Invalid map key type: {}", key), - Error::InvalidCString(ref string) => { - write!(fmt, "cstrings cannot contain null bytes: {:?}", string) - } Error::SerializationError { message } => message.fmt(fmt), Error::UnsignedIntegerExceededRange(value) => write!( fmt, @@ -86,6 +89,7 @@ impl fmt::Display for Error { ), #[cfg(feature = "serde_path_to_error")] Error::WithPath { path, source } => write!(fmt, "error at {}: {}", path, source), + Error::Crate(inner) => inner.fmt(fmt), } } } diff --git a/src/ser/raw.rs b/src/ser/raw.rs index 69b7320e..480fdd46 100644 --- a/src/ser/raw.rs +++ b/src/ser/raw.rs @@ -10,9 +10,9 @@ use serde::{ use self::value_serializer::{ValueSerializer, ValueType}; -use super::{write_binary, write_cstring, write_f64, write_i32, write_i64, write_string}; +use super::{write_binary, write_f64, write_i32, write_i64}; use crate::{ - raw::{RAW_ARRAY_NEWTYPE, RAW_DOCUMENT_NEWTYPE}, + raw::{write_cstring, write_string, RAW_ARRAY_NEWTYPE, RAW_DOCUMENT_NEWTYPE}, ser::{Error, Result}, serde_helpers::HUMAN_READABLE_NEWTYPE, spec::{BinarySubtype, ElementType}, diff --git a/src/ser/raw/document_serializer.rs b/src/ser/raw/document_serializer.rs index 114a11e9..10e3b93e 100644 --- a/src/ser/raw/document_serializer.rs +++ b/src/ser/raw/document_serializer.rs @@ -1,8 +1,9 @@ use serde::{ser::Impossible, Serialize}; use crate::{ - ser::{write_cstring, write_i32, Error, Result}, - to_bson, + raw::write_cstring, + ser::{write_i32, Error, Result}, + serialize_to_bson, Bson, }; @@ -185,7 +186,7 @@ struct KeySerializer<'a> { impl KeySerializer<'_> { fn invalid_key(v: T) -> Error { - Error::InvalidDocumentKey(to_bson(&v).unwrap_or(Bson::Null)) + Error::InvalidDocumentKey(serialize_to_bson(&v).unwrap_or(Bson::Null)) } } @@ -264,7 +265,7 @@ impl serde::Serializer for KeySerializer<'_> { #[inline] fn serialize_str(self, v: &str) -> Result { - write_cstring(&mut self.root_serializer.bytes, v) + Ok(write_cstring(&mut self.root_serializer.bytes, v)?) } #[inline] diff --git a/src/ser/raw/value_serializer.rs b/src/ser/raw/value_serializer.rs index 8c0b2215..45a4ff6a 100644 --- a/src/ser/raw/value_serializer.rs +++ b/src/ser/raw/value_serializer.rs @@ -8,8 +8,8 @@ use serde::{ use crate::{ base64, oid::ObjectId, - raw::RAW_DOCUMENT_NEWTYPE, - ser::{write_binary, write_cstring, write_i32, write_i64, write_string, Error, Result}, + raw::{write_cstring, write_string, RAW_DOCUMENT_NEWTYPE}, + ser::{write_binary, write_i32, write_i64, Error, Result}, spec::{BinarySubtype, ElementType}, RawDocument, RawJavaScriptCodeWithScopeRef, @@ -311,7 +311,7 @@ impl<'b> serde::Serializer for &'b mut ValueSerializer<'_> { SerializationStep::CodeWithScopeScope { ref code, raw } if raw => { let raw = RawJavaScriptCodeWithScopeRef { code, - scope: RawDocument::from_bytes(v).map_err(Error::custom)?, + scope: RawDocument::decode_from_bytes(v).map_err(Error::custom)?, }; write_i32(&mut self.root_serializer.bytes, raw.len())?; write_string(&mut self.root_serializer.bytes, code); diff --git a/src/ser/serde.rs b/src/ser/serde.rs index 053f1e93..99abc7c9 100644 --- a/src/ser/serde.rs +++ b/src/ser/serde.rs @@ -305,7 +305,8 @@ impl ser::Serializer for Serializer { .serialize(self)? { Bson::Binary(b) => { - let doc = Document::from_reader(b.bytes.as_slice()).map_err(Error::custom)?; + let doc = + Document::decode_from_reader(b.bytes.as_slice()).map_err(Error::custom)?; if name == RAW_DOCUMENT_NEWTYPE { Ok(Bson::Document(doc)) diff --git a/src/tests.rs b/src/tests.rs index a3134674..2093ae83 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -1,8 +1,11 @@ mod binary_subtype; mod datetime; mod modules; +#[cfg(feature = "serde")] mod serde; +#[cfg(feature = "serde")] mod serde_helpers; +#[cfg(feature = "serde")] mod spec; use modules::TestLock; diff --git a/src/tests/modules.rs b/src/tests/modules.rs index d55a287e..c37da5af 100644 --- a/src/tests/modules.rs +++ b/src/tests/modules.rs @@ -4,7 +4,9 @@ mod document; mod lock; mod macros; mod oid; +#[cfg(feature = "serde")] mod ser; +#[cfg(feature = "serde")] mod serializer_deserializer; pub use self::lock::TestLock; diff --git a/src/tests/modules/bson.rs b/src/tests/modules/bson.rs index c3ccabeb..2f80e7c5 100644 --- a/src/tests/modules/bson.rs +++ b/src/tests/modules/bson.rs @@ -18,7 +18,7 @@ use crate::{ Timestamp, }; -use serde_json::{json, Value}; +use serde_json::Value; #[test] fn to_json() { @@ -173,8 +173,10 @@ fn from_impls() { Bson::from(vec![1, 2, 3]), Bson::Array(vec![Bson::Int32(1), Bson::Int32(2), Bson::Int32(3)]) ); + #[cfg(feature = "serde")] assert_eq!( - Bson::try_from(json!({"_id": {"$oid": oid.to_hex()}, "name": ["bson-rs"]})).unwrap(), + Bson::try_from(serde_json::json!({"_id": {"$oid": oid.to_hex()}, "name": ["bson-rs"]})) + .unwrap(), Bson::Document(doc! {"_id": &oid, "name": ["bson-rs"]}) ); @@ -204,15 +206,18 @@ fn from_impls() { assert_eq!(doc! {"x": Some(4)}, doc! {"x": 4}); assert_eq!(doc! {"x": None::}, doc! {"x": Bson::Null}); - let db_pointer = Bson::try_from(json!({ - "$dbPointer": { - "$ref": "db.coll", - "$id": { "$oid": "507f1f77bcf86cd799439011" }, - } - })) - .unwrap(); - let db_pointer = db_pointer.as_db_pointer().unwrap(); - assert_eq!(Bson::from(db_pointer), Bson::DbPointer(db_pointer.clone())); + #[cfg(feature = "serde")] + { + let db_pointer = Bson::try_from(serde_json::json!({ + "$dbPointer": { + "$ref": "db.coll", + "$id": { "$oid": "507f1f77bcf86cd799439011" }, + } + })) + .unwrap(); + let db_pointer = db_pointer.as_db_pointer().unwrap(); + assert_eq!(Bson::from(db_pointer), Bson::DbPointer(db_pointer.clone())); + } } #[test] diff --git a/src/tests/modules/macros.rs b/src/tests/modules/macros.rs index 12e718dd..642dfe58 100644 --- a/src/tests/modules/macros.rs +++ b/src/tests/modules/macros.rs @@ -46,29 +46,6 @@ fn standard_format() { "date": Bson::DateTime(crate::DateTime::from_time_0_3(date)), }; - let rawdoc = rawdoc! { - "float": 2.4, - "string": "hello", - "array": ["testing", 1, true, [1, 2]], - "doc": { - "fish": "in", - "a": "barrel", - "!": 1, - }, - "bool": true, - "null": null, - "regexp": Regex { pattern: "s[ao]d".to_owned(), options: "i".to_owned() }, - "with_wrapped_parens": (-20), - "code": RawBson::JavaScriptCode("function(x) { return x._id; }".to_owned()), - "i32": 12, - "i64": -55, - "timestamp": Timestamp { time: 0, increment: 229_999_444 }, - "binary": Binary { subtype: BinarySubtype::Md5, bytes: "thingies".to_owned().into_bytes() }, - "encrypted": Binary { subtype: BinarySubtype::Encrypted, bytes: "secret".to_owned().into_bytes() }, - "_id": id, - "date": crate::DateTime::from_time_0_3(date), - }; - let ts_nanos = date.unix_timestamp_nanos(); let ts_millis = ts_nanos - (ts_nanos % 1_000_000); let date_trunc = time::OffsetDateTime::from_unix_timestamp_nanos(ts_millis).unwrap(); @@ -87,7 +64,32 @@ fn standard_format() { assert_eq!(expected, format!("{}", doc)); - assert_eq!(rawdoc.into_bytes(), crate::to_vec(&doc).unwrap()); + #[cfg(feature = "serde")] + { + let rawdoc = rawdoc! { + "float": 2.4, + "string": "hello", + "array": ["testing", 1, true, [1, 2]], + "doc": { + "fish": "in", + "a": "barrel", + "!": 1, + }, + "bool": true, + "null": null, + "regexp": Regex { pattern: "s[ao]d".to_owned(), options: "i".to_owned() }, + "with_wrapped_parens": (-20), + "code": RawBson::JavaScriptCode("function(x) { return x._id; }".to_owned()), + "i32": 12, + "i64": -55, + "timestamp": Timestamp { time: 0, increment: 229_999_444 }, + "binary": Binary { subtype: BinarySubtype::Md5, bytes: "thingies".to_owned().into_bytes() }, + "encrypted": Binary { subtype: BinarySubtype::Encrypted, bytes: "secret".to_owned().into_bytes() }, + "_id": id, + "date": crate::DateTime::from_time_0_3(date), + }; + assert_eq!(rawdoc.into_bytes(), crate::serialize_to_vec(&doc).unwrap()); + } } #[test] @@ -124,24 +126,6 @@ fn recursive_macro() { "e": { "single": "test" }, "n": (Bson::Null), }; - let rawdoc = rawdoc! { - "a": "foo", - "b": { - "bar": { - "harbor": ["seal", false], - "jelly": 42.0, - }, - "grape": 27, - }, - "c": [-7], - "d": [ - { - "apple": "ripe", - } - ], - "e": { "single": "test" }, - "n": (RawBson::Null), - }; match doc.get("a") { Some(Bson::String(s)) => assert_eq!("foo", s), @@ -241,7 +225,28 @@ fn recursive_macro() { _ => panic!("Null was not inserted correctly."), } - assert_eq!(rawdoc.into_bytes(), crate::to_vec(&doc).unwrap()); + #[cfg(feature = "serde")] + { + let rawdoc = rawdoc! { + "a": "foo", + "b": { + "bar": { + "harbor": ["seal", false], + "jelly": 42.0, + }, + "grape": 27, + }, + "c": [-7], + "d": [ + { + "apple": "ripe", + } + ], + "e": { "single": "test" }, + "n": (RawBson::Null), + }; + assert_eq!(rawdoc.into_bytes(), crate::serialize_to_vec(&doc).unwrap()); + } } #[test] diff --git a/src/tests/modules/ser.rs b/src/tests/modules/ser.rs index aa95651c..9eb839db 100644 --- a/src/tests/modules/ser.rs +++ b/src/tests/modules/ser.rs @@ -2,17 +2,27 @@ use std::collections::BTreeMap; use assert_matches::assert_matches; -use crate::{from_bson, oid::ObjectId, ser, tests::LOCK, to_bson, to_vec, Bson, Document, Regex}; +use crate::{ + deserialize_from_bson, + oid::ObjectId, + ser, + serialize_to_bson, + serialize_to_vec, + tests::LOCK, + Bson, + Document, + Regex, +}; #[test] #[allow(clippy::float_cmp)] fn floating_point() { let _guard = LOCK.run_concurrently(); let obj = Bson::Double(240.5); - let f: f64 = from_bson(obj.clone()).unwrap(); + let f: f64 = deserialize_from_bson(obj.clone()).unwrap(); assert_eq!(f, 240.5); - let deser: Bson = to_bson(&f).unwrap(); + let deser: Bson = serialize_to_bson(&f).unwrap(); assert_eq!(obj, deser); } @@ -20,10 +30,10 @@ fn floating_point() { fn string() { let _guard = LOCK.run_concurrently(); let obj = Bson::String("avocado".to_owned()); - let s: String = from_bson(obj.clone()).unwrap(); + let s: String = deserialize_from_bson(obj.clone()).unwrap(); assert_eq!(s, "avocado"); - let deser: Bson = to_bson(&s).unwrap(); + let deser: Bson = serialize_to_bson(&s).unwrap(); assert_eq!(obj, deser); } @@ -36,10 +46,10 @@ fn arr() { Bson::Int32(2), Bson::Int32(3), ]); - let arr: Vec = from_bson(obj.clone()).unwrap(); + let arr: Vec = deserialize_from_bson(obj.clone()).unwrap(); assert_eq!(arr, vec![0i32, 1i32, 2i32, 3i32]); - let deser: Bson = to_bson(&arr).unwrap(); + let deser: Bson = serialize_to_bson(&arr).unwrap(); assert_eq!(deser, obj); } @@ -47,10 +57,10 @@ fn arr() { fn boolean() { let _guard = LOCK.run_concurrently(); let obj = Bson::Boolean(true); - let b: bool = from_bson(obj.clone()).unwrap(); + let b: bool = deserialize_from_bson(obj.clone()).unwrap(); assert!(b); - let deser: Bson = to_bson(&b).unwrap(); + let deser: Bson = serialize_to_bson(&b).unwrap(); assert_eq!(deser, obj); } @@ -58,58 +68,58 @@ fn boolean() { fn int32() { let _guard = LOCK.run_concurrently(); let obj = Bson::Int32(101); - let i: i32 = from_bson(obj.clone()).unwrap(); + let i: i32 = deserialize_from_bson(obj.clone()).unwrap(); assert_eq!(i, 101); - let deser: Bson = to_bson(&i).unwrap(); + let deser: Bson = serialize_to_bson(&i).unwrap(); assert_eq!(deser, obj); } #[test] fn uint8_u2i() { let _guard = LOCK.run_concurrently(); - let obj: Bson = to_bson(&u8::MIN).unwrap(); - let deser: u8 = from_bson(obj).unwrap(); + let obj: Bson = serialize_to_bson(&u8::MIN).unwrap(); + let deser: u8 = deserialize_from_bson(obj).unwrap(); assert_eq!(deser, u8::MIN); - let obj_max: Bson = to_bson(&u8::MAX).unwrap(); - let deser_max: u8 = from_bson(obj_max).unwrap(); + let obj_max: Bson = serialize_to_bson(&u8::MAX).unwrap(); + let deser_max: u8 = deserialize_from_bson(obj_max).unwrap(); assert_eq!(deser_max, u8::MAX); } #[test] fn uint16_u2i() { let _guard = LOCK.run_concurrently(); - let obj: Bson = to_bson(&u16::MIN).unwrap(); - let deser: u16 = from_bson(obj).unwrap(); + let obj: Bson = serialize_to_bson(&u16::MIN).unwrap(); + let deser: u16 = deserialize_from_bson(obj).unwrap(); assert_eq!(deser, u16::MIN); - let obj_max: Bson = to_bson(&u16::MAX).unwrap(); - let deser_max: u16 = from_bson(obj_max).unwrap(); + let obj_max: Bson = serialize_to_bson(&u16::MAX).unwrap(); + let deser_max: u16 = deserialize_from_bson(obj_max).unwrap(); assert_eq!(deser_max, u16::MAX); } #[test] fn uint32_u2i() { let _guard = LOCK.run_concurrently(); - let obj_min: Bson = to_bson(&u32::MIN).unwrap(); - let deser_min: u32 = from_bson(obj_min).unwrap(); + let obj_min: Bson = serialize_to_bson(&u32::MIN).unwrap(); + let deser_min: u32 = deserialize_from_bson(obj_min).unwrap(); assert_eq!(deser_min, u32::MIN); - let obj_max: Bson = to_bson(&u32::MAX).unwrap(); - let deser_max: u32 = from_bson(obj_max).unwrap(); + let obj_max: Bson = serialize_to_bson(&u32::MAX).unwrap(); + let deser_max: u32 = deserialize_from_bson(obj_max).unwrap(); assert_eq!(deser_max, u32::MAX); } #[test] fn uint64_u2i() { let _guard = LOCK.run_concurrently(); - let obj_min: Bson = to_bson(&u64::MIN).unwrap(); - let deser_min: u64 = from_bson(obj_min).unwrap(); + let obj_min: Bson = serialize_to_bson(&u64::MIN).unwrap(); + let deser_min: u64 = deserialize_from_bson(obj_min).unwrap(); assert_eq!(deser_min, u64::MIN); - let err: ser::Error = to_bson(&u64::MAX).unwrap_err().strip_path(); + let err: ser::Error = serialize_to_bson(&u64::MAX).unwrap_err().strip_path(); assert_matches!(err, ser::Error::UnsignedIntegerExceededRange(u64::MAX)); } @@ -117,10 +127,10 @@ fn uint64_u2i() { fn int64() { let _guard = LOCK.run_concurrently(); let obj = Bson::Int64(101); - let i: i64 = from_bson(obj.clone()).unwrap(); + let i: i64 = deserialize_from_bson(obj.clone()).unwrap(); assert_eq!(i, 101); - let deser: Bson = to_bson(&i).unwrap(); + let deser: Bson = serialize_to_bson(&i).unwrap(); assert_eq!(deser, obj); } @@ -129,13 +139,13 @@ fn oid() { let _guard = LOCK.run_concurrently(); let oid = ObjectId::new(); let obj = Bson::ObjectId(oid); - let s: BTreeMap = from_bson(obj.clone()).unwrap(); + let s: BTreeMap = deserialize_from_bson(obj.clone()).unwrap(); let mut expected = BTreeMap::new(); expected.insert("$oid".to_owned(), oid.to_string()); assert_eq!(s, expected); - let deser: Bson = to_bson(&s).unwrap(); + let deser: Bson = serialize_to_bson(&s).unwrap(); assert_eq!(deser, obj); } @@ -156,14 +166,15 @@ fn cstring_null_bytes_error() { verify_doc(regex); fn verify_doc(doc: Document) { - let mut vec = Vec::new(); - assert!(matches!( - doc.to_writer(&mut vec).unwrap_err().strip_path(), - ser::Error::InvalidCString(_) - )); - assert!(matches!( - to_vec(&doc).unwrap_err().strip_path(), - ser::Error::InvalidCString(_) - )); + let result = doc.encode_to_vec(); + assert!(result.is_err(), "unexpected success"); + let err = result.unwrap_err(); + assert!(err.is_malformed_value(), "unexpected error: {:?}", err); + let result = serialize_to_vec(&doc); + assert!(result.is_err(), "unexpected success"); + match result.unwrap_err().strip_path() { + ser::Error::Crate(inner) if inner.is_malformed_value() => (), + err => panic!("unexpected error: {:?}", err), + }; } } diff --git a/src/tests/modules/serializer_deserializer.rs b/src/tests/modules/serializer_deserializer.rs index 63182cde..2afc6a73 100644 --- a/src/tests/modules/serializer_deserializer.rs +++ b/src/tests/modules/serializer_deserializer.rs @@ -6,13 +6,13 @@ use std::{ use serde::{Deserialize, Serialize}; use crate::{ - de::from_document, + de::deserialize_from_document, doc, oid::ObjectId, ser::Error, + serialize_to_document, spec::BinarySubtype, tests::LOCK, - to_document, Binary, Bson, Decimal128, @@ -35,11 +35,11 @@ fn test_serialize_deserialize_floating_point() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -55,11 +55,11 @@ fn test_serialize_deserialize_utf8_string() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -71,10 +71,10 @@ fn test_encode_decode_utf8_string_invalid() { let doc = doc! { "key": &src, "subdoc": { "subkey": &src } }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); let expected = doc! { "key": "��", "subdoc": { "subkey": "��" } }; - let decoded = RawDocumentBuf::from_reader(&mut Cursor::new(buf)) + let decoded = RawDocumentBuf::decode_from_reader(&mut Cursor::new(buf)) .unwrap() .to_document_utf8_lossy() .unwrap(); @@ -93,11 +93,11 @@ fn test_serialize_deserialize_array() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -113,11 +113,11 @@ fn test_serialize_deserialize() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -130,11 +130,11 @@ fn test_serialize_deserialize_boolean() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -147,11 +147,11 @@ fn test_serialize_deserialize_null() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -167,11 +167,11 @@ fn test_serialize_deserialize_regexp() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -184,11 +184,11 @@ fn test_serialize_deserialize_javascript_code() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -206,11 +206,11 @@ fn test_serialize_deserialize_javascript_code_with_scope() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -223,11 +223,11 @@ fn test_serialize_deserialize_i32() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -242,11 +242,11 @@ fn test_serialize_deserialize_i64() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -264,11 +264,11 @@ fn test_serialize_deserialize_timestamp() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -286,11 +286,11 @@ fn test_serialize_binary_generic() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -305,11 +305,11 @@ fn test_serialize_deserialize_object_id() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -334,11 +334,11 @@ fn test_serialize_utc_date_time() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -353,11 +353,11 @@ fn test_serialize_deserialize_symbol() { let doc = doc! { "key": symbol }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -375,7 +375,7 @@ fn test_deserialize_utc_date_time_overflows() { raw.write_all(&raw0).unwrap(); raw.write_all(&[0]).unwrap(); - let deserialized = Document::from_reader(&mut Cursor::new(raw)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(raw)).unwrap(); let expected = doc! { "A": crate::DateTime::from_time_0_3(time::OffsetDateTime::from_unix_timestamp(1_530_492_218).unwrap() + time::Duration::nanoseconds(999 * 1_000_000))}; assert_eq!(deserialized, expected); @@ -386,7 +386,7 @@ fn test_deserialize_invalid_utf8_string_issue64() { let _guard = LOCK.run_concurrently(); let buffer = b"\x13\x00\x00\x00\x02\x01\x00\x00\x00\x00\x00\x00\x00foo\x00\x13\x05\x00\x00\x00"; - assert!(Document::from_reader(&mut Cursor::new(buffer)).is_err()); + assert!(Document::decode_from_reader(&mut Cursor::new(buffer)).is_err()); } #[test] @@ -394,7 +394,7 @@ fn test_deserialize_multiply_overflows_issue64() { let _guard = LOCK.run_concurrently(); let buffer = b"*\xc9*\xc9\t\x00\x00\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\xca\x01\t\x00\x00\x01\x10"; - assert!(Document::from_reader(&mut Cursor::new(&buffer[..])).is_err()); + assert!(Document::decode_from_reader(&mut Cursor::new(&buffer[..])).is_err()); } #[test] @@ -410,11 +410,11 @@ fn test_serialize_deserialize_decimal128() { let doc = doc! { "key": val }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -424,7 +424,7 @@ fn test_illegal_size() { let buffer = [ 0x06, 0xcc, 0xf9, 0x0a, 0x05, 0x00, 0x00, 0x03, 0x00, 0xff, 0xff, ]; - assert!(Document::from_reader(&mut Cursor::new(&buffer[..])).is_err()); + assert!(Document::decode_from_reader(&mut Cursor::new(&buffer[..])).is_err()); } #[test] @@ -436,11 +436,11 @@ fn test_serialize_deserialize_undefined() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -453,11 +453,11 @@ fn test_serialize_deserialize_min_key() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -470,11 +470,11 @@ fn test_serialize_deserialize_max_key() { let doc = doc! {"key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -496,11 +496,11 @@ fn test_serialize_deserialize_db_pointer() { let doc = doc! { "key": src }; let mut buf = Vec::new(); - doc.to_writer(&mut buf).unwrap(); + doc.encode_to_writer(&mut buf).unwrap(); assert_eq!(buf, dst); - let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap(); + let deserialized = Document::decode_from_reader(&mut Cursor::new(buf)).unwrap(); assert_eq!(deserialized, doc); } @@ -515,10 +515,10 @@ fn test_serialize_deserialize_document() { } let src = Point { x: 1, y: 2 }; - let doc = to_document(&src).unwrap(); + let doc = serialize_to_document(&src).unwrap(); assert_eq!(doc, doc! { "x": 1, "y": 2 }); - let point: Point = from_document(doc).unwrap(); + let point: Point = deserialize_from_document(doc).unwrap(); assert_eq!(src, point); #[derive(Debug, Deserialize, Serialize, PartialEq)] @@ -531,17 +531,17 @@ fn test_serialize_deserialize_document() { p2: Point { x: 1, y: 1 }, }; - let doc = to_document(&src).unwrap(); + let doc = serialize_to_document(&src).unwrap(); assert_eq!( doc, doc! { "p1": { "x": 0, "y": 0 }, "p2": { "x": 1, "y": 1 } } ); - let line: Line = from_document(doc).unwrap(); + let line: Line = deserialize_from_document(doc).unwrap(); assert_eq!(src, line); let x = 1; - let err = to_document(&x).unwrap_err(); + let err = serialize_to_document(&x).unwrap_err(); match err { Error::SerializationError { message } => { assert!(message.contains("Could not be serialized to Document")); @@ -550,7 +550,7 @@ fn test_serialize_deserialize_document() { } let bad_point = doc! { "x": "one", "y": "two" }; - let bad_point: Result = from_document(bad_point); + let bad_point: Result = deserialize_from_document(bad_point); assert!(bad_point.is_err()); } @@ -559,7 +559,7 @@ fn test_serialize_deserialize_document() { fn test_deserialize_invalid_array_length() { let _guard = LOCK.run_concurrently(); let buffer = b"\n\x00\x00\x00\x04\x00\x00\x00\x00\x00"; - Document::from_reader(&mut std::io::Cursor::new(buffer)) + Document::decode_from_reader(&mut std::io::Cursor::new(buffer)) .expect_err("expected deserialization to fail"); } @@ -568,10 +568,10 @@ fn test_deserialize_invalid_array_length() { fn test_deserialize_invalid_old_binary_length() { let _guard = LOCK.run_concurrently(); let buffer = b"\x0F\x00\x00\x00\x05\x00\x00\x00\x00\x00\x02\xFC\xFF\xFF\xFF"; - Document::from_reader(&mut std::io::Cursor::new(buffer)) + Document::decode_from_reader(&mut std::io::Cursor::new(buffer)) .expect_err("expected deserialization to fail"); let buffer = b".\x00\x00\x00\x05\x01\x00\x00\x00\x00\x00\x02\xfc\xff\xff\xff\xff\xff\xff\xff\x00\x00*\x00h\x0e\x10++\x00h\x0e++\x00\x00\t\x00\x00\x00\x00\x00*\x0e\x10++"; - Document::from_reader(&mut std::io::Cursor::new(buffer)) + Document::decode_from_reader(&mut std::io::Cursor::new(buffer)) .expect_err("expected deserialization to fail"); } diff --git a/src/tests/serde.rs b/src/tests/serde.rs index f92d064d..aedc739d 100644 --- a/src/tests/serde.rs +++ b/src/tests/serde.rs @@ -2,9 +2,9 @@ use crate::{ bson, + deserialize_from_bson, + deserialize_from_document, doc, - from_bson, - from_document, oid::ObjectId, serde_helpers, serde_helpers::{ @@ -16,10 +16,10 @@ use crate::{ timestamp_as_u32, u32_as_timestamp, }, + serialize_to_bson, + serialize_to_document, spec::BinarySubtype, tests::LOCK, - to_bson, - to_document, Binary, Bson, DateTime, @@ -106,13 +106,13 @@ fn test_ser_timestamp() { }, }; - let x = to_bson(&foo).unwrap(); + let x = serialize_to_bson(&foo).unwrap(); assert_eq!( x.as_document().unwrap(), &doc! { "ts": Bson::Timestamp(Timestamp { time: 0x0000_000C, increment: 0x0000_000A }) } ); - let xfoo: Foo = from_bson(x).unwrap(); + let xfoo: Foo = deserialize_from_bson(x).unwrap(); assert_eq!(xfoo, foo); } @@ -126,7 +126,7 @@ fn test_de_timestamp() { ts: Timestamp, } - let foo: Foo = from_bson(Bson::Document(doc! { + let foo: Foo = deserialize_from_bson(Bson::Document(doc! { "ts": Bson::Timestamp(Timestamp { time: 0x0000_000C, increment: 0x0000_000A }), })) .unwrap(); @@ -159,13 +159,13 @@ fn test_ser_regex() { regex: regex.clone(), }; - let x = to_bson(&foo).unwrap(); + let x = serialize_to_bson(&foo).unwrap(); assert_eq!( x.as_document().unwrap(), &doc! { "regex": Bson::RegularExpression(regex) } ); - let xfoo: Foo = from_bson(x).unwrap(); + let xfoo: Foo = deserialize_from_bson(x).unwrap(); assert_eq!(xfoo, foo); } @@ -184,7 +184,7 @@ fn test_de_regex() { options: "01".into(), }; - let foo: Foo = from_bson(Bson::Document(doc! { + let foo: Foo = deserialize_from_bson(Bson::Document(doc! { "regex": Bson::RegularExpression(regex.clone()), })) .unwrap(); @@ -211,13 +211,13 @@ fn test_ser_code_with_scope() { code_with_scope: code_with_scope.clone(), }; - let x = to_bson(&foo).unwrap(); + let x = serialize_to_bson(&foo).unwrap(); assert_eq!( x.as_document().unwrap(), &doc! { "code_with_scope": Bson::JavaScriptCodeWithScope(code_with_scope) } ); - let xfoo: Foo = from_bson(x).unwrap(); + let xfoo: Foo = deserialize_from_bson(x).unwrap(); assert_eq!(xfoo, foo); } @@ -236,7 +236,7 @@ fn test_de_code_with_scope() { scope: doc! { "x": 12 }, }; - let foo: Foo = from_bson(Bson::Document(doc! { + let foo: Foo = deserialize_from_bson(Bson::Document(doc! { "code_with_scope": Bson::JavaScriptCodeWithScope(code_with_scope.clone()), })) .unwrap(); @@ -258,13 +258,13 @@ fn test_ser_datetime() { let foo = Foo { date: now }; - let x = to_bson(&foo).unwrap(); + let x = serialize_to_bson(&foo).unwrap(); assert_eq!( x.as_document().unwrap(), &doc! { "date": (Bson::DateTime(now)) } ); - let xfoo: Foo = from_bson(x).unwrap(); + let xfoo: Foo = deserialize_from_bson(x).unwrap(); assert_eq!(xfoo, foo); } @@ -283,13 +283,13 @@ fn test_binary_generic_roundtrip() { }), }; - let b = to_bson(&x).unwrap(); + let b = serialize_to_bson(&x).unwrap(); assert_eq!( b.as_document().unwrap(), &doc! {"data": Bson::Binary(Binary { subtype: BinarySubtype::Generic, bytes: b"12345abcde".to_vec() })} ); - let f = from_bson::(b).unwrap(); + let f = deserialize_from_bson::(b).unwrap(); assert_eq!(x, f); } @@ -308,13 +308,13 @@ fn test_binary_non_generic_roundtrip() { }), }; - let b = to_bson(&x).unwrap(); + let b = serialize_to_bson(&x).unwrap(); assert_eq!( b.as_document().unwrap(), &doc! {"data": Bson::Binary(Binary { subtype: BinarySubtype::BinaryOld, bytes: b"12345abcde".to_vec() })} ); - let f = from_bson::(b).unwrap(); + let f = deserialize_from_bson::(b).unwrap(); assert_eq!(x, f); } @@ -333,13 +333,13 @@ fn test_binary_helper_generic_roundtrip() { }, }; - let b = to_bson(&x).unwrap(); + let b = serialize_to_bson(&x).unwrap(); assert_eq!( b.as_document().unwrap(), &doc! {"data": Bson::Binary(Binary { subtype: BinarySubtype::Generic, bytes: b"12345abcde".to_vec() })} ); - let f = from_bson::(b).unwrap(); + let f = deserialize_from_bson::(b).unwrap(); assert_eq!(x, f); } @@ -358,13 +358,13 @@ fn test_binary_helper_non_generic_roundtrip() { }, }; - let b = to_bson(&x).unwrap(); + let b = serialize_to_bson(&x).unwrap(); assert_eq!( b.as_document().unwrap(), &doc! {"data": Bson::Binary(Binary { subtype: BinarySubtype::BinaryOld, bytes: b"12345abcde".to_vec() })} ); - let f = from_bson::(b).unwrap(); + let f = deserialize_from_bson::(b).unwrap(); assert_eq!(x, f); } @@ -381,19 +381,13 @@ fn test_byte_vec() { challenge: b"18762b98b7c34c25bf9dc3154e4a5ca3", }; - let b = to_bson(&x).unwrap(); + let b = serialize_to_bson(&x).unwrap(); assert_eq!( b, Bson::Document( doc! { "challenge": (Bson::Binary(Binary { subtype: BinarySubtype::Generic, bytes: x.challenge.to_vec() }))} ) ); - - // let mut buf = Vec::new(); - // b.as_document().unwrap().to_writer(&mut buf).unwrap(); - - // let xb = Document::from_reader(&mut Cursor::new(buf)).unwrap(); - // assert_eq!(b.as_document().unwrap(), &xb); } #[test] @@ -409,13 +403,13 @@ fn test_serde_bytes() { data: b"12345abcde".to_vec(), }; - let b = to_bson(&x).unwrap(); + let b = serialize_to_bson(&x).unwrap(); assert_eq!( b.as_document().unwrap(), &doc! {"data": Bson::Binary(Binary { subtype: BinarySubtype::Generic, bytes: b"12345abcde".to_vec() })} ); - let f = from_bson::(b).unwrap(); + let f = deserialize_from_bson::(b).unwrap(); assert_eq!(x, f); } @@ -426,12 +420,12 @@ fn test_serde_newtype_struct() { struct Email(String); let email_1 = Email(String::from("bson@serde.rs")); - let b = to_bson(&email_1).unwrap(); + let b = serialize_to_bson(&email_1).unwrap(); assert_eq!(b, Bson::String(email_1.0)); let s = String::from("root@localho.st"); let de = Bson::String(s.clone()); - let email_2 = from_bson::(de).unwrap(); + let email_2 = deserialize_from_bson::(de).unwrap(); assert_eq!(email_2, Email(s)); } @@ -442,12 +436,12 @@ fn test_serde_tuple_struct() { struct Name(String, String); // first, last let name_1 = Name(String::from("Graydon"), String::from("Hoare")); - let b = to_bson(&name_1).unwrap(); + let b = serialize_to_bson(&name_1).unwrap(); assert_eq!(b, bson!([name_1.0.clone(), name_1.1])); let (first, last) = (String::from("Donald"), String::from("Knuth")); let de = bson!([first.clone(), last.clone()]); - let name_2 = from_bson::(de).unwrap(); + let name_2 = deserialize_from_bson::(de).unwrap(); assert_eq!(name_2, Name(first, last)); } @@ -463,12 +457,12 @@ fn test_serde_newtype_variant() { let n = 42; let num_1 = Number::Int(n); - let b = to_bson(&num_1).unwrap(); + let b = serialize_to_bson(&num_1).unwrap(); assert_eq!(b, bson!({ "type": "Int", "value": n })); let x = 1337.0; let de = bson!({ "type": "Float", "value": x }); - let num_2 = from_bson::(de).unwrap(); + let num_2 = deserialize_from_bson::(de).unwrap(); assert_eq!(num_2, Number::Float(x)); } @@ -484,12 +478,12 @@ fn test_serde_tuple_variant() { #[allow(clippy::approx_constant)] let (x1, y1) = (3.14, -2.71); let p1 = Point::TwoDim(x1, y1); - let b = to_bson(&p1).unwrap(); + let b = serialize_to_bson(&p1).unwrap(); assert_eq!(b, bson!({ "TwoDim": [x1, y1] })); let (x2, y2, z2) = (0.0, -13.37, 4.2); let de = bson!({ "ThreeDim": [x2, y2, z2] }); - let p2 = from_bson::(de).unwrap(); + let p2 = deserialize_from_bson::(de).unwrap(); assert_eq!(p2, Point::ThreeDim(x2, y2, z2)); } @@ -517,13 +511,13 @@ fn test_ser_db_pointer() { db_pointer: db_pointer.clone(), }; - let x = to_bson(&foo).unwrap(); + let x = serialize_to_bson(&foo).unwrap(); assert_eq!( x.as_document().unwrap(), &doc! {"db_pointer": Bson::DbPointer(db_pointer.clone()) } ); - let xfoo: Foo = from_bson(x).unwrap(); + let xfoo: Foo = deserialize_from_bson(x).unwrap(); assert_eq!(xfoo, foo); } @@ -546,7 +540,7 @@ fn test_de_db_pointer() { .unwrap(); let db_pointer = db_pointer.as_db_pointer().unwrap(); - let foo: Foo = from_bson(Bson::Document( + let foo: Foo = deserialize_from_bson(Bson::Document( doc! {"db_pointer": Bson::DbPointer(db_pointer.clone())}, )) .unwrap(); @@ -577,7 +571,7 @@ fn test_serde_legacy_uuid_1() { csharp_legacy: uuid, }; - let x = to_bson(&foo).unwrap(); + let x = serialize_to_bson(&foo).unwrap(); assert_eq!( x.as_document().unwrap(), &doc! { @@ -596,7 +590,7 @@ fn test_serde_legacy_uuid_1() { } ); - let foo: Foo = from_bson(x).unwrap(); + let foo: Foo = deserialize_from_bson(x).unwrap(); assert_eq!(foo.java_legacy, uuid); assert_eq!(foo.python_legacy, uuid); assert_eq!(foo.csharp_legacy, uuid); @@ -608,7 +602,7 @@ fn test_de_uuid_extjson_string() { let uuid_bson_bytes = hex::decode("1D000000057800100000000473FFD26444B34C6990E8E7D1DFC035D400").unwrap(); - let uuid_document = Document::from_reader(uuid_bson_bytes.as_slice()).unwrap(); + let uuid_document = Document::decode_from_reader(uuid_bson_bytes.as_slice()).unwrap(); let expected_uuid_bson = Bson::from_extended_document(uuid_document); let ext_json_uuid = "{\"x\" : { \"$uuid\" : \"73ffd264-44b3-4c69-90e8-e7d1dfc035d4\"}}"; @@ -677,7 +671,7 @@ fn test_unsigned_helpers() { } let a = A { num_1: 1, num_2: 2 }; - let doc = to_document(&a).unwrap(); + let doc = serialize_to_document(&a).unwrap(); assert!(doc.get_i32("num_1").unwrap() == 1); assert!(doc.get_i32("num_2").unwrap() == 2); @@ -685,14 +679,14 @@ fn test_unsigned_helpers() { num_1: u32::MAX, num_2: 1, }; - let doc_result = to_document(&a); + let doc_result = serialize_to_document(&a); assert!(doc_result.is_err()); let a = A { num_1: 1, num_2: u64::MAX, }; - let doc_result = to_document(&a); + let doc_result = serialize_to_document(&a); assert!(doc_result.is_err()); #[derive(Serialize)] @@ -707,7 +701,7 @@ fn test_unsigned_helpers() { num_1: u32::MAX, num_2: i64::MAX as u64, }; - let doc = to_document(&b).unwrap(); + let doc = serialize_to_document(&b).unwrap(); assert!(doc.get_i64("num_1").unwrap() == u32::MAX as i64); assert!(doc.get_i64("num_2").unwrap() == i64::MAX); @@ -715,7 +709,7 @@ fn test_unsigned_helpers() { num_1: 1, num_2: i64::MAX as u64 + 1, }; - let doc_result = to_document(&b); + let doc_result = serialize_to_document(&b); assert!(doc_result.is_err()); #[derive(Deserialize, Serialize, Debug, PartialEq)] @@ -730,11 +724,11 @@ fn test_unsigned_helpers() { num_1: 101, num_2: 12345, }; - let doc = to_document(&f).unwrap(); + let doc = serialize_to_document(&f).unwrap(); assert!((doc.get_f64("num_1").unwrap() - 101.0).abs() < f64::EPSILON); assert!((doc.get_f64("num_2").unwrap() - 12345.0).abs() < f64::EPSILON); - let back: F = from_document(doc).unwrap(); + let back: F = deserialize_from_document(doc).unwrap(); assert_eq!(back, f); let f = F { @@ -742,14 +736,14 @@ fn test_unsigned_helpers() { // f64 cannot represent many large integers exactly, u64::MAX included num_2: u64::MAX, }; - let doc_result = to_document(&f); + let doc_result = serialize_to_document(&f); assert!(doc_result.is_err()); let f = F { num_1: 1, num_2: u64::MAX - 255, }; - let doc_result = to_document(&f); + let doc_result = serialize_to_document(&f); assert!(doc_result.is_err()); } @@ -770,9 +764,9 @@ fn test_datetime_helpers() { let a = A { date: crate::DateTime::from_time_0_3(date), }; - let doc = to_document(&a).unwrap(); + let doc = serialize_to_document(&a).unwrap(); assert_eq!(doc.get_str("date").unwrap(), iso); - let a: A = from_document(doc).unwrap(); + let a: A = deserialize_from_document(doc).unwrap(); assert_eq!(a.date.to_time_0_3(), date); #[cfg(feature = "time-0_3")] @@ -797,9 +791,9 @@ fn test_datetime_helpers() { let b: B = serde_json::from_value(json).unwrap(); let expected = datetime!(2020-06-09 10:58:07.095 UTC); assert_eq!(b.date, expected); - let doc = to_document(&b).unwrap(); + let doc = serialize_to_document(&b).unwrap(); assert_eq!(doc.get_datetime("date").unwrap().to_time_0_3(), expected); - let b: B = from_document(doc).unwrap(); + let b: B = deserialize_from_document(doc).unwrap(); assert_eq!(b.date, expected); } @@ -825,9 +819,9 @@ fn test_datetime_helpers() { let expected: chrono::DateTime = chrono::DateTime::from_str("2020-06-09 10:58:07.095 UTC").unwrap(); assert_eq!(b.date, expected); - let doc = to_document(&b).unwrap(); + let doc = serialize_to_document(&b).unwrap(); assert_eq!(doc.get_datetime("date").unwrap().to_chrono(), expected); - let b: B = from_document(doc).unwrap(); + let b: B = deserialize_from_document(doc).unwrap(); assert_eq!(b.date, expected); } @@ -853,12 +847,12 @@ fn test_datetime_helpers() { let expected: Option> = Some(chrono::DateTime::from_str("2020-06-09 10:58:07.095 UTC").unwrap()); assert_eq!(b.date, expected); - let doc = to_document(&b).unwrap(); + let doc = serialize_to_document(&b).unwrap(); assert_eq!( Some(doc.get_datetime("date").unwrap().to_chrono()), expected ); - let b: B = from_document(doc).unwrap(); + let b: B = deserialize_from_document(doc).unwrap(); assert_eq!(b.date, expected); } @@ -878,9 +872,9 @@ fn test_datetime_helpers() { let b: B = serde_json::from_value(json).unwrap(); let expected = None; assert_eq!(b.date, expected); - let doc = to_document(&b).unwrap(); + let doc = serialize_to_document(&b).unwrap(); assert_eq!(None, expected); - let b: B = from_document(doc).unwrap(); + let b: B = deserialize_from_document(doc).unwrap(); assert_eq!(b.date, expected); } @@ -894,9 +888,9 @@ fn test_datetime_helpers() { let c = C { date: date.to_string(), }; - let doc = to_document(&c).unwrap(); + let doc = serialize_to_document(&c).unwrap(); assert!(doc.get_datetime("date").is_ok()); - let c: C = from_document(doc).unwrap(); + let c: C = deserialize_from_document(doc).unwrap(); assert_eq!(c.date.as_str(), date); } @@ -914,9 +908,9 @@ fn test_oid_helpers() { let a = A { oid: oid.to_string(), }; - let doc = to_document(&a).unwrap(); + let doc = serialize_to_document(&a).unwrap(); assert_eq!(doc.get_object_id("oid").unwrap(), oid); - let a: A = from_document(doc).unwrap(); + let a: A = deserialize_from_document(doc).unwrap(); assert_eq!(a.oid, oid.to_string()); } @@ -934,9 +928,9 @@ fn test_i64_as_bson_datetime() { let a = A { now: now.timestamp_millis(), }; - let doc = to_document(&a).unwrap(); + let doc = serialize_to_document(&a).unwrap(); assert_eq!(doc.get_datetime("now").unwrap(), &now); - let a: A = from_document(doc).unwrap(); + let a: A = deserialize_from_document(doc).unwrap(); assert_eq!(a.now, now.timestamp_millis()); } @@ -956,7 +950,7 @@ fn test_uuid_1_helpers() { let uuid = Uuid::parse_str("936DA01F9ABD4d9d80C702AF85C822A8").unwrap(); let a = A { uuid }; - let doc = to_document(&a).unwrap(); + let doc = serialize_to_document(&a).unwrap(); match doc.get("uuid").unwrap() { Bson::Binary(bin) => { assert_eq!(bin.subtype, BinarySubtype::Uuid); @@ -964,7 +958,7 @@ fn test_uuid_1_helpers() { } _ => panic!("expected Bson::Binary"), } - let a: A = from_document(doc).unwrap(); + let a: A = deserialize_from_document(doc).unwrap(); assert_eq!(a.uuid, uuid); } @@ -980,11 +974,11 @@ fn test_timestamp_helpers() { let time = 12345; let a = A { time }; - let doc = to_document(&a).unwrap(); + let doc = serialize_to_document(&a).unwrap(); let timestamp = doc.get_timestamp("time").unwrap(); assert_eq!(timestamp.time, time); assert_eq!(timestamp.increment, 0); - let a: A = from_document(doc).unwrap(); + let a: A = deserialize_from_document(doc).unwrap(); assert_eq!(a.time, time); #[derive(Deserialize, Serialize)] @@ -1044,14 +1038,14 @@ fn oid_as_hex_string() { let oid = ObjectId::new(); let foo = Foo { oid }; - let doc = to_document(&foo).unwrap(); + let doc = serialize_to_document(&foo).unwrap(); assert_eq!(doc.get_str("oid").unwrap(), oid.to_hex()); } #[test] fn fuzz_regression_00() { let buf: &[u8] = &[227, 0, 35, 4, 2, 0, 255, 255, 255, 127, 255, 255, 255, 47]; - let _ = crate::from_slice::(buf); + let _ = crate::deserialize_from_slice::(buf); } #[cfg(feature = "serde_path_to_error")] @@ -1079,7 +1073,7 @@ mod serde_path_to_error { "value": "hello", }, }; - let result: Result = crate::from_document(src); + let result: Result = crate::deserialize_from_document(src); assert!(result.is_err()); match result.unwrap_err() { crate::de::Error::WithPath { source: _, path } => { @@ -1100,7 +1094,7 @@ mod serde_path_to_error { }, } .into_bytes(); - let result: Result = crate::from_slice(&src); + let result: Result = crate::deserialize_from_slice(&src); assert!(result.is_err()); match result.unwrap_err() { crate::de::Error::WithPath { source: _, path } => { @@ -1116,7 +1110,7 @@ mod serde_path_to_error { one: Bar { value: 42 }, two: Bar { value: u64::MAX }, }; - let result = crate::to_bson(&src); + let result = crate::serialize_to_bson(&src); assert!(result.is_err()); match result.unwrap_err() { crate::ser::Error::WithPath { source: _, path } => { @@ -1132,7 +1126,7 @@ mod serde_path_to_error { one: Bar { value: 42 }, two: Bar { value: u64::MAX }, }; - let result = crate::to_vec(&src); + let result = crate::serialize_to_vec(&src); assert!(result.is_err()); match result.unwrap_err() { crate::ser::Error::WithPath { source: _, path } => { diff --git a/src/tests/serde_helpers.rs b/src/tests/serde_helpers.rs index 817b8d88..0f74e352 100644 --- a/src/tests/serde_helpers.rs +++ b/src/tests/serde_helpers.rs @@ -3,7 +3,7 @@ use core::str; use serde::{de::Visitor, Deserialize, Serialize}; use crate::{ - from_slice, + deserialize_from_slice, serde_helpers::{HumanReadable, Utf8LossyDeserialization}, }; @@ -88,7 +88,7 @@ fn human_readable_wrapper() { }), }; // use the raw serializer, which is non-human-readable - let data_doc = crate::to_raw_document_buf(&data).unwrap(); + let data_doc = crate::serialize_to_raw_document_buf(&data).unwrap(); let expected_data_doc = rawdoc! { "first": "human readable", "outer": "not human readable", @@ -99,7 +99,7 @@ fn human_readable_wrapper() { }; assert_eq!(data_doc, expected_data_doc); - let tripped: Data = crate::from_slice(expected_data_doc.as_bytes()).unwrap(); + let tripped: Data = crate::deserialize_from_slice(expected_data_doc.as_bytes()).unwrap(); let expected = Data { first: HumanReadable(Detector { serialized_as: true, @@ -142,11 +142,13 @@ fn utf8_lossy_wrapper() { s2: String, } - from_slice::(&both_strings_invalid_bytes).unwrap_err(); + deserialize_from_slice::(&both_strings_invalid_bytes).unwrap_err(); - let s = from_slice::>(&both_strings_invalid_bytes) - .unwrap() - .0; + let s = deserialize_from_slice::>( + &both_strings_invalid_bytes, + ) + .unwrap() + .0; assert_eq!(s.s1, expected_replacement); assert_eq!(s.s2, expected_replacement); @@ -156,16 +158,17 @@ fn utf8_lossy_wrapper() { s2: String, } - let s = from_slice::(&first_string_invalid_bytes).unwrap(); + let s = deserialize_from_slice::(&first_string_invalid_bytes).unwrap(); assert_eq!(s.s1.0, expected_replacement); assert_eq!(&s.s2, ":)"); - from_slice::(&both_strings_invalid_bytes).unwrap_err(); + deserialize_from_slice::(&both_strings_invalid_bytes).unwrap_err(); - let s = - from_slice::>(&both_strings_invalid_bytes) - .unwrap() - .0; + let s = deserialize_from_slice::>( + &both_strings_invalid_bytes, + ) + .unwrap() + .0; assert_eq!(s.s1.0, expected_replacement); assert_eq!(s.s2, expected_replacement); } diff --git a/src/tests/spec/corpus.rs b/src/tests/spec/corpus.rs index 1ef1893e..0255cf41 100644 --- a/src/tests/spec/corpus.rs +++ b/src/tests/spec/corpus.rs @@ -1,6 +1,5 @@ use std::{ convert::{TryFrom, TryInto}, - iter::FromIterator, marker::PhantomData, str::FromStr, }; @@ -111,70 +110,74 @@ fn run_test(test: TestFile) { // these four cover the four ways to create a [`Document`] from the provided BSON. let documentfromreader_cb = - Document::from_reader(canonical_bson.as_slice()).expect(&description); + Document::decode_from_reader(canonical_bson.as_slice()).expect(&description); let fromreader_cb: Document = - crate::from_reader(canonical_bson.as_slice()).expect(&description); + crate::deserialize_from_reader(canonical_bson.as_slice()).expect(&description); let fromdocument_documentfromreader_cb: Document = - crate::from_document(documentfromreader_cb.clone()).expect(&description); + crate::deserialize_from_document(documentfromreader_cb.clone()).expect(&description); let todocument_documentfromreader_cb: Document = - crate::to_document(&documentfromreader_cb).expect(&description); + crate::serialize_to_document(&documentfromreader_cb).expect(&description); let canonical_raw_document = - RawDocument::from_bytes(canonical_bson.as_slice()).expect(&description); + RawDocument::decode_from_bytes(canonical_bson.as_slice()).expect(&description); let document_from_raw_document: Document = canonical_raw_document.try_into().expect(&description); let canonical_raw_bson_from_slice = - crate::from_slice::(canonical_bson.as_slice()) + crate::deserialize_from_slice::(canonical_bson.as_slice()) .expect(&description) .as_document() .expect(&description); let canonical_owned_raw_bson_from_slice = - crate::from_slice::(canonical_bson.as_slice()).expect(&description); + crate::deserialize_from_slice::(canonical_bson.as_slice()) + .expect(&description); let canonical_raw_document_from_slice = - crate::from_slice::<&RawDocument>(canonical_bson.as_slice()).expect(&description); + crate::deserialize_from_slice::<&RawDocument>(canonical_bson.as_slice()) + .expect(&description); // These cover the ways to serialize those [`Documents`] back to BSON. let mut documenttowriter_documentfromreader_cb = Vec::new(); documentfromreader_cb - .to_writer(&mut documenttowriter_documentfromreader_cb) + .encode_to_writer(&mut documenttowriter_documentfromreader_cb) .expect(&description); let mut documenttowriter_fromreader_cb = Vec::new(); fromreader_cb - .to_writer(&mut documenttowriter_fromreader_cb) + .encode_to_writer(&mut documenttowriter_fromreader_cb) .expect(&description); let mut documenttowriter_fromdocument_documentfromreader_cb = Vec::new(); fromdocument_documentfromreader_cb - .to_writer(&mut documenttowriter_fromdocument_documentfromreader_cb) + .encode_to_writer(&mut documenttowriter_fromdocument_documentfromreader_cb) .expect(&description); let mut documenttowriter_todocument_documentfromreader_cb = Vec::new(); todocument_documentfromreader_cb - .to_writer(&mut documenttowriter_todocument_documentfromreader_cb) + .encode_to_writer(&mut documenttowriter_todocument_documentfromreader_cb) .expect(&description); let tovec_documentfromreader_cb = - crate::to_vec(&documentfromreader_cb).expect(&description); + crate::serialize_to_vec(&documentfromreader_cb).expect(&description); let mut documenttowriter_document_from_raw_document = Vec::new(); document_from_raw_document - .to_writer(&mut documenttowriter_document_from_raw_document) + .encode_to_writer(&mut documenttowriter_document_from_raw_document) .expect(&description); // Serialize the raw versions "back" to BSON also. - let tovec_rawdocument = crate::to_vec(&canonical_raw_document).expect(&description); + let tovec_rawdocument = + crate::serialize_to_vec(&canonical_raw_document).expect(&description); let tovec_rawdocument_from_slice = - crate::to_vec(&canonical_raw_document_from_slice).expect(&description); - let tovec_rawbson = crate::to_vec(&canonical_raw_bson_from_slice).expect(&description); + crate::serialize_to_vec(&canonical_raw_document_from_slice).expect(&description); + let tovec_rawbson = + crate::serialize_to_vec(&canonical_raw_bson_from_slice).expect(&description); let tovec_ownedrawbson = - crate::to_vec(&canonical_owned_raw_bson_from_slice).expect(&description); + crate::serialize_to_vec(&canonical_owned_raw_bson_from_slice).expect(&description); // test Bson / RawBson field deserialization if let Some(ref test_key) = test.test_key { @@ -199,7 +202,9 @@ fn run_test(test: TestFile) { .deserialize_any(FieldVisitor(test_key.as_str(), PhantomData::)) .expect(&description); let from_slice_owned_vec = - RawDocumentBuf::from_iter([(test_key, owned_raw_bson_field)]).into_bytes(); + RawDocumentBuf::from_iter([(test_key, owned_raw_bson_field)]) + .expect(&description) + .into_bytes(); // deserialize the field from raw Bytes into a Bson let deserializer_value = @@ -234,11 +239,13 @@ fn run_test(test: TestFile) { }; // convert back into raw BSON for comparison with canonical BSON - let from_raw_vec = crate::to_vec(&from_raw_doc).expect(&description); + let from_raw_vec = crate::serialize_to_vec(&from_raw_doc).expect(&description); let from_slice_value_vec = - crate::to_vec(&from_slice_value_doc).expect(&description); - let from_bson_value_vec = crate::to_vec(&from_value_value_doc).expect(&description); - let from_value_raw_vec = crate::to_vec(&from_value_raw_doc).expect(&description); + crate::serialize_to_vec(&from_slice_value_doc).expect(&description); + let from_bson_value_vec = + crate::serialize_to_vec(&from_value_value_doc).expect(&description); + let from_value_raw_vec = + crate::serialize_to_vec(&from_value_raw_doc).expect(&description); assert_eq!(from_raw_vec, canonical_bson, "{}", description); assert_eq!(from_slice_value_vec, canonical_bson, "{}", description); @@ -343,10 +350,11 @@ fn run_test(test: TestFile) { if let Some(db) = valid.degenerate_bson { let db = hex::decode(&db).expect(&description); - let bson_to_native_db = Document::from_reader(db.as_slice()).expect(&description); + let bson_to_native_db = + Document::decode_from_reader(db.as_slice()).expect(&description); let mut native_to_bson_bson_to_native_db = Vec::new(); bson_to_native_db - .to_writer(&mut native_to_bson_bson_to_native_db) + .encode_to_writer(&mut native_to_bson_bson_to_native_db) .unwrap(); assert_eq!( hex::encode(native_to_bson_bson_to_native_db).to_lowercase(), @@ -356,10 +364,10 @@ fn run_test(test: TestFile) { ); let bson_to_native_db_serde: Document = - crate::from_reader(db.as_slice()).expect(&description); + crate::deserialize_from_reader(db.as_slice()).expect(&description); let mut native_to_bson_bson_to_native_db_serde = Vec::new(); bson_to_native_db_serde - .to_writer(&mut native_to_bson_bson_to_native_db_serde) + .encode_to_writer(&mut native_to_bson_bson_to_native_db_serde) .unwrap(); assert_eq!( hex::encode(native_to_bson_bson_to_native_db_serde).to_lowercase(), @@ -368,13 +376,14 @@ fn run_test(test: TestFile) { description, ); - let document_from_raw_document: Document = RawDocument::from_bytes(db.as_slice()) - .expect(&description) - .try_into() - .expect(&description); + let document_from_raw_document: Document = + RawDocument::decode_from_bytes(db.as_slice()) + .expect(&description) + .try_into() + .expect(&description); let mut documenttowriter_document_from_raw_document = Vec::new(); document_from_raw_document - .to_writer(&mut documenttowriter_document_from_raw_document) + .encode_to_writer(&mut documenttowriter_document_from_raw_document) .expect(&description); assert_eq!( hex::encode(documenttowriter_document_from_raw_document).to_lowercase(), @@ -468,7 +477,7 @@ fn run_test(test: TestFile) { json_to_native_cej .as_document() .unwrap() - .to_writer(&mut native_to_bson_json_to_native_cej) + .encode_to_writer(&mut native_to_bson_json_to_native_cej) .unwrap(); assert_eq!( @@ -503,7 +512,7 @@ fn run_test(test: TestFile) { json_to_native_dej .as_document() .unwrap() - .to_writer(&mut native_to_bson_json_to_native_dej) + .encode_to_writer(&mut native_to_bson_json_to_native_dej) .unwrap(); assert_eq!( @@ -540,7 +549,7 @@ fn run_test(test: TestFile) { ); let bson = hex::decode(&decode_error.bson).expect("should decode from hex"); - if let Ok(doc) = RawDocument::from_bytes(bson.as_slice()) { + if let Ok(doc) = RawDocument::decode_from_bytes(bson.as_slice()) { Document::try_from(doc).expect_err(description.as_str()); } @@ -551,11 +560,12 @@ fn run_test(test: TestFile) { continue; } - Document::from_reader(bson.as_slice()).expect_err(&description); - crate::from_reader::<_, Document>(bson.as_slice()).expect_err(description.as_str()); + Document::decode_from_reader(bson.as_slice()).expect_err(&description); + crate::deserialize_from_reader::<_, Document>(bson.as_slice()) + .expect_err(description.as_str()); if decode_error.description.contains("invalid UTF-8") { - RawDocumentBuf::from_reader(bson.as_slice()) + RawDocumentBuf::decode_from_reader(bson.as_slice()) .expect(&description) .to_document_utf8_lossy() .unwrap_or_else(|err| { @@ -564,7 +574,7 @@ fn run_test(test: TestFile) { description, err ) }); - crate::from_slice::>(bson.as_slice()) + crate::deserialize_from_slice::>(bson.as_slice()) .expect(&description); } } @@ -579,7 +589,7 @@ fn run_test(test: TestFile) { if let Ok(bson) = Bson::try_from(json.clone()) { // if converting to bson succeeds, assert that translating that bson to bytes fails - assert!(crate::to_vec(&bson).is_err()); + assert!(crate::serialize_to_vec(&bson).is_err()); } } } diff --git a/src/tests/spec/vector.rs b/src/tests/spec/vector.rs index 95f389ce..d25090da 100644 --- a/src/tests/spec/vector.rs +++ b/src/tests/spec/vector.rs @@ -4,11 +4,11 @@ use serde::{Deserialize, Deserializer, Serialize}; use crate::{ binary::{Binary, PackedBitVector, Vector}, - from_document, - from_slice, + deserialize_from_document, + deserialize_from_slice, + serialize_to_document, + serialize_to_raw_document_buf, spec::BinarySubtype, - to_document, - to_raw_document_buf, Bson, Document, RawDocumentBuf, @@ -107,7 +107,7 @@ fn vector_from_numbers( // Only return the binary if it represents a valid vector; otherwise, return an error. fn binary_from_bytes(bson: &str, test_key: &str, description: &str) -> Result { let bytes = hex::decode(bson).expect(description); - let mut test_document = Document::from_reader(bytes.as_slice()).expect(description); + let mut test_document = Document::decode_from_reader(bytes.as_slice()).expect(description); let bson = test_document.remove(test_key).expect(description); let binary = match bson { Bson::Binary(binary) => binary, @@ -187,20 +187,21 @@ fn run_test_file(test_file: TestFile) { }; // Serialize for Vector (Document) - let serialized_document = to_document(&data).expect(&description); + let serialized_document = serialize_to_document(&data).expect(&description); assert_eq!(serialized_document, test_document); // Deserialize for Vector (Document) - let deserialized_data: Data = from_document(serialized_document).expect(&description); + let deserialized_data: Data = + deserialize_from_document(serialized_document).expect(&description); assert_eq!(deserialized_data, data); // Serialize for Vector (RawDocumentBuf) - let serialized_raw_document = to_raw_document_buf(&data).expect(&description); + let serialized_raw_document = serialize_to_raw_document_buf(&data).expect(&description); assert_eq!(serialized_raw_document, test_raw_document); // Deserialize for Vector (RawDocumentBuf) let deserialized_data: Data = - from_slice(serialized_raw_document.as_bytes()).expect(&description); + deserialize_from_slice(serialized_raw_document.as_bytes()).expect(&description); assert_eq!(deserialized_data, data); } } diff --git a/src/uuid.rs b/src/uuid.rs index c6bf3aef..61897b2e 100644 --- a/src/uuid.rs +++ b/src/uuid.rs @@ -101,7 +101,7 @@ //! "as_bson": bson::Uuid::from(foo.as_bson.unwrap()), //! }; //! -//! assert_eq!(bson::to_document(&foo)?, expected); +//! assert_eq!(bson::serialize_to_document(&foo)?, expected); //! # } //! # Ok::<(), Box>(()) //! ``` @@ -135,19 +135,15 @@ #[cfg(test)] mod test; -use std::{ - fmt::{self, Display}, - str::FromStr, -}; +use std::fmt::{self, Display}; -use serde::{Deserialize, Serialize}; - -use crate::{de::BsonVisitor, spec::BinarySubtype, Binary, Bson}; +use crate::{spec::BinarySubtype, Binary, Bson}; /// Special type name used in the [`Uuid`] serialization implementation to indicate a BSON /// UUID is being serialized or deserialized. The BSON serializers/deserializers will handle this /// name specially, but other serializers/deserializers will just ignore it and use [`uuid::Uuid`]'s /// serde integration. +#[cfg(feature = "serde")] pub(crate) const UUID_NEWTYPE_NAME: &str = "$__bson_private_uuid"; /// A struct modeling a BSON UUID value (i.e. a Binary value with subtype 4). @@ -227,7 +223,8 @@ impl Uuid { } } -impl Serialize for Uuid { +#[cfg(feature = "serde")] +impl serde::Serialize for Uuid { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, @@ -236,12 +233,13 @@ impl Serialize for Uuid { } } -impl<'de> Deserialize<'de> for Uuid { +#[cfg(feature = "serde")] +impl<'de> serde::Deserialize<'de> for Uuid { fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { - match deserializer.deserialize_newtype_struct(UUID_NEWTYPE_NAME, BsonVisitor)? { + match deserializer.deserialize_newtype_struct(UUID_NEWTYPE_NAME, crate::de::BsonVisitor)? { // Need to support deserializing from generic subtypes for non-BSON formats. // When using the BSON deserializer, the newtype name will ensure the subtype is only // ever BinarySubtype::Uuid. @@ -258,6 +256,7 @@ impl<'de> Deserialize<'de> for Uuid { )) } Bson::String(s) => { + use std::str::FromStr as _; let uuid = uuid::Uuid::from_str(s.as_str()).map_err(serde::de::Error::custom)?; Ok(Self::from_external_uuid(uuid)) } @@ -460,6 +459,7 @@ macro_rules! trait_impls { where D: serde::Deserializer<'de>, { + use serde::Deserialize as _; let uuid = Uuid::deserialize(deserializer)?; Ok(uuid.into()) } @@ -472,6 +472,7 @@ macro_rules! trait_impls { where S: serde::Serializer, { + use serde::Serialize as _; let uuid = Uuid::from(*source); uuid.serialize(serializer) } diff --git a/src/uuid/test.rs b/src/uuid/test.rs index 29c0048c..17efdbf6 100644 --- a/src/uuid/test.rs +++ b/src/uuid/test.rs @@ -1,16 +1,12 @@ use crate::{ - from_document, - from_slice, spec::BinarySubtype, uuid::{Uuid, UuidRepresentation}, Binary, Bson, - Document, }; -use serde::{Deserialize, Serialize}; -use serde_json::json; -#[derive(Debug, Serialize, Deserialize, PartialEq)] +#[cfg(feature = "serde")] +#[derive(Debug, serde::Serialize, serde::Deserialize, PartialEq)] struct U { uuid: Uuid, } @@ -28,18 +24,20 @@ fn into_bson() { assert_eq!(bson, Bson::Binary(binary)); } +#[cfg(feature = "serde")] #[test] fn raw_serialization() { let u = U { uuid: Uuid::new() }; - let bytes = crate::to_vec(&u).unwrap(); + let bytes = crate::serialize_to_vec(&u).unwrap(); - let doc: Document = crate::from_slice(bytes.as_slice()).unwrap(); + let doc: crate::Document = crate::deserialize_from_slice(bytes.as_slice()).unwrap(); assert_eq!(doc, doc! { "uuid": u.uuid }); - let u_roundtrip: U = crate::from_slice(bytes.as_slice()).unwrap(); + let u_roundtrip: U = crate::deserialize_from_slice(bytes.as_slice()).unwrap(); assert_eq!(u_roundtrip, u); } +#[cfg(feature = "serde")] #[test] fn bson_serialization() { let u = U { uuid: Uuid::new() }; @@ -52,24 +50,26 @@ fn bson_serialization() { assert_eq!(doc! { "uuid": u.uuid }, correct); - let doc = crate::to_document(&u).unwrap(); + let doc = crate::serialize_to_document(&u).unwrap(); assert_eq!(doc, correct); - let u_roundtrip: U = crate::from_document(doc).unwrap(); + let u_roundtrip: U = crate::deserialize_from_document(doc).unwrap(); assert_eq!(u_roundtrip, u); } +#[cfg(feature = "serde")] #[test] fn json() { let u = U { uuid: Uuid::new() }; let json = serde_json::to_value(&u).unwrap(); - assert_eq!(json, json!({ "uuid": u.uuid.to_string() })); + assert_eq!(json, serde_json::json!({ "uuid": u.uuid.to_string() })); let u_roundtrip_json: U = serde_json::from_value(json).unwrap(); assert_eq!(u_roundtrip_json, u); } +#[cfg(feature = "serde")] #[test] fn wrong_subtype() { let generic = doc! { @@ -78,9 +78,9 @@ fn wrong_subtype() { subtype: BinarySubtype::Generic } }; - crate::from_document::(generic.clone()).unwrap_err(); - let generic_bytes = crate::to_vec(&generic).unwrap(); - crate::from_slice::(&generic_bytes).unwrap_err(); + crate::deserialize_from_document::(generic.clone()).unwrap_err(); + let generic_bytes = crate::serialize_to_vec(&generic).unwrap(); + crate::deserialize_from_slice::(&generic_bytes).unwrap_err(); let old = doc! { "uuid": Binary { @@ -88,9 +88,9 @@ fn wrong_subtype() { subtype: BinarySubtype::UuidOld } }; - crate::from_document::(old.clone()).unwrap_err(); - let old_bytes = crate::to_vec(&old).unwrap(); - crate::from_slice::(&old_bytes).unwrap_err(); + crate::deserialize_from_document::(old.clone()).unwrap_err(); + let old_bytes = crate::serialize_to_vec(&old).unwrap(); + crate::deserialize_from_slice::(&old_bytes).unwrap_err(); let other = doc! { "uuid": Binary { @@ -98,9 +98,9 @@ fn wrong_subtype() { subtype: BinarySubtype::UserDefined(100) } }; - crate::from_document::(other.clone()).unwrap_err(); - let other_bytes = crate::to_vec(&other).unwrap(); - crate::from_slice::(&other_bytes).unwrap_err(); + crate::deserialize_from_document::(other.clone()).unwrap_err(); + let other_bytes = crate::serialize_to_vec(&other).unwrap(); + crate::deserialize_from_slice::(&other_bytes).unwrap_err(); } #[test] @@ -256,9 +256,10 @@ fn interop_1() { assert_eq!(d_bson, d_uuid); } +#[cfg(feature = "serde")] #[test] fn deserialize_uuid_from_string() { - #[derive(Deserialize)] + #[derive(serde::Deserialize)] struct UuidWrapper { uuid: Uuid, } @@ -266,11 +267,12 @@ fn deserialize_uuid_from_string() { let uuid = Uuid::new(); let doc = doc! { "uuid": uuid.to_string() }; - let wrapper: UuidWrapper = from_document(doc).expect("failed to deserialize document"); + let wrapper: UuidWrapper = + crate::deserialize_from_document(doc).expect("failed to deserialize document"); assert_eq!(wrapper.uuid, uuid); let raw_doc = rawdoc! { "uuid": uuid.to_string() }; - let wrapper: UuidWrapper = - from_slice(raw_doc.as_bytes()).expect("failed to deserialize raw document"); + let wrapper: UuidWrapper = crate::deserialize_from_slice(raw_doc.as_bytes()) + .expect("failed to deserialize raw document"); assert_eq!(wrapper.uuid, uuid); }