Skip to content

Commit a1d0db2

Browse files
refactor(serde): drop internal tokenstream types
1 parent 71a3342 commit a1d0db2

File tree

5 files changed

+171
-291
lines changed

5 files changed

+171
-291
lines changed

keyvalues-serde/src/de/mod.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ use std::{
2020
use crate::{
2121
de::{map::ObjEater, seq::SeqBuilder},
2222
error::{Error, Result},
23-
tokens::{Token, TokenStream},
23+
tokens::{tokens_from_vdf, Token},
2424
};
2525

2626
pub fn from_reader<R: Read, T: DeserializeOwned>(rdr: R) -> Result<T> {
@@ -84,15 +84,15 @@ pub struct Deserializer<'de> {
8484
impl<'de> Deserializer<'de> {
8585
/// Attempts to create a new VDF deserializer along with returning the top level VDF key
8686
pub fn new_with_key(vdf: Vdf<'de>) -> Result<(Self, Key<'de>)> {
87-
let token_stream = TokenStream::from(vdf);
87+
let token_stream = tokens_from_vdf(vdf);
8888

8989
let key = if let Some(Token::Key(key)) = token_stream.first() {
9090
key.clone()
9191
} else {
9292
unreachable!("Tokenstream must start with key");
9393
};
9494

95-
let tokens = token_stream.0.into_iter().peekable();
95+
let tokens = token_stream.into_iter().peekable();
9696
Ok((Self { tokens }, key.clone()))
9797
}
9898

keyvalues-serde/src/ser.rs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
11
//! Serialize Rust types to VDF text
22
3-
use keyvalues_parser::Vdf;
43
use serde_core::{ser, Serialize};
54

65
use std::io::Write;
76

87
use crate::{
98
error::{Error, Result},
10-
tokens::{NaiveToken, NaiveTokenStream},
9+
tokens::{naive::vdf_from_naive_tokens, NaiveToken},
1110
};
1211

1312
/// The struct for serializing Rust values into VDF text
@@ -16,7 +15,7 @@ use crate::{
1615
/// [`to_writer_with_key()`] can be used instead
1716
#[derive(Default)]
1817
pub struct Serializer {
19-
tokens: NaiveTokenStream,
18+
tokens: Vec<NaiveToken>,
2019
}
2120

2221
impl Serializer {
@@ -79,7 +78,7 @@ where
7978
}
8079
}
8180

82-
let vdf = Vdf::try_from(&serializer.tokens)?;
81+
let vdf = vdf_from_naive_tokens(&serializer.tokens)?;
8382
write!(writer, "{vdf}")?;
8483

8584
Ok(())

keyvalues-serde/src/tokens/mod.rs

Lines changed: 37 additions & 115 deletions
Original file line numberDiff line numberDiff line change
@@ -1,140 +1,62 @@
1-
// TODO: a lot of this can probably be slimmed down at this point
2-
// TODO: implement a validate function
3-
// TODO: make a note that this has invariants that must be upheld, so it is only exposed internally
1+
// TODO: replace with some kind of iterator that decomposes the original structure instead of using
2+
// an intermediate layer
43

5-
mod naive;
4+
pub(crate) mod naive;
65
#[cfg(test)]
76
mod tests;
87

98
use keyvalues_parser::{Obj, Value, Vdf};
109

11-
use std::{
12-
borrow::Cow,
13-
ops::{Deref, DerefMut},
14-
};
15-
16-
pub use crate::tokens::naive::{NaiveToken, NaiveTokenStream};
17-
18-
// I've been struggling to get serde to play nice with using a more complex internal structure in a
19-
// `Deserializer`. I think the easiest solution I can come up with is to flatten out the `Vdf` into
20-
// a stream of tokens that serde can consume. In this way the Deserializer can just work on
21-
// munching through all the tokens instead of trying to mutate a more complex nested structure
22-
// containing different types
23-
/// A stream of [`Token`]s representing a [`Vdf`]
24-
///
25-
/// I think an example is the easiest way to understand the structure so something like
26-
///
27-
/// ```vdf
28-
/// "Outer Key"
29-
/// {
30-
/// "Inner Key" "Inner Value"
31-
/// "Inner Key"
32-
/// {
33-
/// }
34-
/// }
35-
/// ```
36-
///
37-
/// will be transformed into
38-
///
39-
/// ```ron
40-
/// Vdf(
41-
/// key: "Outer Key",
42-
/// value: Obj({
43-
/// "Inner Key": [
44-
/// Str("Inner Value"),
45-
/// Obj({})
46-
/// ]
47-
/// })
48-
/// )
49-
/// ```
50-
///
51-
/// which has the following token stream
52-
///
53-
/// ```ron
54-
/// TokenStream([
55-
/// Key("Outer Key"),
56-
/// ObjBegin,
57-
/// Key("Inner Key"),
58-
/// SeqBegin,
59-
/// Str("Inner Value"),
60-
/// ObjBegin,
61-
/// ObjEnd,
62-
/// SeqEnd,
63-
/// ObjEnd,
64-
/// )]
65-
/// ```
66-
///
67-
/// So in this way it's a linear sequence of keys and values where the value is either a str or an
68-
/// object.
69-
#[derive(Debug, PartialEq, Eq)]
70-
pub struct TokenStream<'a>(pub Vec<Token<'a>>);
10+
use std::borrow::Cow;
7111

72-
impl<'a> Deref for TokenStream<'a> {
73-
type Target = Vec<Token<'a>>;
12+
pub use crate::tokens::naive::NaiveToken;
7413

75-
fn deref(&self) -> &Self::Target {
76-
&self.0
77-
}
78-
}
14+
pub(crate) fn tokens_from_vdf(vdf: Vdf<'_>) -> Vec<Token<'_>> {
15+
let Vdf { key, value } = vdf;
7916

80-
impl DerefMut for TokenStream<'_> {
81-
fn deref_mut(&mut self) -> &mut Self::Target {
82-
&mut self.0
83-
}
17+
let mut tokens = vec![Token::Key(key)];
18+
tokens.extend(tokens_from_value(value));
19+
tokens
8420
}
8521

86-
impl<'a> From<Vdf<'a>> for TokenStream<'a> {
87-
fn from(vdf: Vdf<'a>) -> Self {
88-
let Vdf { key, value } = vdf;
89-
90-
let mut inner = vec![Token::Key(key)];
91-
inner.extend(TokenStream::from(value).0);
92-
93-
Self(inner)
94-
}
95-
}
22+
// TODO: pass through a `&mut Vec<_>` instead of allocating new ones
23+
fn tokens_from_value(value: Value<'_>) -> Vec<Token<'_>> {
24+
let mut tokens = Vec::new();
9625

97-
impl<'a> From<Value<'a>> for TokenStream<'a> {
98-
fn from(value: Value<'a>) -> Self {
99-
let mut inner = Vec::new();
100-
101-
match value {
102-
Value::Str(s) => inner.push(Token::Str(s)),
103-
Value::Obj(obj) => {
104-
inner.push(Token::ObjBegin);
105-
inner.extend(Self::from(obj).0);
106-
inner.push(Token::ObjEnd);
107-
}
26+
match value {
27+
Value::Str(s) => tokens.push(Token::Str(s)),
28+
Value::Obj(obj) => {
29+
tokens.push(Token::ObjBegin);
30+
tokens.extend(tokens_from_obj(obj));
31+
tokens.push(Token::ObjEnd);
10832
}
109-
110-
Self(inner)
11133
}
112-
}
11334

114-
impl<'a> From<Obj<'a>> for TokenStream<'a> {
115-
fn from(obj: Obj<'a>) -> Self {
116-
let mut inner = Vec::new();
35+
tokens
36+
}
11737

118-
for (key, values) in obj.into_inner().into_iter() {
119-
inner.push(Token::Key(key));
38+
fn tokens_from_obj(obj: Obj<'_>) -> Vec<Token<'_>> {
39+
let mut tokens = Vec::new();
12040

121-
// For ease of use a sequence is only marked when len != 1
122-
let num_values = values.len();
123-
if num_values != 1 {
124-
inner.push(Token::SeqBegin);
125-
}
41+
for (key, values) in obj.into_inner().into_iter() {
42+
tokens.push(Token::Key(key));
12643

127-
for value in values {
128-
inner.extend(TokenStream::from(value).0);
129-
}
44+
// For ease of use a sequence is only marked when len != 1
45+
let num_values = values.len();
46+
if num_values != 1 {
47+
tokens.push(Token::SeqBegin);
48+
}
13049

131-
if num_values != 1 {
132-
inner.push(Token::SeqEnd);
133-
}
50+
for value in values {
51+
tokens.extend(tokens_from_value(value));
13452
}
13553

136-
Self(inner)
54+
if num_values != 1 {
55+
tokens.push(Token::SeqEnd);
56+
}
13757
}
58+
59+
tokens
13860
}
13961

14062
/// A single VDF token

0 commit comments

Comments
 (0)