Skip to content

Commit 039a4ea

Browse files
committed
derive WIP
1 parent 82d8635 commit 039a4ea

File tree

9 files changed

+957
-3
lines changed

9 files changed

+957
-3
lines changed

Cargo.toml

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,16 +14,18 @@ categories = [
1414
[dependencies]
1515
lite-parser = { path = "parser", default-features = false }
1616
num-traits = { version = "0.2", optional = true, default-features = false }
17+
lite-json-derive = { path = "derive", default-features = false, optional = true }
1718

1819
[features]
1920
default = ["std"]
2021
std = [
2122
"lite-parser/std"
2223
]
23-
# Enables converting values to floats in no-`std` environment
24-
float = ["num-traits"]
24+
float = ["num-traits"] # Enables converting values to floats in no-`std` environment
25+
derive = ["lite-json-derive"]
2526

2627
[workspace]
2728
members = [
28-
"parser"
29+
"parser",
30+
"derive"
2931
]

derive/Cargo.toml

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
[package]
2+
name = "lite-json-derive"
3+
description = "Serialization and deserialization derive macro for lite-json"
4+
version = "0.1.0"
5+
authors = ["Bryan Chen <[email protected]>"]
6+
license = "Apache-2.0"
7+
edition = "2018"
8+
9+
[lib]
10+
proc-macro = true
11+
12+
[dependencies]
13+
syn = { version = "1.0.8", features = [ "full", "visit" ] }
14+
quote = "1.0.2"
15+
proc-macro2 = "1.0.6"
16+
proc-macro-crate = "0.1.4"
17+
18+
[dev-dependencies]
19+
lite-json = { path = "..", version = "0.1.0" }

derive/src/from_json.rs

Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
// Heavily inspred by http://github.com/paritytech/parity-scale-codec
2+
3+
use proc_macro2::{Ident, Span, TokenStream};
4+
use syn::{spanned::Spanned, Data, Error, Field, Fields};
5+
6+
use crate::utils;
7+
8+
pub fn quote(data: &Data, type_name: &Ident, input: &TokenStream) -> TokenStream {
9+
match *data {
10+
Data::Struct(ref data) => match data.fields {
11+
Fields::Named(_) | Fields::Unnamed(_) => {
12+
create_instance(quote! { #type_name }, input, &data.fields)
13+
}
14+
Fields::Unit => {
15+
quote_spanned! { data.fields.span() =>
16+
Ok(#type_name)
17+
}
18+
}
19+
},
20+
Data::Enum(ref data) => {
21+
let data_variants = || {
22+
data.variants
23+
.iter()
24+
.filter(|variant| crate::utils::get_skip(&variant.attrs).is_none())
25+
};
26+
27+
let recurse = data_variants().enumerate().map(|(i, v)| {
28+
let name = &v.ident;
29+
let index = utils::index(v, i);
30+
31+
let create = create_instance(quote! { #type_name :: #name }, input, &v.fields);
32+
33+
quote_spanned! { v.span() =>
34+
x if x == #index as u8 => {
35+
#create
36+
},
37+
}
38+
});
39+
40+
// TODO: match string name
41+
42+
quote! {
43+
match #input {
44+
_lite_json::JsonValue::Number(_lite_json::NumberValue { integer, .. }) => match integer {
45+
#( #recurse )*,
46+
_ => None
47+
},
48+
_ => None,
49+
}
50+
}
51+
}
52+
Data::Union(_) => {
53+
Error::new(Span::call_site(), "Union types are not supported.").to_compile_error()
54+
}
55+
}
56+
}
57+
58+
fn create_decode_expr(field: &Field, _name: &str, input: &TokenStream) -> TokenStream {
59+
let skip = utils::get_skip(&field.attrs).is_some();
60+
61+
if skip {
62+
quote_spanned! { field.span() => Default::default() }
63+
} else {
64+
quote_spanned! { field.span() =>
65+
_lite_json::FromJson::from_json(#input)?;
66+
}
67+
}
68+
}
69+
70+
fn create_instance(name: TokenStream, input: &TokenStream, fields: &Fields) -> TokenStream {
71+
match *fields {
72+
Fields::Named(ref fields) => {
73+
let recurse = fields.named.iter().map(|f| {
74+
let name_ident = &f.ident;
75+
let field = match name_ident {
76+
Some(a) => format!("{}.{}", name, a),
77+
None => format!("{}", name),
78+
};
79+
let decode = create_decode_expr(f, &field, input);
80+
81+
quote_spanned! { f.span() =>
82+
#name_ident: #decode
83+
}
84+
});
85+
86+
quote_spanned! { fields.span() =>
87+
Ok(#name {
88+
#( #recurse, )*
89+
})
90+
}
91+
}
92+
Fields::Unnamed(ref fields) => {
93+
let recurse = fields.unnamed.iter().enumerate().map(|(i, f)| {
94+
let name = format!("{}.{}", name, i);
95+
96+
create_decode_expr(f, &name, input)
97+
});
98+
99+
quote_spanned! { fields.span() =>
100+
Ok(#name (
101+
#( #recurse, )*
102+
))
103+
}
104+
}
105+
Fields::Unit => {
106+
quote_spanned! { fields.span() =>
107+
Ok(#name)
108+
}
109+
}
110+
}
111+
}

derive/src/into_json.rs

Lines changed: 175 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,175 @@
1+
// Heavily inspred by http://github.com/paritytech/parity-scale-codec
2+
3+
use std::str::from_utf8;
4+
5+
use proc_macro2::{Ident, Span, TokenStream};
6+
use syn::{punctuated::Punctuated, spanned::Spanned, token::Comma, Data, Error, Field, Fields};
7+
8+
use crate::utils;
9+
10+
type FieldsList = Punctuated<Field, Comma>;
11+
12+
fn encode_named_fields<F>(fields: &FieldsList, field_name: F) -> TokenStream
13+
where
14+
F: Fn(usize, &Option<Ident>) -> TokenStream,
15+
{
16+
let recurse = fields.iter().enumerate().map(|(i, f)| {
17+
let skip = utils::get_skip(&f.attrs).is_some();
18+
let field = field_name(i, &f.ident);
19+
20+
if skip {
21+
quote! {}
22+
} else {
23+
quote_spanned! { f.span() =>
24+
(
25+
stringify!(f.indent.unwrap()).into_iter().collect(),
26+
_lite_json::IntoJson::into_json(#field)
27+
)
28+
}
29+
}
30+
});
31+
32+
quote! {
33+
_lite_json::JsonValue::Object( __core::vec![ #( #recurse, )* ] )
34+
}
35+
}
36+
37+
fn encode_unnamed_fields<F>(fields: &FieldsList, field_name: F) -> TokenStream
38+
where
39+
F: Fn(usize, &Option<Ident>) -> TokenStream,
40+
{
41+
let recurse = fields.iter().enumerate().map(|(i, f)| {
42+
let skip = utils::get_skip(&f.attrs).is_some();
43+
let field = field_name(i, &f.ident);
44+
45+
if skip {
46+
quote! {}
47+
} else {
48+
quote_spanned! { f.span() =>
49+
_lite_json::IntoJson::into_json(#field)
50+
}
51+
}
52+
});
53+
54+
quote! {
55+
_lite_json::JsonValue::Array( __core::vec![ #( #recurse, )* ] )
56+
}
57+
}
58+
59+
pub fn quote(data: &Data, type_name: &Ident) -> TokenStream {
60+
let self_ = quote!(self);
61+
let dest = &quote!(dest);
62+
let encoding = match *data {
63+
Data::Struct(ref data) => match data.fields {
64+
Fields::Named(ref fields) => {
65+
encode_named_fields(&fields.named, |_, name| quote!(&#self_.#name))
66+
}
67+
Fields::Unnamed(ref fields) => encode_unnamed_fields(&fields.unnamed, |i, _| {
68+
let i = syn::Index::from(i);
69+
quote!(&#self_.#i)
70+
}),
71+
Fields::Unit => quote! {
72+
_lite_json::JsonValue::Object( __core::vec![] )
73+
},
74+
},
75+
Data::Enum(ref data) => {
76+
let data_variants = || {
77+
data.variants
78+
.iter()
79+
.filter(|variant| crate::utils::get_skip(&variant.attrs).is_none())
80+
};
81+
82+
// If the enum has no variants, make it null
83+
if data_variants().count() == 0 {
84+
return quote!(_lite_json::JsonValue::Null);
85+
}
86+
87+
let recurse = data_variants().map(|f| {
88+
let name = &f.ident;
89+
90+
match f.fields {
91+
Fields::Named(ref fields) => {
92+
let field_name = |_, ident: &Option<Ident>| quote!(#ident);
93+
let names = fields.named
94+
.iter()
95+
.enumerate()
96+
.map(|(i, f)| field_name(i, &f.ident));
97+
98+
let encode_fields = encode_named_fields(
99+
&fields.named,
100+
|a, b| field_name(a, b),
101+
);
102+
103+
quote_spanned! { f.span() =>
104+
#type_name :: #name { #( ref #names, )* } => {
105+
_lite_json::JsonValue::Array(__core::vec![
106+
_lite_json::JsonValue::String(stringify!(#name).into_iter().collect()),
107+
#encode_fields
108+
])
109+
}
110+
}
111+
},
112+
Fields::Unnamed(ref fields) => {
113+
let field_name = |i, _: &Option<Ident>| {
114+
let data = stringify(i as u8);
115+
let ident = from_utf8(&data).expect("We never go beyond ASCII");
116+
let ident = Ident::new(ident, Span::call_site());
117+
quote!(#ident)
118+
};
119+
let names = fields.unnamed
120+
.iter()
121+
.enumerate()
122+
.map(|(i, f)| field_name(i, &f.ident));
123+
124+
let encode_fields = encode_unnamed_fields(
125+
&fields.unnamed,
126+
|a, b| field_name(a, b),
127+
);
128+
129+
quote_spanned! { f.span() =>
130+
#type_name :: #name { #( ref #names, )* } => {
131+
_lite_json::JsonValue::Array(__core::vec![
132+
_lite_json::JsonValue::String(stringify!(#name).into_iter().collect()),
133+
#encode_fields
134+
])
135+
}
136+
}
137+
},
138+
Fields::Unit => {
139+
quote_spanned! { f.span() =>
140+
#type_name :: #name => {
141+
_lite_json::JsonValue::String(stringify!(#name).into_iter().collect())
142+
}
143+
}
144+
},
145+
}
146+
});
147+
148+
quote! {
149+
match *#self_ {
150+
#( #recurse )*,
151+
_ => (),
152+
}
153+
}
154+
}
155+
Data::Union(ref data) => {
156+
Error::new(data.union_token.span(), "Union types are not supported.").to_compile_error()
157+
}
158+
};
159+
160+
quote! {
161+
fn encode_to<EncOut: _parity_scale_codec::Output>(&#self_, #dest: &mut EncOut) {
162+
#encoding
163+
}
164+
}
165+
}
166+
167+
pub fn stringify(id: u8) -> [u8; 2] {
168+
const CHARS: &[u8] = b"abcdefghijklmnopqrstuvwxyz";
169+
let len = CHARS.len() as u8;
170+
let symbol = |id: u8| CHARS[(id % len) as usize];
171+
let a = symbol(id);
172+
let b = symbol(id / len);
173+
174+
[a, b]
175+
}

0 commit comments

Comments
 (0)