From 039a4ea317acf69c954e433dc65b29acb2b2837a Mon Sep 17 00:00:00 2001 From: Bryan Chen Date: Sat, 18 Apr 2020 18:37:54 +1200 Subject: [PATCH] derive WIP --- Cargo.toml | 8 +- derive/Cargo.toml | 19 +++ derive/src/from_json.rs | 111 ++++++++++++++++ derive/src/into_json.rs | 175 +++++++++++++++++++++++++ derive/src/lib.rs | 146 +++++++++++++++++++++ derive/src/trait_bounds.rs | 262 +++++++++++++++++++++++++++++++++++++ derive/src/utils.rs | 219 +++++++++++++++++++++++++++++++ src/lib.rs | 8 ++ src/traits.rs | 12 ++ 9 files changed, 957 insertions(+), 3 deletions(-) create mode 100644 derive/Cargo.toml create mode 100644 derive/src/from_json.rs create mode 100644 derive/src/into_json.rs create mode 100644 derive/src/lib.rs create mode 100644 derive/src/trait_bounds.rs create mode 100644 derive/src/utils.rs diff --git a/Cargo.toml b/Cargo.toml index 2d29a7c..e7f8e4c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,16 +14,18 @@ categories = [ [dependencies] lite-parser = { path = "parser", default-features = false } num-traits = { version = "0.2", optional = true, default-features = false } +lite-json-derive = { path = "derive", default-features = false, optional = true } [features] default = ["std"] std = [ "lite-parser/std" ] -# Enables converting values to floats in no-`std` environment -float = ["num-traits"] +float = ["num-traits"] # Enables converting values to floats in no-`std` environment +derive = ["lite-json-derive"] [workspace] members = [ - "parser" + "parser", + "derive" ] diff --git a/derive/Cargo.toml b/derive/Cargo.toml new file mode 100644 index 0000000..e3f8425 --- /dev/null +++ b/derive/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "lite-json-derive" +description = "Serialization and deserialization derive macro for lite-json" +version = "0.1.0" +authors = ["Bryan Chen "] +license = "Apache-2.0" +edition = "2018" + +[lib] +proc-macro = true + +[dependencies] +syn = { version = "1.0.8", features = [ "full", "visit" ] } +quote = "1.0.2" +proc-macro2 = "1.0.6" +proc-macro-crate = "0.1.4" + +[dev-dependencies] +lite-json = { path = "..", version = "0.1.0" } diff --git a/derive/src/from_json.rs b/derive/src/from_json.rs new file mode 100644 index 0000000..6a2b756 --- /dev/null +++ b/derive/src/from_json.rs @@ -0,0 +1,111 @@ +// Heavily inspred by http://github.com/paritytech/parity-scale-codec + +use proc_macro2::{Ident, Span, TokenStream}; +use syn::{spanned::Spanned, Data, Error, Field, Fields}; + +use crate::utils; + +pub fn quote(data: &Data, type_name: &Ident, input: &TokenStream) -> TokenStream { + match *data { + Data::Struct(ref data) => match data.fields { + Fields::Named(_) | Fields::Unnamed(_) => { + create_instance(quote! { #type_name }, input, &data.fields) + } + Fields::Unit => { + quote_spanned! { data.fields.span() => + Ok(#type_name) + } + } + }, + Data::Enum(ref data) => { + let data_variants = || { + data.variants + .iter() + .filter(|variant| crate::utils::get_skip(&variant.attrs).is_none()) + }; + + let recurse = data_variants().enumerate().map(|(i, v)| { + let name = &v.ident; + let index = utils::index(v, i); + + let create = create_instance(quote! { #type_name :: #name }, input, &v.fields); + + quote_spanned! { v.span() => + x if x == #index as u8 => { + #create + }, + } + }); + + // TODO: match string name + + quote! { + match #input { + _lite_json::JsonValue::Number(_lite_json::NumberValue { integer, .. }) => match integer { + #( #recurse )*, + _ => None + }, + _ => None, + } + } + } + Data::Union(_) => { + Error::new(Span::call_site(), "Union types are not supported.").to_compile_error() + } + } +} + +fn create_decode_expr(field: &Field, _name: &str, input: &TokenStream) -> TokenStream { + let skip = utils::get_skip(&field.attrs).is_some(); + + if skip { + quote_spanned! { field.span() => Default::default() } + } else { + quote_spanned! { field.span() => + _lite_json::FromJson::from_json(#input)?; + } + } +} + +fn create_instance(name: TokenStream, input: &TokenStream, fields: &Fields) -> TokenStream { + match *fields { + Fields::Named(ref fields) => { + let recurse = fields.named.iter().map(|f| { + let name_ident = &f.ident; + let field = match name_ident { + Some(a) => format!("{}.{}", name, a), + None => format!("{}", name), + }; + let decode = create_decode_expr(f, &field, input); + + quote_spanned! { f.span() => + #name_ident: #decode + } + }); + + quote_spanned! { fields.span() => + Ok(#name { + #( #recurse, )* + }) + } + } + Fields::Unnamed(ref fields) => { + let recurse = fields.unnamed.iter().enumerate().map(|(i, f)| { + let name = format!("{}.{}", name, i); + + create_decode_expr(f, &name, input) + }); + + quote_spanned! { fields.span() => + Ok(#name ( + #( #recurse, )* + )) + } + } + Fields::Unit => { + quote_spanned! { fields.span() => + Ok(#name) + } + } + } +} diff --git a/derive/src/into_json.rs b/derive/src/into_json.rs new file mode 100644 index 0000000..5c80ee1 --- /dev/null +++ b/derive/src/into_json.rs @@ -0,0 +1,175 @@ +// Heavily inspred by http://github.com/paritytech/parity-scale-codec + +use std::str::from_utf8; + +use proc_macro2::{Ident, Span, TokenStream}; +use syn::{punctuated::Punctuated, spanned::Spanned, token::Comma, Data, Error, Field, Fields}; + +use crate::utils; + +type FieldsList = Punctuated; + +fn encode_named_fields(fields: &FieldsList, field_name: F) -> TokenStream +where + F: Fn(usize, &Option) -> TokenStream, +{ + let recurse = fields.iter().enumerate().map(|(i, f)| { + let skip = utils::get_skip(&f.attrs).is_some(); + let field = field_name(i, &f.ident); + + if skip { + quote! {} + } else { + quote_spanned! { f.span() => + ( + stringify!(f.indent.unwrap()).into_iter().collect(), + _lite_json::IntoJson::into_json(#field) + ) + } + } + }); + + quote! { + _lite_json::JsonValue::Object( __core::vec![ #( #recurse, )* ] ) + } +} + +fn encode_unnamed_fields(fields: &FieldsList, field_name: F) -> TokenStream +where + F: Fn(usize, &Option) -> TokenStream, +{ + let recurse = fields.iter().enumerate().map(|(i, f)| { + let skip = utils::get_skip(&f.attrs).is_some(); + let field = field_name(i, &f.ident); + + if skip { + quote! {} + } else { + quote_spanned! { f.span() => + _lite_json::IntoJson::into_json(#field) + } + } + }); + + quote! { + _lite_json::JsonValue::Array( __core::vec![ #( #recurse, )* ] ) + } +} + +pub fn quote(data: &Data, type_name: &Ident) -> TokenStream { + let self_ = quote!(self); + let dest = "e!(dest); + let encoding = match *data { + Data::Struct(ref data) => match data.fields { + Fields::Named(ref fields) => { + encode_named_fields(&fields.named, |_, name| quote!(&#self_.#name)) + } + Fields::Unnamed(ref fields) => encode_unnamed_fields(&fields.unnamed, |i, _| { + let i = syn::Index::from(i); + quote!(&#self_.#i) + }), + Fields::Unit => quote! { + _lite_json::JsonValue::Object( __core::vec![] ) + }, + }, + Data::Enum(ref data) => { + let data_variants = || { + data.variants + .iter() + .filter(|variant| crate::utils::get_skip(&variant.attrs).is_none()) + }; + + // If the enum has no variants, make it null + if data_variants().count() == 0 { + return quote!(_lite_json::JsonValue::Null); + } + + let recurse = data_variants().map(|f| { + let name = &f.ident; + + match f.fields { + Fields::Named(ref fields) => { + let field_name = |_, ident: &Option| quote!(#ident); + let names = fields.named + .iter() + .enumerate() + .map(|(i, f)| field_name(i, &f.ident)); + + let encode_fields = encode_named_fields( + &fields.named, + |a, b| field_name(a, b), + ); + + quote_spanned! { f.span() => + #type_name :: #name { #( ref #names, )* } => { + _lite_json::JsonValue::Array(__core::vec![ + _lite_json::JsonValue::String(stringify!(#name).into_iter().collect()), + #encode_fields + ]) + } + } + }, + Fields::Unnamed(ref fields) => { + let field_name = |i, _: &Option| { + let data = stringify(i as u8); + let ident = from_utf8(&data).expect("We never go beyond ASCII"); + let ident = Ident::new(ident, Span::call_site()); + quote!(#ident) + }; + let names = fields.unnamed + .iter() + .enumerate() + .map(|(i, f)| field_name(i, &f.ident)); + + let encode_fields = encode_unnamed_fields( + &fields.unnamed, + |a, b| field_name(a, b), + ); + + quote_spanned! { f.span() => + #type_name :: #name { #( ref #names, )* } => { + _lite_json::JsonValue::Array(__core::vec![ + _lite_json::JsonValue::String(stringify!(#name).into_iter().collect()), + #encode_fields + ]) + } + } + }, + Fields::Unit => { + quote_spanned! { f.span() => + #type_name :: #name => { + _lite_json::JsonValue::String(stringify!(#name).into_iter().collect()) + } + } + }, + } + }); + + quote! { + match *#self_ { + #( #recurse )*, + _ => (), + } + } + } + Data::Union(ref data) => { + Error::new(data.union_token.span(), "Union types are not supported.").to_compile_error() + } + }; + + quote! { + fn encode_to(&#self_, #dest: &mut EncOut) { + #encoding + } + } +} + +pub fn stringify(id: u8) -> [u8; 2] { + const CHARS: &[u8] = b"abcdefghijklmnopqrstuvwxyz"; + let len = CHARS.len() as u8; + let symbol = |id: u8| CHARS[(id % len) as usize]; + let a = symbol(id); + let b = symbol(id / len); + + [a, b] +} diff --git a/derive/src/lib.rs b/derive/src/lib.rs new file mode 100644 index 0000000..9c13975 --- /dev/null +++ b/derive/src/lib.rs @@ -0,0 +1,146 @@ +// Heavily inspred by parity-scale-codec + +#![recursion_limit = "128"] +extern crate proc_macro; + +#[macro_use] +extern crate syn; + +#[macro_use] +extern crate quote; + +use proc_macro2::{Ident, Span}; +use proc_macro_crate::crate_name; +use syn::{DeriveInput, Error}; + +mod from_json; +mod into_json; +mod trait_bounds; +mod utils; + +/// Include the `lite-json` crate under a known name (`_lite_json`). +fn include_crate() -> proc_macro2::TokenStream { + // This "hack" is required for the tests. + if std::env::var("CARGO_PKG_NAME").unwrap() == "lite-json" { + quote!( + extern crate lite_json as _lite_json; + ) + } else { + match crate_name("lite-json") { + Ok(lite_json_crate) => { + let ident = Ident::new(&lite_json_crate, Span::call_site()); + quote!( extern crate #ident as _lite_json; ) + } + Err(e) => Error::new(Span::call_site(), &e).to_compile_error(), + } + } +} + +/// Wraps the impl block in a "dummy const" +fn wrap_with_dummy_const(impl_block: proc_macro2::TokenStream) -> proc_macro::TokenStream { + let crate_name = include_crate(); + + let generated = quote! { + const _: () = { + #[allow(unknown_lints)] + #[cfg_attr(feature = "cargo-clippy", allow(useless_attribute))] + #[allow(rust_2018_idioms)] + #crate_name + + #[cfg(feature = "std")] + mod __core { + pub use ::core::*; + pub use ::std::{vec, vec::Vec}; + } + + #[cfg(not(feature = "std"))] + mod __core { + pub use ::core::*; + pub use ::alloc::{vec, vec::Vec}; + } + + #impl_block + }; + }; + + generated.into() +} + +/// Derive `lite_json::IntoJson` and for struct and enum. +#[proc_macro_derive(IntoJson, attributes(json))] +pub fn into_json_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let mut input: DeriveInput = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; + + if let Err(e) = utils::check_attributes(&input) { + return e.to_compile_error().into(); + } + + if let Err(e) = trait_bounds::add( + &input.ident, + &mut input.generics, + &input.data, + parse_quote!(_lite_json::IntoJson>), + None, + utils::get_dumb_trait_bound(&input.attrs), + ) { + return e.to_compile_error().into(); + } + + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let body = into_json::quote(&input.data, name); + + let impl_block = quote! { + impl #impl_generics _lite_json::IntoJson for #name #ty_generics #where_clause { + fn into_json(self) -> JsonValue { + #body + } + } + }; + + wrap_with_dummy_const(impl_block) +} + +/// Derive `lite_json::FromJson` and for struct and enum. +#[proc_macro_derive(Decode, attributes(json))] +pub fn from_json_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let mut input: DeriveInput = match syn::parse(input) { + Ok(input) => input, + Err(e) => return e.to_compile_error().into(), + }; + + if let Err(e) = utils::check_attributes(&input) { + return e.to_compile_error().into(); + } + + if let Err(e) = trait_bounds::add( + &input.ident, + &mut input.generics, + &input.data, + parse_quote!(_lite_json::FronJson), + Some(parse_quote!(Default)), + utils::get_dumb_trait_bound(&input.attrs), + ) { + return e.to_compile_error().into(); + } + + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + + let input_ = quote!(input); + let body = from_json::quote(&input.data, name, &input_); + + let impl_block = quote! { + impl #impl_generics _lite_json::FromJson for #name #ty_generics #where_clause { + fn from_json(input: JsonValue) -> Option { + #body + } + } + }; + + wrap_with_dummy_const(impl_block) +} diff --git a/derive/src/trait_bounds.rs b/derive/src/trait_bounds.rs new file mode 100644 index 0000000..cafae5b --- /dev/null +++ b/derive/src/trait_bounds.rs @@ -0,0 +1,262 @@ +// Taken from http://github.com/paritytech/parity-scale-codec + +// Copyright 2019 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::iter; + +use proc_macro2::Ident; +use syn::{ + spanned::Spanned, + visit::{self, Visit}, + Generics, Result, Type, TypePath, +}; + +/// Visits the ast and checks if one of the given idents is found. +struct ContainIdents<'a> { + result: bool, + idents: &'a [Ident], +} + +impl<'a, 'ast> Visit<'ast> for ContainIdents<'a> { + fn visit_ident(&mut self, i: &'ast Ident) { + if self.idents.iter().any(|id| id == i) { + self.result = true; + } + } +} + +/// Checks if the given type contains one of the given idents. +fn type_contain_idents(ty: &Type, idents: &[Ident]) -> bool { + let mut visitor = ContainIdents { + result: false, + idents, + }; + visitor.visit_type(ty); + visitor.result +} + +/// Visits the ast and checks if the a type path starts with the given ident. +struct TypePathStartsWithIdent<'a> { + result: bool, + ident: &'a Ident, +} + +impl<'a, 'ast> Visit<'ast> for TypePathStartsWithIdent<'a> { + fn visit_type_path(&mut self, i: &'ast TypePath) { + if let Some(segment) = i.path.segments.first() { + if &segment.ident == self.ident { + self.result = true; + return; + } + } + + visit::visit_type_path(self, i); + } +} + +/// Checks if the given type path or any containing type path starts with the given ident. +fn type_path_or_sub_starts_with_ident(ty: &TypePath, ident: &Ident) -> bool { + let mut visitor = TypePathStartsWithIdent { + result: false, + ident, + }; + visitor.visit_type_path(ty); + visitor.result +} + +/// Checks if the given type or any containing type path starts with the given ident. +fn type_or_sub_type_path_starts_with_ident(ty: &Type, ident: &Ident) -> bool { + let mut visitor = TypePathStartsWithIdent { + result: false, + ident, + }; + visitor.visit_type(ty); + visitor.result +} + +/// Visits the ast and collects all type paths that do not start or contain the given ident. +/// +/// Returns `T`, `N`, `A` for `Vec<(Recursive, A)>` with `Recursive` as ident. +struct FindTypePathsNotStartOrContainIdent<'a> { + result: Vec, + ident: &'a Ident, +} + +impl<'a, 'ast> Visit<'ast> for FindTypePathsNotStartOrContainIdent<'a> { + fn visit_type_path(&mut self, i: &'ast TypePath) { + if type_path_or_sub_starts_with_ident(i, &self.ident) { + visit::visit_type_path(self, i); + } else { + self.result.push(i.clone()); + } + } +} + +/// Collects all type paths that do not start or contain the given ident in the given type. +/// +/// Returns `T`, `N`, `A` for `Vec<(Recursive, A)>` with `Recursive` as ident. +fn find_type_paths_not_start_or_contain_ident(ty: &Type, ident: &Ident) -> Vec { + let mut visitor = FindTypePathsNotStartOrContainIdent { + result: Vec::new(), + ident, + }; + visitor.visit_type(ty); + visitor.result +} + +/// Add required trait bounds to all generic types. +pub fn add( + input_ident: &Ident, + generics: &mut Generics, + data: &syn::Data, + codec_bound: syn::Path, + codec_skip_bound: Option, + dumb_trait_bounds: bool, +) -> Result<()> { + let ty_params = generics + .type_params() + .map(|p| p.ident.clone()) + .collect::>(); + if ty_params.is_empty() { + return Ok(()); + } + + let codec_types = + get_types_to_add_trait_bound(input_ident, data, &ty_params, dumb_trait_bounds)?; + + let skip_types = if codec_skip_bound.is_some() { + collect_types(&data, needs_default_bound, variant_not_skipped)? + .into_iter() + // Only add a bound if the type uses a generic + .filter(|ty| type_contain_idents(ty, &ty_params)) + .collect::>() + } else { + Vec::new() + }; + + if !codec_types.is_empty() || !skip_types.is_empty() { + let where_clause = generics.make_where_clause(); + + codec_types.into_iter().for_each(|ty| { + where_clause + .predicates + .push(parse_quote!(#ty : #codec_bound)) + }); + + skip_types.into_iter().for_each(|ty| { + let codec_skip_bound = codec_skip_bound.as_ref().unwrap(); + where_clause + .predicates + .push(parse_quote!(#ty : #codec_skip_bound)) + }); + } + + Ok(()) +} + +/// Returns all types that must be added to the where clause with the respective trait bound. +fn get_types_to_add_trait_bound( + input_ident: &Ident, + data: &syn::Data, + ty_params: &[Ident], + dumb_trait_bound: bool, +) -> Result> { + if dumb_trait_bound { + Ok(ty_params.iter().map(|t| parse_quote!( #t )).collect()) + } else { + let res = collect_types(&data, needs_codec_bound, variant_not_skipped)? + .into_iter() + // Only add a bound if the type uses a generic + .filter(|ty| type_contain_idents(ty, &ty_params)) + // If a struct is cotaining itself as field type, we can not add this type into the where clause. + // This is required to work a round the following compiler bug: https://github.com/rust-lang/rust/issues/47032 + .flat_map(|ty| { + find_type_paths_not_start_or_contain_ident(&ty, input_ident) + .into_iter() + .map(|ty| Type::Path(ty.clone())) + // Remove again types that do not contain any of our generic parameters + .filter(|ty| type_contain_idents(ty, &ty_params)) + // Add back the original type, as we don't want to loose him. + .chain(iter::once(ty)) + }) + // Remove all remaining types that start/contain the input ident to not have them in the where clause. + .filter(|ty| !type_or_sub_type_path_starts_with_ident(ty, input_ident)) + .collect(); + + Ok(res) + } +} + +fn needs_codec_bound(field: &syn::Field) -> bool { + crate::utils::get_skip(&field.attrs).is_none() +} + +fn needs_default_bound(field: &syn::Field) -> bool { + crate::utils::get_skip(&field.attrs).is_some() +} + +fn variant_not_skipped(variant: &syn::Variant) -> bool { + crate::utils::get_skip(&variant.attrs).is_none() +} + +fn collect_types( + data: &syn::Data, + type_filter: fn(&syn::Field) -> bool, + variant_filter: fn(&syn::Variant) -> bool, +) -> Result> { + use syn::*; + + let types = match *data { + Data::Struct(ref data) => match &data.fields { + Fields::Named(FieldsNamed { named: fields, .. }) + | Fields::Unnamed(FieldsUnnamed { + unnamed: fields, .. + }) => fields + .iter() + .filter(|f| type_filter(f)) + .map(|f| f.ty.clone()) + .collect(), + + Fields::Unit => Vec::new(), + }, + + Data::Enum(ref data) => data + .variants + .iter() + .filter(|variant| variant_filter(variant)) + .flat_map(|variant| match &variant.fields { + Fields::Named(FieldsNamed { named: fields, .. }) + | Fields::Unnamed(FieldsUnnamed { + unnamed: fields, .. + }) => fields + .iter() + .filter(|f| type_filter(f)) + .map(|f| f.ty.clone()) + .collect(), + + Fields::Unit => Vec::new(), + }) + .collect(), + + Data::Union(ref data) => { + return Err(Error::new( + data.union_token.span(), + "Union types are not supported.", + )) + } + }; + + Ok(types) +} diff --git a/derive/src/utils.rs b/derive/src/utils.rs new file mode 100644 index 0000000..0cd9cb8 --- /dev/null +++ b/derive/src/utils.rs @@ -0,0 +1,219 @@ +// Taken from http://github.com/paritytech/parity-scale-json + +// Copyright 2018 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Various internal utils. +//! +//! NOTE: attributes finder must be checked using check_attribute first, otherwise macro can panic. + +use proc_macro2::{Span, TokenStream}; +use syn::{ + spanned::Spanned, Attribute, Data, DeriveInput, Fields, FieldsNamed, FieldsUnnamed, Lit, Meta, + MetaNameValue, NestedMeta, Variant, +}; + +fn find_meta_item<'a, F, R, I>(itr: I, pred: F) -> Option +where + F: FnMut(&NestedMeta) -> Option + Clone, + I: Iterator, +{ + itr.filter_map(|attr| { + if attr.path.is_ident("json") { + if let Meta::List(ref meta_list) = attr + .parse_meta() + .expect("Internal error, parse_meta must have been checked") + { + return meta_list.nested.iter().filter_map(pred.clone()).next(); + } + } + + None + }) + .next() +} + +pub fn index(v: &Variant, i: usize) -> TokenStream { + // look for an index in attributes + let index = find_meta_item(v.attrs.iter(), |meta| { + if let NestedMeta::Meta(Meta::NameValue(ref nv)) = meta { + if nv.path.is_ident("index") { + if let Lit::Int(ref v) = nv.lit { + let byte = v + .base10_parse::() + .expect("Internal error, index attribute must have been checked"); + return Some(byte); + } + } + } + + None + }); + + // then fallback to discriminant or just index + index.map(|i| quote! { #i }).unwrap_or_else(|| { + v.discriminant + .as_ref() + .map(|&(_, ref expr)| quote! { #expr }) + .unwrap_or_else(|| quote! { #i }) + }) +} + +// return span of skip if found +pub fn get_skip(attrs: &[Attribute]) -> Option { + // look for `skip` in the attributes + find_meta_item(attrs.iter(), |meta| { + if let NestedMeta::Meta(Meta::Path(ref path)) = meta { + if path.is_ident("skip") { + return Some(path.span()); + } + } + + None + }) +} + +/// Returns if the `dumb_trait_bound` attribute is given in `attrs`. +pub fn get_dumb_trait_bound(attrs: &[Attribute]) -> bool { + find_meta_item(attrs.iter(), |meta| { + if let NestedMeta::Meta(Meta::Path(ref path)) = meta { + if path.is_ident("dumb_trait_bound") { + return Some(()); + } + } + + None + }) + .is_some() +} + +pub fn check_attributes(input: &DeriveInput) -> syn::Result<()> { + for attr in &input.attrs { + check_top_attribute(attr)?; + } + + match input.data { + Data::Struct(ref data) => match &data.fields { + Fields::Named(FieldsNamed { named: fields, .. }) + | Fields::Unnamed(FieldsUnnamed { + unnamed: fields, .. + }) => { + for field in fields { + for attr in &field.attrs { + check_field_attribute(attr)?; + } + } + } + Fields::Unit => (), + }, + Data::Enum(ref data) => { + for variant in data.variants.iter() { + for attr in &variant.attrs { + check_variant_attribute(attr)?; + } + for field in &variant.fields { + for attr in &field.attrs { + check_field_attribute(attr)?; + } + } + } + } + Data::Union(_) => (), + } + Ok(()) +} + +// Is accepted only: +// * `#[json(skip)]` +fn check_field_attribute(attr: &Attribute) -> syn::Result<()> { + let field_error = "Invalid attribute on field, only `#[json(skip)]` is accepted."; + + if attr.path.is_ident("json") { + match attr.parse_meta()? { + Meta::List(ref meta_list) if meta_list.nested.len() == 1 => { + match meta_list.nested.first().unwrap() { + NestedMeta::Meta(Meta::Path(path)) + if path.get_ident().map_or(false, |i| i == "skip") => + { + Ok(()) + } + elt @ _ => Err(syn::Error::new(elt.span(), field_error)), + } + } + meta @ _ => Err(syn::Error::new(meta.span(), field_error)), + } + } else { + Ok(()) + } +} + +// Is accepted only: +// * `#[json(skip)]` +// * `#[json(index = $int)]` +fn check_variant_attribute(attr: &Attribute) -> syn::Result<()> { + let variant_error = "Invalid attribute on variant, only `#[json(skip)]` and \ + `#[json(index = $u8)]` are accepted."; + + if attr.path.is_ident("json") { + match attr.parse_meta()? { + Meta::List(ref meta_list) if meta_list.nested.len() == 1 => { + match meta_list.nested.first().unwrap() { + NestedMeta::Meta(Meta::Path(path)) + if path.get_ident().map_or(false, |i| i == "skip") => + { + Ok(()) + } + + NestedMeta::Meta(Meta::NameValue(MetaNameValue { + path, + lit: Lit::Int(lit_int), + .. + })) if path.get_ident().map_or(false, |i| i == "index") => lit_int + .base10_parse::() + .map(|_| ()) + .map_err(|_| syn::Error::new(lit_int.span(), "Index must be in 0..255")), + + elt @ _ => Err(syn::Error::new(elt.span(), variant_error)), + } + } + meta @ _ => Err(syn::Error::new(meta.span(), variant_error)), + } + } else { + Ok(()) + } +} + +// Only `#[json(dumb_trait_bound)]` is accepted as top attribute +fn check_top_attribute(attr: &Attribute) -> syn::Result<()> { + let top_error = "Invalid attribute only `#[json(dumb_trait_bound)]` is accepted as top \ + attribute"; + if attr.path.is_ident("json") { + match attr.parse_meta()? { + Meta::List(ref meta_list) if meta_list.nested.len() == 1 => { + match meta_list.nested.first().unwrap() { + NestedMeta::Meta(Meta::Path(path)) + if path.get_ident().map_or(false, |i| i == "dumb_trait_bound") => + { + Ok(()) + } + + elt @ _ => Err(syn::Error::new(elt.span(), top_error)), + } + } + meta @ _ => Err(syn::Error::new(meta.span(), top_error)), + } + } else { + Ok(()) + } +} diff --git a/src/lib.rs b/src/lib.rs index 1f361cf..92da3b5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,3 +7,11 @@ pub mod traits; pub use crate::json::*; pub use crate::json_parser::*; pub use crate::traits::*; + +#[cfg(feature = "lite-json-derive")] +#[allow(unused_imports)] +#[macro_use] +extern crate lite_json_derive; + +#[cfg(feature = "lite-json-derive")] +pub use lite_json_derive::*; diff --git a/src/traits.rs b/src/traits.rs index ca7d501..debbe9d 100644 --- a/src/traits.rs +++ b/src/traits.rs @@ -4,6 +4,8 @@ extern crate alloc; #[cfg(not(feature = "std"))] use alloc::vec::Vec; +use crate::json::JsonValue; + pub trait Serialize { fn serialize(&self) -> Vec { let mut res = Vec::new(); @@ -17,3 +19,13 @@ pub trait Serialize { } fn serialize_to(&self, buffer: &mut Vec, indent: u32, level: u32); } + +pub trait IntoJson { + fn into_json(self) -> JsonValue; +} + +pub trait FromJson: Sized { + fn from_json(value: JsonValue) -> Option; +} + +// TODO: implement IntoJson & FromJson for common types such as &str, integers, Vec, Box, Option, Result, etc