-
Notifications
You must be signed in to change notification settings - Fork 23
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
9 changed files
with
957 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
[package] | ||
name = "lite-json-derive" | ||
description = "Serialization and deserialization derive macro for lite-json" | ||
version = "0.1.0" | ||
authors = ["Bryan Chen <[email protected]>"] | ||
license = "Apache-2.0" | ||
edition = "2018" | ||
|
||
[lib] | ||
proc-macro = true | ||
|
||
[dependencies] | ||
syn = { version = "1.0.8", features = [ "full", "visit" ] } | ||
quote = "1.0.2" | ||
proc-macro2 = "1.0.6" | ||
proc-macro-crate = "0.1.4" | ||
|
||
[dev-dependencies] | ||
lite-json = { path = "..", version = "0.1.0" } |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,111 @@ | ||
// Heavily inspred by http://github.com/paritytech/parity-scale-codec | ||
|
||
use proc_macro2::{Ident, Span, TokenStream}; | ||
use syn::{spanned::Spanned, Data, Error, Field, Fields}; | ||
|
||
use crate::utils; | ||
|
||
pub fn quote(data: &Data, type_name: &Ident, input: &TokenStream) -> TokenStream { | ||
match *data { | ||
Data::Struct(ref data) => match data.fields { | ||
Fields::Named(_) | Fields::Unnamed(_) => { | ||
create_instance(quote! { #type_name }, input, &data.fields) | ||
} | ||
Fields::Unit => { | ||
quote_spanned! { data.fields.span() => | ||
Ok(#type_name) | ||
} | ||
} | ||
}, | ||
Data::Enum(ref data) => { | ||
let data_variants = || { | ||
data.variants | ||
.iter() | ||
.filter(|variant| crate::utils::get_skip(&variant.attrs).is_none()) | ||
}; | ||
|
||
let recurse = data_variants().enumerate().map(|(i, v)| { | ||
let name = &v.ident; | ||
let index = utils::index(v, i); | ||
|
||
let create = create_instance(quote! { #type_name :: #name }, input, &v.fields); | ||
|
||
quote_spanned! { v.span() => | ||
x if x == #index as u8 => { | ||
#create | ||
}, | ||
} | ||
}); | ||
|
||
// TODO: match string name | ||
|
||
quote! { | ||
match #input { | ||
_lite_json::JsonValue::Number(_lite_json::NumberValue { integer, .. }) => match integer { | ||
#( #recurse )*, | ||
_ => None | ||
}, | ||
_ => None, | ||
} | ||
} | ||
} | ||
Data::Union(_) => { | ||
Error::new(Span::call_site(), "Union types are not supported.").to_compile_error() | ||
} | ||
} | ||
} | ||
|
||
fn create_decode_expr(field: &Field, _name: &str, input: &TokenStream) -> TokenStream { | ||
let skip = utils::get_skip(&field.attrs).is_some(); | ||
|
||
if skip { | ||
quote_spanned! { field.span() => Default::default() } | ||
} else { | ||
quote_spanned! { field.span() => | ||
_lite_json::FromJson::from_json(#input)?; | ||
} | ||
} | ||
} | ||
|
||
fn create_instance(name: TokenStream, input: &TokenStream, fields: &Fields) -> TokenStream { | ||
match *fields { | ||
Fields::Named(ref fields) => { | ||
let recurse = fields.named.iter().map(|f| { | ||
let name_ident = &f.ident; | ||
let field = match name_ident { | ||
Some(a) => format!("{}.{}", name, a), | ||
None => format!("{}", name), | ||
}; | ||
let decode = create_decode_expr(f, &field, input); | ||
|
||
quote_spanned! { f.span() => | ||
#name_ident: #decode | ||
} | ||
}); | ||
|
||
quote_spanned! { fields.span() => | ||
Ok(#name { | ||
#( #recurse, )* | ||
}) | ||
} | ||
} | ||
Fields::Unnamed(ref fields) => { | ||
let recurse = fields.unnamed.iter().enumerate().map(|(i, f)| { | ||
let name = format!("{}.{}", name, i); | ||
|
||
create_decode_expr(f, &name, input) | ||
}); | ||
|
||
quote_spanned! { fields.span() => | ||
Ok(#name ( | ||
#( #recurse, )* | ||
)) | ||
} | ||
} | ||
Fields::Unit => { | ||
quote_spanned! { fields.span() => | ||
Ok(#name) | ||
} | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,175 @@ | ||
// Heavily inspred by http://github.com/paritytech/parity-scale-codec | ||
|
||
use std::str::from_utf8; | ||
|
||
use proc_macro2::{Ident, Span, TokenStream}; | ||
use syn::{punctuated::Punctuated, spanned::Spanned, token::Comma, Data, Error, Field, Fields}; | ||
|
||
use crate::utils; | ||
|
||
type FieldsList = Punctuated<Field, Comma>; | ||
|
||
fn encode_named_fields<F>(fields: &FieldsList, field_name: F) -> TokenStream | ||
where | ||
F: Fn(usize, &Option<Ident>) -> TokenStream, | ||
{ | ||
let recurse = fields.iter().enumerate().map(|(i, f)| { | ||
let skip = utils::get_skip(&f.attrs).is_some(); | ||
let field = field_name(i, &f.ident); | ||
|
||
if skip { | ||
quote! {} | ||
} else { | ||
quote_spanned! { f.span() => | ||
( | ||
stringify!(f.indent.unwrap()).into_iter().collect(), | ||
_lite_json::IntoJson::into_json(#field) | ||
) | ||
} | ||
} | ||
}); | ||
|
||
quote! { | ||
_lite_json::JsonValue::Object( __core::vec![ #( #recurse, )* ] ) | ||
} | ||
} | ||
|
||
fn encode_unnamed_fields<F>(fields: &FieldsList, field_name: F) -> TokenStream | ||
where | ||
F: Fn(usize, &Option<Ident>) -> TokenStream, | ||
{ | ||
let recurse = fields.iter().enumerate().map(|(i, f)| { | ||
let skip = utils::get_skip(&f.attrs).is_some(); | ||
let field = field_name(i, &f.ident); | ||
|
||
if skip { | ||
quote! {} | ||
} else { | ||
quote_spanned! { f.span() => | ||
_lite_json::IntoJson::into_json(#field) | ||
} | ||
} | ||
}); | ||
|
||
quote! { | ||
_lite_json::JsonValue::Array( __core::vec![ #( #recurse, )* ] ) | ||
} | ||
} | ||
|
||
pub fn quote(data: &Data, type_name: &Ident) -> TokenStream { | ||
let self_ = quote!(self); | ||
let dest = "e!(dest); | ||
let encoding = match *data { | ||
Data::Struct(ref data) => match data.fields { | ||
Fields::Named(ref fields) => { | ||
encode_named_fields(&fields.named, |_, name| quote!(&#self_.#name)) | ||
} | ||
Fields::Unnamed(ref fields) => encode_unnamed_fields(&fields.unnamed, |i, _| { | ||
let i = syn::Index::from(i); | ||
quote!(&#self_.#i) | ||
}), | ||
Fields::Unit => quote! { | ||
_lite_json::JsonValue::Object( __core::vec![] ) | ||
}, | ||
}, | ||
Data::Enum(ref data) => { | ||
let data_variants = || { | ||
data.variants | ||
.iter() | ||
.filter(|variant| crate::utils::get_skip(&variant.attrs).is_none()) | ||
}; | ||
|
||
// If the enum has no variants, make it null | ||
if data_variants().count() == 0 { | ||
return quote!(_lite_json::JsonValue::Null); | ||
} | ||
|
||
let recurse = data_variants().map(|f| { | ||
let name = &f.ident; | ||
|
||
match f.fields { | ||
Fields::Named(ref fields) => { | ||
let field_name = |_, ident: &Option<Ident>| quote!(#ident); | ||
let names = fields.named | ||
.iter() | ||
.enumerate() | ||
.map(|(i, f)| field_name(i, &f.ident)); | ||
|
||
let encode_fields = encode_named_fields( | ||
&fields.named, | ||
|a, b| field_name(a, b), | ||
); | ||
|
||
quote_spanned! { f.span() => | ||
#type_name :: #name { #( ref #names, )* } => { | ||
_lite_json::JsonValue::Array(__core::vec![ | ||
_lite_json::JsonValue::String(stringify!(#name).into_iter().collect()), | ||
#encode_fields | ||
]) | ||
} | ||
} | ||
}, | ||
Fields::Unnamed(ref fields) => { | ||
let field_name = |i, _: &Option<Ident>| { | ||
let data = stringify(i as u8); | ||
let ident = from_utf8(&data).expect("We never go beyond ASCII"); | ||
let ident = Ident::new(ident, Span::call_site()); | ||
quote!(#ident) | ||
}; | ||
let names = fields.unnamed | ||
.iter() | ||
.enumerate() | ||
.map(|(i, f)| field_name(i, &f.ident)); | ||
|
||
let encode_fields = encode_unnamed_fields( | ||
&fields.unnamed, | ||
|a, b| field_name(a, b), | ||
); | ||
|
||
quote_spanned! { f.span() => | ||
#type_name :: #name { #( ref #names, )* } => { | ||
_lite_json::JsonValue::Array(__core::vec![ | ||
_lite_json::JsonValue::String(stringify!(#name).into_iter().collect()), | ||
#encode_fields | ||
]) | ||
} | ||
} | ||
}, | ||
Fields::Unit => { | ||
quote_spanned! { f.span() => | ||
#type_name :: #name => { | ||
_lite_json::JsonValue::String(stringify!(#name).into_iter().collect()) | ||
} | ||
} | ||
}, | ||
} | ||
}); | ||
|
||
quote! { | ||
match *#self_ { | ||
#( #recurse )*, | ||
_ => (), | ||
} | ||
} | ||
} | ||
Data::Union(ref data) => { | ||
Error::new(data.union_token.span(), "Union types are not supported.").to_compile_error() | ||
} | ||
}; | ||
|
||
quote! { | ||
fn encode_to<EncOut: _parity_scale_codec::Output>(&#self_, #dest: &mut EncOut) { | ||
#encoding | ||
} | ||
} | ||
} | ||
|
||
pub fn stringify(id: u8) -> [u8; 2] { | ||
const CHARS: &[u8] = b"abcdefghijklmnopqrstuvwxyz"; | ||
let len = CHARS.len() as u8; | ||
let symbol = |id: u8| CHARS[(id % len) as usize]; | ||
let a = symbol(id); | ||
let b = symbol(id / len); | ||
|
||
[a, b] | ||
} |
Oops, something went wrong.