Updated everything and moved to hard tab indentation

This commit is contained in:
2025-01-08 19:20:34 +01:00
parent 7cdfe7e3c4
commit 52c8d1c95a
100 changed files with 5949 additions and 5998 deletions

View File

@@ -3,28 +3,28 @@ use quote::ToTokens;
use syn::spanned::Spanned;
pub fn add_trait_bounds(mut generics: syn::Generics, bound: syn::TypeParamBound) -> syn::Generics {
for param in &mut generics.params {
if let syn::GenericParam::Type(ref mut type_param) = *param {
type_param.bounds.push(bound.clone())
}
}
generics
for param in &mut generics.params {
if let syn::GenericParam::Type(ref mut type_param) = *param {
type_param.bounds.push(bound.clone())
}
}
generics
}
pub fn destructure(fields: &syn::Fields) -> Option<pm2::TokenStream> {
match fields {
syn::Fields::Unit => None,
syn::Fields::Named(_) => {
let field_list = fields.iter().map(|f| f.ident.as_ref().unwrap());
Some(quote! { { #(#field_list),* } })
},
syn::Fields::Unnamed(un) => {
let field_list = (0..fields.len()).map(|i| pos_field_name(i, un.span()));
Some(quote! { ( #(#field_list),* ) })
},
}
match fields {
syn::Fields::Unit => None,
syn::Fields::Named(_) => {
let field_list = fields.iter().map(|f| f.ident.as_ref().unwrap());
Some(quote! { { #(#field_list),* } })
},
syn::Fields::Unnamed(un) => {
let field_list = (0..fields.len()).map(|i| pos_field_name(i, un.span()));
Some(quote! { ( #(#field_list),* ) })
},
}
}
pub fn pos_field_name(i: usize, span: pm2::Span) -> pm2::TokenStream {
syn::Ident::new(&format!("field_{i}"), span).to_token_stream()
syn::Ident::new(&format!("field_{i}"), span).to_token_stream()
}

View File

@@ -4,53 +4,53 @@ use proc_macro2 as pm2;
use crate::common::add_trait_bounds;
pub fn derive(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput);
let generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Decode));
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let name = input.ident;
let decode = decode_body(&input.data);
let expanded = quote! {
impl #impl_generics orchid_api_traits::Decode for #name #ty_generics #where_clause {
fn decode<R: std::io::Read + ?Sized>(read: &mut R) -> Self { #decode }
}
};
TokenStream::from(expanded)
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput);
let generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Decode));
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let name = input.ident;
let decode = decode_body(&input.data);
let expanded = quote! {
impl #impl_generics orchid_api_traits::Decode for #name #ty_generics #where_clause {
fn decode<R: std::io::Read + ?Sized>(read: &mut R) -> Self { #decode }
}
};
TokenStream::from(expanded)
}
fn decode_fields(fields: &syn::Fields) -> pm2::TokenStream {
match fields {
syn::Fields::Unit => quote! {},
syn::Fields::Named(_) => {
let names = fields.iter().map(|f| f.ident.as_ref().unwrap());
quote! { { #( #names: orchid_api_traits::Decode::decode(read), )* } }
},
syn::Fields::Unnamed(_) => {
let exprs = fields.iter().map(|_| quote! { orchid_api_traits::Decode::decode(read), });
quote! { ( #( #exprs )* ) }
},
}
match fields {
syn::Fields::Unit => quote! {},
syn::Fields::Named(_) => {
let names = fields.iter().map(|f| f.ident.as_ref().unwrap());
quote! { { #( #names: orchid_api_traits::Decode::decode(read), )* } }
},
syn::Fields::Unnamed(_) => {
let exprs = fields.iter().map(|_| quote! { orchid_api_traits::Decode::decode(read), });
quote! { ( #( #exprs )* ) }
},
}
}
fn decode_body(data: &syn::Data) -> proc_macro2::TokenStream {
match data {
syn::Data::Union(_) => panic!("Unions can't be deserialized"),
syn::Data::Struct(str) => {
let fields = decode_fields(&str.fields);
quote! { Self #fields }
},
syn::Data::Enum(en) => {
let opts = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| {
let fields = decode_fields(&v.fields);
let id = i as u8;
quote! { #id => Self::#ident #fields, }
});
quote! {
match <u8 as orchid_api_traits::Decode>::decode(read) {
#(#opts)*
x => panic!("Unrecognized enum kind {x}")
}
}
},
}
match data {
syn::Data::Union(_) => panic!("Unions can't be deserialized"),
syn::Data::Struct(str) => {
let fields = decode_fields(&str.fields);
quote! { Self #fields }
},
syn::Data::Enum(en) => {
let opts = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| {
let fields = decode_fields(&v.fields);
let id = i as u8;
quote! { #id => Self::#ident #fields, }
});
quote! {
match <u8 as orchid_api_traits::Decode>::decode(read) {
#(#opts)*
x => panic!("Unrecognized enum kind {x}")
}
}
},
}
}

View File

@@ -6,61 +6,61 @@ use syn::spanned::Spanned;
use crate::common::{add_trait_bounds, destructure, pos_field_name};
pub fn derive(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput);
let e_generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Encode));
let (e_impl_generics, e_ty_generics, e_where_clause) = e_generics.split_for_impl();
let name = input.ident;
let encode = encode_body(&input.data);
let expanded = quote! {
impl #e_impl_generics orchid_api_traits::Encode for #name #e_ty_generics #e_where_clause {
fn encode<W: std::io::Write + ?Sized>(&self, write: &mut W) { #encode }
}
};
TokenStream::from(expanded)
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput);
let e_generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Encode));
let (e_impl_generics, e_ty_generics, e_where_clause) = e_generics.split_for_impl();
let name = input.ident;
let encode = encode_body(&input.data);
let expanded = quote! {
impl #e_impl_generics orchid_api_traits::Encode for #name #e_ty_generics #e_where_clause {
fn encode<W: std::io::Write + ?Sized>(&self, write: &mut W) { #encode }
}
};
TokenStream::from(expanded)
}
fn encode_body(data: &syn::Data) -> Option<pm2::TokenStream> {
match data {
syn::Data::Union(_) => panic!("Unions can't be deserialized"),
syn::Data::Struct(str) => {
let dest = destructure(&str.fields)?;
let body = encode_items(&str.fields);
Some(quote! {
let Self #dest = &self;
#body
})
},
syn::Data::Enum(en) => {
let options = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| {
let dest = destructure(&v.fields).unwrap_or_default();
let body = encode_items(&v.fields);
quote! {
Self::#ident #dest => {
(#i as u8).encode(write);
#body
}
}
});
Some(quote! {
match self {
#(#options)*
_ => unreachable!("Autogenerated encode impl for all possible variants"),
}
})
},
}
match data {
syn::Data::Union(_) => panic!("Unions can't be deserialized"),
syn::Data::Struct(str) => {
let dest = destructure(&str.fields)?;
let body = encode_items(&str.fields);
Some(quote! {
let Self #dest = &self;
#body
})
},
syn::Data::Enum(en) => {
let options = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| {
let dest = destructure(&v.fields).unwrap_or_default();
let body = encode_items(&v.fields);
quote! {
Self::#ident #dest => {
(#i as u8).encode(write);
#body
}
}
});
Some(quote! {
match self {
#(#options)*
_ => unreachable!("Autogenerated encode impl for all possible variants"),
}
})
},
}
}
fn encode_names<T: ToTokens>(names: impl Iterator<Item = T>) -> pm2::TokenStream {
quote! { #( #names .encode(write); )* }
quote! { #( #names .encode(write); )* }
}
fn encode_items(fields: &syn::Fields) -> Option<pm2::TokenStream> {
match fields {
syn::Fields::Unit => None,
syn::Fields::Named(_) => Some(encode_names(fields.iter().map(|f| f.ident.as_ref().unwrap()))),
syn::Fields::Unnamed(un) =>
Some(encode_names((0..fields.len()).map(|i| pos_field_name(i, un.span())))),
}
match fields {
syn::Fields::Unit => None,
syn::Fields::Named(_) => Some(encode_names(fields.iter().map(|f| f.ident.as_ref().unwrap()))),
syn::Fields::Unnamed(un) =>
Some(encode_names((0..fields.len()).map(|i| pos_field_name(i, un.span())))),
}
}

View File

@@ -7,118 +7,118 @@ use proc_macro2 as pm2;
use syn::DeriveInput;
pub fn derive(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput);
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
let name = &input.ident;
let extendable = is_extendable(&input);
let is_leaf_val = if extendable { quote!(TLFalse) } else { quote!(TLTrue) };
match get_ancestry(&input) {
None => TokenStream::from(quote! {
impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause {
type IsRoot = orchid_api_traits::TLTrue;
type IsLeaf = orchid_api_traits:: #is_leaf_val ;
}
}),
Some(ancestry) => {
let parent = ancestry[0].clone();
let casts = gen_casts(&ancestry[..], &quote!(#name));
TokenStream::from(quote! {
#casts
impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause {
type IsRoot = orchid_api_traits::TLFalse;
type IsLeaf = orchid_api_traits:: #is_leaf_val ;
}
impl #impl_generics orchid_api_traits::Extends for #name #ty_generics #where_clause {
type Parent = #parent;
}
})
},
}
// Parse the input tokens into a syntax tree
let input = parse_macro_input!(input as syn::DeriveInput);
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
let name = &input.ident;
let extendable = is_extendable(&input);
let is_leaf_val = if extendable { quote!(TLFalse) } else { quote!(TLTrue) };
match get_ancestry(&input) {
None => TokenStream::from(quote! {
impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause {
type IsRoot = orchid_api_traits::TLTrue;
type IsLeaf = orchid_api_traits:: #is_leaf_val ;
}
}),
Some(ancestry) => {
let parent = ancestry[0].clone();
let casts = gen_casts(&ancestry[..], &quote!(#name));
TokenStream::from(quote! {
#casts
impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause {
type IsRoot = orchid_api_traits::TLFalse;
type IsLeaf = orchid_api_traits:: #is_leaf_val ;
}
impl #impl_generics orchid_api_traits::Extends for #name #ty_generics #where_clause {
type Parent = #parent;
}
})
},
}
}
fn gen_casts(ancestry: &[pm2::TokenStream], this: &pm2::TokenStream) -> pm2::TokenStream {
let from_impls = iter::once(this).chain(ancestry.iter()).tuple_windows().map(|(prev, cur)| {
quote! {
impl From<#this> for #cur {
fn from(value: #this) -> Self {
#cur::#prev(value.into())
}
}
}
});
let try_from_impls = (1..=ancestry.len()).map(|len| {
let (orig, inter) = ancestry[..len].split_last().unwrap();
fn gen_chk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream {
match r.split_last() {
None => quote! { #last (_) => true },
Some((ty, tail)) => {
let sub = gen_chk(tail, last);
quote! {
#ty ( value ) => match value {
#ty:: #sub ,
_ => false
}
}
},
}
}
let chk = gen_chk(inter, this);
fn gen_unpk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream {
match r.split_last() {
None => quote! { #last ( value ) => value },
Some((ty, tail)) => {
let sub = gen_unpk(tail, last);
quote! {
#ty ( value ) => match value {
#ty:: #sub ,
_ => unreachable!("Checked above!"),
}
}
},
}
}
let unpk = gen_unpk(inter, this);
quote! {
impl TryFrom<#orig> for #this {
type Error = #orig;
fn try_from(value: #orig) -> Result<Self, Self::Error> {
let can_cast = match &value {
#orig:: #chk ,
_ => false
};
if !can_cast { return Err(value) }
Ok ( match value {
#orig:: #unpk ,
_ => unreachable!("Checked above!")
} )
}
}
}
});
from_impls.chain(try_from_impls).flatten().collect()
let from_impls = iter::once(this).chain(ancestry.iter()).tuple_windows().map(|(prev, cur)| {
quote! {
impl From<#this> for #cur {
fn from(value: #this) -> Self {
#cur::#prev(value.into())
}
}
}
});
let try_from_impls = (1..=ancestry.len()).map(|len| {
let (orig, inter) = ancestry[..len].split_last().unwrap();
fn gen_chk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream {
match r.split_last() {
None => quote! { #last (_) => true },
Some((ty, tail)) => {
let sub = gen_chk(tail, last);
quote! {
#ty ( value ) => match value {
#ty:: #sub ,
_ => false
}
}
},
}
}
let chk = gen_chk(inter, this);
fn gen_unpk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream {
match r.split_last() {
None => quote! { #last ( value ) => value },
Some((ty, tail)) => {
let sub = gen_unpk(tail, last);
quote! {
#ty ( value ) => match value {
#ty:: #sub ,
_ => unreachable!("Checked above!"),
}
}
},
}
}
let unpk = gen_unpk(inter, this);
quote! {
impl TryFrom<#orig> for #this {
type Error = #orig;
fn try_from(value: #orig) -> Result<Self, Self::Error> {
let can_cast = match &value {
#orig:: #chk ,
_ => false
};
if !can_cast { return Err(value) }
Ok ( match value {
#orig:: #unpk ,
_ => unreachable!("Checked above!")
} )
}
}
}
});
from_impls.chain(try_from_impls).flatten().collect()
}
fn get_ancestry(input: &DeriveInput) -> Option<Vec<pm2::TokenStream>> {
input.attrs.iter().find(|a| a.path().get_ident().is_some_and(|i| *i == "extends")).map(|attr| {
match &attr.meta {
syn::Meta::List(list) => (list.tokens.clone().into_iter())
.batching(|it| {
let grp: pm2::TokenStream =
it.take_while(|t| {
if let TokenTree::Punct(punct) = t { punct.as_char() != ',' } else { true }
})
.collect();
(!grp.is_empty()).then_some(grp)
})
.collect(),
_ => panic!("The correct format of the parent macro is #[parent(SomeParentType)]"),
}
})
input.attrs.iter().find(|a| a.path().get_ident().is_some_and(|i| *i == "extends")).map(|attr| {
match &attr.meta {
syn::Meta::List(list) => (list.tokens.clone().into_iter())
.batching(|it| {
let grp: pm2::TokenStream =
it.take_while(|t| {
if let TokenTree::Punct(punct) = t { punct.as_char() != ',' } else { true }
})
.collect();
(!grp.is_empty()).then_some(grp)
})
.collect(),
_ => panic!("The correct format of the parent macro is #[parent(SomeParentType)]"),
}
})
}
fn is_extendable(input: &DeriveInput) -> bool {
input.attrs.iter().any(|a| a.path().get_ident().is_some_and(|i| *i == "extendable"))
input.attrs.iter().any(|a| a.path().get_ident().is_some_and(|i| *i == "extendable"))
}
#[test]

View File

@@ -23,5 +23,5 @@ pub fn hierarchy(input: TokenStream) -> TokenStream { hierarchy::derive(input) }
#[proc_macro_derive(Coding)]
pub fn coding(input: TokenStream) -> TokenStream {
decode(input.clone()).into_iter().chain(encode(input)).collect()
decode(input.clone()).into_iter().chain(encode(input)).collect()
}