in midst of refactor
This commit is contained in:
17
orchid-api-derive/Cargo.toml
Normal file
17
orchid-api-derive/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "orchid-api-derive"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
quote = "1.0.35"
|
||||
syn = { version = "2.0.52" }
|
||||
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
|
||||
proc-macro2 = "1.0.78"
|
||||
darling = "0.20.8"
|
||||
itertools = "0.12.1"
|
||||
30
orchid-api-derive/src/common.rs
Normal file
30
orchid-api-derive/src/common.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
use proc_macro2 as pm2;
|
||||
use quote::ToTokens;
|
||||
use syn::spanned::Spanned;
|
||||
|
||||
pub fn add_trait_bounds(mut generics: syn::Generics, bound: syn::TypeParamBound) -> syn::Generics {
|
||||
for param in &mut generics.params {
|
||||
if let syn::GenericParam::Type(ref mut type_param) = *param {
|
||||
type_param.bounds.push(bound.clone())
|
||||
}
|
||||
}
|
||||
generics
|
||||
}
|
||||
|
||||
pub fn destructure(fields: &syn::Fields) -> Option<pm2::TokenStream> {
|
||||
match fields {
|
||||
syn::Fields::Unit => None,
|
||||
syn::Fields::Named(_) => {
|
||||
let field_list = fields.iter().map(|f| f.ident.as_ref().unwrap());
|
||||
Some(quote! { { #(#field_list),* } })
|
||||
},
|
||||
syn::Fields::Unnamed(un) => {
|
||||
let field_list = (0..fields.len()).map(|i| pos_field_name(i, un.span()));
|
||||
Some(quote! { ( #(#field_list),* ) })
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pos_field_name(i: usize, span: pm2::Span) -> pm2::TokenStream {
|
||||
syn::Ident::new(&format!("field_{i}"), span).to_token_stream()
|
||||
}
|
||||
56
orchid-api-derive/src/decode.rs
Normal file
56
orchid-api-derive/src/decode.rs
Normal file
@@ -0,0 +1,56 @@
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2 as pm2;
|
||||
|
||||
use crate::common::add_trait_bounds;
|
||||
|
||||
pub fn derive(input: TokenStream) -> TokenStream {
|
||||
// Parse the input tokens into a syntax tree
|
||||
let input = parse_macro_input!(input as syn::DeriveInput);
|
||||
let generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Decode));
|
||||
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
|
||||
let name = input.ident;
|
||||
let decode = decode_body(&input.data);
|
||||
let expanded = quote! {
|
||||
impl #impl_generics orchid_api_traits::Decode for #name #ty_generics #where_clause {
|
||||
fn decode<R: std::io::Read>(read: &mut R) -> Self { #decode }
|
||||
}
|
||||
};
|
||||
TokenStream::from(expanded)
|
||||
}
|
||||
|
||||
fn decode_fields(fields: &syn::Fields) -> pm2::TokenStream {
|
||||
match fields {
|
||||
syn::Fields::Unit => pm2::TokenStream::new(),
|
||||
syn::Fields::Named(_) => {
|
||||
let names = fields.iter().map(|f| f.ident.as_ref().unwrap());
|
||||
quote! { { #( #names: orchid_api_traits::Decode::decode(read), )* } }
|
||||
},
|
||||
syn::Fields::Unnamed(_) => {
|
||||
let exprs = fields.iter().map(|_| quote! { orchid_api_traits::Decode::decode(read), });
|
||||
quote! { ( #( #exprs )* ) }
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_body(data: &syn::Data) -> proc_macro2::TokenStream {
|
||||
match data {
|
||||
syn::Data::Union(_) => panic!("Unions can't be deserialized"),
|
||||
syn::Data::Struct(str) => {
|
||||
let fields = decode_fields(&str.fields);
|
||||
quote! { Self #fields }
|
||||
},
|
||||
syn::Data::Enum(en) => {
|
||||
let opts = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| {
|
||||
let fields = decode_fields(&v.fields);
|
||||
let id = i as u8;
|
||||
quote! { #id => Self::#ident #fields, }
|
||||
});
|
||||
quote! {
|
||||
match <u8 as orchid_api_traits::Decode>::decode(read) {
|
||||
#(#opts)*
|
||||
x => panic!("Unrecognized enum kind {x}")
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
68
orchid-api-derive/src/encode.rs
Normal file
68
orchid-api-derive/src/encode.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2 as pm2;
|
||||
use quote::ToTokens;
|
||||
use syn::spanned::Spanned;
|
||||
|
||||
use crate::common::{add_trait_bounds, destructure, pos_field_name};
|
||||
|
||||
pub fn derive(input: TokenStream) -> TokenStream {
|
||||
// Parse the input tokens into a syntax tree
|
||||
let input = parse_macro_input!(input as syn::DeriveInput);
|
||||
let e_generics = add_trait_bounds(input.generics, parse_quote!(orchid_api_traits::Decode));
|
||||
let (e_impl_generics, e_ty_generics, e_where_clause) = e_generics.split_for_impl();
|
||||
let name = input.ident;
|
||||
let encode = encode_body(&input.data);
|
||||
let expanded = quote! {
|
||||
impl #e_impl_generics orchid_api_traits::Encode for #name #e_ty_generics #e_where_clause {
|
||||
fn encode<W: std::io::Write>(&self, write: &mut W) { #encode }
|
||||
}
|
||||
};
|
||||
TokenStream::from(expanded)
|
||||
}
|
||||
|
||||
fn encode_body(data: &syn::Data) -> Option<pm2::TokenStream> {
|
||||
match data {
|
||||
syn::Data::Union(_) => panic!("Unions can't be deserialized"),
|
||||
syn::Data::Struct(str) => {
|
||||
let dest = destructure(&str.fields)?;
|
||||
let body = encode_items(&str.fields);
|
||||
Some(quote! {
|
||||
let Self #dest = &self;
|
||||
#body
|
||||
})
|
||||
},
|
||||
syn::Data::Enum(en) => {
|
||||
let options = en.variants.iter().enumerate().map(|(i, v @ syn::Variant { ident, .. })| {
|
||||
let dest = destructure(&v.fields).unwrap_or_default();
|
||||
let body = encode_items(&v.fields);
|
||||
quote! {
|
||||
Self::#ident #dest => {
|
||||
(#i as u64).encode(write);
|
||||
#body
|
||||
}
|
||||
}
|
||||
});
|
||||
Some(quote! {
|
||||
match self {
|
||||
#(#options)*
|
||||
_ => unreachable!("Autogenerated encode impl for all possible variants"),
|
||||
}
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_names<T: ToTokens>(names: impl Iterator<Item = T>) -> pm2::TokenStream {
|
||||
quote! { #( #names .encode(write); )* }
|
||||
}
|
||||
|
||||
fn encode_items(fields: &syn::Fields) -> Option<pm2::TokenStream> {
|
||||
match fields {
|
||||
syn::Fields::Unit => None,
|
||||
syn::Fields::Named(_) => Some(encode_names(fields.iter().map(|f| f.ident.as_ref().unwrap()))),
|
||||
syn::Fields::Unnamed(un) =>
|
||||
Some(encode_names((0..fields.len()).map(|i| pos_field_name(i, un.span())))),
|
||||
}
|
||||
}
|
||||
|
||||
127
orchid-api-derive/src/hierarchy.rs
Normal file
127
orchid-api-derive/src/hierarchy.rs
Normal file
@@ -0,0 +1,127 @@
|
||||
use std::iter;
|
||||
|
||||
use itertools::Itertools;
|
||||
use pm2::TokenTree;
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2 as pm2;
|
||||
use syn::DeriveInput;
|
||||
|
||||
pub fn derive(input: TokenStream) -> TokenStream {
|
||||
// Parse the input tokens into a syntax tree
|
||||
let input = parse_macro_input!(input as syn::DeriveInput);
|
||||
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
|
||||
let name = &input.ident;
|
||||
let extendable = is_extendable(&input);
|
||||
let is_leaf_val = if extendable { quote!(TLFalse) } else { quote!(TLTrue) };
|
||||
match get_ancestry(&input) {
|
||||
None => TokenStream::from(quote! {
|
||||
impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause {
|
||||
type IsRoot = orchid_api_traits::TLTrue;
|
||||
type IsLeaf = orchid_api_traits:: #is_leaf_val ;
|
||||
}
|
||||
}),
|
||||
Some(ancestry) => {
|
||||
let parent = ancestry[0].clone();
|
||||
let casts = gen_casts(&ancestry[..], "e!(#name));
|
||||
TokenStream::from(quote! {
|
||||
#casts
|
||||
impl #impl_generics orchid_api_traits::InHierarchy for #name #ty_generics #where_clause {
|
||||
type IsRoot = orchid_api_traits::TLFalse;
|
||||
type IsLeaf = orchid_api_traits:: #is_leaf_val ;
|
||||
}
|
||||
impl #impl_generics orchid_api_traits::Extends for #name #ty_generics #where_clause {
|
||||
type Parent = #parent;
|
||||
}
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_casts(ancestry: &[pm2::TokenStream], this: &pm2::TokenStream) -> pm2::TokenStream {
|
||||
let from_impls = iter::once(this).chain(ancestry.iter()).tuple_windows().map(|(prev, cur)| {
|
||||
quote! {
|
||||
impl From<#this> for #cur {
|
||||
fn from(value: #this) -> Self {
|
||||
#cur::#prev(value.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
let try_from_impls = (1..=ancestry.len()).map(|len| {
|
||||
let (orig, inter) = ancestry[..len].split_last().unwrap();
|
||||
fn gen_chk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream {
|
||||
match r.split_last() {
|
||||
None => quote! { #last (_) => true },
|
||||
Some((ty, tail)) => {
|
||||
let sub = gen_chk(tail, last);
|
||||
quote! {
|
||||
#ty ( value ) => match value {
|
||||
#ty:: #sub ,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let chk = gen_chk(inter, this);
|
||||
fn gen_unpk(r: &[pm2::TokenStream], last: &pm2::TokenStream) -> pm2::TokenStream {
|
||||
match r.split_last() {
|
||||
None => quote! { #last ( value ) => value },
|
||||
Some((ty, tail)) => {
|
||||
let sub = gen_unpk(tail, last);
|
||||
quote! {
|
||||
#ty ( value ) => match value {
|
||||
#ty:: #sub ,
|
||||
_ => unreachable!("Checked above!"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let unpk = gen_unpk(inter, this);
|
||||
quote! {
|
||||
impl TryFrom<#orig> for #this {
|
||||
type Error = #orig;
|
||||
fn try_from(value: #orig) -> Result<Self, Self::Error> {
|
||||
let can_cast = match &value {
|
||||
#orig:: #chk ,
|
||||
_ => false
|
||||
};
|
||||
if !can_cast { return Err(value) }
|
||||
Ok ( match value {
|
||||
#orig:: #unpk ,
|
||||
_ => unreachable!("Checked above!")
|
||||
} )
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
from_impls.chain(try_from_impls).flatten().collect()
|
||||
}
|
||||
|
||||
fn get_ancestry(input: &DeriveInput) -> Option<Vec<pm2::TokenStream>> {
|
||||
input.attrs.iter().find(|a| a.path().get_ident().is_some_and(|i| *i == "extends")).map(|attr| {
|
||||
match &attr.meta {
|
||||
syn::Meta::List(list) => (list.tokens.clone().into_iter())
|
||||
.batching(|it| {
|
||||
let grp: pm2::TokenStream = it
|
||||
.take_while(|t| {
|
||||
if let TokenTree::Punct(punct) = t { punct.as_char() != ',' } else { true }
|
||||
})
|
||||
.collect();
|
||||
(!grp.is_empty()).then_some(grp)
|
||||
})
|
||||
.collect(),
|
||||
_ => panic!("The correct format of the parent macro is #[parent(SomeParentType)]"),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn is_extendable(input: &DeriveInput) -> bool {
|
||||
input.attrs.iter().any(|a| a.path().get_ident().is_some_and(|i| *i == "extendable"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_wtf() {
|
||||
eprintln!("{}", gen_casts(&[quote!(ExtHostReq)], "e!(BogusReq)))
|
||||
}
|
||||
27
orchid-api-derive/src/lib.rs
Normal file
27
orchid-api-derive/src/lib.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
mod common;
|
||||
mod decode;
|
||||
mod encode;
|
||||
mod hierarchy;
|
||||
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
#[macro_use]
|
||||
extern crate syn;
|
||||
|
||||
#[allow(unused)]
|
||||
use orchid_api_traits::Coding;
|
||||
use proc_macro::TokenStream;
|
||||
|
||||
#[proc_macro_derive(Decode)]
|
||||
pub fn decode(input: TokenStream) -> TokenStream { decode::derive(input) }
|
||||
|
||||
#[proc_macro_derive(Encode)]
|
||||
pub fn encode(input: TokenStream) -> TokenStream { encode::derive(input) }
|
||||
|
||||
#[proc_macro_derive(Hierarchy, attributes(extends, extendable))]
|
||||
pub fn hierarchy(input: TokenStream) -> TokenStream { hierarchy::derive(input) }
|
||||
|
||||
#[proc_macro_derive(Coding)]
|
||||
pub fn coding(input: TokenStream) -> TokenStream {
|
||||
decode(input.clone()).into_iter().chain(encode(input)).collect()
|
||||
}
|
||||
Reference in New Issue
Block a user