Compare commits

10 Commits

Author SHA1 Message Date
1a7230ce9b Traditional route appears to work
Beginnings of dylib extensions, entirely untestted
2026-01-12 01:38:10 +01:00
32d6237dc5 task_local context over context objects
- interner impls logically separate from API in orchid-base (default host interner still in base for testing)
- error reporting, logging, and a variety of other features passed down via context in extension, not yet in host to maintain library-ish profile, should consider options
- no global spawn mechanic, the host has a spawn function but extensions only get a stash for enqueuing async work in sync callbacks which is then explicitly, manually, and with strict order popped and awaited
- still deadlocks nondeterministically for some ungodly reason
2026-01-01 14:54:29 +00:00
06debb3636 Tests pass for reqnot 2025-12-16 00:02:45 +01:00
0b2b05d44e Orchid-base uses task-local context.
Everything else is broken at the moment.
2025-12-14 17:17:43 +01:00
8753d4c751 Added docs to unsync-pipe 2025-12-14 01:32:24 +01:00
224c4ecca2 Added unsync-pipe with some tests 2025-12-13 02:28:10 +01:00
0f89cde246 added binary-safe (hopefully) pipe for upcoming dylib extension support 2025-12-12 17:32:01 +01:00
85d45cf0ef Unboxed whatever I coul 2025-12-11 16:33:49 +01:00
d211f3127d Added untested comm impl 2025-12-11 16:25:46 +01:00
4e4dc381ea Fixed match, and enabled macro keywords to share names with constants 2025-11-30 02:30:42 +01:00
124 changed files with 4756 additions and 3612 deletions

877
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -11,5 +11,7 @@ members = [
"orchid-api-derive", "orchid-api-derive",
"orchid-api-traits", "orchid-api-traits",
"stdio-perftest", "stdio-perftest",
"xtask", "async-fn-stream", "xtask",
"async-fn-stream",
"unsync-pipe",
] ]

View File

@@ -7,4 +7,4 @@ edition = "2024"
futures = { version = "0.3.31", features = ["std"], default-features = false } futures = { version = "0.3.31", features = ["std"], default-features = false }
[dev-dependencies] [dev-dependencies]
test_executors = "0.3.5" test_executors = "0.4.1"

View File

@@ -1,6 +1,6 @@
let my_tuple = option::some t[1, 2] let my_tuple = option::some t[1, 2]
let main = match my_tuple { let main = match my_tuple {
option::of t[ref head, ..] => head; option::some t[ref head, ..] => head;
option::empty => "foo"; option::none => "foo";
} }

View File

@@ -9,8 +9,8 @@ proc-macro = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
quote = "1.0.40" quote = "1.0.42"
syn = { version = "2.0.106" } syn = { version = "2.0.112" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
proc-macro2 = "1.0.101" proc-macro2 = "1.0.104"
itertools = "0.14.0" itertools = "0.14.0"

View File

@@ -14,8 +14,8 @@ pub fn derive(input: TokenStream) -> TokenStream {
impl #impl_generics orchid_api_traits::Decode for #name #ty_generics #where_clause { impl #impl_generics orchid_api_traits::Decode for #name #ty_generics #where_clause {
async fn decode<R: orchid_api_traits::AsyncRead + ?Sized>( async fn decode<R: orchid_api_traits::AsyncRead + ?Sized>(
mut read: std::pin::Pin<&mut R> mut read: std::pin::Pin<&mut R>
) -> Self { ) -> std::io::Result<Self> {
#decode Ok(#decode)
} }
} }
}; };
@@ -30,7 +30,7 @@ fn decode_fields(fields: &syn::Fields) -> pm2::TokenStream {
let syn::Field { ty, ident, .. } = &f; let syn::Field { ty, ident, .. } = &f;
quote! { quote! {
#ident : (Box::pin(< #ty as orchid_api_traits::Decode>::decode(read.as_mut())) #ident : (Box::pin(< #ty as orchid_api_traits::Decode>::decode(read.as_mut()))
as std::pin::Pin<Box<dyn std::future::Future<Output = _>>>).await as std::pin::Pin<Box<dyn std::future::Future<Output = std::io::Result<_>>>>).await?
} }
}); });
quote! { { #( #exprs, )* } } quote! { { #( #exprs, )* } }
@@ -40,7 +40,7 @@ fn decode_fields(fields: &syn::Fields) -> pm2::TokenStream {
let ty = &field.ty; let ty = &field.ty;
quote! { quote! {
(Box::pin(< #ty as orchid_api_traits::Decode>::decode(read.as_mut())) (Box::pin(< #ty as orchid_api_traits::Decode>::decode(read.as_mut()))
as std::pin::Pin<Box<dyn std::future::Future<Output = _>>>).await, as std::pin::Pin<Box<dyn std::future::Future<Output = std::io::Result<_>>>>).await?,
} }
}); });
quote! { ( #( #exprs )* ) } quote! { ( #( #exprs )* ) }
@@ -62,7 +62,7 @@ fn decode_body(data: &syn::Data) -> proc_macro2::TokenStream {
quote! { #id => Self::#ident #fields, } quote! { #id => Self::#ident #fields, }
}); });
quote! { quote! {
match <u8 as orchid_api_traits::Decode>::decode(read.as_mut()).await { match <u8 as orchid_api_traits::Decode>::decode(read.as_mut()).await? {
#(#opts)* #(#opts)*
x => panic!("Unrecognized enum kind {x}") x => panic!("Unrecognized enum kind {x}")
} }

View File

@@ -17,8 +17,9 @@ pub fn derive(input: TokenStream) -> TokenStream {
async fn encode<W: orchid_api_traits::AsyncWrite + ?Sized>( async fn encode<W: orchid_api_traits::AsyncWrite + ?Sized>(
&self, &self,
mut write: std::pin::Pin<&mut W> mut write: std::pin::Pin<&mut W>
) { ) -> std::io::Result<()> {
#encode #encode;
Ok(())
} }
} }
}; };
@@ -43,7 +44,7 @@ fn encode_body(data: &syn::Data) -> Option<pm2::TokenStream> {
quote! { quote! {
Self::#ident #dest => { Self::#ident #dest => {
(Box::pin((#i as u8).encode(write.as_mut())) (Box::pin((#i as u8).encode(write.as_mut()))
as std::pin::Pin<Box<dyn std::future::Future<Output = _>>>).await; as std::pin::Pin<Box<dyn std::future::Future<Output = std::io::Result<()>>>>).await?;
#body #body
} }
} }
@@ -61,7 +62,7 @@ fn encode_body(data: &syn::Data) -> Option<pm2::TokenStream> {
fn encode_names<T: ToTokens>(names: impl Iterator<Item = T>) -> pm2::TokenStream { fn encode_names<T: ToTokens>(names: impl Iterator<Item = T>) -> pm2::TokenStream {
quote! { #( quote! { #(
(Box::pin(#names .encode(write.as_mut())) (Box::pin(#names .encode(write.as_mut()))
as std::pin::Pin<Box<dyn std::future::Future<Output = _>>>).await; as std::pin::Pin<Box<dyn std::future::Future<Output = std::io::Result<()>>>>).await?;
)* } )* }
} }

View File

@@ -6,8 +6,7 @@ edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
futures = { version = "0.3.31", features = ["std"], default-features = false } futures = { version = "0.3.31", features = ["std"], default-features = false }
itertools = "0.14.0" itertools = "0.14.0"
never = "0.1.0" never = "0.1.0"
ordered-float = "5.0.0" ordered-float = "5.1.0"

View File

@@ -1,33 +1,44 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::future::Future; use std::future::Future;
use std::hash::Hash; use std::hash::Hash;
use std::io;
use std::num::NonZero; use std::num::NonZero;
use std::ops::{Range, RangeInclusive}; use std::ops::{Range, RangeInclusive};
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use async_fn_stream::stream; use futures::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
use futures::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, StreamExt};
use never::Never; use never::Never;
use ordered_float::NotNan; use ordered_float::NotNan;
use crate::encode_enum; use crate::{decode_err, decode_err_for, encode_enum, spin_on};
pub trait Decode: 'static { pub trait Decode: 'static + Sized {
/// Decode an instance from the beginning of the buffer. Return the decoded /// Decode an instance from the beginning of the buffer. Return the decoded
/// data and the remaining buffer. /// data and the remaining buffer.
fn decode<R: AsyncRead + ?Sized>(read: Pin<&mut R>) -> impl Future<Output = Self> + '_; fn decode<R: AsyncRead + ?Sized>(
read: Pin<&mut R>,
) -> impl Future<Output = io::Result<Self>> + '_;
fn decode_slice(slc: &mut &[u8]) -> Self {
spin_on(Self::decode(Pin::new(slc) as Pin<&mut _>)).expect("Decode from slice cannot fail")
}
} }
pub trait Encode { pub trait Encode {
/// Append an instance of the struct to the buffer /// Append an instance of the struct to the buffer
fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) -> impl Future<Output = ()>; fn encode<W: AsyncWrite + ?Sized>(
&self,
write: Pin<&mut W>,
) -> impl Future<Output = io::Result<()>>;
fn encode_vec(&self, vec: &mut Vec<u8>) {
spin_on(self.encode(Pin::new(vec) as Pin<&mut _>)).expect("Encode to vector cannot fail")
}
} }
pub trait Coding: Encode + Decode + Clone { pub trait Coding: Encode + Decode + Clone {
fn get_decoder<T: 'static, F: Future<Output = T> + 'static>( fn get_decoder<T: 'static>(
map: impl Fn(Self) -> F + Clone + 'static, map: impl AsyncFn(Self) -> T + Clone + 'static,
) -> impl AsyncFn(Pin<&mut dyn AsyncRead>) -> T { ) -> impl AsyncFn(Pin<&mut dyn AsyncRead>) -> io::Result<T> {
async move |r| map(Self::decode(r).await).await async move |r| Ok(map(Self::decode(r).await?).await)
} }
} }
impl<T: Encode + Decode + Clone> Coding for T {} impl<T: Encode + Decode + Clone> Coding for T {}
@@ -35,15 +46,15 @@ impl<T: Encode + Decode + Clone> Coding for T {}
macro_rules! num_impl { macro_rules! num_impl {
($number:ty) => { ($number:ty) => {
impl Decode for $number { impl Decode for $number {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
let mut bytes = [0u8; (<$number>::BITS / 8) as usize]; let mut bytes = [0u8; (<$number>::BITS / 8) as usize];
read.read_exact(&mut bytes).await.unwrap(); read.read_exact(&mut bytes).await?;
<$number>::from_be_bytes(bytes) Ok(<$number>::from_be_bytes(bytes))
} }
} }
impl Encode for $number { impl Encode for $number {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
write.write_all(&self.to_be_bytes()).await.expect("Could not write number") write.write_all(&self.to_be_bytes()).await
} }
} }
}; };
@@ -62,12 +73,12 @@ num_impl!(i8);
macro_rules! nonzero_impl { macro_rules! nonzero_impl {
($name:ty) => { ($name:ty) => {
impl Decode for NonZero<$name> { impl Decode for NonZero<$name> {
async fn decode<R: AsyncRead + ?Sized>(read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(read: Pin<&mut R>) -> io::Result<Self> {
Self::new(<$name as Decode>::decode(read).await).unwrap() Self::new(<$name as Decode>::decode(read).await?).ok_or_else(decode_err)
} }
} }
impl Encode for NonZero<$name> { impl Encode for NonZero<$name> {
async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) -> io::Result<()> {
self.get().encode(write).await self.get().encode(write).await
} }
} }
@@ -86,22 +97,22 @@ nonzero_impl!(i64);
nonzero_impl!(i128); nonzero_impl!(i128);
impl<T: Encode + ?Sized> Encode for &T { impl<T: Encode + ?Sized> Encode for &T {
async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) -> io::Result<()> {
(**self).encode(write).await (**self).encode(write).await
} }
} }
macro_rules! float_impl { macro_rules! float_impl {
($t:ty, $size:expr) => { ($t:ty, $size:expr) => {
impl Decode for NotNan<$t> { impl Decode for NotNan<$t> {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
let mut bytes = [0u8; $size]; let mut bytes = [0u8; $size];
read.read_exact(&mut bytes).await.unwrap(); read.read_exact(&mut bytes).await?;
NotNan::new(<$t>::from_be_bytes(bytes)).expect("Float was NaN") NotNan::new(<$t>::from_be_bytes(bytes)).map_err(|_| decode_err())
} }
} }
impl Encode for NotNan<$t> { impl Encode for NotNan<$t> {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
write.write_all(&self.as_ref().to_be_bytes()).await.expect("Could not write number") write.write_all(&self.as_ref().to_be_bytes()).await
} }
} }
}; };
@@ -111,78 +122,77 @@ float_impl!(f64, 8);
float_impl!(f32, 4); float_impl!(f32, 4);
impl Decode for String { impl Decode for String {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
let len: usize = u64::decode(read.as_mut()).await.try_into().unwrap(); let len: usize = u64::decode(read.as_mut()).await?.try_into().map_err(decode_err_for)?;
let mut data = vec![0u8; len]; let mut data = vec![0u8; len];
read.read_exact(&mut data).await.unwrap(); read.read_exact(&mut data).await?;
std::str::from_utf8(&data).expect("String invalid UTF-8").to_owned() Ok(std::str::from_utf8(&data).map_err(decode_err_for)?.to_owned())
} }
} }
impl Encode for String { impl Encode for String {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
u64::try_from(self.len()).unwrap().encode(write.as_mut()).await; u64::try_from(self.len()).map_err(decode_err_for)?.encode(write.as_mut()).await?;
write.write_all(self.as_bytes()).await.unwrap() write.write_all(self.as_bytes()).await
} }
} }
impl Encode for str { impl Encode for str {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
u64::try_from(self.len()).unwrap().encode(write.as_mut()).await; u64::try_from(self.len()).map_err(decode_err_for)?.encode(write.as_mut()).await?;
write.write_all(self.as_bytes()).await.unwrap() write.write_all(self.as_bytes()).await
} }
} }
impl<T: Decode> Decode for Vec<T> { impl<T: Decode> Decode for Vec<T> {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
let len = u64::decode(read.as_mut()).await; let len = u64::decode(read.as_mut()).await?;
stream(async |mut cx| { let mut values = Vec::with_capacity(len.try_into().map_err(decode_err_for)?);
for _ in 0..len { for _ in 0..len {
cx.emit(T::decode(read.as_mut()).await).await values.push(T::decode(read.as_mut()).await?);
} }
}) Ok(values)
.collect()
.await
} }
} }
impl<T: Encode> Encode for Vec<T> { impl<T: Encode> Encode for Vec<T> {
async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) -> io::Result<()> {
self.as_slice().encode(write).await self.as_slice().encode(write).await
} }
} }
impl<T: Encode> Encode for [T] { impl<T: Encode> Encode for [T] {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
u64::try_from(self.len()).unwrap().encode(write.as_mut()).await; u64::try_from(self.len()).unwrap().encode(write.as_mut()).await?;
for t in self.iter() { for t in self.iter() {
t.encode(write.as_mut()).await t.encode(write.as_mut()).await?
} }
Ok(())
} }
} }
impl<T: Decode> Decode for Option<T> { impl<T: Decode> Decode for Option<T> {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
match u8::decode(read.as_mut()).await { Ok(match bool::decode(read.as_mut()).await? {
0 => None, false => None,
1 => Some(T::decode(read).await), true => Some(T::decode(read).await?),
x => panic!("{x} is not a valid option value"), })
}
} }
} }
impl<T: Encode> Encode for Option<T> { impl<T: Encode> Encode for Option<T> {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
let t = if let Some(t) = self { t } else { return 0u8.encode(write.as_mut()).await }; self.is_some().encode(write.as_mut()).await?;
1u8.encode(write.as_mut()).await; if let Some(t) = self {
t.encode(write).await; t.encode(write).await?
}
Ok(())
} }
} }
impl<T: Decode, E: Decode> Decode for Result<T, E> { impl<T: Decode, E: Decode> Decode for Result<T, E> {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
match u8::decode(read.as_mut()).await { Ok(match bool::decode(read.as_mut()).await? {
0 => Self::Ok(T::decode(read).await), false => Self::Ok(T::decode(read).await?),
1 => Self::Err(E::decode(read).await), true => Self::Err(E::decode(read).await?),
x => panic!("Invalid Result tag {x}"), })
}
} }
} }
impl<T: Encode, E: Encode> Encode for Result<T, E> { impl<T: Encode, E: Encode> Encode for Result<T, E> {
async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) -> io::Result<()> {
match self { match self {
Ok(t) => encode_enum(write, 0, |w| t.encode(w)).await, Ok(t) => encode_enum(write, 0, |w| t.encode(w)).await,
Err(e) => encode_enum(write, 1, |w| e.encode(w)).await, Err(e) => encode_enum(write, 1, |w| e.encode(w)).await,
@@ -190,36 +200,37 @@ impl<T: Encode, E: Encode> Encode for Result<T, E> {
} }
} }
impl<K: Decode + Eq + Hash, V: Decode> Decode for HashMap<K, V> { impl<K: Decode + Eq + Hash, V: Decode> Decode for HashMap<K, V> {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
let len = u64::decode(read.as_mut()).await; let len = u64::decode(read.as_mut()).await?;
stream(async |mut cx| { let mut map = HashMap::with_capacity(len.try_into().map_err(decode_err_for)?);
for _ in 0..len { for _ in 0..len {
cx.emit(<(K, V)>::decode(read.as_mut()).await).await map.insert(K::decode(read.as_mut()).await?, V::decode(read.as_mut()).await?);
} }
}) Ok(map)
.collect()
.await
} }
} }
impl<K: Encode + Eq + Hash, V: Encode> Encode for HashMap<K, V> { impl<K: Encode + Eq + Hash, V: Encode> Encode for HashMap<K, V> {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
u64::try_from(self.len()).unwrap().encode(write.as_mut()).await; u64::try_from(self.len()).unwrap().encode(write.as_mut()).await?;
for pair in self.iter() { for (key, value) in self.iter() {
pair.encode(write.as_mut()).await key.encode(write.as_mut()).await?;
value.encode(write.as_mut()).await?;
} }
Ok(())
} }
} }
macro_rules! tuple { macro_rules! tuple {
(($($t:ident)*) ($($T:ident)*)) => { (($($t:ident)*) ($($T:ident)*)) => {
impl<$($T: Decode),*> Decode for ($($T,)*) { impl<$($T: Decode),*> Decode for ($($T,)*) {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
($($T::decode(read.as_mut()).await,)*) Ok(($($T::decode(read.as_mut()).await?,)*))
} }
} }
impl<$($T: Encode),*> Encode for ($($T,)*) { impl<$($T: Encode),*> Encode for ($($T,)*) {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
let ($($t,)*) = self; let ($($t,)*) = self;
$( $t.encode(write.as_mut()).await; )* $( $t.encode(write.as_mut()).await?; )*
Ok(())
} }
} }
}; };
@@ -243,63 +254,67 @@ tuple!((t u v x y z a b c d e f g h i) (T U V X Y Z A B C D E F G H I));
tuple!((t u v x y z a b c d e f g h i j) (T U V X Y Z A B C D E F G H I J)); // 16 tuple!((t u v x y z a b c d e f g h i j) (T U V X Y Z A B C D E F G H I J)); // 16
impl Decode for () { impl Decode for () {
async fn decode<R: AsyncRead + ?Sized>(_: Pin<&mut R>) -> Self {} async fn decode<R: AsyncRead + ?Sized>(_: Pin<&mut R>) -> io::Result<Self> { Ok(()) }
} }
impl Encode for () { impl Encode for () {
async fn encode<W: AsyncWrite + ?Sized>(&self, _: Pin<&mut W>) {} async fn encode<W: AsyncWrite + ?Sized>(&self, _: Pin<&mut W>) -> io::Result<()> { Ok(()) }
} }
impl Decode for Never { impl Decode for Never {
async fn decode<R: AsyncRead + ?Sized>(_: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(_: Pin<&mut R>) -> io::Result<Self> {
unreachable!("A value of Never cannot exist so it can't have been serialized"); unreachable!("A value of Never cannot exist so it can't have been serialized");
} }
} }
impl Encode for Never { impl Encode for Never {
async fn encode<W: AsyncWrite + ?Sized>(&self, _: Pin<&mut W>) { match *self {} } async fn encode<W: AsyncWrite + ?Sized>(&self, _: Pin<&mut W>) -> io::Result<()> {
match *self {}
}
} }
impl Decode for bool { impl Decode for bool {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
let mut buf = [0]; let mut buf = [0];
read.read_exact(&mut buf).await.unwrap(); read.read_exact(&mut buf).await?;
buf[0] != 0 Ok(buf[0] != 0)
} }
} }
impl Encode for bool { impl Encode for bool {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
write.write_all(&[if *self { 0xffu8 } else { 0u8 }]).await.unwrap() write.write_all(&[if *self { 0xffu8 } else { 0u8 }]).await
} }
} }
impl<T: Decode, const N: usize> Decode for [T; N] { impl<T: Decode, const N: usize> Decode for [T; N] {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
let v = stream(async |mut cx| { let mut v = Vec::with_capacity(N);
for _ in 0..N { for _ in 0..N {
cx.emit(T::decode(read.as_mut()).await).await v.push(T::decode(read.as_mut()).await?);
}
match v.try_into() {
Err(_) => unreachable!("The length of this stream is statically known"),
Ok(arr) => Ok(arr),
} }
})
.collect::<Vec<_>>()
.await;
v.try_into().unwrap_or_else(|_| unreachable!("The length of this stream is statically known"))
} }
} }
impl<T: Encode, const N: usize> Encode for [T; N] { impl<T: Encode, const N: usize> Encode for [T; N] {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
for t in self.iter() { for t in self.iter() {
t.encode(write.as_mut()).await t.encode(write.as_mut()).await?
} }
Ok(())
} }
} }
macro_rules! two_end_range { macro_rules! two_end_range {
($this:ident, $name:tt, $op:tt, $start:expr, $end:expr) => { ($this:ident, $name:tt, $op:tt, $start:expr, $end:expr) => {
impl<T: Decode> Decode for $name<T> { impl<T: Decode> Decode for $name<T> {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
T::decode(read.as_mut()).await $op T::decode(read).await Ok(T::decode(read.as_mut()).await? $op T::decode(read).await?)
} }
} }
impl<T: Encode> Encode for $name<T> { impl<T: Encode> Encode for $name<T> {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
let $this = self; let $this = self;
($start).encode(write.as_mut()).await; ($start).encode(write.as_mut()).await?;
($end).encode(write).await; ($end).encode(write).await?;
Ok(())
} }
} }
} }
@@ -311,12 +326,12 @@ two_end_range!(x, RangeInclusive, ..=, x.start(), x.end());
macro_rules! smart_ptr { macro_rules! smart_ptr {
($name:tt) => { ($name:tt) => {
impl<T: Decode> Decode for $name<T> { impl<T: Decode> Decode for $name<T> {
async fn decode<R: AsyncRead + ?Sized>(read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(read: Pin<&mut R>) -> io::Result<Self> {
$name::new(T::decode(read).await) Ok($name::new(T::decode(read).await?))
} }
} }
impl<T: Encode> Encode for $name<T> { impl<T: Encode> Encode for $name<T> {
async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) -> io::Result<()> {
(**self).encode(write).await (**self).encode(write).await
} }
} }
@@ -328,12 +343,12 @@ smart_ptr!(Rc);
smart_ptr!(Box); smart_ptr!(Box);
impl Decode for char { impl Decode for char {
async fn decode<R: AsyncRead + ?Sized>(read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(read: Pin<&mut R>) -> io::Result<Self> {
char::from_u32(u32::decode(read).await).unwrap() char::from_u32(u32::decode(read).await?).ok_or_else(decode_err)
} }
} }
impl Encode for char { impl Encode for char {
async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) -> io::Result<()> {
(*self as u32).encode(write).await (*self as u32).encode(write).await
} }
} }

View File

@@ -1,24 +1,24 @@
use std::future::Future; use std::error::Error;
use std::pin::Pin; use std::io;
use std::pin::{Pin, pin};
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use std::task::{Context, Poll, Wake};
use futures::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt}; use futures::{AsyncRead, AsyncReadExt, AsyncWrite};
use itertools::{Chunk, Itertools}; use itertools::{Chunk, Itertools};
use crate::Encode; use crate::Encode;
pub async fn encode_enum<'a, W: AsyncWrite + ?Sized, F: Future<Output = ()>>( pub async fn encode_enum<'a, W: AsyncWrite + ?Sized>(
mut write: Pin<&'a mut W>, mut write: Pin<&'a mut W>,
id: u8, id: u8,
f: impl FnOnce(Pin<&'a mut W>) -> F, f: impl AsyncFnOnce(Pin<&'a mut W>) -> io::Result<()>,
) { ) -> io::Result<()> {
id.encode(write.as_mut()).await; id.encode(write.as_mut()).await?;
f(write).await f(write).await
} }
pub async fn write_exact<W: AsyncWrite + ?Sized>(mut write: Pin<&mut W>, bytes: &'static [u8]) {
write.write_all(bytes).await.expect("Failed to write exact bytes")
}
pub fn print_bytes(b: &[u8]) -> String { pub fn print_bytes(b: &[u8]) -> String {
(b.iter().map(|b| format!("{b:02x}"))) (b.iter().map(|b| format!("{b:02x}")))
.chunks(4) .chunks(4)
@@ -27,16 +27,52 @@ pub fn print_bytes(b: &[u8]) -> String {
.join(" ") .join(" ")
} }
pub async fn read_exact<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>, bytes: &'static [u8]) { pub async fn read_exact<R: AsyncRead + ?Sized>(
mut read: Pin<&mut R>,
bytes: &'static [u8],
) -> io::Result<()> {
let mut data = vec![0u8; bytes.len()]; let mut data = vec![0u8; bytes.len()];
read.read_exact(&mut data).await.expect("Failed to read bytes"); read.read_exact(&mut data).await?;
if data != bytes { if data == bytes {
panic!("Wrong bytes!\nExpected: {}\nFound: {}", print_bytes(bytes), print_bytes(&data)); Ok(())
} else {
let msg =
format!("Wrong bytes!\nExpected: {}\nFound: {}", print_bytes(bytes), print_bytes(&data));
Err(io::Error::new(io::ErrorKind::InvalidData, msg))
} }
} }
pub async fn enc_vec(enc: &impl Encode) -> Vec<u8> { pub fn enc_vec(enc: &impl Encode) -> Vec<u8> {
let mut vec = Vec::new(); let mut vec = Vec::new();
enc.encode(Pin::new(&mut vec)).await; enc.encode_vec(&mut vec);
vec vec
} }
/// Raises a bool flag when called
struct FlagWaker(AtomicBool);
impl Wake for FlagWaker {
fn wake(self: Arc<Self>) { self.0.store(true, Ordering::Relaxed) }
}
pub fn spin_on<F: Future>(fut: F) -> F::Output {
let flag = AtomicBool::new(false);
let flag_waker = Arc::new(FlagWaker(flag));
let mut future = pin!(fut);
loop {
let waker = flag_waker.clone().into();
let mut ctx = Context::from_waker(&waker);
match future.as_mut().poll(&mut ctx) {
// ideally the future should return synchronously
Poll::Ready(res) => break res,
// poorly written futures may yield and immediately wake
Poll::Pending if flag_waker.0.load(Ordering::Relaxed) => (),
// there is no external event to wait for, this has to be a deadlock
Poll::Pending => panic!("Future inside spin_on cannot block"),
};
}
}
pub fn decode_err() -> io::Error { io::Error::new(io::ErrorKind::InvalidData, "Unexpected zero") }
pub fn decode_err_for(e: impl Error) -> io::Error {
io::Error::new(io::ErrorKind::InvalidData, e.to_string())
}

View File

@@ -29,25 +29,21 @@ pub trait Extends: InHierarchy<IsRoot = TLFalse> + Into<Self::Parent> {
pub trait UnderRootImpl<IsRoot: TLBool>: Sized { pub trait UnderRootImpl<IsRoot: TLBool>: Sized {
type __Root: UnderRoot<IsRoot = TLTrue, Root = Self::__Root>; type __Root: UnderRoot<IsRoot = TLTrue, Root = Self::__Root>;
fn __into_root(self) -> Self::__Root; fn __into_root(self) -> Self::__Root;
fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root>;
} }
pub trait UnderRoot: InHierarchy { pub trait UnderRoot: InHierarchy {
type Root: UnderRoot<IsRoot = TLTrue, Root = Self::Root>; type Root: UnderRoot<IsRoot = TLTrue, Root = Self::Root>;
fn into_root(self) -> Self::Root; fn into_root(self) -> Self::Root;
fn try_from_root(root: Self::Root) -> Result<Self, Self::Root>;
} }
impl<T: InHierarchy + UnderRootImpl<T::IsRoot>> UnderRoot for T { impl<T: InHierarchy + UnderRootImpl<T::IsRoot>> UnderRoot for T {
type Root = <Self as UnderRootImpl<<Self as InHierarchy>::IsRoot>>::__Root; type Root = <Self as UnderRootImpl<<Self as InHierarchy>::IsRoot>>::__Root;
fn into_root(self) -> Self::Root { self.__into_root() } fn into_root(self) -> Self::Root { self.__into_root() }
fn try_from_root(root: Self::Root) -> Result<Self, Self::Root> { Self::__try_from_root(root) }
} }
impl<T: InHierarchy<IsRoot = TLTrue>> UnderRootImpl<TLTrue> for T { impl<T: InHierarchy<IsRoot = TLTrue>> UnderRootImpl<TLTrue> for T {
type __Root = Self; type __Root = Self;
fn __into_root(self) -> Self::__Root { self } fn __into_root(self) -> Self::__Root { self }
fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root> { Ok(root) }
} }
impl<T: InHierarchy<IsRoot = TLFalse> + Extends> UnderRootImpl<TLFalse> for T { impl<T: InHierarchy<IsRoot = TLFalse> + Extends> UnderRootImpl<TLFalse> for T {
@@ -57,8 +53,4 @@ impl<T: InHierarchy<IsRoot = TLFalse> + Extends> UnderRootImpl<TLFalse> for T {
fn __into_root(self) -> Self::__Root { fn __into_root(self) -> Self::__Root {
<Self as Into<<Self as Extends>::Parent>>::into(self).into_root() <Self as Into<<Self as Extends>::Parent>>::into(self).into_root()
} }
fn __try_from_root(root: Self::__Root) -> Result<Self, Self::__Root> {
let parent = <Self as Extends>::Parent::try_from_root(root)?;
parent.clone().try_into().map_err(|_| parent.into_root())
}
} }

View File

@@ -1,5 +1,6 @@
use core::fmt; use core::fmt;
use std::future::Future;
use never::Never;
use super::coding::Coding; use super::coding::Coding;
use crate::helpers::enc_vec; use crate::helpers::enc_vec;
@@ -8,20 +9,22 @@ pub trait Request: fmt::Debug + Sized + 'static {
type Response: fmt::Debug + Coding + 'static; type Response: fmt::Debug + Coding + 'static;
} }
pub async fn respond<R: Request>(_: &R, rep: R::Response) -> Vec<u8> { enc_vec(&rep).await } pub fn respond<R: Request>(_: &R, rep: R::Response) -> Vec<u8> { enc_vec(&rep) }
pub async fn respond_with<R: Request, F: Future<Output = R::Response>>(
r: &R,
f: impl FnOnce(&R) -> F,
) -> Vec<u8> {
respond(r, f(r).await).await
}
pub trait Channel: 'static { pub trait Channel: 'static {
type Req: Coding + Sized + 'static; type Req: Coding + Sized + 'static;
type Notif: Coding + Sized + 'static; type Notif: Coding + Sized + 'static;
} }
impl Channel for Never {
type Notif = Never;
type Req = Never;
}
pub trait MsgSet: Sync + 'static { pub trait MsgSet: Sync + 'static {
type In: Channel; type In: Channel;
type Out: Channel; type Out: Channel;
} }
impl MsgSet for Never {
type In = Never;
type Out = Never;
}

View File

@@ -6,11 +6,12 @@ edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
ordered-float = "5.0.0" ordered-float = "5.1.0"
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" } orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
futures = { version = "0.3.31", features = ["std"], default-features = false } futures = { version = "0.3.31", features = ["std"], default-features = false }
itertools = "0.14.0" itertools = "0.14.0"
unsync-pipe = { version = "0.2.0", path = "../unsync-pipe" }
[dev-dependencies] [dev-dependencies]
test_executors = "0.3.5" test_executors = "0.4.1"

90
orchid-api/src/binary.rs Normal file
View File

@@ -0,0 +1,90 @@
//! # Binary extension definition
//!
//! A binary extension is a DLL / shared object / dylib with a symbol called
//! `orchid_extension_main` which accepts a single argument of type
//! [ExtensionContext]. Once that is received, communication continuees through
//! the channel with the same protocol outlined in [crate::proto]
use unsync_pipe::{Reader, Writer};
/// !Send !Sync owned waker
///
/// This object is [Clone] for convenience but it has `drop` and no `clone` so
/// interactions must reflect a single logical owner
#[derive(Clone, Copy)]
#[repr(C)]
pub struct OwnedWakerVT {
pub data: *const (),
/// `self`
pub drop: extern "C" fn(*const ()),
/// `self`
pub wake: extern "C" fn(*const ()),
/// `&self`
pub wake_ref: extern "C" fn(*const ()),
}
/// !Send !Sync, equivalent to `&mut Context<'a>`, hence no `drop`.
/// When received in [FutureVT::poll], it must not outlive the call.
///
/// You cannot directly wake using this waker, because such a trampoline would
/// pass through the binary interface twice for no reason. An efficient
/// implementation should implement that trampoline action internally, whereas
/// an inefficient but compliant implementation can clone a fresh waker and use
/// it up.
#[derive(Clone, Copy)]
#[repr(C)]
pub struct FutureContextVT {
pub data: *const (),
/// `&self`
pub waker: extern "C" fn(*const ()) -> OwnedWakerVT,
}
/// ABI-stable `Poll<()>`
#[derive(Clone, Copy)]
#[repr(C)]
pub enum UnitPoll {
Pending,
Ready,
}
/// ABI-stable `Pin<Box<dyn Future<Output = ()>>>`
///
/// This object is [Clone] for convenience, but it has `drop` and no `clone` so
/// interactions must reflect a single logical owner
#[derive(Clone, Copy)]
#[repr(C)]
pub struct FutureVT {
pub data: *const (),
/// `self`
pub drop: extern "C" fn(*const ()),
/// `&mut self` Equivalent to [Future::poll]
pub poll: extern "C" fn(*const (), FutureContextVT) -> UnitPoll,
}
/// Handle for a runtime that allows its holder to spawn futures across dynamic
/// library boundaries
#[derive(Clone, Copy)]
#[repr(C)]
pub struct Spawner {
pub data: *const (),
/// `self`
pub drop: extern "C" fn(*const ()),
/// `&self` Add a future to this extension's task
pub spawn: extern "C" fn(*const (), FutureVT),
}
/// Extension context.
///
/// This struct is a plain old value, all of the contained values have a
/// distinct `drop` member
#[repr(C)]
pub struct ExtensionContext {
/// Spawns tasks associated with this extension
pub spawner: Spawner,
/// serialized [crate::HostExtChannel]
pub input: Reader,
/// serialized [crate::ExtHostChannel]
pub output: Writer,
/// UTF-8 log stream directly to log service.
pub log: Writer,
}

View File

@@ -3,7 +3,7 @@ use std::num::NonZeroU64;
use orchid_api_derive::{Coding, Hierarchy}; use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::Request; use orchid_api_traits::Request;
use crate::{ExtHostReq, HostExtReq}; use crate::{ExtHostNotif, ExtHostReq, HostExtReq};
/// Intern requests sent by the replica to the master. These requests are /// Intern requests sent by the replica to the master. These requests are
/// repeatable. /// repeatable.
@@ -71,18 +71,21 @@ pub struct TStr(pub NonZeroU64);
pub struct TStrv(pub NonZeroU64); pub struct TStrv(pub NonZeroU64);
/// A request to sweep the replica. The master will not be sweeped until all /// A request to sweep the replica. The master will not be sweeped until all
/// replicas respond, as it must retain everything the replicas retained /// replicas respond. For efficiency, replicas should make sure to send the
/// [Sweeped] notif before returning.
#[derive(Clone, Copy, Debug, Coding, Hierarchy)] #[derive(Clone, Copy, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)] #[extends(HostExtReq)]
pub struct Sweep; pub struct Sweep;
impl Request for Sweep { impl Request for Sweep {
type Response = Retained; type Response = ();
} }
/// List of keys in this replica that couldn't be sweeped because local /// List of keys in this replica that were removed during a sweep. This may have
/// datastructures reference their value. /// been initiated via a [Sweep] request, but can also be triggered by the
#[derive(Clone, Debug, Coding)] /// replica autonomously.
pub struct Retained { #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(ExtHostNotif)]
pub struct Sweeped {
pub strings: Vec<TStr>, pub strings: Vec<TStr>,
pub vecs: Vec<TStrv>, pub vecs: Vec<TStrv>,
} }

View File

@@ -1,3 +1,4 @@
pub mod binary;
mod lexer; mod lexer;
pub use lexer::*; pub use lexer::*;
mod format; mod format;

View File

@@ -1,14 +1,30 @@
use std::collections::HashMap;
use orchid_api_derive::{Coding, Hierarchy}; use orchid_api_derive::{Coding, Hierarchy};
use crate::ExtHostNotif; use crate::{ExtHostNotif, TStr};
/// Describes what to do with a log stream.
/// Log streams are unstructured utf8 text unless otherwise stated.
#[derive(Clone, Debug, Coding, PartialEq, Eq, Hash)] #[derive(Clone, Debug, Coding, PartialEq, Eq, Hash)]
pub enum LogStrategy { pub enum LogStrategy {
StdErr, /// Context-dependent default stream, often stderr
File(String), Default,
/// A file on the local filesystem
File { path: String, append: bool },
/// Discard any log output
Discard, Discard,
} }
#[derive(Clone, Debug, Coding)]
pub struct Logger {
pub routing: HashMap<String, LogStrategy>,
pub default: Option<LogStrategy>,
}
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(ExtHostNotif)] #[extends(ExtHostNotif)]
pub struct Log(pub String); pub struct Log {
pub category: TStr,
pub message: String,
}

View File

@@ -22,51 +22,49 @@
//! be preserved. Toolkits must ensure that the client code is able to observe //! be preserved. Toolkits must ensure that the client code is able to observe
//! the ordering of messages. //! the ordering of messages.
use std::io;
use std::pin::Pin; use std::pin::Pin;
use futures::{AsyncRead, AsyncWrite}; use futures::{AsyncRead, AsyncWrite, AsyncWriteExt};
use orchid_api_derive::{Coding, Hierarchy}; use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::{Channel, Decode, Encode, MsgSet, Request, read_exact, write_exact}; use orchid_api_traits::{Channel, Decode, Encode, MsgSet, Request, read_exact};
use crate::{atom, expr, interner, lexer, logging, parser, system, tree}; use crate::{Sweeped, atom, expr, interner, lexer, logging, parser, system, tree};
static HOST_INTRO: &[u8] = b"Orchid host, binary API v0\n"; static HOST_INTRO: &[u8] = b"Orchid host, binary API v0\n";
#[derive(Clone, Debug)]
pub struct HostHeader { pub struct HostHeader {
pub log_strategy: logging::LogStrategy, pub logger: logging::Logger,
pub msg_logs: logging::LogStrategy,
} }
impl Decode for HostHeader { impl Decode for HostHeader {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
read_exact(read.as_mut(), HOST_INTRO).await; read_exact(read.as_mut(), HOST_INTRO).await?;
Self { Ok(Self { logger: logging::Logger::decode(read).await? })
log_strategy: logging::LogStrategy::decode(read.as_mut()).await,
msg_logs: logging::LogStrategy::decode(read.as_mut()).await,
}
} }
} }
impl Encode for HostHeader { impl Encode for HostHeader {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
write_exact(write.as_mut(), HOST_INTRO).await; write.write_all(HOST_INTRO).await?;
self.log_strategy.encode(write.as_mut()).await; self.logger.encode(write.as_mut()).await
self.msg_logs.encode(write.as_mut()).await
} }
} }
static EXT_INTRO: &[u8] = b"Orchid extension, binary API v0\n"; static EXT_INTRO: &[u8] = b"Orchid extension, binary API v0\n";
#[derive(Clone, Debug)]
pub struct ExtensionHeader { pub struct ExtensionHeader {
pub name: String, pub name: String,
pub systems: Vec<system::SystemDecl>, pub systems: Vec<system::SystemDecl>,
} }
impl Decode for ExtensionHeader { impl Decode for ExtensionHeader {
async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> io::Result<Self> {
read_exact(read.as_mut(), EXT_INTRO).await; read_exact(read.as_mut(), EXT_INTRO).await?;
Self { name: String::decode(read.as_mut()).await, systems: Vec::decode(read).await } Ok(Self { name: String::decode(read.as_mut()).await?, systems: Vec::decode(read).await? })
} }
} }
impl Encode for ExtensionHeader { impl Encode for ExtensionHeader {
async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) -> io::Result<()> {
write_exact(write.as_mut(), EXT_INTRO).await; write.write_all(EXT_INTRO).await?;
self.name.encode(write.as_mut()).await; self.name.encode(write.as_mut()).await?;
self.systems.encode(write).await self.systems.encode(write).await
} }
} }
@@ -99,6 +97,7 @@ pub enum ExtHostReq {
pub enum ExtHostNotif { pub enum ExtHostNotif {
ExprNotif(expr::ExprNotif), ExprNotif(expr::ExprNotif),
Log(logging::Log), Log(logging::Log),
Sweeped(Sweeped),
} }
pub struct ExtHostChannel; pub struct ExtHostChannel;
@@ -155,22 +154,22 @@ impl MsgSet for HostMsgSet {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::collections::HashMap;
use orchid_api_traits::enc_vec; use orchid_api_traits::enc_vec;
use ordered_float::NotNan; use ordered_float::NotNan;
use test_executors::spin_on; use test_executors::spin_on;
use super::*; use super::*;
use crate::Logger;
#[test] #[test]
fn host_header_enc() { fn host_header_enc() {
spin_on(async { spin_on(async {
let hh = HostHeader { let hh = HostHeader { logger: Logger { routing: HashMap::new(), default: None } };
log_strategy: logging::LogStrategy::File("SomeFile".to_string()), let mut enc = &enc_vec(&hh)[..];
msg_logs: logging::LogStrategy::File("SomeFile".to_string()),
};
let mut enc = &enc_vec(&hh).await[..];
eprintln!("Encoded to {enc:?}"); eprintln!("Encoded to {enc:?}");
HostHeader::decode(Pin::new(&mut enc)).await; HostHeader::decode(Pin::new(&mut enc)).await.unwrap();
assert_eq!(enc, []); assert_eq!(enc, []);
}) })
} }
@@ -187,9 +186,9 @@ mod tests {
priority: NotNan::new(1f64).unwrap(), priority: NotNan::new(1f64).unwrap(),
}], }],
}; };
let mut enc = &enc_vec(&eh).await[..]; let mut enc = &enc_vec(&eh)[..];
eprintln!("Encoded to {enc:?}"); eprintln!("Encoded to {enc:?}");
ExtensionHeader::decode(Pin::new(&mut enc)).await; ExtensionHeader::decode(Pin::new(&mut enc)).await.unwrap();
assert_eq!(enc, []) assert_eq!(enc, [])
}) })
} }

View File

@@ -2,7 +2,6 @@ use std::collections::HashMap;
use std::fmt; use std::fmt;
use std::num::NonZeroU64; use std::num::NonZeroU64;
use std::ops::Range; use std::ops::Range;
use std::rc::Rc;
use orchid_api_derive::{Coding, Hierarchy}; use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::Request; use orchid_api_traits::Request;
@@ -48,7 +47,7 @@ pub enum Token {
/// NewExpr(Bottom) because it fails in dead branches too. /// NewExpr(Bottom) because it fails in dead branches too.
Bottom(Vec<OrcError>), Bottom(Vec<OrcError>),
/// A comment /// A comment
Comment(Rc<String>), Comment(TStr),
} }
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Coding)] #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Coding)]

View File

@@ -6,12 +6,14 @@ edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
unsync-pipe = { version = "0.2.0", path = "../unsync-pipe" }
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" } async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
async-once-cell = "0.5.4" async-once-cell = "0.5.4"
bound = "0.6.0"
derive_destructure = "1.0.0" derive_destructure = "1.0.0"
dyn-clone = "1.0.20" dyn-clone = "1.0.20"
futures = { version = "0.3.31", features = ["std"], default-features = false } futures = { version = "0.3.31", features = ["std"], default-features = false }
hashbrown = "0.16.0" hashbrown = "0.16.1"
itertools = "0.14.0" itertools = "0.14.0"
lazy_static = "1.5.0" lazy_static = "1.5.0"
never = "0.1.0" never = "0.1.0"
@@ -19,10 +21,13 @@ num-traits = "0.2.19"
orchid-api = { version = "0.1.0", path = "../orchid-api" } orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" } orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
ordered-float = "5.0.0" ordered-float = "5.1.0"
regex = "1.11.2" regex = "1.12.2"
rust-embed = "8.7.2" rust-embed = "8.9.0"
some_executor = "0.6.1"
substack = "1.1.1" substack = "1.1.1"
test_executors = "0.3.5"
trait-set = "0.3.0" trait-set = "0.3.0"
task-local = "0.1.0"
[dev-dependencies]
futures = "0.3.31"
test_executors = "0.4.1"

118
orchid-base/src/binary.rs Normal file
View File

@@ -0,0 +1,118 @@
use std::pin::Pin;
use std::rc::Rc;
use std::task::{Context, Poll, RawWaker, RawWakerVTable, Waker};
use orchid_api::binary::{FutureContextVT, FutureVT, OwnedWakerVT, UnitPoll};
type WideBox = Box<dyn Future<Output = ()>>;
static OWNED_VTABLE: RawWakerVTable = RawWakerVTable::new(
|data| {
let data = unsafe { Rc::<OwnedWakerVT>::from_raw(data as *const _) };
let val = RawWaker::new(Rc::into_raw(data.clone()) as *const (), &OWNED_VTABLE);
// Clone must create a duplicate of the Rc, so it has to be un-leaked, cloned,
// then leaked again.
let _ = Rc::into_raw(data);
val
},
|data| {
// Wake must awaken the task and then clean up the state, so the waker must be
// un-leaked
let data = unsafe { Rc::<OwnedWakerVT>::from_raw(data as *const _) };
(data.wake)(data.data);
},
|data| {
// Wake-by-ref must awaken the task while preserving the future, so the Rc is
// untouched
let data = unsafe { (data as *const OwnedWakerVT).as_ref() }.unwrap();
(data.wake_ref)(data.data);
},
|data| {
// Drop must clean up the state, so the waker must be un-leaked
let data = unsafe { Rc::<OwnedWakerVT>::from_raw(data as *const _) };
(data.drop)(data.data);
},
);
struct BorrowedWakerData<'a> {
go_around: &'a mut bool,
cx: FutureContextVT,
}
static BORROWED_VTABLE: RawWakerVTable = RawWakerVTable::new(
|data| {
let data = unsafe { (data as *mut BorrowedWakerData).as_mut() }.unwrap();
let owned_data = Rc::<OwnedWakerVT>::new((data.cx.waker)(data.cx.data));
RawWaker::new(Rc::into_raw(owned_data) as *const (), &OWNED_VTABLE)
},
|data| *unsafe { (data as *mut BorrowedWakerData).as_mut() }.unwrap().go_around = true,
|data| *unsafe { (data as *mut BorrowedWakerData).as_mut() }.unwrap().go_around = true,
|_data| {},
);
/// Convert a future to a binary-compatible format that can be sent across
/// dynamic library boundaries
pub fn future_to_vt<Fut: Future<Output = ()> + 'static>(fut: Fut) -> FutureVT {
let wide_box = Box::new(fut) as WideBox;
let data = Box::into_raw(Box::new(wide_box));
extern "C" fn drop(raw: *const ()) {
std::mem::drop(unsafe { Box::<WideBox>::from_raw(raw as *mut _) })
}
extern "C" fn poll(raw: *const (), cx: FutureContextVT) -> UnitPoll {
let mut this = unsafe { Pin::new_unchecked(&mut **(raw as *mut WideBox).as_mut().unwrap()) };
loop {
let mut go_around = false;
let borrowed_waker = unsafe {
Waker::from_raw(RawWaker::new(
&mut BorrowedWakerData { go_around: &mut go_around, cx } as *mut _ as *const (),
&BORROWED_VTABLE,
))
};
let mut ctx = Context::from_waker(&borrowed_waker);
let result = this.as_mut().poll(&mut ctx);
if matches!(result, Poll::Ready(())) {
break UnitPoll::Ready;
}
if !go_around {
break UnitPoll::Pending;
}
}
}
FutureVT { data: data as *const _, drop, poll }
}
struct VirtualFuture {
vt: FutureVT,
}
impl Unpin for VirtualFuture {}
impl Future for VirtualFuture {
type Output = ();
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
extern "C" fn waker(raw: *const ()) -> OwnedWakerVT {
let waker = unsafe { (raw as *mut Context).as_mut() }.unwrap().waker().clone();
let data = Box::into_raw(Box::<Waker>::new(waker)) as *const ();
return OwnedWakerVT { data, drop, wake, wake_ref };
extern "C" fn drop(raw: *const ()) {
std::mem::drop(unsafe { Box::<Waker>::from_raw(raw as *mut Waker) })
}
extern "C" fn wake(raw: *const ()) {
unsafe { Box::<Waker>::from_raw(raw as *mut Waker) }.wake();
}
extern "C" fn wake_ref(raw: *const ()) {
unsafe { (raw as *mut Waker).as_mut() }.unwrap().wake_by_ref();
}
}
let cx = FutureContextVT { data: cx as *mut Context as *const (), waker };
let result = (self.vt.poll)(self.vt.data, cx);
match result {
UnitPoll::Pending => Poll::Pending,
UnitPoll::Ready => Poll::Ready(()),
}
}
}
impl Drop for VirtualFuture {
fn drop(&mut self) { (self.vt.drop)(self.vt.data) }
}
/// Receive a future sent across dynamic library boundaries and convert it into
/// an owned object
pub fn vt_to_future(vt: FutureVT) -> impl Future<Output = ()> { VirtualFuture { vt } }

View File

@@ -1,34 +0,0 @@
use std::ops::Deref;
use std::rc::Rc;
use futures::future::LocalBoxFuture;
use crate::api;
pub type Spawner = Rc<dyn Fn(LocalBoxFuture<'static, ()>)>;
/// The 3 primary contact points with an extension are
/// - send a message
/// - wait for a message to arrive
/// - wait for the extension to stop after exit (this is the implicit Drop)
///
/// There are no ordering guarantees about these
pub trait ExtPort {
#[must_use]
fn send<'a>(&'a self, msg: &'a [u8]) -> LocalBoxFuture<'a, ()>;
#[must_use]
fn recv(&self) -> LocalBoxFuture<'_, Option<Vec<u8>>>;
}
pub struct ExtInit {
pub header: api::ExtensionHeader,
pub port: Box<dyn ExtPort>,
}
impl ExtInit {
pub async fn send(&self, msg: &[u8]) { self.port.send(msg).await }
pub async fn recv(&self) -> Option<Vec<u8>> { self.port.recv().await }
}
impl Deref for ExtInit {
type Target = api::ExtensionHeader;
fn deref(&self) -> &Self::Target { &self.header }
}

View File

@@ -2,13 +2,16 @@ use std::cell::RefCell;
use std::ffi::OsStr; use std::ffi::OsStr;
use std::fmt; use std::fmt;
use std::ops::Add; use std::ops::Add;
use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use futures::FutureExt;
use futures::future::join_all; use futures::future::join_all;
use itertools::Itertools; use itertools::Itertools;
use task_local::task_local;
use crate::api; use crate::api;
use crate::interner::{Interner, Tok}; use crate::interner::{IStr, es, is};
use crate::location::Pos; use crate::location::Pos;
/// A point of interest in resolving the error, such as the point where /// A point of interest in resolving the error, such as the point where
@@ -24,10 +27,10 @@ impl ErrPos {
pub fn new(msg: &str, position: Pos) -> Self { pub fn new(msg: &str, position: Pos) -> Self {
Self { message: Some(Arc::new(msg.to_string())), position } Self { message: Some(Arc::new(msg.to_string())), position }
} }
async fn from_api(api: &api::ErrLocation, i: &Interner) -> Self { async fn from_api(api: &api::ErrLocation) -> Self {
Self { Self {
message: Some(api.message.clone()).filter(|s| !s.is_empty()), message: Some(api.message.clone()).filter(|s| !s.is_empty()),
position: Pos::from_api(&api.location, i).await, position: Pos::from_api(&api.location).await,
} }
} }
fn to_api(&self) -> api::ErrLocation { fn to_api(&self) -> api::ErrLocation {
@@ -51,7 +54,7 @@ impl fmt::Display for ErrPos {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct OrcErr { pub struct OrcErr {
pub description: Tok<String>, pub description: IStr,
pub message: Arc<String>, pub message: Arc<String>,
pub positions: Vec<ErrPos>, pub positions: Vec<ErrPos>,
} }
@@ -63,16 +66,16 @@ impl OrcErr {
locations: self.positions.iter().map(ErrPos::to_api).collect(), locations: self.positions.iter().map(ErrPos::to_api).collect(),
} }
} }
async fn from_api(api: &api::OrcError, i: &Interner) -> Self { async fn from_api(api: &api::OrcError) -> Self {
Self { Self {
description: Tok::from_api(api.description, i).await, description: es(api.description).await,
message: api.message.clone(), message: api.message.clone(),
positions: join_all(api.locations.iter().map(|e| ErrPos::from_api(e, i))).await, positions: join_all(api.locations.iter().map(ErrPos::from_api)).await,
} }
} }
} }
impl PartialEq<Tok<String>> for OrcErr { impl PartialEq<IStr> for OrcErr {
fn eq(&self, other: &Tok<String>) -> bool { self.description == *other } fn eq(&self, other: &IStr) -> bool { self.description == *other }
} }
impl From<OrcErr> for Vec<OrcErr> { impl From<OrcErr> for Vec<OrcErr> {
fn from(value: OrcErr) -> Self { vec![value] } fn from(value: OrcErr) -> Self { vec![value] }
@@ -122,12 +125,10 @@ impl OrcErrv {
self.0.iter().flat_map(|e| e.positions.iter().cloned()) self.0.iter().flat_map(|e| e.positions.iter().cloned())
} }
pub fn to_api(&self) -> Vec<api::OrcError> { self.0.iter().map(OrcErr::to_api).collect() } pub fn to_api(&self) -> Vec<api::OrcError> { self.0.iter().map(OrcErr::to_api).collect() }
pub async fn from_api<'a>( pub async fn from_api<'a>(api: impl IntoIterator<Item = &'a api::OrcError>) -> Self {
api: impl IntoIterator<Item = &'a api::OrcError>, Self(join_all(api.into_iter().map(OrcErr::from_api)).await)
i: &Interner,
) -> Self {
Self(join_all(api.into_iter().map(|e| OrcErr::from_api(e, i))).await)
} }
pub fn iter(&self) -> impl Iterator<Item = OrcErr> + '_ { self.0.iter().cloned() }
} }
impl From<OrcErr> for OrcErrv { impl From<OrcErr> for OrcErrv {
fn from(value: OrcErr) -> Self { Self(vec![value]) } fn from(value: OrcErr) -> Self { Self(vec![value]) }
@@ -191,12 +192,12 @@ macro_rules! join_ok {
(@VALUES) => { Ok(()) }; (@VALUES) => { Ok(()) };
} }
pub fn mk_errv_floating(description: Tok<String>, message: impl AsRef<str>) -> OrcErrv { pub fn mk_errv_floating(description: IStr, message: impl AsRef<str>) -> OrcErrv {
mk_errv::<Pos>(description, message, []) mk_errv::<Pos>(description, message, [])
} }
pub fn mk_errv<I: Into<ErrPos>>( pub fn mk_errv<I: Into<ErrPos>>(
description: Tok<String>, description: IStr,
message: impl AsRef<str>, message: impl AsRef<str>,
posv: impl IntoIterator<Item = I>, posv: impl IntoIterator<Item = I>,
) -> OrcErrv { ) -> OrcErrv {
@@ -210,45 +211,71 @@ pub fn mk_errv<I: Into<ErrPos>>(
pub async fn async_io_err<I: Into<ErrPos>>( pub async fn async_io_err<I: Into<ErrPos>>(
err: std::io::Error, err: std::io::Error,
i: &Interner,
posv: impl IntoIterator<Item = I>, posv: impl IntoIterator<Item = I>,
) -> OrcErrv { ) -> OrcErrv {
mk_errv(i.i(&err.kind().to_string()).await, err.to_string(), posv) mk_errv(is(&err.kind().to_string()).await, err.to_string(), posv)
} }
pub async fn os_str_to_string<'a, I: Into<ErrPos>>( pub async fn os_str_to_string<I: Into<ErrPos>>(
str: &'a OsStr, str: &OsStr,
i: &Interner,
posv: impl IntoIterator<Item = I>, posv: impl IntoIterator<Item = I>,
) -> OrcRes<&'a str> { ) -> OrcRes<&str> {
match str.to_str() { match str.to_str() {
Some(str) => Ok(str), Some(str) => Ok(str),
None => Err(mk_errv( None => Err(mk_errv(
i.i("Non-unicode string").await, is("Non-unicode string").await,
format!("{str:?} is not representable as unicode"), format!("{str:?} is not representable as unicode"),
posv, posv,
)), )),
} }
} }
pub struct Reporter { #[derive(Clone, Default)]
errors: RefCell<Vec<OrcErr>>, struct Reporter {
errors: Rc<RefCell<Vec<OrcErr>>>,
} }
impl Reporter { task_local! {
pub fn report(&self, e: impl Into<OrcErrv>) { self.errors.borrow_mut().extend(e.into()) } static REPORTER: Reporter;
pub fn new() -> Self { Self { errors: RefCell::new(vec![]) } }
pub fn errv(self) -> Option<OrcErrv> { OrcErrv::new(self.errors.into_inner()).ok() }
pub fn merge<T>(self, res: OrcRes<T>) -> OrcRes<T> {
match (res, self.errv()) {
(res, None) => res,
(Ok(_), Some(errv)) => Err(errv),
(Err(e), Some(errv)) => Err(e + errv),
}
}
pub fn is_empty(&self) -> bool { self.errors.borrow().is_empty() }
} }
impl Default for Reporter { /// Run the future with a new reporter, and return all errors reported within.
fn default() -> Self { Self::new() } ///
/// If your future returns [OrcRes], see [try_with_reporter]
pub async fn with_reporter<T>(fut: impl Future<Output = T>) -> OrcRes<T> {
try_with_reporter(fut.map(Ok)).await
}
/// Run the future with a new reporter, and return all errors either returned or
/// reported by it
///
/// If your future may report errors but always returns an approximate value,
/// see [with_reporter]
pub async fn try_with_reporter<T>(fut: impl Future<Output = OrcRes<T>>) -> OrcRes<T> {
let rep = Reporter::default();
let res = REPORTER.scope(rep.clone(), fut).await;
let errors = rep.errors.take();
match (res, &errors[..]) {
(Ok(t), []) => Ok(t),
(Ok(_), [_, ..]) => Err(OrcErrv::new(errors).unwrap()),
(Err(e), _) => Err(e.extended(errors)),
}
}
pub async fn is_erroring() -> bool {
(REPORTER.try_with(|r| !r.errors.borrow().is_empty()))
.expect("Sidechannel errors must be caught by a reporter")
}
/// Report an error that is fatal and prevents a correct output, but
/// still allows the current task to continue and produce an approximate output.
/// This can be used for
pub fn report(e: impl Into<OrcErrv>) {
let errv = e.into();
REPORTER.try_with(|r| r.errors.borrow_mut().extend(errv.clone())).unwrap_or_else(|_| {
panic!(
"Unhandled error! Sidechannel errors must be caught by an enclosing call to with_reporter.\n\
Error: {errv}"
)
})
} }

View File

@@ -3,6 +3,7 @@ use std::cmp::Ordering;
use std::convert::Infallible; use std::convert::Infallible;
use std::future::Future; use std::future::Future;
use std::iter; use std::iter;
use std::marker::PhantomData;
use std::rc::Rc; use std::rc::Rc;
use std::str::FromStr; use std::str::FromStr;
@@ -11,7 +12,6 @@ use itertools::{Itertools, chain};
use never::Never; use never::Never;
use regex::Regex; use regex::Regex;
use crate::interner::Interner;
use crate::{api, match_mapping}; use crate::{api, match_mapping};
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
@@ -300,16 +300,16 @@ pub fn take_first(unit: &FmtUnit, bounded: bool) -> String {
fill_slots(&first.elements, &unit.subs, 0, bounded) fill_slots(&first.elements, &unit.subs, 0, bounded)
} }
pub async fn take_first_fmt(v: &(impl Format + ?Sized), i: &Interner) -> String { pub async fn take_first_fmt(v: &(impl Format + ?Sized)) -> String {
take_first(&v.print(&FmtCtxImpl { i }).await, false) take_first(&v.print(&FmtCtxImpl { _foo: PhantomData }).await, false)
} }
#[derive(Default)]
pub struct FmtCtxImpl<'a> { pub struct FmtCtxImpl<'a> {
pub i: &'a Interner, _foo: PhantomData<&'a ()>,
} }
pub trait FmtCtx { pub trait FmtCtx {
fn i(&self) -> &Interner;
// fn print_as(&self, p: &(impl Format + ?Sized)) -> impl Future<Output = // fn print_as(&self, p: &(impl Format + ?Sized)) -> impl Future<Output =
// String> where Self: Sized { // String> where Self: Sized {
// async { // async {
@@ -319,9 +319,7 @@ pub trait FmtCtx {
// } // }
// } // }
} }
impl FmtCtx for FmtCtxImpl<'_> { impl FmtCtx for FmtCtxImpl<'_> {}
fn i(&self) -> &Interner { self.i }
}
pub trait Format { pub trait Format {
#[must_use] #[must_use]
@@ -332,13 +330,10 @@ impl Format for Never {
} }
/// Format with default strategy. Currently equal to [take_first_fmt] /// Format with default strategy. Currently equal to [take_first_fmt]
pub async fn fmt(v: &(impl Format + ?Sized), i: &Interner) -> String { take_first_fmt(v, i).await } pub async fn fmt(v: &(impl Format + ?Sized)) -> String { take_first_fmt(v).await }
/// Format a sequence with default strategy. Currently equal to [take_first_fmt] /// Format a sequence with default strategy. Currently equal to [take_first_fmt]
pub async fn fmt_v<F: Format + ?Sized, R: Borrow<F>>( pub async fn fmt_v<F: Format + ?Sized>(
v: impl IntoIterator<Item = R>, v: impl IntoIterator<Item: Borrow<F>>,
i: &Interner,
) -> impl Iterator<Item = String> { ) -> impl Iterator<Item = String> {
join_all(v.into_iter().map(|f| async move { take_first_fmt(f.borrow(), i).await })) join_all(v.into_iter().map(|f| async move { take_first_fmt(f.borrow()).await })).await.into_iter()
.await
.into_iter()
} }

View File

@@ -1,310 +1,382 @@
use std::borrow::Borrow; use std::fmt::{Debug, Display};
use std::future::Future; use std::future::Future;
use std::hash::BuildHasher as _; use std::hash::Hash;
use std::num::NonZeroU64;
use std::ops::Deref; use std::ops::Deref;
use std::rc::Rc; use std::rc::Rc;
use std::sync::atomic;
use std::{fmt, hash}; use std::{fmt, hash};
use futures::lock::Mutex; use futures::future::LocalBoxFuture;
use hashbrown::{HashMap, HashSet}; use task_local::task_local;
use itertools::Itertools as _;
use orchid_api_traits::Request;
use crate::api; use crate::api;
use crate::reqnot::{DynRequester, Requester};
/// Clippy crashes while verifying `Tok: Sized` without this and I cba to create pub trait IStrHandle: AsRef<str> {
/// a minimal example fn rc(&self) -> Rc<String>;
#[derive(Clone)] }
struct ForceSized<T>(T); pub trait IStrvHandle: AsRef<[IStr]> {
fn rc(&self) -> Rc<Vec<IStr>>;
}
#[derive(Clone)] #[derive(Clone)]
pub struct Tok<T: Interned> { pub struct IStr(pub api::TStr, pub Rc<dyn IStrHandle>);
data: Rc<T>, impl IStr {
marker: ForceSized<T::Marker>, /// Obtain a unique ID for this interned data.
///
/// NOTICE: the ID is guaranteed to be the same for any interned instance of
/// the same value only as long as at least one instance exists. If a value is
/// no longer interned, the interner is free to forget about it.
pub fn to_api(&self) -> api::TStr { self.0 }
pub fn rc(&self) -> Rc<String> { self.1.rc() }
} }
impl<T: Interned> Tok<T> { impl Deref for IStr {
pub fn new(data: Rc<T>, marker: T::Marker) -> Self { Self { data, marker: ForceSized(marker) } } type Target = str;
pub fn to_api(&self) -> T::Marker { self.marker.0 } fn deref(&self) -> &Self::Target { self.1.as_ref().as_ref() }
pub async fn from_api<M>(marker: M, i: &Interner) -> Self
where M: InternMarker<Interned = T> {
i.ex(marker).await
}
pub fn rc(&self) -> Rc<T> { self.data.clone() }
} }
impl<T: Interned> Deref for Tok<T> { impl Eq for IStr {}
type Target = T; impl PartialEq for IStr {
fn eq(&self, other: &Self) -> bool { self.0 == other.0 }
fn deref(&self) -> &Self::Target { self.data.as_ref() }
} }
impl<T: Interned> Ord for Tok<T> { impl Hash for IStr {
fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.to_api().cmp(&other.to_api()) } fn hash<H: hash::Hasher>(&self, state: &mut H) { self.0.hash(state) }
} }
impl<T: Interned> PartialOrd for Tok<T> { impl Display for IStr {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.deref()) }
} }
impl<T: Interned> Eq for Tok<T> {} impl Debug for IStr {
impl<T: Interned> PartialEq for Tok<T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "IStr({self}") }
fn eq(&self, other: &Self) -> bool { self.cmp(other).is_eq() }
} }
impl<T: Interned> hash::Hash for Tok<T> { #[derive(Clone)]
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.to_api().hash(state) } pub struct IStrv(pub api::TStrv, pub Rc<dyn IStrvHandle>);
impl IStrv {
/// Obtain a unique ID for this interned data.
///
/// NOTICE: the ID is guaranteed to be the same for any interned instance of
/// the same value only as long as at least one instance exists. If a value is
/// no longer interned, the interner is free to forget about it.
pub fn to_api(&self) -> api::TStrv { self.0 }
pub fn rc(&self) -> Rc<Vec<IStr>> { self.1.rc() }
} }
impl<T: Interned + fmt::Display> fmt::Display for Tok<T> { impl Deref for IStrv {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { type Target = [IStr];
write!(f, "{}", &*self.data) fn deref(&self) -> &Self::Target { self.1.as_ref().as_ref() }
}
} }
impl<T: Interned + fmt::Debug> fmt::Debug for Tok<T> { impl Eq for IStrv {}
impl PartialEq for IStrv {
fn eq(&self, other: &Self) -> bool { self.0 == other.0 }
}
impl Hash for IStrv {
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.0.0.hash(state) }
}
impl Display for IStrv {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Token({} -> {:?})", self.to_api().get_id(), self.data.as_ref()) let mut iter = self.deref().iter();
match iter.next() {
None => return Ok(()),
Some(s) => write!(f, "{s}")?,
}
for s in iter {
write!(f, "::{s}")?
}
Ok(())
} }
} }
impl Debug for IStrv {
pub trait Interned: Eq + hash::Hash + Clone + fmt::Debug + Internable<Interned = Self> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "IStrv({self})") }
type Marker: InternMarker<Interned = Self> + Sized;
fn intern(
self: Rc<Self>,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> impl Future<Output = Self::Marker>;
fn bimap(interner: &mut TypedInterners) -> &mut Bimap<Self>;
} }
pub trait Internable: fmt::Debug { pub trait InternerSrv {
type Interned: Interned; fn is<'a>(&'a self, v: &'a str) -> LocalBoxFuture<'a, IStr>;
fn get_owned(&self) -> Rc<Self::Interned>; fn es(&self, t: api::TStr) -> LocalBoxFuture<'_, IStr>;
fn iv<'a>(&'a self, v: &'a [IStr]) -> LocalBoxFuture<'a, IStrv>;
fn ev(&self, t: api::TStrv) -> LocalBoxFuture<'_, IStrv>;
} }
pub trait InternMarker: Copy + PartialEq + Eq + PartialOrd + Ord + hash::Hash + Sized { task_local! {
type Interned: Interned<Marker = Self>; static INTERNER: Rc<dyn InternerSrv>;
/// Only called on replicas
fn resolve(self, i: &Interner) -> impl Future<Output = Tok<Self::Interned>>;
fn get_id(self) -> NonZeroU64;
fn from_id(id: NonZeroU64) -> Self;
} }
impl Interned for String { pub async fn with_interner<F: Future>(val: Rc<dyn InternerSrv>, fut: F) -> F::Output {
type Marker = api::TStr; INTERNER.scope(val, fut).await
async fn intern(
self: Rc<Self>,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Self::Marker {
req.request(api::InternStr(self.to_string())).await
}
fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.strings }
}
impl InternMarker for api::TStr {
type Interned = String;
async fn resolve(self, i: &Interner) -> Tok<Self::Interned> {
Tok::new(Rc::new(i.0.master.as_ref().unwrap().request(api::ExternStr(self)).await), self)
}
fn get_id(self) -> NonZeroU64 { self.0 }
fn from_id(id: NonZeroU64) -> Self { Self(id) }
}
impl Internable for str {
type Interned = String;
fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_string()) }
}
impl Internable for String {
type Interned = String;
fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_string()) }
} }
impl Interned for Vec<Tok<String>> { fn get_interner() -> Rc<dyn InternerSrv> {
type Marker = api::TStrv; INTERNER.try_with(|i| i.clone()).expect("Interner not initialized")
async fn intern(
self: Rc<Self>,
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
) -> Self::Marker {
req.request(api::InternStrv(self.iter().map(|t| t.to_api()).collect())).await
}
fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.vecs }
}
impl InternMarker for api::TStrv {
type Interned = Vec<Tok<String>>;
async fn resolve(self, i: &Interner) -> Tok<Self::Interned> {
let rep = i.0.master.as_ref().unwrap().request(api::ExternStrv(self)).await;
let data = futures::future::join_all(rep.into_iter().map(|m| i.ex(m))).await;
Tok::new(Rc::new(data), self)
}
fn get_id(self) -> NonZeroU64 { self.0 }
fn from_id(id: NonZeroU64) -> Self { Self(id) }
}
impl Internable for [Tok<String>] {
type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_vec()) }
}
impl<const N: usize> Internable for [Tok<String>; N] {
type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_vec()) }
}
impl Internable for Vec<Tok<String>> {
type Interned = Vec<Tok<String>>;
fn get_owned(&self) -> Rc<Self::Interned> { Rc::new(self.to_vec()) }
}
// impl Internable for Vec<api::TStr> {
// type Interned = Vec<Tok<String>>;
// fn get_owned(&self) -> Arc<Self::Interned> {
// Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
// }
// }
// impl Internable for [api::TStr] {
// type Interned = Vec<Tok<String>>;
// fn get_owned(&self) -> Arc<Self::Interned> {
// Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
// }
// }
/// The number of references held to any token by the interner.
const BASE_RC: usize = 3;
#[test]
fn base_rc_correct() {
let tok = Tok::new(Rc::new("foo".to_string()), api::TStr(1.try_into().unwrap()));
let mut bimap = Bimap::default();
bimap.insert(tok.clone());
assert_eq!(Rc::strong_count(&tok.data), BASE_RC + 1, "the bimap plus the current instance");
} }
pub struct Bimap<T: Interned> { pub async fn is(v: &str) -> IStr { get_interner().is(v).await }
intern: HashMap<Rc<T>, Tok<T>>, pub async fn iv(v: &[IStr]) -> IStrv { get_interner().iv(v).await }
by_id: HashMap<T::Marker, Tok<T>>, pub async fn es(v: api::TStr) -> IStr { get_interner().es(v).await }
} pub async fn ev(v: api::TStrv) -> IStrv { get_interner().ev(v).await }
impl<T: Interned> Bimap<T> {
pub fn insert(&mut self, token: Tok<T>) {
self.intern.insert(token.data.clone(), token.clone());
self.by_id.insert(token.to_api(), token);
}
pub fn by_marker(&self, marker: T::Marker) -> Option<Tok<T>> { self.by_id.get(&marker).cloned() } pub mod local_interner {
use std::borrow::Borrow;
use std::cell::RefCell;
use std::fmt::Debug;
use std::future;
use std::hash::{BuildHasher, Hash};
use std::num::NonZeroU64;
use std::rc::{Rc, Weak};
pub fn by_value<Q: Eq + hash::Hash>(&self, q: &Q) -> Option<Tok<T>> use futures::future::LocalBoxFuture;
where T: Borrow<Q> { use hashbrown::hash_table::{Entry, OccupiedEntry, VacantEntry};
(self.intern.raw_entry()) use hashbrown::{DefaultHashBuilder, HashTable};
.from_hash(self.intern.hasher().hash_one(q), |k| k.as_ref().borrow() == q) use orchid_api_traits::Coding;
.map(|p| p.1.clone())
}
pub fn sweep_replica(&mut self) -> Vec<T::Marker> { use super::{IStr, IStrHandle, IStrv, IStrvHandle, InternerSrv};
(self.intern)
.extract_if(|k, _| Rc::strong_count(k) == BASE_RC)
.map(|(_, v)| {
self.by_id.remove(&v.to_api());
v.to_api()
})
.collect()
}
pub fn sweep_master(&mut self, retained: HashSet<T::Marker>) {
self.intern.retain(|k, v| BASE_RC < Rc::strong_count(k) || retained.contains(&v.to_api()))
}
}
impl<T: Interned> Default for Bimap<T> {
fn default() -> Self { Self { by_id: HashMap::new(), intern: HashMap::new() } }
}
pub trait UpComm {
fn up<R: Request>(&self, req: R) -> R::Response;
}
#[derive(Default)]
pub struct TypedInterners {
strings: Bimap<String>,
vecs: Bimap<Vec<Tok<String>>>,
}
#[derive(Default)]
pub struct InternerData {
interners: Mutex<TypedInterners>,
master: Option<Box<dyn DynRequester<Transfer = api::IntReq>>>,
}
#[derive(Clone, Default)]
pub struct Interner(Rc<InternerData>);
impl Interner {
pub fn new_master() -> Self { Self::default() }
pub fn new_replica(req: impl DynRequester<Transfer = api::IntReq> + 'static) -> Self {
Self(Rc::new(InternerData { master: Some(Box::new(req)), interners: Mutex::default() }))
}
/// Intern some data; query its identifier if not known locally
pub async fn i<T: Interned>(&self, t: &(impl Internable<Interned = T> + ?Sized)) -> Tok<T> {
let data = t.get_owned();
let mut g = self.0.interners.lock().await;
let typed = T::bimap(&mut g);
if let Some(tok) = typed.by_value(&data) {
return tok;
}
let marker = match &self.0.master {
Some(c) => data.clone().intern(&**c).await,
None =>
T::Marker::from_id(NonZeroU64::new(ID.fetch_add(1, atomic::Ordering::Relaxed)).unwrap()),
};
let tok = Tok::new(data, marker);
T::bimap(&mut g).insert(tok.clone());
tok
}
/// Extern an identifier; query the data it represents if not known locally
pub async fn ex<M: InternMarker>(&self, marker: M) -> Tok<M::Interned> {
if let Some(tok) = M::Interned::bimap(&mut *self.0.interners.lock().await).by_marker(marker) {
return tok;
}
assert!(self.0.master.is_some(), "ID not in local interner and this is master");
let token = marker.resolve(self).await;
M::Interned::bimap(&mut *self.0.interners.lock().await).insert(token.clone());
token
}
pub async fn sweep_replica(&self) -> api::Retained {
assert!(self.0.master.is_some(), "Not a replica");
let mut g = self.0.interners.lock().await;
api::Retained { strings: g.strings.sweep_replica(), vecs: g.vecs.sweep_replica() }
}
pub async fn sweep_master(&self, retained: api::Retained) {
assert!(self.0.master.is_none(), "Not master");
let mut g = self.0.interners.lock().await;
g.strings.sweep_master(retained.strings.into_iter().collect());
g.vecs.sweep_master(retained.vecs.into_iter().collect());
}
}
impl fmt::Debug for Interner {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Interner{{ replica: {} }}", self.0.master.is_none())
}
}
static ID: atomic::AtomicU64 = atomic::AtomicU64::new(1);
pub fn merge_retained(into: &mut api::Retained, from: &api::Retained) {
into.strings = into.strings.iter().chain(&from.strings).copied().unique().collect();
into.vecs = into.vecs.iter().chain(&from.vecs).copied().unique().collect();
}
#[cfg(test)]
mod test {
use std::num::NonZero;
use std::pin::Pin;
use orchid_api_traits::{Decode, enc_vec};
use test_executors::spin_on;
use super::*;
use crate::api; use crate::api;
#[test] /// Associated types and methods for parallel concepts between scalar and
fn test_i() { /// vector interning
let i = Interner::new_master(); pub trait InternableCard: 'static + Sized + Default + Debug {
let _: Tok<String> = spin_on(i.i("foo")); /// API representation of an interner key
let _: Tok<Vec<Tok<String>>> = spin_on(i.i(&[spin_on(i.i("bar")), spin_on(i.i("baz"))])); type Token: Clone + Copy + Debug + Hash + Eq + PartialOrd + Ord + Coding + 'static;
/// Owned version of interned value physically held by `'static` interner
/// and token
type Data: 'static + Borrow<Self::Borrow> + Eq + Hash + Debug;
/// Borrowed version of interned value placed in intern queries to avoid a
/// copy
type Borrow: ToOwned<Owned = Self::Data> + ?Sized + Eq + Hash + Debug;
/// Smart object handed out by the interner for storage and comparison in
/// third party code. [IStr] or [IStrv]
type Interned: Clone + Debug;
/// Create smart object from token for fast comparison and a handle for
/// everything else incl. virtual drop
fn new_interned(token: Self::Token, handle: Rc<Handle<Self>>) -> Self::Interned;
} }
#[test] #[derive(Default, Debug)]
fn test_coding() { pub struct StrBranch;
spin_on(async { impl InternableCard for StrBranch {
let coded = api::TStr(NonZero::new(3u64).unwrap()); type Data = String;
let mut enc = &enc_vec(&coded).await[..]; type Token = api::TStr;
api::TStr::decode(Pin::new(&mut enc)).await; type Borrow = str;
assert_eq!(enc, [], "Did not consume all of {enc:?}") type Interned = IStr;
}) fn new_interned(t: Self::Token, h: Rc<Handle<Self>>) -> Self::Interned { IStr(t, h) }
} }
#[derive(Default, Debug)]
pub struct StrvBranch;
impl InternableCard for StrvBranch {
type Data = Vec<IStr>;
type Token = api::TStrv;
type Borrow = [IStr];
type Interned = IStrv;
fn new_interned(t: Self::Token, h: Rc<Handle<Self>>) -> Self::Interned { IStrv(t, h) }
}
/// Pairs interned data with its internment key
#[derive(Debug)]
struct Data<B: InternableCard> {
token: B::Token,
data: Rc<B::Data>,
}
impl<B: InternableCard> Clone for Data<B> {
fn clone(&self) -> Self { Self { token: self.token, data: self.data.clone() } }
}
/// Implementor for the trait objects held by [IStr] and [IStrv]
pub struct Handle<B: InternableCard> {
data: Data<B>,
parent: Weak<RefCell<IntData<B>>>,
}
impl IStrHandle for Handle<StrBranch> {
fn rc(&self) -> Rc<String> { self.data.data.clone() }
}
impl AsRef<str> for Handle<StrBranch> {
fn as_ref(&self) -> &str { self.data.data.as_ref().as_ref() }
}
impl IStrvHandle for Handle<StrvBranch> {
fn rc(&self) -> Rc<Vec<IStr>> { self.data.data.clone() }
}
impl AsRef<[IStr]> for Handle<StrvBranch> {
fn as_ref(&self) -> &[IStr] { self.data.data.as_ref().as_ref() }
}
impl<B: InternableCard> Drop for Handle<B> {
fn drop(&mut self) {
let Some(parent) = self.parent.upgrade() else { return };
if let Entry::Occupied(ent) =
parent.borrow_mut().entry_by_data(self.data.data.as_ref().borrow())
{
ent.remove();
}
if let Entry::Occupied(ent) = parent.borrow_mut().entry_by_tok(self.data.token) {
ent.remove();
}
}
}
/// Information retained about an interned token indexed both by key and
/// value.
struct Rec<B: InternableCard> {
/// This reference is weak, but the [Drop] handler of [Handle] removes all
/// [Rec]s from the interner so it is guaranteed to be live.
handle: Weak<Handle<B>>,
/// Keys for indexing from either table
data: Data<B>,
}
/// Read data from an occupied entry in an interner. The equivalent insert
/// command is [insert]
fn read<B: InternableCard>(entry: OccupiedEntry<'_, Rec<B>>) -> B::Interned {
let hand = entry.get().handle.upgrade().expect("Found entry but handle already dropped");
B::new_interned(entry.get().data.token, hand)
}
/// Insert some data into an entry borrowed from this same interner.
/// The equivalent read command is [read]
fn insert<B: InternableCard>(entry: VacantEntry<'_, Rec<B>>, handle: Rc<Handle<B>>) {
entry.insert(Rec { data: handle.data.clone(), handle: Rc::downgrade(&handle) });
}
#[derive(Default)]
struct IntData<B: InternableCard> {
by_tok: HashTable<Rec<B>>,
by_data: HashTable<Rec<B>>,
hasher: DefaultHashBuilder,
}
impl<B: InternableCard> IntData<B> {
fn entry_by_data(&mut self, query: &B::Borrow) -> Entry<'_, Rec<B>> {
self.by_data.entry(
self.hasher.hash_one(query),
|rec| rec.data.data.as_ref().borrow() == query,
|rec| self.hasher.hash_one(rec.data.data.as_ref().borrow()),
)
}
fn entry_by_tok(&mut self, token: B::Token) -> Entry<'_, Rec<B>> {
self.by_tok.entry(
self.hasher.hash_one(token),
|rec| rec.data.token == token,
|rec| self.hasher.hash_one(rec.data.token),
)
}
}
/// Failing intern command that can be recovered if the value is found
/// elsewhere
pub struct InternError<'a, B: InternableCard> {
int: &'a Int<B>,
query: &'a B::Borrow,
}
impl<B: InternableCard> InternError<'_, B> {
/// If a racing write populates the entry, the continuation returns that
/// value and discards its argument
pub fn set_if_empty(self, token: B::Token) -> B::Interned {
let mut int_data = self.int.0.borrow_mut();
match int_data.entry_by_data(self.query) {
Entry::Occupied(ent) => read(ent),
Entry::Vacant(ent) => {
let hand = self.int.mk_handle(Data { token, data: Rc::new(self.query.to_owned()) });
insert(ent, hand.clone());
let Entry::Vacant(other_ent) = int_data.entry_by_tok(token) else {
panic!("Data and key tables out of sync")
};
insert(other_ent, hand.clone());
B::new_interned(token, hand)
},
}
}
}
impl<B: InternableCard> Debug for InternError<'_, B> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("InternEntry").field(&self.query).finish()
}
}
/// Failing extern command that can be recovered if the value is found
/// elsewhere
pub struct ExternError<'a, B: InternableCard> {
int: &'a Int<B>,
token: B::Token,
}
impl<B: InternableCard> ExternError<'_, B> {
/// If a racing write populates the entry, the continuation returns that
/// value and discards its argument
pub fn set_if_empty(&self, data: Rc<B::Data>) -> B::Interned {
let mut int_data = self.int.0.borrow_mut();
match int_data.entry_by_tok(self.token) {
Entry::Occupied(ent) => read(ent),
Entry::Vacant(ent) => {
let hand = self.int.mk_handle(Data { token: self.token, data: data.clone() });
insert(ent, hand.clone());
let Entry::Vacant(other_ent) = int_data.entry_by_data(data.as_ref().borrow()) else {
panic!("Data and key tables out of sync")
};
insert(other_ent, hand.clone());
B::new_interned(self.token, hand)
},
}
}
}
impl<B: InternableCard> Debug for ExternError<'_, B> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("ExternEntry").field(&self.token).finish()
}
}
#[derive(Default)]
pub struct Int<B: InternableCard>(Rc<RefCell<IntData<B>>>);
impl<B: InternableCard> Int<B> {
fn mk_handle(&self, data: Data<B>) -> Rc<Handle<B>> {
Rc::new(Handle { data: data.clone(), parent: Rc::downgrade(&self.0.clone()) })
}
/// Look up by value, or yield to figure out its ID from elsewhere
pub fn i<'a>(&'a self, query: &'a B::Borrow) -> Result<B::Interned, InternError<'a, B>> {
if let Entry::Occupied(val) = self.0.borrow_mut().entry_by_data(query) {
return Ok(read(val));
}
Err(InternError { int: self, query })
}
/// Look up by key or yield to figure out its value from elsewhere
pub fn e(&self, token: B::Token) -> Result<B::Interned, ExternError<'_, B>> {
if let Entry::Occupied(ent) = self.0.borrow_mut().entry_by_tok(token) {
return Ok(read(ent));
}
Err(ExternError { int: self, token })
}
}
thread_local! {
static NEXT_ID: RefCell<u64> = 0.into();
}
fn with_new_id<T>(fun: impl FnOnce(NonZeroU64) -> T) -> T {
fun(
NonZeroU64::new(NEXT_ID.with_borrow_mut(|id| {
*id += 1;
*id
}))
.unwrap(),
)
}
#[derive(Default)]
struct LocalInterner {
str: Int<StrBranch>,
strv: Int<StrvBranch>,
}
impl InternerSrv for LocalInterner {
fn is<'a>(&'a self, v: &'a str) -> LocalBoxFuture<'a, IStr> {
match self.str.i(v) {
Ok(int) => Box::pin(future::ready(int)),
Err(e) => with_new_id(|id| Box::pin(future::ready(e.set_if_empty(api::TStr(id))))),
}
}
fn es(&self, t: api::TStr) -> LocalBoxFuture<'_, IStr> {
Box::pin(future::ready(self.str.e(t).expect("Unrecognized token cannot be externed")))
}
fn iv<'a>(&'a self, v: &'a [IStr]) -> LocalBoxFuture<'a, IStrv> {
match self.strv.i(v) {
Ok(int) => Box::pin(future::ready(int)),
Err(e) => with_new_id(|id| Box::pin(future::ready(e.set_if_empty(api::TStrv(id))))),
}
}
fn ev(&self, t: orchid_api::TStrv) -> LocalBoxFuture<'_, IStrv> {
Box::pin(future::ready(self.strv.e(t).expect("Unrecognized token cannot be externed")))
}
}
/// Creates a basic thread-local interner for testing and root role.
pub fn local_interner() -> Rc<dyn InternerSrv> { Rc::<LocalInterner>::default() }
} }

View File

@@ -1,9 +1,9 @@
pub use async_once_cell; pub use async_once_cell;
use orchid_api as api; use orchid_api as api;
pub mod binary;
pub mod box_cow; pub mod box_cow;
pub mod boxed_iter; pub mod boxed_iter;
pub mod builtin;
pub mod char_filter; pub mod char_filter;
pub mod clone; pub mod clone;
pub mod combine; pub mod combine;
@@ -14,6 +14,7 @@ pub mod id_store;
pub mod interner; pub mod interner;
pub mod iter_utils; pub mod iter_utils;
pub mod join; pub mod join;
mod localset;
pub mod location; pub mod location;
pub mod logging; pub mod logging;
mod match_mapping; mod match_mapping;
@@ -25,6 +26,7 @@ pub mod pure_seq;
pub mod reqnot; pub mod reqnot;
pub mod sequence; pub mod sequence;
pub mod side; pub mod side;
pub mod stash;
mod tl_cache; mod tl_cache;
pub mod tokens; pub mod tokens;
pub mod tree; pub mod tree;

View File

@@ -0,0 +1,48 @@
use std::collections::VecDeque;
use std::pin::Pin;
use std::task::Poll;
use futures::StreamExt;
use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender, unbounded};
use futures::future::LocalBoxFuture;
pub struct LocalSet<'a, E> {
receiver: UnboundedReceiver<LocalBoxFuture<'a, Result<(), E>>>,
pending: VecDeque<LocalBoxFuture<'a, Result<(), E>>>,
}
impl<'a, E> LocalSet<'a, E> {
pub fn new() -> (UnboundedSender<LocalBoxFuture<'a, Result<(), E>>>, Self) {
let (sender, receiver) = unbounded();
(sender, Self { receiver, pending: VecDeque::new() })
}
}
impl<E> Future for LocalSet<'_, E> {
type Output = Result<(), E>;
fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {
let this = self.get_mut();
let mut any_pending = false;
loop {
match this.receiver.poll_next_unpin(cx) {
Poll::Pending => {
any_pending = true;
break;
},
Poll::Ready(None) => break,
Poll::Ready(Some(fut)) => this.pending.push_back(fut),
}
}
let count = this.pending.len();
for _ in 0..count {
let mut req = this.pending.pop_front().unwrap();
match req.as_mut().poll(cx) {
Poll::Ready(Ok(())) => (),
Poll::Ready(Err(e)) => return Poll::Ready(Err(e)),
Poll::Pending => {
any_pending = true;
this.pending.push_back(req)
},
}
}
if any_pending { Poll::Pending } else { Poll::Ready(Ok(())) }
}
}

View File

@@ -8,12 +8,12 @@ use futures::future::join_all;
use trait_set::trait_set; use trait_set::trait_set;
use crate::error::ErrPos; use crate::error::ErrPos;
use crate::interner::{Interner, Tok}; use crate::interner::{IStr, es, is};
use crate::name::Sym; use crate::name::Sym;
use crate::{api, match_mapping, sym}; use crate::{api, match_mapping, sym};
trait_set! { trait_set! {
pub trait GetSrc = FnMut(&Sym) -> Tok<String>; pub trait GetSrc = FnMut(&Sym) -> IStr;
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@@ -37,13 +37,13 @@ impl Pos {
other => format!("{other:?}"), other => format!("{other:?}"),
} }
} }
pub async fn from_api(api: &api::Location, i: &Interner) -> Self { pub async fn from_api(api: &api::Location) -> Self {
match_mapping!(api, api::Location => Pos { match_mapping!(api, api::Location => Pos {
None, Inherit, SlotTarget, None, Inherit, SlotTarget,
Gen(cgi => CodeGenInfo::from_api(cgi, i).await), Gen(cgi => CodeGenInfo::from_api(cgi).await),
Multi(v => join_all(v.iter().map(|l| Pos::from_api(l, i))).await) Multi(v => join_all(v.iter().map(Pos::from_api)).await)
} { } {
api::Location::SourceRange(sr) => Self::SrcRange(SrcRange::from_api(sr, i).await) api::Location::SourceRange(sr) => Self::SrcRange(SrcRange::from_api(sr).await)
}) })
} }
pub fn to_api(&self) -> api::Location { pub fn to_api(&self) -> api::Location {
@@ -108,7 +108,7 @@ impl SrcRange {
} }
/// Create a dud [SourceRange] for testing. Its value is unspecified and /// Create a dud [SourceRange] for testing. Its value is unspecified and
/// volatile. /// volatile.
pub async fn mock(i: &Interner) -> Self { Self { range: 0..1, path: sym!(test; i) } } pub async fn mock() -> Self { Self { range: 0..1, path: sym!(test) } }
/// Path the source text was loaded from /// Path the source text was loaded from
pub fn path(&self) -> Sym { self.path.clone() } pub fn path(&self) -> Sym { self.path.clone() }
/// Byte range /// Byte range
@@ -133,8 +133,8 @@ impl SrcRange {
} }
} }
pub fn zw(path: Sym, pos: u32) -> Self { Self { path, range: pos..pos } } pub fn zw(path: Sym, pos: u32) -> Self { Self { path, range: pos..pos } }
pub async fn from_api(api: &api::SourceRange, i: &Interner) -> Self { pub async fn from_api(api: &api::SourceRange) -> Self {
Self { path: Sym::from_api(api.path, i).await, range: api.range.clone() } Self { path: Sym::from_api(api.path).await, range: api.range.clone() }
} }
pub fn to_api(&self) -> api::SourceRange { pub fn to_api(&self) -> api::SourceRange {
api::SourceRange { path: self.path.to_api(), range: self.range.clone() } api::SourceRange { path: self.path.to_api(), range: self.range.clone() }
@@ -162,24 +162,19 @@ pub struct CodeGenInfo {
/// formatted like a Rust namespace /// formatted like a Rust namespace
pub generator: Sym, pub generator: Sym,
/// Unformatted user message with relevant circumstances and parameters /// Unformatted user message with relevant circumstances and parameters
pub details: Tok<String>, pub details: IStr,
} }
impl CodeGenInfo { impl CodeGenInfo {
/// A codegen marker with no user message and parameters /// A codegen marker with no user message and parameters
pub async fn new_short(generator: Sym, i: &Interner) -> Self { pub async fn new_short(generator: Sym) -> Self { Self { generator, details: is("").await } }
Self { generator, details: i.i("").await }
}
/// A codegen marker with a user message or parameters /// A codegen marker with a user message or parameters
pub async fn new_details(generator: Sym, details: impl AsRef<str>, i: &Interner) -> Self { pub async fn new_details(generator: Sym, details: impl AsRef<str>) -> Self {
Self { generator, details: i.i(details.as_ref()).await } Self { generator, details: is(details.as_ref()).await }
} }
/// Syntactic location /// Syntactic location
pub fn pos(&self) -> Pos { Pos::Gen(self.clone()) } pub fn pos(&self) -> Pos { Pos::Gen(self.clone()) }
pub async fn from_api(api: &api::CodeGenInfo, i: &Interner) -> Self { pub async fn from_api(api: &api::CodeGenInfo) -> Self {
Self { Self { generator: Sym::from_api(api.generator).await, details: es(api.details).await }
generator: Sym::from_api(api.generator, i).await,
details: Tok::from_api(api.details, i).await,
}
} }
pub fn to_api(&self) -> api::CodeGenInfo { pub fn to_api(&self) -> api::CodeGenInfo {
api::CodeGenInfo { generator: self.generator.to_api(), details: self.details.to_api() } api::CodeGenInfo { generator: self.generator.to_api(), details: self.details.to_api() }

View File

@@ -1,36 +1,74 @@
use std::any::Any;
use std::cell::RefCell;
use std::fmt::Arguments; use std::fmt::Arguments;
use std::fs::File; use std::io::Write;
use std::io::{Write, stderr}; use std::rc::Rc;
pub use api::LogStrategy; use futures::future::LocalBoxFuture;
use itertools::Itertools; use task_local::task_local;
use crate::api; use crate::api;
#[derive(Clone)] task_local! {
pub struct Logger(api::LogStrategy); static DEFAULT_WRITER: RefCell<Box<dyn Write>>
impl Logger { }
pub fn new(strat: api::LogStrategy) -> Self { Self(strat) }
pub fn log(&self, msg: impl AsRef<str>) { writeln!(self, "{}", msg.as_ref()) } /// Set the stream used for [api::LogStrategy::Default]. If not set,
pub fn strat(&self) -> api::LogStrategy { self.0.clone() } /// [std::io::stderr] will be used.
pub fn is_active(&self) -> bool { !matches!(self.0, api::LogStrategy::Discard) } pub async fn with_default_stream<F: Future>(stderr: impl Write + 'static, fut: F) -> F::Output {
pub fn log_buf(&self, event: impl AsRef<str>, buf: &[u8]) { DEFAULT_WRITER.scope(RefCell::new(Box::new(stderr)), fut).await
if std::env::var("ORCHID_LOG_BUFFERS").is_ok_and(|v| !v.is_empty()) { }
writeln!(self, "{}: [{}]", event.as_ref(), buf.iter().map(|b| format!("{b:02x}")).join(" "))
pub trait LogWriter {
fn write_fmt<'a>(&'a self, fmt: Arguments<'a>) -> LocalBoxFuture<'a, ()>;
}
pub trait Logger: Any {
fn writer(&self, category: &str) -> Rc<dyn LogWriter>;
fn strat(&self, category: &str) -> api::LogStrategy;
fn is_active(&self, category: &str) -> bool {
!matches!(self.strat(category), api::LogStrategy::Discard)
}
}
task_local! {
static LOGGER: Rc<dyn Logger>;
}
pub async fn with_logger<F: Future>(logger: impl Logger + 'static, fut: F) -> F::Output {
LOGGER.scope(Rc::new(logger), fut).await
}
pub fn log(category: &str) -> Rc<dyn LogWriter> {
LOGGER.try_with(|l| l.writer(category)).expect("Logger not set!")
}
pub fn get_logger() -> Rc<dyn Logger> { LOGGER.try_with(|l| l.clone()).expect("Logger not set!") }
pub mod test {
use std::fmt::Arguments;
use std::rc::Rc;
use futures::future::LocalBoxFuture;
use crate::clone;
use crate::logging::{LogWriter, Logger};
#[derive(Clone)]
pub struct TestLogger(Rc<dyn Fn(String) -> LocalBoxFuture<'static, ()>>);
impl LogWriter for TestLogger {
fn write_fmt<'a>(&'a self, fmt: Arguments<'a>) -> LocalBoxFuture<'a, ()> {
(self.0)(fmt.to_string())
} }
} }
pub fn write_fmt(&self, fmt: Arguments) { impl Logger for TestLogger {
match &self.0 { fn strat(&self, _category: &str) -> orchid_api::LogStrategy { orchid_api::LogStrategy::Default }
api::LogStrategy::Discard => (), fn writer(&self, _category: &str) -> std::rc::Rc<dyn LogWriter> { Rc::new(self.clone()) }
api::LogStrategy::StdErr => { }
stderr().write_fmt(fmt).expect("Could not write to stderr!"); impl TestLogger {
stderr().flush().expect("Could not flush stderr") pub fn new(f: impl AsyncFn(String) + 'static) -> Self {
}, let f = Rc::new(f);
api::LogStrategy::File(f) => { Self(Rc::new(move |s| clone!(f; Box::pin(async move { f(s).await }))))
let mut file = (File::options().write(true).create(true).truncate(true).open(f))
.expect("Could not open logfile");
file.write_fmt(fmt).expect("Could not write to logfile");
},
} }
} }
} }

View File

@@ -6,7 +6,8 @@ use orchid_api_traits::{Decode, Encode};
pub async fn send_msg(mut write: Pin<&mut impl AsyncWrite>, msg: &[u8]) -> io::Result<()> { pub async fn send_msg(mut write: Pin<&mut impl AsyncWrite>, msg: &[u8]) -> io::Result<()> {
let mut len_buf = vec![]; let mut len_buf = vec![];
u32::try_from(msg.len()).unwrap().encode(Pin::new(&mut len_buf)).await; let len_prefix = u32::try_from(msg.len()).expect("Message over 4GB not permitted on channel");
len_prefix.encode_vec(&mut len_buf);
write.write_all(&len_buf).await?; write.write_all(&len_buf).await?;
write.write_all(msg).await?; write.write_all(msg).await?;
write.flush().await write.flush().await
@@ -15,7 +16,7 @@ pub async fn send_msg(mut write: Pin<&mut impl AsyncWrite>, msg: &[u8]) -> io::R
pub async fn recv_msg(mut read: Pin<&mut impl AsyncRead>) -> io::Result<Vec<u8>> { pub async fn recv_msg(mut read: Pin<&mut impl AsyncRead>) -> io::Result<Vec<u8>> {
let mut len_buf = [0u8; (u32::BITS / 8) as usize]; let mut len_buf = [0u8; (u32::BITS / 8) as usize];
read.read_exact(&mut len_buf).await?; read.read_exact(&mut len_buf).await?;
let len = u32::decode(Pin::new(&mut &len_buf[..])).await; let len = u32::decode(Pin::new(&mut &len_buf[..])).await?;
let mut msg = vec![0u8; len as usize]; let mut msg = vec![0u8; len as usize];
read.read_exact(&mut msg).await?; read.read_exact(&mut msg).await?;
Ok(msg) Ok(msg)

View File

@@ -12,65 +12,60 @@ use itertools::Itertools;
use trait_set::trait_set; use trait_set::trait_set;
use crate::api; use crate::api;
use crate::interner::{InternMarker, Interner, Tok}; use crate::interner::{IStr, IStrv, es, ev, is, iv};
trait_set! { trait_set! {
/// Traits that all name iterators should implement /// Traits that all name iterators should implement
pub trait NameIter = Iterator<Item = Tok<String>> + DoubleEndedIterator + ExactSizeIterator; pub trait NameIter = Iterator<Item = IStr> + DoubleEndedIterator + ExactSizeIterator;
} }
/// A token path which may be empty. [VName] is the non-empty version /// A token path which may be empty. [VName] is the non-empty version
#[derive(Clone, Default, Hash, PartialEq, Eq)] #[derive(Clone, Default, Hash, PartialEq, Eq)]
pub struct VPath(Vec<Tok<String>>); pub struct VPath(Vec<IStr>);
impl VPath { impl VPath {
/// Collect segments into a vector /// Collect segments into a vector
pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn new(items: impl IntoIterator<Item = IStr>) -> Self { Self(items.into_iter().collect()) }
Self(items.into_iter().collect())
}
/// Number of path segments /// Number of path segments
pub fn len(&self) -> usize { self.0.len() } pub fn len(&self) -> usize { self.0.len() }
/// Whether there are any path segments. In other words, whether this is a /// Whether there are any path segments. In other words, whether this is a
/// valid name /// valid name
pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn is_empty(&self) -> bool { self.len() == 0 }
/// Prepend some tokens to the path /// Prepend some tokens to the path
pub fn prefix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn prefix(self, items: impl IntoIterator<Item = IStr>) -> Self {
Self(items.into_iter().chain(self.0).collect()) Self(items.into_iter().chain(self.0).collect())
} }
/// Append some tokens to the path /// Append some tokens to the path
pub fn suffix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn suffix(self, items: impl IntoIterator<Item = IStr>) -> Self {
Self(self.0.into_iter().chain(items).collect()) Self(self.0.into_iter().chain(items).collect())
} }
/// Partition the string by `::` namespace separators /// Partition the string by `::` namespace separators
pub async fn parse(s: &str, i: &Interner) -> Self { pub async fn parse(s: &str) -> Self {
Self(if s.is_empty() { vec![] } else { join_all(s.split("::").map(|s| i.i(s))).await }) Self(if s.is_empty() { vec![] } else { join_all(s.split("::").map(is)).await })
} }
/// Walk over the segments /// Walk over the segments
pub fn str_iter(&self) -> impl Iterator<Item = &'_ str> { pub fn str_iter(&self) -> impl Iterator<Item = &'_ str> { Box::new(self.0.iter().map(|s| &**s)) }
Box::new(self.0.iter().map(|s| s.as_str()))
}
/// Try to convert into non-empty version /// Try to convert into non-empty version
pub fn into_name(self) -> Result<VName, EmptyNameError> { VName::new(self.0) } pub fn into_name(self) -> Result<VName, EmptyNameError> { VName::new(self.0) }
/// Add a token to the path. Since now we know that it can't be empty, turn it /// Add a token to the path. Since now we know that it can't be empty, turn it
/// into a name. /// into a name.
pub fn name_with_suffix(self, name: Tok<String>) -> VName { pub fn name_with_suffix(self, name: IStr) -> VName {
VName(self.into_iter().chain([name]).collect()) VName(self.into_iter().chain([name]).collect())
} }
/// Add a token to the beginning of the. Since now we know that it can't be /// Add a token to the beginning of the. Since now we know that it can't be
/// empty, turn it into a name. /// empty, turn it into a name.
pub fn name_with_prefix(self, name: Tok<String>) -> VName { pub fn name_with_prefix(self, name: IStr) -> VName {
VName([name].into_iter().chain(self).collect()) VName([name].into_iter().chain(self).collect())
} }
/// Convert a fs path to a vpath /// Convert a fs path to a vpath
pub async fn from_path(path: &Path, ext: &str, i: &Interner) -> Option<(Self, bool)> { pub async fn from_path(path: &Path, ext: &str) -> Option<(Self, bool)> {
async fn to_vpath(p: &Path, i: &Interner) -> Option<VPath> { async fn to_vpath(p: &Path) -> Option<VPath> {
let tok_opt_v = let tok_opt_v = join_all(p.iter().map(|c| OptionFuture::from(c.to_str().map(is)))).await;
join_all(p.iter().map(|c| OptionFuture::from(c.to_str().map(|s| i.i(s))))).await;
tok_opt_v.into_iter().collect::<Option<_>>().map(VPath) tok_opt_v.into_iter().collect::<Option<_>>().map(VPath)
} }
match path.extension().map(|s| s.to_str()) { match path.extension().map(|s| s.to_str()) {
Some(Some(s)) if s == ext => Some((to_vpath(&path.with_extension(""), i).await?, true)), Some(Some(s)) if s == ext => Some((to_vpath(&path.with_extension("")).await?, true)),
None => Some((to_vpath(path, i).await?, false)), None => Some((to_vpath(path).await?, false)),
Some(_) => None, Some(_) => None,
} }
} }
@@ -83,30 +78,28 @@ impl fmt::Display for VPath {
write!(f, "{}", self.str_iter().join("::")) write!(f, "{}", self.str_iter().join("::"))
} }
} }
impl FromIterator<Tok<String>> for VPath { impl FromIterator<IStr> for VPath {
fn from_iter<T: IntoIterator<Item = Tok<String>>>(iter: T) -> Self { fn from_iter<T: IntoIterator<Item = IStr>>(iter: T) -> Self { Self(iter.into_iter().collect()) }
Self(iter.into_iter().collect())
}
} }
impl IntoIterator for VPath { impl IntoIterator for VPath {
type Item = Tok<String>; type Item = IStr;
type IntoIter = vec::IntoIter<Self::Item>; type IntoIter = vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() } fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
} }
impl Borrow<[Tok<String>]> for VPath { impl Borrow<[IStr]> for VPath {
fn borrow(&self) -> &[Tok<String>] { &self.0[..] } fn borrow(&self) -> &[IStr] { &self.0[..] }
} }
impl Deref for VPath { impl Deref for VPath {
type Target = [Tok<String>]; type Target = [IStr];
fn deref(&self) -> &Self::Target { self.borrow() } fn deref(&self) -> &Self::Target { self.borrow() }
} }
impl<T> Index<T> for VPath impl<T> Index<T> for VPath
where [Tok<String>]: Index<T> where [IStr]: Index<T>
{ {
type Output = <[Tok<String>] as Index<T>>::Output; type Output = <[IStr] as Index<T>>::Output;
fn index(&self, index: T) -> &Self::Output { &Borrow::<[Tok<String>]>::borrow(self)[index] } fn index(&self, index: T) -> &Self::Output { &Borrow::<[IStr]>::borrow(self)[index] }
} }
/// A mutable representation of a namespaced identifier of at least one segment. /// A mutable representation of a namespaced identifier of at least one segment.
@@ -116,50 +109,43 @@ where [Tok<String>]: Index<T>
/// See also [Sym] for the immutable representation, and [VPath] for possibly /// See also [Sym] for the immutable representation, and [VPath] for possibly
/// empty values /// empty values
#[derive(Clone, Hash, PartialEq, Eq)] #[derive(Clone, Hash, PartialEq, Eq)]
pub struct VName(Vec<Tok<String>>); pub struct VName(Vec<IStr>);
impl VName { impl VName {
/// Assert that the sequence isn't empty and wrap it in [VName] to represent /// Assert that the sequence isn't empty and wrap it in [VName] to represent
/// this invariant /// this invariant
pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Result<Self, EmptyNameError> { pub fn new(items: impl IntoIterator<Item = IStr>) -> Result<Self, EmptyNameError> {
let data: Vec<_> = items.into_iter().collect(); let data: Vec<_> = items.into_iter().collect();
if data.is_empty() { Err(EmptyNameError) } else { Ok(Self(data)) } if data.is_empty() { Err(EmptyNameError) } else { Ok(Self(data)) }
} }
pub async fn deintern( pub async fn deintern(name: impl IntoIterator<Item = api::TStr>) -> Result<Self, EmptyNameError> {
name: impl IntoIterator<Item = api::TStr>, Self::new(join_all(name.into_iter().map(es)).await)
i: &Interner,
) -> Result<Self, EmptyNameError> {
Self::new(join_all(name.into_iter().map(|m| Tok::from_api(m, i))).await)
} }
/// Unwrap the enclosed vector /// Unwrap the enclosed vector
pub fn into_vec(self) -> Vec<Tok<String>> { self.0 } pub fn into_vec(self) -> Vec<IStr> { self.0 }
/// Get a reference to the enclosed vector /// Get a reference to the enclosed vector
pub fn vec(&self) -> &Vec<Tok<String>> { &self.0 } pub fn vec(&self) -> &Vec<IStr> { &self.0 }
/// Mutable access to the underlying vector. To ensure correct results, this /// Mutable access to the underlying vector. To ensure correct results, this
/// must never be empty. /// must never be empty.
pub fn vec_mut(&mut self) -> &mut Vec<Tok<String>> { &mut self.0 } pub fn vec_mut(&mut self) -> &mut Vec<IStr> { &mut self.0 }
/// Intern the name and return a [Sym] /// Intern the name and return a [Sym]
pub async fn to_sym(&self, i: &Interner) -> Sym { Sym(i.i(&self.0[..]).await) } pub async fn to_sym(&self) -> Sym { Sym(iv(&self.0[..]).await) }
/// If this name has only one segment, return it /// If this name has only one segment, return it
pub fn as_root(&self) -> Option<Tok<String>> { self.0.iter().exactly_one().ok().cloned() } pub fn as_root(&self) -> Option<IStr> { self.0.iter().exactly_one().ok().cloned() }
/// Prepend the segments to this name /// Prepend the segments to this name
#[must_use = "This is a pure function"] #[must_use = "This is a pure function"]
pub fn prefix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn prefix(self, items: impl IntoIterator<Item = IStr>) -> Self {
Self(items.into_iter().chain(self.0).collect()) Self(items.into_iter().chain(self.0).collect())
} }
/// Append the segments to this name /// Append the segments to this name
#[must_use = "This is a pure function"] #[must_use = "This is a pure function"]
pub fn suffix(self, items: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn suffix(self, items: impl IntoIterator<Item = IStr>) -> Self {
Self(self.0.into_iter().chain(items).collect()) Self(self.0.into_iter().chain(items).collect())
} }
/// Read a `::` separated namespaced name /// Read a `::` separated namespaced name
pub async fn parse(s: &str, i: &Interner) -> Result<Self, EmptyNameError> { pub async fn parse(s: &str) -> Result<Self, EmptyNameError> { Self::new(VPath::parse(s).await) }
Self::new(VPath::parse(s, i).await) pub async fn literal(s: &'static str) -> Self { Self::parse(s).await.expect("empty literal !?") }
}
pub async fn literal(s: &'static str, i: &Interner) -> Self {
Self::parse(s, i).await.expect("empty literal !?")
}
/// Obtain an iterator over the segments of the name /// Obtain an iterator over the segments of the name
pub fn iter(&self) -> impl Iterator<Item = Tok<String>> + '_ { self.0.iter().cloned() } pub fn iter(&self) -> impl Iterator<Item = IStr> + '_ { self.0.iter().cloned() }
} }
impl fmt::Debug for VName { impl fmt::Debug for VName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "VName({self})") }
@@ -170,22 +156,22 @@ impl fmt::Display for VName {
} }
} }
impl IntoIterator for VName { impl IntoIterator for VName {
type Item = Tok<String>; type Item = IStr;
type IntoIter = vec::IntoIter<Self::Item>; type IntoIter = vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter { self.0.into_iter() } fn into_iter(self) -> Self::IntoIter { self.0.into_iter() }
} }
impl<T> Index<T> for VName impl<T> Index<T> for VName
where [Tok<String>]: Index<T> where [IStr]: Index<T>
{ {
type Output = <[Tok<String>] as Index<T>>::Output; type Output = <[IStr] as Index<T>>::Output;
fn index(&self, index: T) -> &Self::Output { &self.deref()[index] } fn index(&self, index: T) -> &Self::Output { &self.deref()[index] }
} }
impl Borrow<[Tok<String>]> for VName { impl Borrow<[IStr]> for VName {
fn borrow(&self) -> &[Tok<String>] { self.0.borrow() } fn borrow(&self) -> &[IStr] { self.0.borrow() }
} }
impl Deref for VName { impl Deref for VName {
type Target = [Tok<String>]; type Target = [IStr];
fn deref(&self) -> &Self::Target { self.borrow() } fn deref(&self) -> &Self::Target { self.borrow() }
} }
@@ -193,11 +179,9 @@ impl Deref for VName {
/// empty sequence /// empty sequence
#[derive(Debug, Copy, Clone, Default, Hash, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Copy, Clone, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct EmptyNameError; pub struct EmptyNameError;
impl TryFrom<&[Tok<String>]> for VName { impl TryFrom<&[IStr]> for VName {
type Error = EmptyNameError; type Error = EmptyNameError;
fn try_from(value: &[Tok<String>]) -> Result<Self, Self::Error> { fn try_from(value: &[IStr]) -> Result<Self, Self::Error> { Self::new(value.iter().cloned()) }
Self::new(value.iter().cloned())
}
} }
/// An interned representation of a namespaced identifier. /// An interned representation of a namespaced identifier.
@@ -206,37 +190,34 @@ impl TryFrom<&[Tok<String>]> for VName {
/// ///
/// See also [VName] /// See also [VName]
#[derive(Clone, Hash, PartialEq, Eq)] #[derive(Clone, Hash, PartialEq, Eq)]
pub struct Sym(Tok<Vec<Tok<String>>>); pub struct Sym(IStrv);
impl Sym { impl Sym {
/// Assert that the sequence isn't empty, intern it and wrap it in a [Sym] to /// Assert that the sequence isn't empty, intern it and wrap it in a [Sym] to
/// represent this invariant /// represent this invariant
pub async fn new( pub async fn new(v: impl IntoIterator<Item = IStr>) -> Result<Self, EmptyNameError> {
v: impl IntoIterator<Item = Tok<String>>,
i: &Interner,
) -> Result<Self, EmptyNameError> {
let items = v.into_iter().collect_vec(); let items = v.into_iter().collect_vec();
Self::from_tok(i.i(&items).await) Self::from_tok(iv(&items).await)
} }
/// Read a `::` separated namespaced name. /// Read a `::` separated namespaced name.
pub async fn parse(s: &str, i: &Interner) -> Result<Self, EmptyNameError> { pub async fn parse(s: &str) -> Result<Self, EmptyNameError> {
Ok(Sym(i.i(&VName::parse(s, i).await?.into_vec()).await)) Ok(Sym(iv(&VName::parse(s).await?.into_vec()).await))
} }
/// Assert that a token isn't empty, and wrap it in a [Sym] /// Assert that a token isn't empty, and wrap it in a [Sym]
pub fn from_tok(t: Tok<Vec<Tok<String>>>) -> Result<Self, EmptyNameError> { pub fn from_tok(t: IStrv) -> Result<Self, EmptyNameError> {
if t.is_empty() { Err(EmptyNameError) } else { Ok(Self(t)) } if t.is_empty() { Err(EmptyNameError) } else { Ok(Self(t)) }
} }
/// Grab the interner token /// Grab the interner token
pub fn tok(&self) -> Tok<Vec<Tok<String>>> { self.0.clone() } pub fn tok(&self) -> IStrv { self.0.clone() }
/// Get a number unique to this name suitable for arbitrary ordering. /// Get a number unique to this name suitable for arbitrary ordering.
pub fn id(&self) -> NonZeroU64 { self.0.to_api().get_id() } pub fn id(&self) -> NonZeroU64 { self.0.to_api().0 }
/// Extern the sym for editing /// Extern the sym for editing
pub fn to_vname(&self) -> VName { VName(self[..].to_vec()) } pub fn to_vname(&self) -> VName { VName(self[..].to_vec()) }
pub async fn from_api(marker: api::TStrv, i: &Interner) -> Sym { pub async fn from_api(marker: api::TStrv) -> Sym {
Self::from_tok(Tok::from_api(marker, i).await).expect("Empty sequence found for serialized Sym") Self::from_tok(ev(marker).await).expect("Empty sequence found for serialized Sym")
} }
pub fn to_api(&self) -> api::TStrv { self.tok().to_api() } pub fn to_api(&self) -> api::TStrv { self.tok().to_api() }
pub async fn suffix(&self, tokv: impl IntoIterator<Item = Tok<String>>, i: &Interner) -> Sym { pub async fn suffix(&self, tokv: impl IntoIterator<Item = IStr>) -> Sym {
Self::new(self.0.iter().cloned().chain(tokv), i).await.unwrap() Self::new(self.0.iter().cloned().chain(tokv)).await.unwrap()
} }
} }
impl fmt::Debug for Sym { impl fmt::Debug for Sym {
@@ -248,17 +229,17 @@ impl fmt::Display for Sym {
} }
} }
impl<T> Index<T> for Sym impl<T> Index<T> for Sym
where [Tok<String>]: Index<T> where [IStr]: Index<T>
{ {
type Output = <[Tok<String>] as Index<T>>::Output; type Output = <[IStr] as Index<T>>::Output;
fn index(&self, index: T) -> &Self::Output { &self.deref()[index] } fn index(&self, index: T) -> &Self::Output { &self.deref()[index] }
} }
impl Borrow<[Tok<String>]> for Sym { impl Borrow<[IStr]> for Sym {
fn borrow(&self) -> &[Tok<String>] { &self.0[..] } fn borrow(&self) -> &[IStr] { &self.0[..] }
} }
impl Deref for Sym { impl Deref for Sym {
type Target = [Tok<String>]; type Target = [IStr];
fn deref(&self) -> &Self::Target { self.borrow() } fn deref(&self) -> &Self::Target { self.borrow() }
} }
@@ -266,16 +247,14 @@ impl Deref for Sym {
/// handled together in datastructures. The names can never be empty /// handled together in datastructures. The names can never be empty
#[allow(clippy::len_without_is_empty)] // never empty #[allow(clippy::len_without_is_empty)] // never empty
pub trait NameLike: pub trait NameLike:
'static + Clone + Eq + Hash + fmt::Debug + fmt::Display + Borrow<[Tok<String>]> 'static + Clone + Eq + Hash + fmt::Debug + fmt::Display + Borrow<[IStr]>
{ {
/// Convert into held slice /// Convert into held slice
fn as_slice(&self) -> &[Tok<String>] { Borrow::<[Tok<String>]>::borrow(self) } fn as_slice(&self) -> &[IStr] { Borrow::<[IStr]>::borrow(self) }
/// Get iterator over tokens /// Get iterator over tokens
fn segs(&self) -> impl NameIter + '_ { self.as_slice().iter().cloned() } fn segs(&self) -> impl NameIter + '_ { self.as_slice().iter().cloned() }
/// Get iterator over string segments /// Get iterator over string segments
fn str_iter(&self) -> impl Iterator<Item = &'_ str> + '_ { fn str_iter(&self) -> impl Iterator<Item = &'_ str> + '_ { self.as_slice().iter().map(|t| &**t) }
self.as_slice().iter().map(|t| t.as_str())
}
/// Fully resolve the name for printing /// Fully resolve the name for printing
#[must_use] #[must_use]
fn to_strv(&self) -> Vec<String> { self.segs().map(|s| s.to_string()).collect() } fn to_strv(&self) -> Vec<String> { self.segs().map(|s| s.to_string()).collect() }
@@ -286,19 +265,19 @@ pub trait NameLike:
NonZeroUsize::try_from(self.segs().count()).expect("NameLike never empty") NonZeroUsize::try_from(self.segs().count()).expect("NameLike never empty")
} }
/// Like slice's `split_first` except we know that it always returns Some /// Like slice's `split_first` except we know that it always returns Some
fn split_first_seg(&self) -> (Tok<String>, &[Tok<String>]) { fn split_first_seg(&self) -> (IStr, &[IStr]) {
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty"); let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
(foot.clone(), torso) (foot.clone(), torso)
} }
/// Like slice's `split_last` except we know that it always returns Some /// Like slice's `split_last` except we know that it always returns Some
fn split_last_seg(&self) -> (Tok<String>, &[Tok<String>]) { fn split_last_seg(&self) -> (IStr, &[IStr]) {
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty"); let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
(foot.clone(), torso) (foot.clone(), torso)
} }
/// Get the first element /// Get the first element
fn first_seg(&self) -> Tok<String> { self.split_first_seg().0 } fn first_seg(&self) -> IStr { self.split_first_seg().0 }
/// Get the last element /// Get the last element
fn last_seg(&self) -> Tok<String> { self.split_last_seg().0 } fn last_seg(&self) -> IStr { self.split_last_seg().0 }
} }
impl NameLike for Sym {} impl NameLike for Sym {}
@@ -311,11 +290,11 @@ impl NameLike for VName {}
/// cloning the token. /// cloning the token.
#[macro_export] #[macro_export]
macro_rules! sym { macro_rules! sym {
($seg1:tt $( :: $seg:tt)* ; $i:expr) => { ($seg1:tt $( :: $seg:tt)*) => {
$crate::name::Sym::from_tok( $crate::name::Sym::from_tok(
$i.i(&[ $crate::interner::iv(&[
$i.i(stringify!($seg1)).await $crate::interner::is(stringify!($seg1)).await
$( , $i.i(stringify!($seg)).await )* $( , $crate::interner::is(stringify!($seg)).await )*
]) ])
.await .await
).unwrap() ).unwrap()
@@ -327,10 +306,10 @@ macro_rules! sym {
/// The components are interned much like in [sym]. /// The components are interned much like in [sym].
#[macro_export] #[macro_export]
macro_rules! vname { macro_rules! vname {
($seg1:tt $( :: $seg:tt)* ; $i:expr) => { ($seg1:tt $( :: $seg:tt)*) => {
$crate::name::VName::new([ $crate::name::VName::new([
$i.i(stringify!($seg1)).await $crate::interner::is(stringify!($seg1)).await
$( , $i.i(stringify!($seg)).await )* $( , $crate::interner::is(stringify!($seg)).await )*
]).unwrap() ]).unwrap()
}; };
} }
@@ -340,10 +319,10 @@ macro_rules! vname {
/// The components are interned much like in [sym]. /// The components are interned much like in [sym].
#[macro_export] #[macro_export]
macro_rules! vpath { macro_rules! vpath {
($seg1:tt $( :: $seg:tt)+ ; $i:expr) => { ($seg1:tt $( :: $seg:tt)*) => {
$crate::name::VPath(vec![ $crate::name::VPath(vec![
$i.i(stringify!($seg1)).await $crate::interner::is(stringify!($seg1)).await
$( , $i.i(stringify!($seg)).await )+ $( , $crate::interner::is(stringify!($seg)).await )*
]) ])
}; };
() => { () => {
@@ -352,42 +331,33 @@ macro_rules! vpath {
} }
#[cfg(test)] #[cfg(test)]
mod test { pub mod test {
use std::borrow::Borrow; use std::borrow::Borrow;
use test_executors::spin_on;
use super::{NameLike, Sym, VName}; use super::{NameLike, Sym, VName};
use crate::interner::{Interner, Tok}; use crate::interner::{IStr, is};
use crate::name::VPath; use crate::name::VPath;
#[test] pub async fn recur() {
fn recur() { let myname = vname!(foo::bar);
spin_on(async { let _borrowed_slice: &[IStr] = myname.borrow();
let i = Interner::new_master(); let _deref_pathslice: &[IStr] = &myname;
let myname = vname!(foo::bar; i); let _as_slice_out: &[IStr] = myname.as_slice();
let _borrowed_slice: &[Tok<String>] = myname.borrow();
let _deref_pathslice: &[Tok<String>] = &myname;
let _as_slice_out: &[Tok<String>] = myname.as_slice();
})
} }
#[test] /// Tests that literals are correctly interned as equal
fn literals() { pub async fn literals() {
spin_on(async {
let i = Interner::new_master();
assert_eq!( assert_eq!(
sym!(foo::bar::baz; i), sym!(foo::bar::baz),
Sym::new([i.i("foo").await, i.i("bar").await, i.i("baz").await], &i).await.unwrap() Sym::new([is("foo").await, is("bar").await, is("baz").await]).await.unwrap()
); );
assert_eq!( assert_eq!(
vname!(foo::bar::baz; i), vname!(foo::bar::baz),
VName::new([i.i("foo").await, i.i("bar").await, i.i("baz").await]).unwrap() VName::new([is("foo").await, is("bar").await, is("baz").await]).unwrap()
); );
assert_eq!( assert_eq!(
vpath!(foo::bar::baz; i), vpath!(foo::bar::baz),
VPath::new([i.i("foo").await, i.i("bar").await, i.i("baz").await]) VPath::new([is("foo").await, is("bar").await, is("baz").await])
); );
})
} }
} }

View File

@@ -4,7 +4,7 @@ use std::ops::Range;
use ordered_float::NotNan; use ordered_float::NotNan;
use crate::error::{OrcErrv, mk_errv}; use crate::error::{OrcErrv, mk_errv};
use crate::interner::Interner; use crate::interner::is;
use crate::location::SrcRange; use crate::location::SrcRange;
use crate::name::Sym; use crate::name::Sym;
@@ -55,14 +55,9 @@ pub struct NumError {
pub kind: NumErrorKind, pub kind: NumErrorKind,
} }
pub async fn num_to_errv( pub async fn num_to_errv(NumError { kind, range }: NumError, offset: u32, source: &Sym) -> OrcErrv {
NumError { kind, range }: NumError,
offset: u32,
source: &Sym,
i: &Interner,
) -> OrcErrv {
mk_errv( mk_errv(
i.i("Failed to parse number").await, is("Failed to parse number").await,
match kind { match kind {
NumErrorKind::NaN => "NaN emerged during parsing", NumErrorKind::NaN => "NaN emerged during parsing",
NumErrorKind::InvalidDigit => "non-digit character encountered", NumErrorKind::InvalidDigit => "non-digit character encountered",

View File

@@ -7,28 +7,13 @@ use futures::future::join_all;
use itertools::Itertools; use itertools::Itertools;
use crate::api; use crate::api;
use crate::error::{OrcErrv, OrcRes, Reporter, mk_errv}; use crate::error::{OrcErrv, OrcRes, mk_errv, report};
use crate::format::{FmtCtx, FmtUnit, Format, fmt}; use crate::format::{FmtCtx, FmtUnit, Format, fmt};
use crate::interner::{Interner, Tok}; use crate::interner::{IStr, es, is};
use crate::location::SrcRange; use crate::location::SrcRange;
use crate::name::{Sym, VName, VPath}; use crate::name::{Sym, VName, VPath};
use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_fmt, ttv_range}; use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_fmt, ttv_range};
pub trait ParseCtx {
#[must_use]
fn i(&self) -> &Interner;
#[must_use]
fn rep(&self) -> &Reporter;
}
pub struct ParseCtxImpl<'a> {
pub i: &'a Interner,
pub r: &'a Reporter,
}
impl ParseCtx for ParseCtxImpl<'_> {
fn i(&self) -> &Interner { self.i }
fn rep(&self) -> &Reporter { self.r }
}
pub fn name_start(c: char) -> bool { c.is_alphabetic() || c == '_' } pub fn name_start(c: char) -> bool { c.is_alphabetic() || c == '_' }
pub fn name_char(c: char) -> bool { name_start(c) || c.is_numeric() } pub fn name_char(c: char) -> bool { name_start(c) || c.is_numeric() }
pub fn op_char(c: char) -> bool { !name_char(c) && !c.is_whitespace() && !"()[]{}\\".contains(c) } pub fn op_char(c: char) -> bool { !name_char(c) && !c.is_whitespace() && !"()[]{}\\".contains(c) }
@@ -103,22 +88,22 @@ impl<A: ExprRepr, X: ExtraTok> Format for Snippet<'_, A, X> {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Comment { pub struct Comment {
pub text: Tok<String>, pub text: IStr,
pub sr: SrcRange, pub sr: SrcRange,
} }
impl Comment { impl Comment {
// XXX: which of these four are actually used? // XXX: which of these four are actually used?
pub async fn from_api(c: &api::Comment, src: Sym, i: &Interner) -> Self { pub async fn from_api(c: &api::Comment, src: Sym) -> Self {
Self { text: i.ex(c.text).await, sr: SrcRange::new(c.range.clone(), &src) } Self { text: es(c.text).await, sr: SrcRange::new(c.range.clone(), &src) }
} }
pub async fn from_tk(tk: &TokTree<impl ExprRepr, impl ExtraTok>, i: &Interner) -> Option<Self> { pub async fn from_tk(tk: &TokTree<impl ExprRepr, impl ExtraTok>) -> Option<Self> {
match &tk.tok { match &tk.tok {
Token::Comment(text) => Some(Self { text: i.i(&**text).await, sr: tk.sr.clone() }), Token::Comment(text) => Some(Self { text: text.clone(), sr: tk.sr.clone() }),
_ => None, _ => None,
} }
} }
pub fn to_tk<R: ExprRepr, X: ExtraTok>(&self) -> TokTree<R, X> { pub fn to_tk<R: ExprRepr, X: ExtraTok>(&self) -> TokTree<R, X> {
TokTree { tok: Token::Comment(self.text.rc().clone()), sr: self.sr.clone() } TokTree { tok: Token::Comment(self.text.clone()), sr: self.sr.clone() }
} }
pub fn to_api(&self) -> api::Comment { pub fn to_api(&self) -> api::Comment {
api::Comment { range: self.sr.range(), text: self.text.to_api() } api::Comment { range: self.sr.range(), text: self.text.to_api() }
@@ -130,7 +115,6 @@ impl fmt::Display for Comment {
} }
pub async fn line_items<'a, A: ExprRepr, X: ExtraTok>( pub async fn line_items<'a, A: ExprRepr, X: ExtraTok>(
ctx: &impl ParseCtx,
snip: Snippet<'a, A, X>, snip: Snippet<'a, A, X>,
) -> Vec<Parsed<'a, Vec<Comment>, A, X>> { ) -> Vec<Parsed<'a, Vec<Comment>, A, X>> {
let mut items = Vec::new(); let mut items = Vec::new();
@@ -145,9 +129,10 @@ pub async fn line_items<'a, A: ExprRepr, X: ExtraTok>(
None => comments.extend(line.cur), None => comments.extend(line.cur),
Some(i) => { Some(i) => {
let (cmts, tail) = line.split_at(i); let (cmts, tail) = line.split_at(i);
let comments = join_all(comments.drain(..).chain(cmts.cur).map(|t| async { let comments = join_all(
Comment::from_tk(t, ctx.i()).await.expect("All are comments checked above") (comments.drain(..).chain(cmts.cur))
})) .map(|t| async { Comment::from_tk(t).await.expect("All are comments checked above") }),
)
.await; .await;
items.push(Parsed { output: comments, tail }); items.push(Parsed { output: comments, tail });
}, },
@@ -157,26 +142,21 @@ pub async fn line_items<'a, A: ExprRepr, X: ExtraTok>(
} }
pub async fn try_pop_no_fluff<'a, A: ExprRepr, X: ExtraTok>( pub async fn try_pop_no_fluff<'a, A: ExprRepr, X: ExtraTok>(
ctx: &impl ParseCtx,
snip: Snippet<'a, A, X>, snip: Snippet<'a, A, X>,
) -> ParseRes<'a, &'a TokTree<A, X>, A, X> { ) -> ParseRes<'a, &'a TokTree<A, X>, A, X> {
match snip.skip_fluff().pop_front() { match snip.skip_fluff().pop_front() {
Some((output, tail)) => Ok(Parsed { output, tail }), Some((output, tail)) => Ok(Parsed { output, tail }),
None => Err(mk_errv( None =>
ctx.i().i("Unexpected end").await, Err(mk_errv(is("Unexpected end").await, "Line ends abruptly; more tokens were expected", [
"Line ends abruptly; more tokens were expected", snip.sr(),
[snip.sr()], ])),
)),
} }
} }
pub async fn expect_end( pub async fn expect_end(snip: Snippet<'_, impl ExprRepr, impl ExtraTok>) -> OrcRes<()> {
ctx: &impl ParseCtx,
snip: Snippet<'_, impl ExprRepr, impl ExtraTok>,
) -> OrcRes<()> {
match snip.skip_fluff().get(0) { match snip.skip_fluff().get(0) {
Some(surplus) => Err(mk_errv( Some(surplus) => Err(mk_errv(
ctx.i().i("Extra code after end of line").await, is("Extra code after end of line").await,
"Code found after the end of the line", "Code found after the end of the line",
[surplus.sr.pos()], [surplus.sr.pos()],
)), )),
@@ -185,28 +165,26 @@ pub async fn expect_end(
} }
pub async fn expect_tok<'a, A: ExprRepr, X: ExtraTok>( pub async fn expect_tok<'a, A: ExprRepr, X: ExtraTok>(
ctx: &impl ParseCtx,
snip: Snippet<'a, A, X>, snip: Snippet<'a, A, X>,
tok: Tok<String>, tok: IStr,
) -> ParseRes<'a, (), A, X> { ) -> ParseRes<'a, (), A, X> {
let Parsed { output: head, tail } = try_pop_no_fluff(ctx, snip).await?; let Parsed { output: head, tail } = try_pop_no_fluff(snip).await?;
match &head.tok { match &head.tok {
Token::Name(n) if *n == tok => Ok(Parsed { output: (), tail }), Token::Name(n) if *n == tok => Ok(Parsed { output: (), tail }),
t => Err(mk_errv( t => Err(mk_errv(
ctx.i().i("Expected specific keyword").await, is("Expected specific keyword").await,
format!("Expected {tok} but found {:?}", fmt(t, ctx.i()).await), format!("Expected {tok} but found {:?}", fmt(t).await),
[head.sr()], [head.sr()],
)), )),
} }
} }
pub async fn token_errv<A: ExprRepr, X: ExtraTok>( pub async fn token_errv<A: ExprRepr, X: ExtraTok>(
ctx: &impl ParseCtx,
tok: &TokTree<A, X>, tok: &TokTree<A, X>,
description: &'static str, description: &'static str,
message: impl FnOnce(&str) -> String, message: impl FnOnce(&str) -> String,
) -> OrcErrv { ) -> OrcErrv {
mk_errv(ctx.i().i(description).await, message(&fmt(tok, ctx.i()).await), [tok.sr.pos()]) mk_errv(is(description).await, message(&fmt(tok).await), [tok.sr.pos()])
} }
pub struct Parsed<'a, T, H: ExprRepr, X: ExtraTok> { pub struct Parsed<'a, T, H: ExprRepr, X: ExtraTok> {
@@ -217,33 +195,27 @@ pub struct Parsed<'a, T, H: ExprRepr, X: ExtraTok> {
pub type ParseRes<'a, T, H, X> = OrcRes<Parsed<'a, T, H, X>>; pub type ParseRes<'a, T, H, X> = OrcRes<Parsed<'a, T, H, X>>;
pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>( pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
ctx: &impl ParseCtx,
tail: Snippet<'a, A, X>, tail: Snippet<'a, A, X>,
) -> ParseRes<'a, Vec<Import>, A, X> { ) -> ParseRes<'a, Vec<Import>, A, X> {
let Some((tt, tail)) = tail.skip_fluff().pop_front() else { let Some((tt, tail)) = tail.skip_fluff().pop_front() else {
return Err(mk_errv( return Err(mk_errv(
ctx.i().i("Expected token").await, is("Expected token").await,
"Expected a name, a parenthesized list of names, or a globstar.", "Expected a name, a parenthesized list of names, or a globstar.",
[tail.sr().pos()], [tail.sr().pos()],
)); ));
}; };
let ret = rec(tt, ctx).await; let ret = rec(tt).await;
#[allow(clippy::type_complexity)] // it's an internal function #[allow(clippy::type_complexity)] // it's an internal function
pub async fn rec<A: ExprRepr, X: ExtraTok>( pub async fn rec<A: ExprRepr, X: ExtraTok>(
tt: &TokTree<A, X>, tt: &TokTree<A, X>,
ctx: &impl ParseCtx, ) -> OrcRes<Vec<(Vec<IStr>, Option<IStr>, SrcRange)>> {
) -> OrcRes<Vec<(Vec<Tok<String>>, Option<Tok<String>>, SrcRange)>> {
let ttpos = tt.sr.pos(); let ttpos = tt.sr.pos();
match &tt.tok { match &tt.tok {
Token::NS(ns, body) => { Token::NS(ns, body) => {
if !ns.starts_with(name_start) { if !ns.starts_with(name_start) {
ctx.rep().report(mk_errv( report(mk_errv(is("Unexpected name prefix").await, "Only names can precede ::", [ttpos]))
ctx.i().i("Unexpected name prefix").await,
"Only names can precede ::",
[ttpos],
))
}; };
let out = Box::pin(rec(body, ctx)).await?; let out = Box::pin(rec(body)).await?;
Ok(out.into_iter().update(|i| i.0.push(ns.clone())).collect_vec()) Ok(out.into_iter().update(|i| i.0.push(ns.clone())).collect_vec())
}, },
Token::Name(ntok) => { Token::Name(ntok) => {
@@ -255,21 +227,19 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
let mut o = Vec::new(); let mut o = Vec::new();
let mut body = Snippet::new(tt, b); let mut body = Snippet::new(tt, b);
while let Some((output, tail)) = body.pop_front() { while let Some((output, tail)) = body.pop_front() {
match rec(output, ctx).boxed_local().await { match rec(output).boxed_local().await {
Ok(names) => o.extend(names), Ok(names) => o.extend(names),
Err(e) => ctx.rep().report(e), Err(e) => report(e),
} }
body = tail; body = tail;
} }
Ok(o) Ok(o)
}, },
t => { t => Err(mk_errv(
return Err(mk_errv( is("Unrecognized name end").await,
ctx.i().i("Unrecognized name end").await, format!("Names cannot end with {:?} tokens", fmt(t).await),
format!("Names cannot end with {:?} tokens", fmt(t, ctx.i()).await),
[ttpos], [ttpos],
)); )),
},
} }
} }
ret.map(|output| { ret.map(|output| {
@@ -285,7 +255,7 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Import { pub struct Import {
pub path: VPath, pub path: VPath,
pub name: Option<Tok<String>>, pub name: Option<IStr>,
pub sr: SrcRange, pub sr: SrcRange,
} }
impl Import { impl Import {
@@ -296,14 +266,14 @@ impl Import {
None => self.path.into_name().expect("Import cannot be empty"), None => self.path.into_name().expect("Import cannot be empty"),
} }
} }
pub fn new(sr: SrcRange, path: VPath, name: Tok<String>) -> Self { pub fn new(sr: SrcRange, path: VPath, name: IStr) -> Self {
Import { path, name: Some(name), sr } Import { path, name: Some(name), sr }
} }
pub fn new_glob(sr: SrcRange, path: VPath) -> Self { Import { path, name: None, sr } } pub fn new_glob(sr: SrcRange, path: VPath) -> Self { Import { path, name: None, sr } }
} }
impl Display for Import { impl Display for Import {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}::{}", self.path.iter().join("::"), self.name.as_ref().map_or("*", |t| t.as_str())) write!(f, "{}::{}", self.path.iter().join("::"), self.name.as_ref().map_or("*", |t| &**t))
} }
} }

View File

@@ -1,357 +1,573 @@
use std::cell::RefCell; use std::cell::RefCell;
use std::future::Future;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::mem; use std::pin::{Pin, pin};
use std::ops::{BitAnd, Deref}; use std::rc::Rc;
use std::pin::Pin; use std::{io, mem};
use std::sync::Arc;
use std::thread::panicking;
use async_fn_stream::try_stream;
use bound::Bound;
use derive_destructure::destructure; use derive_destructure::destructure;
use dyn_clone::{DynClone, clone_box}; use futures::channel::mpsc::{self, Receiver, Sender, channel};
use futures::channel::mpsc; use futures::channel::oneshot;
use futures::future::LocalBoxFuture; use futures::future::LocalBoxFuture;
use futures::lock::Mutex; use futures::lock::{Mutex, MutexGuard};
use futures::{SinkExt, StreamExt}; use futures::{
AsyncRead, AsyncWrite, AsyncWriteExt, FutureExt, SinkExt, Stream, StreamExt, stream_select,
};
use hashbrown::HashMap; use hashbrown::HashMap;
use orchid_api_traits::{Channel, Coding, Decode, Encode, MsgSet, Request, enc_vec}; use orchid_api_traits::{Decode, Encode, Request, UnderRoot};
use trait_set::trait_set;
use crate::clone; use crate::localset::LocalSet;
use crate::logging::Logger;
#[must_use = "Receipts indicate that a required action has been performed within a function. \
Most likely this should be returned somewhere."]
pub struct Receipt<'a>(PhantomData<&'a mut ()>); pub struct Receipt<'a>(PhantomData<&'a mut ()>);
impl Receipt<'_> {
trait_set! { /// Only call this function from a custom implementation of [RepWriter]
pub trait SendFn<T: MsgSet> = pub fn _new() -> Self { Self(PhantomData) }
for<'a> FnMut(&'a [u8], ReqNot<T>) -> LocalBoxFuture<'a, ()>
+ DynClone + 'static;
pub trait ReqFn<T: MsgSet> =
for<'a> FnMut(RequestHandle<'a, T>, <T::In as Channel>::Req)
-> LocalBoxFuture<'a, Receipt<'a>>
+ DynClone + 'static;
pub trait NotifFn<T: MsgSet> =
FnMut(<T::In as Channel>::Notif, ReqNot<T>) -> LocalBoxFuture<'static, ()>
+ DynClone + 'static;
} }
fn get_id(message: &[u8]) -> (u64, &[u8]) { /// Write guard to outbound for the purpose of serializing a request. Only one
(u64::from_be_bytes(message[..8].to_vec().try_into().unwrap()), &message[8..]) /// can exist at a time. Dropping this object should panic.
pub trait ReqWriter<'a> {
/// Access to the underlying channel. This may be buffered.
fn writer(&mut self) -> Pin<&mut dyn AsyncWrite>;
/// Finalize the request, release the outbound channel, then queue for the
/// reply on the inbound channel.
fn send(self: Box<Self>) -> LocalBoxFuture<'a, io::Result<Box<dyn RepReader<'a> + 'a>>>;
} }
pub trait ReqHandlish { /// Write guard to inbound for the purpose of deserializing a reply. While held,
fn defer(&self, cb: impl Future<Output = ()> + 'static) /// no inbound requests or other replies can be processed.
where Self: Sized { ///
self.defer_objsafe(Box::pin(cb)); /// Dropping this object should panic even if [RepReader::finish] returns
} /// synchronously, because the API isn't cancellation safe in general so it is a
fn defer_objsafe(&self, val: Pin<Box<dyn Future<Output = ()>>>); /// programmer error in all cases to drop an object related to it without proper
} /// cleanup.
impl ReqHandlish for &'_ dyn ReqHandlish { pub trait RepReader<'a> {
fn defer_objsafe(&self, val: Pin<Box<dyn Future<Output = ()>>>) { (**self).defer_objsafe(val) } /// Access to the underlying channel. The length of the message is inferred
/// from the number of bytes read so this must not be buffered.
fn reader(&mut self) -> Pin<&mut dyn AsyncRead>;
/// Finish reading the request
fn finish(self: Box<Self>) -> LocalBoxFuture<'a, ()>;
} }
type LocalAsyncFnOnceBox = Box<dyn FnOnce(Vec<u8>) -> LocalBoxFuture<'static, ()>>; /// Write guard to outbound for the purpose of serializing a notification.
///
/// Dropping this object should panic for the same reason [RepReader] panics
pub trait MsgWriter<'a> {
/// Access to the underlying channel. This may be buffered.
fn writer(&mut self) -> Pin<&mut dyn AsyncWrite>;
/// Send the notification
fn finish(self: Box<Self>) -> LocalBoxFuture<'a, io::Result<()>>;
}
#[derive(destructure)] /// For initiating outbound requests and notifications
pub struct RequestHandle<'a, MS: MsgSet> { pub trait Client {
defer: RefCell<Vec<Pin<Box<dyn Future<Output = ()>>>>>, fn start_request(&self) -> LocalBoxFuture<'_, io::Result<Box<dyn ReqWriter<'_> + '_>>>;
_reqlt: PhantomData<&'a mut ()>, fn start_notif(&self) -> LocalBoxFuture<'_, io::Result<Box<dyn MsgWriter<'_> + '_>>>;
parent: ReqNot<MS>,
raw_reply: RefCell<Option<LocalAsyncFnOnceBox>>,
} }
impl<'a, MS: MsgSet + 'static> RequestHandle<'a, MS> {
pub fn new(parent: ReqNot<MS>, raw_reply: impl AsyncFnOnce(Vec<u8>) + 'static) -> Self { impl<T: Client + ?Sized> ClientExt for T {}
Self { /// Extension trait with convenience methods that handle outbound request and
defer: RefCell::default(), /// notif lifecycle and typing
_reqlt: PhantomData, #[allow(async_fn_in_trait)]
parent, pub trait ClientExt: Client {
raw_reply: RefCell::new(Some(Box::new(|v| Box::pin(raw_reply(v))))), async fn request<T: Request + UnderRoot<Root: Encode>>(&self, t: T) -> io::Result<T::Response> {
} let mut req = self.start_request().await?;
} t.into_root().encode(req.writer().as_mut()).await?;
pub fn reqnot(&self) -> ReqNot<MS> { self.parent.clone() } let mut rep = req.send().await?;
pub async fn handle<U: Request>(&self, _: &U, rep: &U::Response) -> Receipt<'a> { let response = T::Response::decode(rep.reader()).await;
self.respond(rep).await rep.finish().await;
} response
pub fn will_handle_as<U: Request>(&self, _: &U) -> ReqTypToken<U> { ReqTypToken(PhantomData) }
pub async fn handle_as<U: Request>(&self, _: ReqTypToken<U>, rep: &U::Response) -> Receipt<'a> {
self.respond(rep).await
}
pub async fn respond(&self, response: &impl Encode) -> Receipt<'a> {
let replier = self.raw_reply.borrow_mut().take().expect("Already responded to request");
let buf = enc_vec(response).await;
(replier)(buf).await;
let deferred = mem::take(&mut *self.defer.borrow_mut());
for item in deferred {
item.await
}
Receipt(PhantomData)
}
}
impl<MS: MsgSet> ReqHandlish for RequestHandle<'_, MS> {
fn defer_objsafe(&self, val: Pin<Box<dyn Future<Output = ()>>>) {
self.defer.borrow_mut().push(val)
}
}
impl<MS: MsgSet> Drop for RequestHandle<'_, MS> {
fn drop(&mut self) {
if !panicking() {
debug_assert!(self.raw_reply.borrow().is_none(), "Request dropped without response")
} }
async fn notify<T: UnderRoot<Root: Encode>>(&self, t: T) -> io::Result<()> {
let mut notif = self.start_notif().await?;
t.into_root().encode(notif.writer().as_mut()).await?;
notif.finish().await?;
Ok(())
} }
} }
pub struct ReqTypToken<T>(PhantomData<T>); pub trait ReqReader<'a> {
fn reader(&mut self) -> Pin<&mut dyn AsyncRead>;
pub struct ReqNotData<T: MsgSet> { fn finish(self: Box<Self>) -> LocalBoxFuture<'a, Box<dyn ReqHandle<'a> + 'a>>;
id: u64,
send: Box<dyn SendFn<T>>,
notif: Box<dyn NotifFn<T>>,
req: Box<dyn ReqFn<T>>,
responses: HashMap<u64, mpsc::Sender<Vec<u8>>>,
} }
impl<'a, T: ReqReader<'a> + ?Sized> ReqReaderExt<'a> for T {}
/// Wraps a raw message buffer to save on copying. #[allow(async_fn_in_trait)]
/// Dereferences to the tail of the message buffer, cutting off the ID pub trait ReqReaderExt<'a>: ReqReader<'a> {
#[derive(Debug, Clone)] async fn read_req<R: Decode>(&mut self) -> io::Result<R> { R::decode(self.reader()).await }
pub struct RawReply(Vec<u8>); async fn reply<R: Request>(
impl Deref for RawReply { self: Box<Self>,
type Target = [u8]; req: impl Evidence<R>,
fn deref(&self) -> &Self::Target { get_id(&self.0[..]).1 } rep: &R::Response,
} ) -> io::Result<Receipt<'a>> {
self.finish().await.reply(req, rep).await
pub struct ReqNot<T: MsgSet>(Arc<Mutex<ReqNotData<T>>>, Logger);
impl<T: MsgSet> ReqNot<T> {
pub fn new(
logger: Logger,
send: impl SendFn<T>,
notif: impl NotifFn<T>,
req: impl ReqFn<T>,
) -> Self {
Self(
Arc::new(Mutex::new(ReqNotData {
id: 1,
send: Box::new(send),
notif: Box::new(notif),
req: Box::new(req),
responses: HashMap::new(),
})),
logger,
)
} }
async fn start_reply(self: Box<Self>) -> io::Result<Box<dyn RepWriter<'a> + 'a>> {
/// Can be called from a polling thread or dispatched in any other way self.finish().await.start_reply().await
pub async fn receive(&self, message: &[u8]) {
let mut g = self.0.lock().await;
let (id, payload) = get_id(message);
if id == 0 {
let mut notif_cb = clone_box(&*g.notif);
mem::drop(g);
let notif_val = <T::In as Channel>::Notif::decode(Pin::new(&mut &payload[..])).await;
notif_cb(notif_val, self.clone()).await
} else if 0 < id.bitand(1 << 63) {
let mut sender = g.responses.remove(&!id).expect("Received response for invalid message");
let _ = sender.send(message.to_vec()).await;
} else {
let message = <T::In as Channel>::Req::decode(Pin::new(&mut &payload[..])).await;
let mut req_cb = clone_box(&*g.req);
mem::drop(g);
let rn = self.clone();
let rn2 = self.clone();
req_cb(
RequestHandle::new(rn, async move |vec| {
let mut buf = (!id).to_be_bytes().to_vec();
buf.extend(vec);
let mut send = clone_box(&*rn2.0.lock().await.send);
(send)(&buf, rn2.clone()).await;
}),
message,
)
.await;
}
}
pub async fn notify<N: Coding + Into<<T::Out as Channel>::Notif>>(&self, notif: N) {
let mut send = clone_box(&*self.0.lock().await.send);
let mut buf = vec![0; 8];
let msg: <T::Out as Channel>::Notif = notif.into();
msg.encode(Pin::new(&mut buf)).await;
send(&buf, self.clone()).await
} }
} }
pub trait DynRequester { pub trait ReqHandle<'a> {
type Transfer; fn start_reply(self: Box<Self>) -> LocalBoxFuture<'a, io::Result<Box<dyn RepWriter<'a> + 'a>>>;
fn logger(&self) -> &Logger;
/// Encode and send a request, then receive the response buffer.
fn raw_request(&self, data: Self::Transfer) -> LocalBoxFuture<'_, RawReply>;
} }
impl<'a, T: ReqHandle<'a> + ?Sized> ReqHandleExt<'a> for T {}
pub struct MappedRequester<'a, T: 'a>(Box<dyn Fn(T) -> LocalBoxFuture<'a, RawReply> + 'a>, Logger); #[allow(async_fn_in_trait)]
impl<'a, T> MappedRequester<'a, T> { pub trait ReqHandleExt<'a>: ReqHandle<'a> {
fn new<U: DynRequester + 'a, F: Fn(T) -> U::Transfer + 'a>( async fn reply<Req: Request>(
req: U, self: Box<Self>,
cb: F, _: impl Evidence<Req>,
logger: Logger, rep: &Req::Response,
) -> Self { ) -> io::Result<Receipt<'a>> {
let req_arc = Arc::new(req); let mut reply = self.start_reply().await?;
let cb_arc = Arc::new(cb); rep.encode(reply.writer()).await?;
MappedRequester( reply.finish().await
Box::new(move |t| {
Box::pin(clone!(req_arc, cb_arc; async move { req_arc.raw_request(cb_arc(t)).await}))
}),
logger,
)
} }
} }
impl<T> DynRequester for MappedRequester<'_, T> { pub trait RepWriter<'a> {
type Transfer = T; fn writer(&mut self) -> Pin<&mut dyn AsyncWrite>;
fn logger(&self) -> &Logger { &self.1 } fn finish(self: Box<Self>) -> LocalBoxFuture<'a, io::Result<Receipt<'a>>>;
fn raw_request(&self, data: Self::Transfer) -> LocalBoxFuture<'_, RawReply> { self.0(data) }
} }
impl<T: MsgSet> DynRequester for ReqNot<T> { pub trait MsgReader<'a> {
type Transfer = <T::Out as Channel>::Req; fn reader(&mut self) -> Pin<&mut dyn AsyncRead>;
fn logger(&self) -> &Logger { &self.1 } fn finish(self: Box<Self>) -> LocalBoxFuture<'a, ()>;
fn raw_request(&self, req: Self::Transfer) -> LocalBoxFuture<'_, RawReply> { }
impl<'a, T: ?Sized + MsgReader<'a>> MsgReaderExt<'a> for T {}
#[allow(async_fn_in_trait)]
pub trait MsgReaderExt<'a>: MsgReader<'a> {
async fn read<N: Decode>(mut self: Box<Self>) -> io::Result<N> {
let n = N::decode(self.reader()).await;
self.finish().await;
n
}
}
/// A form of [Evidence] that doesn't require the value to be kept around
pub struct Witness<T>(PhantomData<T>);
impl<T> Witness<T> {
pub fn of(_: &T) -> Self { Self(PhantomData) }
}
impl<T> Copy for Witness<T> {}
impl<T> Clone for Witness<T> {
fn clone(&self) -> Self { *self }
}
/// A proxy for the type of a value either previously saved into a [Witness] or
/// still available.
pub trait Evidence<T> {}
impl<T> Evidence<T> for &'_ T {}
impl<T> Evidence<T> for Witness<T> {}
type IoRef<T> = Pin<Box<T>>;
type IoLock<T> = Rc<Mutex<Pin<Box<T>>>>;
type IoGuard<T> = Bound<MutexGuard<'static, Pin<Box<T>>>, IoLock<T>>;
/// An incoming request. This holds a lock on the ingress channel.
pub struct IoReqReader<'a> {
prefix: &'a [u8],
read: IoGuard<dyn AsyncRead>,
write: &'a Mutex<IoRef<dyn AsyncWrite>>,
}
impl<'a> ReqReader<'a> for IoReqReader<'a> {
fn reader(&mut self) -> Pin<&mut dyn AsyncRead> { self.read.as_mut() }
fn finish(self: Box<Self>) -> LocalBoxFuture<'a, Box<dyn ReqHandle<'a> + 'a>> {
Box::pin(async {
Box::new(IoReqHandle { prefix: self.prefix, write: self.write }) as Box<dyn ReqHandle<'a>>
})
}
}
pub struct IoReqHandle<'a> {
prefix: &'a [u8],
write: &'a Mutex<IoRef<dyn AsyncWrite>>,
}
impl<'a> ReqHandle<'a> for IoReqHandle<'a> {
fn start_reply(self: Box<Self>) -> LocalBoxFuture<'a, io::Result<Box<dyn RepWriter<'a> + 'a>>> {
Box::pin(async move { Box::pin(async move {
let mut g = self.0.lock().await; let mut write = self.write.lock().await;
let id = g.id; write.as_mut().write_all(self.prefix).await?;
g.id += 1; Ok(Box::new(IoRepWriter { write }) as Box<dyn RepWriter<'a>>)
let mut buf = id.to_be_bytes().to_vec(); })
req.encode(Pin::new(&mut buf)).await; }
let (send, mut recv) = mpsc::channel(1); }
g.responses.insert(id, send); pub struct IoRepWriter<'a> {
let mut send = clone_box(&*g.send); write: MutexGuard<'a, IoRef<dyn AsyncWrite>>,
mem::drop(g); }
let rn = self.clone(); impl<'a> RepWriter<'a> for IoRepWriter<'a> {
send(&buf, rn).await; fn writer(&mut self) -> Pin<&mut dyn AsyncWrite> { self.write.as_mut() }
let items = recv.next().await; fn finish(mut self: Box<Self>) -> LocalBoxFuture<'a, io::Result<Receipt<'a>>> {
RawReply(items.unwrap()) Box::pin(async move {
self.writer().flush().await?;
Ok(Receipt(PhantomData))
}) })
} }
} }
pub trait Requester: DynRequester { pub struct IoMsgReader<'a> {
#[must_use = "These types are subject to change with protocol versions. \ _pd: PhantomData<&'a mut ()>,
If you don't want to use the return value, At a minimum, force the type."] read: IoGuard<dyn AsyncRead>,
fn request<R: Request + Into<Self::Transfer>>( }
&self, impl<'a> MsgReader<'a> for IoMsgReader<'a> {
data: R, fn reader(&mut self) -> Pin<&mut dyn AsyncRead> { self.read.as_mut() }
) -> impl Future<Output = R::Response>; fn finish(self: Box<Self>) -> LocalBoxFuture<'static, ()> { Box::pin(async {}) }
fn map<'a, U>(self, cb: impl Fn(U) -> Self::Transfer + 'a) -> MappedRequester<'a, U> }
where Self: Sized + 'a {
let logger = self.logger().clone(); #[derive(Debug)]
MappedRequester::new(self, cb, logger) struct ReplySub {
id: u64,
ack: oneshot::Sender<()>,
cb: oneshot::Sender<IoGuard<dyn AsyncRead>>,
}
struct IoClient {
output: IoLock<dyn AsyncWrite>,
id: Rc<RefCell<u64>>,
subscribe: Rc<Sender<ReplySub>>,
}
impl IoClient {
fn new(output: IoLock<dyn AsyncWrite>) -> (Receiver<ReplySub>, Self) {
let (req, rep) = mpsc::channel(0);
(rep, Self { output, id: Rc::new(RefCell::new(0)), subscribe: Rc::new(req) })
}
async fn lock_out(&self) -> IoGuard<dyn AsyncWrite> {
Bound::async_new(self.output.clone(), async |o| o.lock().await).await
}
}
impl Client for IoClient {
fn start_notif(&self) -> LocalBoxFuture<'_, io::Result<Box<dyn MsgWriter<'_> + '_>>> {
Box::pin(async {
let mut o = self.lock_out().await;
0u64.encode(o.as_mut()).await?;
Ok(Box::new(IoNotifWriter { o }) as Box<dyn MsgWriter>)
})
}
fn start_request(&self) -> LocalBoxFuture<'_, io::Result<Box<dyn ReqWriter<'_> + '_>>> {
Box::pin(async {
let id = {
let mut id_g = self.id.borrow_mut();
*id_g += 1;
*id_g
};
let (cb, reply) = oneshot::channel();
let (ack, got_ack) = oneshot::channel();
self.subscribe.as_ref().clone().send(ReplySub { id, ack, cb }).await.unwrap();
got_ack.await.unwrap();
let mut w = self.lock_out().await;
id.encode(w.as_mut()).await?;
Ok(Box::new(IoReqWriter { reply, w }) as Box<dyn ReqWriter>)
})
} }
} }
impl<This: DynRequester + ?Sized> Requester for This { struct IoReqWriter {
async fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response { reply: oneshot::Receiver<IoGuard<dyn AsyncRead>>,
let req = format!("{data:?}"); w: IoGuard<dyn AsyncWrite>,
let rep = R::Response::decode(Pin::new(&mut &self.raw_request(data.into()).await[..])).await; }
let req_str = req.to_string(); impl<'a> ReqWriter<'a> for IoReqWriter {
if !req_str.starts_with("AtomPrint") && !req_str.starts_with("ExtAtomPrint") { fn writer(&mut self) -> Pin<&mut dyn AsyncWrite> { self.w.as_mut() }
writeln!(self.logger(), "Request {req} got response {rep:?}"); fn send(self: Box<Self>) -> LocalBoxFuture<'a, io::Result<Box<dyn RepReader<'a> + 'a>>> {
} Box::pin(async {
rep let Self { reply, mut w } = *self;
w.flush().await?;
mem::drop(w);
let i = reply.await.expect("Client dropped before reply received");
Ok(Box::new(IoRepReader { i }) as Box<dyn RepReader>)
})
} }
} }
impl<T: MsgSet> Clone for ReqNot<T> { struct IoRepReader {
fn clone(&self) -> Self { Self(self.0.clone(), self.1.clone()) } i: IoGuard<dyn AsyncRead>,
}
impl<'a> RepReader<'a> for IoRepReader {
fn reader(&mut self) -> Pin<&mut dyn AsyncRead> { self.i.as_mut() }
fn finish(self: Box<Self>) -> LocalBoxFuture<'static, ()> { Box::pin(async {}) }
}
#[derive(destructure)]
struct IoNotifWriter {
o: IoGuard<dyn AsyncWrite>,
}
impl<'a> MsgWriter<'a> for IoNotifWriter {
fn writer(&mut self) -> Pin<&mut dyn AsyncWrite> { self.o.as_mut() }
fn finish(mut self: Box<Self>) -> LocalBoxFuture<'static, io::Result<()>> {
Box::pin(async move { self.o.flush().await })
}
}
pub struct CommCtx {
exit: Sender<()>,
}
impl CommCtx {
pub async fn exit(self) { self.exit.clone().send(()).await.expect("quit channel dropped"); }
}
/// Establish bidirectional request-notification communication over a duplex
/// channel. The returned [IoClient] can be used for notifications immediately,
/// but requests can only be received while the future is running. The future
/// will only resolve when [CommCtx::quit] is called. The generic type
/// parameters are associated with the client and serve to ensure with a runtime
/// check that the correct message families are sent in the correct directions
/// across the channel.
pub fn io_comm(
o: Rc<Mutex<Pin<Box<dyn AsyncWrite>>>>,
i: Mutex<Pin<Box<dyn AsyncRead>>>,
) -> (impl Client + 'static, CommCtx, IoCommServer) {
let i = Rc::new(i);
let (onsub, client) = IoClient::new(o.clone());
let (exit, onexit) = channel(1);
(client, CommCtx { exit }, IoCommServer { o, i, onsub, onexit })
}
pub struct IoCommServer {
o: Rc<Mutex<Pin<Box<dyn AsyncWrite>>>>,
i: Rc<Mutex<Pin<Box<dyn AsyncRead>>>>,
onsub: Receiver<ReplySub>,
onexit: Receiver<()>,
}
impl IoCommServer {
pub async fn listen(
self,
notif: impl for<'a> AsyncFn(Box<dyn MsgReader<'a> + 'a>) -> io::Result<()>,
req: impl for<'a> AsyncFn(Box<dyn ReqReader<'a> + 'a>) -> io::Result<Receipt<'a>>,
) -> io::Result<()> {
let Self { o, i, onexit, onsub } = self;
enum Event {
Input(u64, IoGuard<dyn AsyncRead>),
Sub(ReplySub),
Exit,
}
let exiting = RefCell::new(false);
let input_stream = try_stream(async |mut h| {
loop {
let mut g = Bound::async_new(i.clone(), async |i| i.lock().await).await;
match u64::decode(g.as_mut()).await {
Ok(id) => h.emit(Event::Input(id, g)).await,
Err(e)
if matches!(
e.kind(),
io::ErrorKind::BrokenPipe
| io::ErrorKind::ConnectionAborted
| io::ErrorKind::UnexpectedEof
) =>
h.emit(Event::Exit).await,
Err(e) => return Err(e),
}
}
});
let (mut add_pending_req, fork_future) = LocalSet::new();
let mut fork_stream = pin!(fork_future.fuse().into_stream());
let mut pending_replies = HashMap::new();
'body: {
let mut shared = pin!(stream_select!(
pin!(input_stream) as Pin<&mut dyn Stream<Item = io::Result<Event>>>,
onsub.map(|sub| Ok(Event::Sub(sub))),
fork_stream.as_mut().map(|res| {
res.map(|()| panic!("this substream cannot exit while the loop is running"))
}),
onexit.map(|()| Ok(Event::Exit)),
));
while let Some(next) = shared.next().await {
match next {
Err(e) => break 'body Err(e),
Ok(Event::Exit) => {
*exiting.borrow_mut() = true;
let mut out = o.lock().await;
out.as_mut().flush().await?;
out.as_mut().close().await?;
break;
},
Ok(Event::Sub(ReplySub { id, ack, cb })) => {
pending_replies.insert(id, cb);
ack.send(()).unwrap();
},
Ok(Event::Input(0, read)) => {
let notif = &notif;
let notif_job =
async move { notif(Box::new(IoMsgReader { _pd: PhantomData, read })).await };
add_pending_req.send(Box::pin(notif_job)).await.unwrap();
},
// MSB == 0 is a request, !id where MSB == 1 is the corresponding response
Ok(Event::Input(id, read)) if (id & (1 << (u64::BITS - 1))) == 0 => {
let (o, req) = (o.clone(), &req);
let req_job = async move {
let mut prefix = Vec::new();
(!id).encode_vec(&mut prefix);
let _ = req(Box::new(IoReqReader { prefix: &pin!(prefix), read, write: &o })).await;
Ok(())
};
add_pending_req.send(Box::pin(req_job)).await.unwrap();
},
Ok(Event::Input(id, read)) => {
let cb = pending_replies.remove(&!id).expect("Reply to unrecognized request");
cb.send(read).unwrap_or_else(|_| panic!("Failed to send reply"));
},
}
}
Ok(())
}?;
mem::drop(add_pending_req);
while let Some(next) = fork_stream.next().await {
next?
}
Ok(())
}
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::cell::RefCell;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc;
use futures::FutureExt; use futures::channel::mpsc;
use futures::lock::Mutex; use futures::lock::Mutex;
use orchid_api_derive::Coding; use futures::{SinkExt, StreamExt, join};
use orchid_api_traits::{Channel, Request}; use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::Request;
use test_executors::spin_on; use test_executors::spin_on;
use unsync_pipe::pipe;
use super::{MsgSet, ReqNot}; use crate::logging::test::TestLogger;
use crate::logging::Logger; use crate::logging::with_logger;
use crate::reqnot::Requester as _; use crate::reqnot::{ClientExt, MsgReaderExt, ReqReaderExt, io_comm};
use crate::{api, clone};
#[derive(Clone, Debug, Coding, PartialEq)] #[derive(Clone, Debug, PartialEq, Coding, Hierarchy)]
pub struct TestReq(u8); #[extendable]
impl Request for TestReq { struct TestNotif(u64);
type Response = u8;
}
pub struct TestChan;
impl Channel for TestChan {
type Notif = u8;
type Req = TestReq;
}
pub struct TestMsgSet;
impl MsgSet for TestMsgSet {
type In = TestChan;
type Out = TestChan;
}
#[test] #[test]
fn notification() { fn notification() {
spin_on(async { let logger = TestLogger::new(async |s| eprint!("{s}"));
let logger = Logger::new(api::LogStrategy::StdErr); spin_on(with_logger(logger, async {
let received = Arc::new(Mutex::new(None)); let (in1, out2) = pipe(1024);
let receiver = ReqNot::<TestMsgSet>::new( let (in2, out1) = pipe(1024);
logger.clone(), let (received, mut on_receive) = mpsc::channel(2);
|_, _| panic!("Should not send anything"), let (_, recv_ctx, recv_srv) =
clone!(received; move |notif, _| clone!(received; async move { io_comm(Rc::new(Mutex::new(Box::pin(in2))), Mutex::new(Box::pin(out2)));
*received.lock().await = Some(notif); let (sender, ..) = io_comm(Rc::new(Mutex::new(Box::pin(in1))), Mutex::new(Box::pin(out1)));
}.boxed_local())), join!(
|_, _| panic!("Not receiving a request"), async {
recv_srv
.listen(
async |notif| {
received.clone().send(notif.read::<TestNotif>().await?).await.unwrap();
Ok(())
},
async |_| panic!("Should receive notif, not request"),
)
.await
.unwrap()
},
async {
sender.notify(TestNotif(3)).await.unwrap();
assert_eq!(on_receive.next().await, Some(TestNotif(3)));
sender.notify(TestNotif(4)).await.unwrap();
assert_eq!(on_receive.next().await, Some(TestNotif(4)));
recv_ctx.exit().await;
}
); );
let sender = ReqNot::<TestMsgSet>::new( }))
logger, }
clone!(receiver; move |d, _| clone!(receiver; Box::pin(async move {
receiver.receive(d).await #[derive(Clone, Debug, Coding, Hierarchy)]
}))), #[extendable]
|_, _| panic!("Should not receive notif"), struct DummyRequest(u64);
|_, _| panic!("Should not receive request"), impl Request for DummyRequest {
); type Response = u64;
sender.notify(3).await;
assert_eq!(*received.lock().await, Some(3));
sender.notify(4).await;
assert_eq!(*received.lock().await, Some(4));
})
} }
#[test] #[test]
fn request() { fn request() {
spin_on(async { let logger = TestLogger::new(async |s| eprint!("{s}"));
let logger = Logger::new(api::LogStrategy::StdErr); spin_on(with_logger(logger, async {
let receiver = Rc::new(Mutex::<Option<ReqNot<TestMsgSet>>>::new(None)); let (in1, out2) = pipe(1024);
let sender = Rc::new(ReqNot::<TestMsgSet>::new( let (in2, out1) = pipe(1024);
logger.clone(), let (_, srv_ctx, srv) =
clone!(receiver; move |d, _| clone!(receiver; Box::pin(async move { io_comm(Rc::new(Mutex::new(Box::pin(in2))), Mutex::new(Box::pin(out2)));
receiver.lock().await.as_ref().unwrap().receive(d).await let (client, client_ctx, client_srv) =
}))), io_comm(Rc::new(Mutex::new(Box::pin(in1))), Mutex::new(Box::pin(out1)));
|_, _| panic!("Should not receive notif"), join!(
|_, _| panic!("Should not receive request"), async {
)); srv
*receiver.lock().await = Some(ReqNot::new( .listen(
logger, async |_| panic!("No notifs expected"),
clone!(sender; move |d, _| clone!(sender; Box::pin(async move { async |mut req| {
sender.receive(d).await let val = req.read_req::<DummyRequest>().await?;
}))), req.reply(&val, &(val.0 + 1)).await
|_, _| panic!("Not receiving notifs"),
|hand, req| {
Box::pin(async move {
assert_eq!(req, TestReq(5));
hand.respond(&6u8).await
})
}, },
)); )
let response = sender.request(TestReq(5)).await; .await
.unwrap()
},
async {
client_srv
.listen(
async |_| panic!("Not expecting ingress notif"),
async |_| panic!("Not expecting ingress req"),
)
.await
.unwrap()
},
async {
let response = client.request(DummyRequest(5)).await.unwrap();
assert_eq!(response, 6); assert_eq!(response, 6);
}) srv_ctx.exit().await;
client_ctx.exit().await;
}
);
}))
}
#[test]
fn exit() {
let logger = TestLogger::new(async |s| eprint!("{s}"));
spin_on(with_logger(logger, async {
let (input1, output1) = pipe(1024);
let (input2, output2) = pipe(1024);
let (reply_client, reply_context, reply_server) =
io_comm(Rc::new(Mutex::new(Box::pin(input1))), Mutex::new(Box::pin(output2)));
let (req_client, req_context, req_server) =
io_comm(Rc::new(Mutex::new(Box::pin(input2))), Mutex::new(Box::pin(output1)));
let reply_context = RefCell::new(Some(reply_context));
let (exit, onexit) = futures::channel::oneshot::channel::<()>();
join!(
async move {
reply_server
.listen(
async |hand| {
let _notif = hand.read::<TestNotif>().await.unwrap();
let context = reply_context.borrow_mut().take().unwrap();
context.exit().await;
Ok(())
},
async |mut hand| {
let req = hand.read_req::<DummyRequest>().await?;
hand.reply(&req, &(req.0 + 1)).await
},
)
.await
.unwrap();
exit.send(()).unwrap();
let _client = reply_client;
},
async move {
req_server
.listen(
async |_| panic!("Only the other server expected notifs"),
async |_| panic!("Only the other server expected requests"),
)
.await
.unwrap();
let _ctx = req_context;
},
async move {
req_client.request(DummyRequest(0)).await.unwrap();
req_client.notify(TestNotif(0)).await.unwrap();
onexit.await.unwrap();
}
)
}));
} }
} }

44
orchid-base/src/stash.rs Normal file
View File

@@ -0,0 +1,44 @@
//! A pattern for running async code from sync destructors and other
//! unfortunately sync callbacks
//!
//! We create a task_local vecdeque which is moved into a thread_local whenever
//! the task is being polled. A call to [stash] pushes the future onto this
//! deque. Before [with_stash] returns, it pops everything from the deque
//! individually and awaits each of them, pushing any additionally stashed
//! futures onto the back of the same deque.
use std::cell::RefCell;
use std::collections::VecDeque;
use std::pin::Pin;
use task_local::task_local;
#[derive(Default)]
struct StashedFutures {
queue: RefCell<VecDeque<Pin<Box<dyn Future<Output = ()>>>>>,
}
task_local! {
static STASHED_FUTURES: StashedFutures;
}
/// Complete the argument future, and any futures spawned from it via [stash].
/// This is useful mostly to guarantee that messaging destructors have run.
pub async fn with_stash<F: Future>(fut: F) -> F::Output {
STASHED_FUTURES
.scope(StashedFutures::default(), async {
let val = fut.await;
while let Some(fut) = STASHED_FUTURES.with(|sf| sf.queue.borrow_mut().pop_front()) {
fut.await;
}
val
})
.await
}
/// Schedule a future to be run before the next [with_stash] guard ends. This is
/// most useful for sending messages from destructors.
pub fn stash<F: Future<Output = ()> + 'static>(fut: F) {
(STASHED_FUTURES.try_with(|sf| sf.queue.borrow_mut().push_back(Box::pin(fut))))
.expect("No stash! Timely completion cannot be guaranteed")
}

View File

@@ -14,7 +14,7 @@ use trait_set::trait_set;
use crate::error::OrcErrv; use crate::error::OrcErrv;
use crate::format::{FmtCtx, FmtUnit, Format, Variants}; use crate::format::{FmtCtx, FmtUnit, Format, Variants};
use crate::interner::{Interner, Tok}; use crate::interner::{IStr, es};
use crate::location::{Pos, SrcRange}; use crate::location::{Pos, SrcRange};
use crate::name::{Sym, VName, VPath}; use crate::name::{Sym, VName, VPath};
use crate::parse::Snippet; use crate::parse::Snippet;
@@ -28,7 +28,6 @@ pub trait TokenVariant<ApiEquiv: Clone + Debug + Coding>: Format + Clone + fmt::
api: &ApiEquiv, api: &ApiEquiv,
ctx: &mut Self::FromApiCtx<'_>, ctx: &mut Self::FromApiCtx<'_>,
pos: SrcRange, pos: SrcRange,
i: &Interner,
) -> impl Future<Output = Self>; ) -> impl Future<Output = Self>;
#[must_use] #[must_use]
fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> impl Future<Output = ApiEquiv>; fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> impl Future<Output = ApiEquiv>;
@@ -36,7 +35,7 @@ pub trait TokenVariant<ApiEquiv: Clone + Debug + Coding>: Format + Clone + fmt::
impl<T: Clone + Debug + Coding> TokenVariant<T> for Never { impl<T: Clone + Debug + Coding> TokenVariant<T> for Never {
type FromApiCtx<'a> = (); type FromApiCtx<'a> = ();
type ToApiCtx<'a> = (); type ToApiCtx<'a> = ();
async fn from_api(_: &T, _: &mut Self::FromApiCtx<'_>, _: SrcRange, _: &Interner) -> Self { async fn from_api(_: &T, _: &mut Self::FromApiCtx<'_>, _: SrcRange) -> Self {
panic!("Cannot deserialize Never") panic!("Cannot deserialize Never")
} }
async fn into_api(self, _: &mut Self::ToApiCtx<'_>) -> T { match self {} } async fn into_api(self, _: &mut Self::ToApiCtx<'_>) -> T { match self {} }
@@ -108,20 +107,19 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
hctx: &mut H::FromApiCtx<'_>, hctx: &mut H::FromApiCtx<'_>,
xctx: &mut X::FromApiCtx<'_>, xctx: &mut X::FromApiCtx<'_>,
src: &Sym, src: &Sym,
i: &Interner,
) -> Self { ) -> Self {
let pos = SrcRange::new(tt.range.clone(), src); let pos = SrcRange::new(tt.range.clone(), src);
let tok = match_mapping!(&tt.token, api::Token => Token::<H, X> { let tok = match_mapping!(&tt.token, api::Token => Token::<H, X> {
BR, BR,
NS(n => Tok::from_api(*n, i).await, NS(n => es(*n).await,
b => Box::new(Self::from_api(b, hctx, xctx, src, i).boxed_local().await)), b => Box::new(Self::from_api(b, hctx, xctx, src).boxed_local().await)),
Bottom(e => OrcErrv::from_api(e, i).await), Bottom(e => OrcErrv::from_api(e).await),
LambdaHead(arg => Box::new(Self::from_api(arg, hctx, xctx, src, i).boxed_local().await)), LambdaHead(arg => Box::new(Self::from_api(arg, hctx, xctx, src).boxed_local().await)),
Name(n => Tok::from_api(*n, i).await), Name(n => es(*n).await),
S(*par, b => ttv_from_api(b, hctx, xctx, src, i).await), S(*par, b => ttv_from_api(b, hctx, xctx, src).await),
Comment(c.clone()), Comment(c => es(*c).await),
NewExpr(expr => X::from_api(expr, xctx, pos.clone(), i).await), NewExpr(expr => X::from_api(expr, xctx, pos.clone()).await),
Handle(tk => H::from_api(tk, hctx, pos.clone(), i).await) Handle(tk => H::from_api(tk, hctx, pos.clone()).await)
}); });
Self { sr: pos, tok } Self { sr: pos, tok }
} }
@@ -135,7 +133,7 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
BR, BR,
NS(n.to_api(), b => Box::new(b.into_api(hctx, xctx).boxed_local().await)), NS(n.to_api(), b => Box::new(b.into_api(hctx, xctx).boxed_local().await)),
Bottom(e.to_api()), Bottom(e.to_api()),
Comment(c.clone()), Comment(c.to_api()),
LambdaHead(arg => Box::new(arg.into_api(hctx, xctx).boxed_local().await)), LambdaHead(arg => Box::new(arg.into_api(hctx, xctx).boxed_local().await)),
Name(nn.to_api()), Name(nn.to_api()),
S(p, b => ttv_into_api(b, hctx, xctx).boxed_local().await), S(p, b => ttv_into_api(b, hctx, xctx).boxed_local().await),
@@ -145,8 +143,8 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
api::TokenTree { range: self.sr.range.clone(), token } api::TokenTree { range: self.sr.range.clone(), token }
} }
pub fn is_kw(&self, tk: Tok<String>) -> bool { self.tok.is_kw(tk) } pub fn is_kw(&self, tk: IStr) -> bool { self.tok.is_kw(tk) }
pub fn as_name(&self) -> Option<Tok<String>> { pub fn as_name(&self) -> Option<IStr> {
if let Token::Name(n) = &self.tok { Some(n.clone()) } else { None } if let Token::Name(n) = &self.tok { Some(n.clone()) } else { None }
} }
pub fn as_multiname(&self) -> Result<VName, &TokTree<H, X>> { pub fn as_multiname(&self) -> Result<VName, &TokTree<H, X>> {
@@ -193,11 +191,10 @@ pub async fn ttv_from_api<H: ExprRepr, X: ExtraTok>(
hctx: &mut H::FromApiCtx<'_>, hctx: &mut H::FromApiCtx<'_>,
xctx: &mut X::FromApiCtx<'_>, xctx: &mut X::FromApiCtx<'_>,
src: &Sym, src: &Sym,
i: &Interner,
) -> Vec<TokTree<H, X>> { ) -> Vec<TokTree<H, X>> {
stream(async |mut cx| { stream(async |mut cx| {
for tok in tokv { for tok in tokv {
cx.emit(TokTree::<H, X>::from_api(tok.borrow(), hctx, xctx, src, i).boxed_local().await).await cx.emit(TokTree::<H, X>::from_api(tok.borrow(), hctx, xctx, src).boxed_local().await).await
} }
}) })
.collect() .collect()
@@ -240,14 +237,14 @@ pub enum Token<H: ExprRepr, X: ExtraTok> {
/// Information about the code addressed to the human reader or dev tooling /// Information about the code addressed to the human reader or dev tooling
/// It has no effect on the behaviour of the program unless it's explicitly /// It has no effect on the behaviour of the program unless it's explicitly
/// read via reflection /// read via reflection
Comment(Rc<String>), Comment(IStr),
/// The part of a lambda between `\` and `.` enclosing the argument. The body /// The part of a lambda between `\` and `.` enclosing the argument. The body
/// stretches to the end of the enclosing parens or the end of the const line /// stretches to the end of the enclosing parens or the end of the const line
LambdaHead(Box<TokTree<H, X>>), LambdaHead(Box<TokTree<H, X>>),
/// A binding, operator, or a segment of a namespaced::name /// A binding, operator, or a segment of a namespaced::name
Name(Tok<String>), Name(IStr),
/// A namespace prefix, like `my_ns::` followed by a token /// A namespace prefix, like `my_ns::` followed by a token
NS(Tok<String>, Box<TokTree<H, X>>), NS(IStr, Box<TokTree<H, X>>),
/// A line break /// A line break
BR, BR,
/// `()`, `[]`, or `{}` /// `()`, `[]`, or `{}`
@@ -263,7 +260,7 @@ pub enum Token<H: ExprRepr, X: ExtraTok> {
} }
impl<H: ExprRepr, X: ExtraTok> Token<H, X> { impl<H: ExprRepr, X: ExtraTok> Token<H, X> {
pub fn at(self, sr: SrcRange) -> TokTree<H, X> { TokTree { sr, tok: self } } pub fn at(self, sr: SrcRange) -> TokTree<H, X> { TokTree { sr, tok: self } }
pub fn is_kw(&self, tk: Tok<String>) -> bool { matches!(self, Token::Name(n) if *n == tk) } pub fn is_kw(&self, tk: IStr) -> bool { matches!(self, Token::Name(n) if *n == tk) }
pub fn as_s(&self, par: Paren) -> Option<&[TokTree<H, X>]> { pub fn as_s(&self, par: Paren) -> Option<&[TokTree<H, X>]> {
match self { match self {
Self::S(p, b) if *p == par => Some(b), Self::S(p, b) if *p == par => Some(b),

View File

@@ -13,13 +13,13 @@ pub enum Loaded {
Code(Arc<String>), Code(Arc<String>),
/// Conceptually equivalent to the list of *.orc files in a folder, without /// Conceptually equivalent to the list of *.orc files in a folder, without
/// the extension /// the extension
Collection(Arc<Vec<Tok<String>>>), Collection(Arc<Vec<IStr>>),
} }
impl Loaded { impl Loaded {
/// Is the loaded item source code (not a collection)? /// Is the loaded item source code (not a collection)?
pub fn is_code(&self) -> bool { matches!(self, Loaded::Code(_)) } pub fn is_code(&self) -> bool { matches!(self, Loaded::Code(_)) }
/// Collect the elements in a collection rreport /// Collect the elements in a collection rreport
pub fn collection(items: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn collection(items: impl IntoIterator<Item = IStr>) -> Self {
Self::Collection(Arc::new(items.into_iter().collect())) Self::Collection(Arc::new(items.into_iter().collect()))
} }
} }
@@ -55,7 +55,7 @@ impl ErrorSansOrigin for CodeNotFound {
/// formats and other sources for libraries and dependencies. /// formats and other sources for libraries and dependencies.
pub trait VirtFS { pub trait VirtFS {
/// Implementation of [VirtFS::read] /// Implementation of [VirtFS::read]
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult; fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult;
/// Discover information about a path without reading it. /// Discover information about a path without reading it.
/// ///
/// Implement this if your vfs backend can do expensive operations /// Implement this if your vfs backend can do expensive operations
@@ -68,7 +68,7 @@ pub trait VirtFS {
} }
/// Convert a path into a human-readable string that is meaningful in the /// Convert a path into a human-readable string that is meaningful in the
/// target context. /// target context.
fn display(&self, path: &[Tok<String>]) -> Option<String>; fn display(&self, path: &[IStr]) -> Option<String>;
/// Convert the FS handler into a type-erased version of itself for packing in /// Convert the FS handler into a type-erased version of itself for packing in
/// a tree. /// a tree.
fn rc(self) -> Rc<dyn VirtFS> fn rc(self) -> Rc<dyn VirtFS>
@@ -81,15 +81,11 @@ pub trait VirtFS {
} }
impl VirtFS for &dyn VirtFS { impl VirtFS for &dyn VirtFS {
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult { fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult { (*self).get(path, full_path) }
(*self).get(path, full_path) fn display(&self, path: &[IStr]) -> Option<String> { (*self).display(path) }
}
fn display(&self, path: &[Tok<String>]) -> Option<String> { (*self).display(path) }
} }
impl<T: VirtFS + ?Sized> VirtFS for Rc<T> { impl<T: VirtFS + ?Sized> VirtFS for Rc<T> {
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult { fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult { (**self).get(path, full_path) }
(**self).get(path, full_path) fn display(&self, path: &[IStr]) -> Option<String> { (**self).display(path) }
}
fn display(&self, path: &[Tok<String>]) -> Option<String> { (**self).display(path) }
} }

View File

@@ -32,7 +32,7 @@ impl<'a> Combine for &'a dyn VirtFS {
pub type DeclTree = ModEntry<Rc<dyn VirtFS>, (), ()>; pub type DeclTree = ModEntry<Rc<dyn VirtFS>, (), ()>;
impl VirtFS for DeclTree { impl VirtFS for DeclTree {
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult { fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult {
match &self.member { match &self.member {
ModMember::Item(it) => it.get(path, full_path), ModMember::Item(it) => it.get(path, full_path),
ModMember::Sub(module) => match path.split_first() { ModMember::Sub(module) => match path.split_first() {
@@ -44,7 +44,7 @@ impl VirtFS for DeclTree {
} }
} }
fn display(&self, path: &[Tok<String>]) -> Option<String> { fn display(&self, path: &[IStr]) -> Option<String> {
let (head, tail) = path.split_first()?; let (head, tail) = path.split_first()?;
match &self.member { match &self.member {
ModMember::Item(it) => it.display(path), ModMember::Item(it) => it.display(path),
@@ -54,16 +54,16 @@ impl VirtFS for DeclTree {
} }
impl VirtFS for String { impl VirtFS for String {
fn display(&self, _: &[Tok<String>]) -> Option<String> { None } fn display(&self, _: &[IStr]) -> Option<String> { None }
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult { fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult {
(path.is_empty().then(|| Loaded::Code(Arc::new(self.as_str().to_string())))) (path.is_empty().then(|| Loaded::Code(Arc::new(self.as_str().to_string()))))
.ok_or_else(|| CodeNotFound::new(full_path.to_vpath()).pack()) .ok_or_else(|| CodeNotFound::new(full_path.to_vpath()).pack())
} }
} }
impl<'a> VirtFS for &'a str { impl<'a> VirtFS for &'a str {
fn display(&self, _: &[Tok<String>]) -> Option<String> { None } fn display(&self, _: &[IStr]) -> Option<String> { None }
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult { fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult {
(path.is_empty().then(|| Loaded::Code(Arc::new(self.to_string())))) (path.is_empty().then(|| Loaded::Code(Arc::new(self.to_string()))))
.ok_or_else(|| CodeNotFound::new(full_path.to_vpath()).pack()) .ok_or_else(|| CodeNotFound::new(full_path.to_vpath()).pack())
} }

View File

@@ -99,14 +99,14 @@ impl DirNode {
} }
} }
fn mk_pathbuf(&self, path: &[Tok<String>]) -> PathBuf { fn mk_pathbuf(&self, path: &[IStr]) -> PathBuf {
let mut fpath = self.root.clone(); let mut fpath = self.root.clone();
path.iter().for_each(|seg| fpath.push(seg.as_str())); path.iter().for_each(|seg| fpath.push(seg.as_str()));
fpath fpath
} }
} }
impl VirtFS for DirNode { impl VirtFS for DirNode {
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult { fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult {
let fpath = self.mk_pathbuf(path); let fpath = self.mk_pathbuf(path);
let mut binding = self.cached.borrow_mut(); let mut binding = self.cached.borrow_mut();
let (_, res) = (binding.raw_entry_mut().from_key(&fpath)) let (_, res) = (binding.raw_entry_mut().from_key(&fpath))
@@ -114,7 +114,7 @@ impl VirtFS for DirNode {
res.clone() res.clone()
} }
fn display(&self, path: &[Tok<String>]) -> Option<String> { fn display(&self, path: &[IStr]) -> Option<String> {
let pathbuf = self.mk_pathbuf(path).with_extension(self.ext()); let pathbuf = self.mk_pathbuf(path).with_extension(self.ext());
Some(pathbuf.to_string_lossy().to_string()) Some(pathbuf.to_string_lossy().to_string())
} }

View File

@@ -56,7 +56,7 @@ impl EmbeddedFS {
} }
impl VirtFS for EmbeddedFS { impl VirtFS for EmbeddedFS {
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> FSResult { fn get(&self, path: &[IStr], full_path: &PathSlice) -> FSResult {
if path.is_empty() { if path.is_empty() {
return Ok(Loaded::collection(self.tree.keys(|_| true))); return Ok(Loaded::collection(self.tree.keys(|_| true)));
} }
@@ -67,7 +67,7 @@ impl VirtFS for EmbeddedFS {
ModMember::Sub(sub) => Loaded::collection(sub.keys(|_| true)), ModMember::Sub(sub) => Loaded::collection(sub.keys(|_| true)),
}) })
} }
fn display(&self, path: &[Tok<String>]) -> Option<String> { fn display(&self, path: &[IStr]) -> Option<String> {
let Self { gen, suffix, .. } = self; let Self { gen, suffix, .. } = self;
Some(format!("{}{suffix} in {gen}", path.iter().join("/"))) Some(format!("{}{suffix} in {gen}", path.iter().join("/")))
} }

View File

@@ -21,18 +21,18 @@ impl<'a> PrefixFS<'a> {
add: VPath::parse(add.as_ref()), add: VPath::parse(add.as_ref()),
} }
} }
fn proc_path(&self, path: &[Tok<String>]) -> Option<Vec<Tok<String>>> { fn proc_path(&self, path: &[IStr]) -> Option<Vec<IStr>> {
let path = path.strip_prefix(self.remove.as_slice())?; let path = path.strip_prefix(self.remove.as_slice())?;
Some(self.add.0.iter().chain(path).cloned().collect_vec()) Some(self.add.0.iter().chain(path).cloned().collect_vec())
} }
} }
impl<'a> VirtFS for PrefixFS<'a> { impl<'a> VirtFS for PrefixFS<'a> {
fn get(&self, path: &[Tok<String>], full_path: &PathSlice) -> super::FSResult { fn get(&self, path: &[IStr], full_path: &PathSlice) -> super::FSResult {
let path = let path =
self.proc_path(path).ok_or_else(|| CodeNotFound::new(full_path.to_vpath()).pack())?; self.proc_path(path).ok_or_else(|| CodeNotFound::new(full_path.to_vpath()).pack())?;
self.wrapped.get(&path, full_path) self.wrapped.get(&path, full_path)
} }
fn display(&self, path: &[Tok<String>]) -> Option<String> { fn display(&self, path: &[IStr]) -> Option<String> {
self.wrapped.display(&self.proc_path(path)?) self.wrapped.display(&self.proc_path(path)?)
} }
} }

View File

@@ -8,17 +8,18 @@ edition = "2024"
[dependencies] [dependencies]
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" } async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
async-once-cell = "0.5.4" async-once-cell = "0.5.4"
bound = "0.6.0"
derive_destructure = "1.0.0" derive_destructure = "1.0.0"
dyn-clone = "1.0.20" dyn-clone = "1.0.20"
futures = { version = "0.3.31", features = [ futures = { version = "0.3.31", default-features = false, features = [
"std", "std",
"async-await", "async-await",
], default-features = false } ] }
futures-locks = "0.7.1" futures-locks = "0.7.1"
hashbrown = "0.16.0" hashbrown = "0.16.1"
include_dir = { version = "0.7.4", optional = true } include_dir = { version = "0.7.4", optional = true }
itertools = "0.14.0" itertools = "0.14.0"
konst = "0.4.2" konst = "0.4.3"
lazy_static = "1.5.0" lazy_static = "1.5.0"
memo-map = "0.3.3" memo-map = "0.3.3"
never = "0.1.0" never = "0.1.0"
@@ -27,13 +28,12 @@ orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" } orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-base = { version = "0.1.0", path = "../orchid-base" } orchid-base = { version = "0.1.0", path = "../orchid-base" }
ordered-float = "5.0.0" ordered-float = "5.1.0"
pastey = "0.1.1" pastey = "0.2.1"
some_executor = "0.6.1"
substack = "1.1.1" substack = "1.1.1"
task-local = "0.1.0" task-local = "0.1.0"
tokio = { version = "1.47.1", optional = true, features = [] } tokio = { version = "1.49.0", optional = true, features = [] }
tokio-util = { version = "0.7.16", optional = true, features = ["compat"] } tokio-util = { version = "0.7.17", optional = true, features = ["compat"] }
trait-set = "0.3.0" trait-set = "0.3.0"

View File

@@ -14,20 +14,20 @@ use orchid_api_derive::Coding;
use orchid_api_traits::{Coding, Decode, Encode, Request, enc_vec}; use orchid_api_traits::{Coding, Decode, Encode, Request, enc_vec};
use orchid_base::error::{OrcErrv, OrcRes, mk_errv, mk_errv_floating}; use orchid_base::error::{OrcErrv, OrcRes, mk_errv, mk_errv_floating};
use orchid_base::format::{FmtCtx, FmtUnit, Format, fmt}; use orchid_base::format::{FmtCtx, FmtUnit, Format, fmt};
use orchid_base::interner::is;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::Requester;
use trait_set::trait_set; use trait_set::trait_set;
use crate::api; use crate::api;
use crate::context::{ctx, i};
use crate::conv::ToExpr; use crate::conv::ToExpr;
use crate::entrypoint::request;
// use crate::error::{ProjectError, ProjectResult}; // use crate::error::{ProjectError, ProjectResult};
use crate::expr::{Expr, ExprData, ExprHandle, ExprKind}; use crate::expr::{Expr, ExprData, ExprHandle, ExprKind};
use crate::gen_expr::GExpr; use crate::gen_expr::GExpr;
use crate::system::{DynSystemCard, atom_info_for, downcast_atom}; use crate::system::{DynSystemCard, atom_by_idx, atom_info_for, cted, downcast_atom};
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)] #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
pub struct AtomTypeId(pub NonZeroU32); pub struct AtomTypeId(pub NonZeroU32);
pub trait AtomCard: 'static + Sized { pub trait AtomCard: 'static + Sized {
@@ -99,13 +99,13 @@ impl ForeignAtom {
ForeignAtom { atom, expr: handle, pos } ForeignAtom { atom, expr: handle, pos }
} }
pub async fn request<M: AtomMethod>(&self, m: M) -> Option<M::Response> { pub async fn request<M: AtomMethod>(&self, m: M) -> Option<M::Response> {
let rep = (ctx().reqnot().request(api::Fwd( let rep = (request(api::Fwd(
self.atom.clone(), self.atom.clone(),
Sym::parse(M::NAME, &i()).await.unwrap().tok().to_api(), Sym::parse(M::NAME).await.unwrap().tok().to_api(),
enc_vec(&m).await, enc_vec(&m),
))) )))
.await?; .await?;
Some(M::Response::decode(Pin::new(&mut &rep[..])).await) Some(M::Response::decode_slice(&mut &rep[..]))
} }
pub async fn downcast<T: AtomicFeatures>(self) -> Result<TAtom<T>, NotTypAtom> { pub async fn downcast<T: AtomicFeatures>(self) -> Result<TAtom<T>, NotTypAtom> {
TAtom::downcast(self.ex().handle()).await TAtom::downcast(self.ex().handle()).await
@@ -119,7 +119,7 @@ impl fmt::Debug for ForeignAtom {
} }
impl Format for ForeignAtom { impl Format for ForeignAtom {
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
FmtUnit::from_api(&ctx().reqnot().request(api::ExtAtomPrint(self.atom.clone())).await) FmtUnit::from_api(&request(api::ExtAtomPrint(self.atom.clone())).await)
} }
} }
impl ToExpr for ForeignAtom { impl ToExpr for ForeignAtom {
@@ -138,8 +138,8 @@ pub struct NotTypAtom {
impl NotTypAtom { impl NotTypAtom {
pub async fn mk_err(&self) -> OrcErrv { pub async fn mk_err(&self) -> OrcErrv {
mk_errv( mk_errv(
i().i("Not the expected type").await, is("Not the expected type").await,
format!("The expression {} is not a {}", fmt(&self.expr, &i()).await, self.typ.name()), format!("The expression {} is not a {}", fmt(&self.expr).await, self.typ.name()),
[self.pos.clone()], [self.pos.clone()],
) )
} }
@@ -172,7 +172,9 @@ impl<A: AtomCard> MethodSetBuilder<A> {
self.handlers.push(( self.handlers.push((
M::NAME, M::NAME,
Rc::new(move |a: &A, req: Pin<&mut dyn AsyncRead>, rep: Pin<&mut dyn AsyncWrite>| { Rc::new(move |a: &A, req: Pin<&mut dyn AsyncRead>, rep: Pin<&mut dyn AsyncWrite>| {
async { Supports::<M>::handle(a, M::decode(req).await).await.encode(rep).await } async {
Supports::<M>::handle(a, M::decode(req).await.unwrap()).await.encode(rep).await.unwrap()
}
.boxed_local() .boxed_local()
}), }),
)); ));
@@ -182,7 +184,7 @@ impl<A: AtomCard> MethodSetBuilder<A> {
pub async fn pack(&self) -> MethodSet<A> { pub async fn pack(&self) -> MethodSet<A> {
MethodSet { MethodSet {
handlers: stream::iter(self.handlers.iter()) handlers: stream::iter(self.handlers.iter())
.then(async |(k, v)| (Sym::parse(k, &i()).await.unwrap(), v.clone())) .then(async |(k, v)| (Sym::parse(k).await.unwrap(), v.clone()))
.collect() .collect()
.await, .await,
} }
@@ -234,16 +236,15 @@ impl<A: AtomicFeatures> TAtom<A> {
} }
pub async fn request<M: AtomMethod>(&self, req: M) -> M::Response pub async fn request<M: AtomMethod>(&self, req: M) -> M::Response
where A: Supports<M> { where A: Supports<M> {
M::Response::decode(Pin::new( M::Response::decode_slice(
&mut &(ctx().reqnot().request(api::Fwd( &mut &(request(api::Fwd(
self.untyped.atom.clone(), self.untyped.atom.clone(),
Sym::parse(M::NAME, &i()).await.unwrap().tok().to_api(), Sym::parse(M::NAME).await.unwrap().tok().to_api(),
enc_vec(&req).await, enc_vec(&req),
))) )))
.await .await
.unwrap()[..], .unwrap()[..],
)) )
.await
} }
} }
impl<A: AtomicFeatures> Deref for TAtom<A> { impl<A: AtomicFeatures> Deref for TAtom<A> {
@@ -311,9 +312,18 @@ impl Format for AtomFactory {
} }
pub async fn err_not_callable() -> OrcErrv { pub async fn err_not_callable() -> OrcErrv {
mk_errv_floating(i().i("This atom is not callable").await, "Attempted to apply value as function") mk_errv_floating(is("This atom is not callable").await, "Attempted to apply value as function")
} }
pub async fn err_not_command() -> OrcErrv { pub async fn err_not_command() -> OrcErrv {
mk_errv_floating(i().i("This atom is not a command").await, "Settled on an inactionable value") mk_errv_floating(is("This atom is not a command").await, "Settled on an inactionable value")
}
/// Read the type ID prefix from an atom, return type information and the rest
/// of the data
pub(crate) fn resolve_atom_type(atom: &api::Atom) -> (Box<dyn AtomDynfo>, AtomTypeId, &[u8]) {
let mut data = &atom.data.0[..];
let tid = AtomTypeId::decode_slice(&mut data);
let atom_record = atom_by_idx(cted().inst().card(), tid).expect("Unrecognized atom type ID");
(atom_record, tid, data)
} }

View File

@@ -1,11 +1,12 @@
use std::any::{Any, TypeId, type_name}; use std::any::{Any, TypeId, type_name};
use std::borrow::Cow; use std::borrow::Cow;
use std::cell::RefCell;
use std::future::Future; use std::future::Future;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::num::NonZero; use std::num::NonZero;
use std::ops::Deref; use std::ops::Deref;
use std::pin::Pin; use std::pin::Pin;
use std::sync::atomic::AtomicU64; use std::rc::Rc;
use async_once_cell::OnceCell; use async_once_cell::OnceCell;
use dyn_clone::{DynClone, clone_box}; use dyn_clone::{DynClone, clone_box};
@@ -18,33 +19,35 @@ use never::Never;
use orchid_api_traits::{Decode, Encode, enc_vec}; use orchid_api_traits::{Decode, Encode, enc_vec};
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::format::{FmtCtx, FmtCtxImpl, FmtUnit, take_first}; use orchid_base::format::{FmtCtx, FmtCtxImpl, FmtUnit, take_first};
use orchid_base::logging::log;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use task_local::task_local;
use crate::api; use crate::api;
use crate::atom::{ use crate::atom::{
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet, AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
MethodSetBuilder, TAtom, err_not_callable, err_not_command, get_info, MethodSetBuilder, TAtom, err_not_callable, err_not_command, get_info,
}; };
use crate::context::{SysCtxEntry, ctx, i};
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::{GExpr, bot}; use crate::gen_expr::{GExpr, bot};
use crate::system_ctor::CtedObj; use crate::system::{cted, sys_id};
pub struct OwnedVariant; pub struct OwnedVariant;
impl AtomicVariant for OwnedVariant {} impl AtomicVariant for OwnedVariant {}
impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVariant> for A { impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVariant> for A {
fn _factory(self) -> AtomFactory { fn _factory(self) -> AtomFactory {
AtomFactory::new(async move || { AtomFactory::new(async move || {
let serial = ctx() let obj_store = get_obj_store();
.get_or_default::<ObjStore>() let atom_id = {
.next_id let mut id = obj_store.next_id.borrow_mut();
.fetch_add(1, std::sync::atomic::Ordering::Relaxed); *id += 1;
let atom_id = api::AtomId(NonZero::new(serial + 1).unwrap()); api::AtomId(NonZero::new(*id + 1).unwrap())
let (typ_id, _) = get_info::<A>(ctx().get::<CtedObj>().inst().card()); };
let mut data = enc_vec(&typ_id).await; let (typ_id, _) = get_info::<A>(cted().inst().card());
let mut data = enc_vec(&typ_id);
self.encode(Pin::<&mut Vec<u8>>::new(&mut data)).await; self.encode(Pin::<&mut Vec<u8>>::new(&mut data)).await;
ctx().get_or_default::<ObjStore>().objects.read().await.insert(atom_id, Box::new(self)); obj_store.objects.read().await.insert(atom_id, Box::new(self));
api::Atom { drop: Some(atom_id), data: api::AtomData(data), owner: ctx().sys_id() } api::Atom { drop: Some(atom_id), data: api::AtomData(data), owner: sys_id() }
}) })
} }
fn _info() -> Self::_Info { OwnedAtomDynfo { msbuild: A::reg_reqs(), ms: OnceCell::new() } } fn _info() -> Self::_Info { OwnedAtomDynfo { msbuild: A::reg_reqs(), ms: OnceCell::new() } }
@@ -59,7 +62,7 @@ pub(crate) struct AtomReadGuard<'a> {
} }
impl<'a> AtomReadGuard<'a> { impl<'a> AtomReadGuard<'a> {
async fn new(id: api::AtomId) -> Self { async fn new(id: api::AtomId) -> Self {
let guard = ctx().get_or_default::<ObjStore>().objects.read().await; let guard = get_obj_store().objects.read().await;
if guard.get(&id).is_none() { if guard.get(&id).is_none() {
panic!("Received invalid atom ID: {id:?}"); panic!("Received invalid atom ID: {id:?}");
} }
@@ -73,7 +76,7 @@ impl Deref for AtomReadGuard<'_> {
/// Remove an atom from the store /// Remove an atom from the store
pub(crate) async fn take_atom(id: api::AtomId) -> Box<dyn DynOwnedAtom> { pub(crate) async fn take_atom(id: api::AtomId) -> Box<dyn DynOwnedAtom> {
let mut g = ctx().get_or_default::<ObjStore>().objects.write().await; let mut g = get_obj_store().objects.write().await;
g.remove(&id).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0)) g.remove(&id).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0))
} }
@@ -86,7 +89,7 @@ impl<T: OwnedAtom> AtomDynfo for OwnedAtomDynfo<T> {
fn name(&self) -> &'static str { type_name::<T>() } fn name(&self) -> &'static str { type_name::<T>() }
fn decode<'a>(&'a self, AtomCtx(data, ..): AtomCtx<'a>) -> LocalBoxFuture<'a, Box<dyn Any>> { fn decode<'a>(&'a self, AtomCtx(data, ..): AtomCtx<'a>) -> LocalBoxFuture<'a, Box<dyn Any>> {
Box::pin(async { Box::pin(async {
Box::new(<T as AtomCard>::Data::decode(Pin::new(&mut &data[..])).await) as Box<dyn Any> Box::new(<T as AtomCard>::Data::decode_slice(&mut &data[..])) as Box<dyn Any>
}) })
} }
fn call(&self, AtomCtx(_, id): AtomCtx, arg: Expr) -> LocalBoxFuture<'_, GExpr> { fn call(&self, AtomCtx(_, id): AtomCtx, arg: Expr) -> LocalBoxFuture<'_, GExpr> {
@@ -127,7 +130,7 @@ impl<T: OwnedAtom> AtomDynfo for OwnedAtomDynfo<T> {
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> { ) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
Box::pin(async move { Box::pin(async move {
let id = id.unwrap(); let id = id.unwrap();
id.encode(write.as_mut()).await; id.encode(write.as_mut()).await.unwrap();
AtomReadGuard::new(id).await.dyn_serialize(write).await AtomReadGuard::new(id).await.dyn_serialize(write).await
}) })
} }
@@ -155,7 +158,7 @@ pub trait DeserializeCtx: Sized {
struct DeserCtxImpl<'a>(&'a [u8]); struct DeserCtxImpl<'a>(&'a [u8]);
impl DeserializeCtx for DeserCtxImpl<'_> { impl DeserializeCtx for DeserCtxImpl<'_> {
async fn read<T: Decode>(&mut self) -> T { T::decode(Pin::new(&mut self.0)).await } async fn read<T: Decode>(&mut self) -> T { T::decode(Pin::new(&mut self.0)).await.unwrap() }
fn is_empty(&self) -> bool { self.0.is_empty() } fn is_empty(&self) -> bool { self.0.is_empty() }
} }
@@ -266,7 +269,7 @@ impl<T: OwnedAtom> DynOwnedAtom for T {
fn atom_tid(&self) -> TypeId { TypeId::of::<T>() } fn atom_tid(&self) -> TypeId { TypeId::of::<T>() }
fn as_any_ref(&self) -> &dyn Any { self } fn as_any_ref(&self) -> &dyn Any { self }
fn encode<'a>(&'a self, buffer: Pin<&'a mut dyn AsyncWrite>) -> LocalBoxFuture<'a, ()> { fn encode<'a>(&'a self, buffer: Pin<&'a mut dyn AsyncWrite>) -> LocalBoxFuture<'a, ()> {
async { self.val().await.as_ref().encode(buffer).await }.boxed_local() async { self.val().await.as_ref().encode(buffer).await.unwrap() }.boxed_local()
} }
fn dyn_call_ref(&self, arg: Expr) -> LocalBoxFuture<'_, GExpr> { fn dyn_call_ref(&self, arg: Expr) -> LocalBoxFuture<'_, GExpr> {
self.call_ref(arg).boxed_local() self.call_ref(arg).boxed_local()
@@ -279,7 +282,7 @@ impl<T: OwnedAtom> DynOwnedAtom for T {
} }
fn dyn_free(self: Box<Self>) -> LocalBoxFuture<'static, ()> { self.free().boxed_local() } fn dyn_free(self: Box<Self>) -> LocalBoxFuture<'static, ()> { self.free().boxed_local() }
fn dyn_print(&self) -> LocalBoxFuture<'_, FmtUnit> { fn dyn_print(&self) -> LocalBoxFuture<'_, FmtUnit> {
async move { self.print_atom(&FmtCtxImpl { i: &i() }).await }.boxed_local() async move { self.print_atom(&FmtCtxImpl::default()).await }.boxed_local()
} }
fn dyn_serialize<'a>( fn dyn_serialize<'a>(
&'a self, &'a self,
@@ -294,13 +297,24 @@ impl<T: OwnedAtom> DynOwnedAtom for T {
#[derive(Default)] #[derive(Default)]
pub(crate) struct ObjStore { pub(crate) struct ObjStore {
pub(crate) next_id: AtomicU64, pub(crate) next_id: RefCell<u64>,
pub(crate) objects: RwLock<MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>, pub(crate) objects: RwLock<MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>,
} }
impl SysCtxEntry for ObjStore {}
task_local! {
static OBJ_STORE: Rc<ObjStore>;
}
pub(crate) fn with_obj_store<'a>(fut: LocalBoxFuture<'a, ()>) -> LocalBoxFuture<'a, ()> {
Box::pin(OBJ_STORE.scope(Rc::new(ObjStore::default()), fut))
}
pub(crate) fn get_obj_store() -> Rc<ObjStore> {
OBJ_STORE.try_with(|store| store.clone()).expect("Owned atom store not initialized")
}
pub async fn own<A: OwnedAtom>(typ: &TAtom<A>) -> A { pub async fn own<A: OwnedAtom>(typ: &TAtom<A>) -> A {
let g = ctx().get_or_default::<ObjStore>().objects.read().await; let g = get_obj_store().objects.read().await;
let atom_id = typ.untyped.atom.drop.expect("Owned atoms always have a drop ID"); let atom_id = typ.untyped.atom.drop.expect("Owned atoms always have a drop ID");
let dyn_atom = let dyn_atom =
g.get(&atom_id).expect("Atom ID invalid; atom type probably not owned by this crate"); g.get(&atom_id).expect("Atom ID invalid; atom type probably not owned by this crate");
@@ -308,8 +322,7 @@ pub async fn own<A: OwnedAtom>(typ: &TAtom<A>) -> A {
} }
pub async fn debug_print_obj_store(show_atoms: bool) { pub async fn debug_print_obj_store(show_atoms: bool) {
let ctx = ctx(); let store = get_obj_store();
let store = ctx.get_or_default::<ObjStore>();
let keys = store.objects.read().await.keys().cloned().collect_vec(); let keys = store.objects.read().await.keys().cloned().collect_vec();
let mut message = "Atoms in store:".to_string(); let mut message = "Atoms in store:".to_string();
if !show_atoms { if !show_atoms {
@@ -326,5 +339,5 @@ pub async fn debug_print_obj_store(show_atoms: bool) {
message += &format!("\n{k:?} -> {}", take_first(&atom.dyn_print().await, true)); message += &format!("\n{k:?} -> {}", take_first(&atom.dyn_print().await, true));
} }
} }
eprintln!("{message}") writeln!(log("debug"), "{message}").await
} }

View File

@@ -8,6 +8,7 @@ use futures::{AsyncRead, AsyncWrite, FutureExt};
use orchid_api_traits::{Coding, enc_vec}; use orchid_api_traits::{Coding, enc_vec};
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::format::FmtUnit; use orchid_base::format::FmtUnit;
use orchid_base::logging::log;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use crate::api; use crate::api;
@@ -15,20 +16,19 @@ use crate::atom::{
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet, AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
MethodSetBuilder, err_not_callable, err_not_command, get_info, MethodSetBuilder, err_not_callable, err_not_command, get_info,
}; };
use crate::context::ctx;
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::{GExpr, bot}; use crate::gen_expr::{GExpr, bot};
use crate::system_ctor::CtedObj; use crate::system::{cted, sys_id};
pub struct ThinVariant; pub struct ThinVariant;
impl AtomicVariant for ThinVariant {} impl AtomicVariant for ThinVariant {}
impl<A: ThinAtom + Atomic<Variant = ThinVariant>> AtomicFeaturesImpl<ThinVariant> for A { impl<A: ThinAtom + Atomic<Variant = ThinVariant>> AtomicFeaturesImpl<ThinVariant> for A {
fn _factory(self) -> AtomFactory { fn _factory(self) -> AtomFactory {
AtomFactory::new(async move || { AtomFactory::new(async move || {
let (id, _) = get_info::<A>(ctx().get::<CtedObj>().inst().card()); let (id, _) = get_info::<A>(cted().inst().card());
let mut buf = enc_vec(&id).await; let mut buf = enc_vec(&id);
self.encode(Pin::new(&mut buf)).await; self.encode_vec(&mut buf);
api::Atom { drop: None, data: api::AtomData(buf), owner: ctx().sys_id() } api::Atom { drop: None, data: api::AtomData(buf), owner: sys_id() }
}) })
} }
fn _info() -> Self::_Info { ThinAtomDynfo { msbuild: Self::reg_reqs(), ms: OnceCell::new() } } fn _info() -> Self::_Info { ThinAtomDynfo { msbuild: Self::reg_reqs(), ms: OnceCell::new() } }
@@ -41,18 +41,18 @@ pub struct ThinAtomDynfo<T: ThinAtom> {
} }
impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> { impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
fn print<'a>(&self, AtomCtx(buf, _): AtomCtx<'a>) -> LocalBoxFuture<'a, FmtUnit> { fn print<'a>(&self, AtomCtx(buf, _): AtomCtx<'a>) -> LocalBoxFuture<'a, FmtUnit> {
Box::pin(async move { T::decode(Pin::new(&mut &buf[..])).await.print().await }) Box::pin(async move { T::decode_slice(&mut &buf[..]).print().await })
} }
fn tid(&self) -> TypeId { TypeId::of::<T>() } fn tid(&self) -> TypeId { TypeId::of::<T>() }
fn name(&self) -> &'static str { type_name::<T>() } fn name(&self) -> &'static str { type_name::<T>() }
fn decode<'a>(&'a self, AtomCtx(buf, ..): AtomCtx<'a>) -> LocalBoxFuture<'a, Box<dyn Any>> { fn decode<'a>(&'a self, AtomCtx(buf, ..): AtomCtx<'a>) -> LocalBoxFuture<'a, Box<dyn Any>> {
Box::pin(async { Box::new(T::decode(Pin::new(&mut &buf[..])).await) as Box<dyn Any> }) Box::pin(async { Box::new(T::decode_slice(&mut &buf[..])) as Box<dyn Any> })
} }
fn call<'a>(&'a self, AtomCtx(buf, ..): AtomCtx<'a>, arg: Expr) -> LocalBoxFuture<'a, GExpr> { fn call<'a>(&'a self, AtomCtx(buf, ..): AtomCtx<'a>, arg: Expr) -> LocalBoxFuture<'a, GExpr> {
Box::pin(async move { T::decode(Pin::new(&mut &buf[..])).await.call(arg).await }) Box::pin(async move { T::decode_slice(&mut &buf[..]).call(arg).await })
} }
fn call_ref<'a>(&'a self, AtomCtx(buf, ..): AtomCtx<'a>, arg: Expr) -> LocalBoxFuture<'a, GExpr> { fn call_ref<'a>(&'a self, AtomCtx(buf, ..): AtomCtx<'a>, arg: Expr) -> LocalBoxFuture<'a, GExpr> {
Box::pin(async move { T::decode(Pin::new(&mut &buf[..])).await.call(arg).await }) Box::pin(async move { T::decode_slice(&mut &buf[..]).call(arg).await })
} }
fn handle_req<'a, 'm1: 'a, 'm2: 'a>( fn handle_req<'a, 'm1: 'a, 'm2: 'a>(
&'a self, &'a self,
@@ -63,14 +63,14 @@ impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
) -> LocalBoxFuture<'a, bool> { ) -> LocalBoxFuture<'a, bool> {
Box::pin(async move { Box::pin(async move {
let ms = self.ms.get_or_init(self.msbuild.pack()).await; let ms = self.ms.get_or_init(self.msbuild.pack()).await;
ms.dispatch(&T::decode(Pin::new(&mut &buf[..])).await, key, req, rep).await ms.dispatch(&T::decode_slice(&mut &buf[..]), key, req, rep).await
}) })
} }
fn command<'a>( fn command<'a>(
&'a self, &'a self,
AtomCtx(buf, _): AtomCtx<'a>, AtomCtx(buf, _): AtomCtx<'a>,
) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>> { ) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>> {
async move { T::decode(Pin::new(&mut &buf[..])).await.command().await }.boxed_local() async move { T::decode_slice(&mut &buf[..]).command().await }.boxed_local()
} }
fn serialize<'a, 'b: 'a>( fn serialize<'a, 'b: 'a>(
&'a self, &'a self,
@@ -78,18 +78,18 @@ impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
write: Pin<&'b mut dyn AsyncWrite>, write: Pin<&'b mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> { ) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
Box::pin(async { Box::pin(async {
T::decode(Pin::new(&mut &ctx.0[..])).await.encode(write).await; T::decode_slice(&mut &ctx.0[..]).encode(write).await.unwrap();
Some(Vec::new()) Some(Vec::new())
}) })
} }
fn deserialize<'a>(&'a self, data: &'a [u8], refs: &'a [Expr]) -> LocalBoxFuture<'a, api::Atom> { fn deserialize<'a>(&'a self, data: &'a [u8], refs: &'a [Expr]) -> LocalBoxFuture<'a, api::Atom> {
assert!(refs.is_empty(), "Refs found when deserializing thin atom"); assert!(refs.is_empty(), "Refs found when deserializing thin atom");
Box::pin(async { T::decode(Pin::new(&mut &data[..])).await._factory().build().await }) Box::pin(async { T::decode_slice(&mut &data[..])._factory().build().await })
} }
fn drop<'a>(&'a self, AtomCtx(buf, _): AtomCtx<'a>) -> LocalBoxFuture<'a, ()> { fn drop<'a>(&'a self, AtomCtx(buf, _): AtomCtx<'a>) -> LocalBoxFuture<'a, ()> {
Box::pin(async move { Box::pin(async move {
let string_self = T::decode(Pin::new(&mut &buf[..])).await.print().await; let string_self = T::decode_slice(&mut &buf[..]).print().await;
writeln!(ctx().logger(), "Received drop signal for non-drop atom {string_self:?}"); writeln!(log("warn"), "Received drop signal for non-drop atom {string_self:?}").await;
}) })
} }
} }

View File

@@ -0,0 +1,30 @@
use std::rc::Rc;
use futures::future::LocalBoxFuture;
use orchid_base::binary::future_to_vt;
use crate::api;
use crate::entrypoint::ExtensionBuilder;
use crate::ext_port::ExtPort;
pub type ExtCx = api::binary::ExtensionContext;
struct Spawner(api::binary::Spawner);
impl Drop for Spawner {
fn drop(&mut self) { (self.0.drop)(self.0.data) }
}
impl Spawner {
pub fn spawn(&self, fut: LocalBoxFuture<'static, ()>) {
(self.0.spawn)(self.0.data, future_to_vt(fut))
}
}
pub fn orchid_extension_main_body(cx: ExtCx, builder: ExtensionBuilder) {
let spawner = Spawner(cx.spawner);
builder.build(ExtPort {
input: Box::pin(cx.input),
output: Box::pin(cx.output),
log: Box::pin(cx.log),
spawn: Rc::new(move |fut| spawner.spawn(fut)),
});
}

View File

@@ -1,90 +0,0 @@
use std::any::{Any, TypeId, type_name};
use std::fmt;
use std::num::NonZero;
use std::rc::Rc;
use memo_map::MemoMap;
use orchid_base::builtin::Spawner;
use orchid_base::interner::Interner;
use orchid_base::logging::Logger;
use orchid_base::reqnot::ReqNot;
use task_local::task_local;
use crate::api;
use crate::system_ctor::CtedObj;
#[derive(Clone)]
pub struct SysCtx(Rc<MemoMap<TypeId, Box<dyn Any>>>);
impl SysCtx {
pub fn new(
id: api::SysId,
i: Interner,
reqnot: ReqNot<api::ExtMsgSet>,
spawner: Spawner,
logger: Logger,
cted: CtedObj,
) -> Self {
let this = Self(Rc::new(MemoMap::new()));
this.add(id).add(i).add(reqnot).add(spawner).add(logger).add(cted);
this
}
pub fn add<T: SysCtxEntry>(&self, t: T) -> &Self {
assert!(self.0.insert(TypeId::of::<T>(), Box::new(t)), "Key already exists");
self
}
pub fn get_or_insert<T: SysCtxEntry>(&self, f: impl FnOnce() -> T) -> &T {
(self.0.get_or_insert_owned(TypeId::of::<T>(), || Box::new(f())).downcast_ref())
.expect("Keyed by TypeId")
}
pub fn get_or_default<T: SysCtxEntry + Default>(&self) -> &T { self.get_or_insert(T::default) }
pub fn try_get<T: SysCtxEntry>(&self) -> Option<&T> {
Some(self.0.get(&TypeId::of::<T>())?.downcast_ref().expect("Keyed by TypeId"))
}
pub fn get<T: SysCtxEntry>(&self) -> &T {
self.try_get().unwrap_or_else(|| panic!("Context {} missing", type_name::<T>()))
}
/// Shorthand to get the messaging link
pub fn reqnot(&self) -> &ReqNot<api::ExtMsgSet> { self.get::<ReqNot<api::ExtMsgSet>>() }
/// Shorthand to get the system ID
pub fn sys_id(&self) -> api::SysId { *self.get::<api::SysId>() }
/// Spawn a task that will eventually be executed asynchronously
pub fn spawn(&self, f: impl Future<Output = ()> + 'static) {
(self.get::<Spawner>())(Box::pin(CTX.scope(self.clone(), f)))
}
/// Shorthand to get the logger
pub fn logger(&self) -> &Logger { self.get::<Logger>() }
/// Shorthand to get the constructed system object
pub fn cted(&self) -> &CtedObj { self.get::<CtedObj>() }
}
impl fmt::Debug for SysCtx {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "SysCtx({:?})", self.sys_id())
}
}
pub trait SysCtxEntry: 'static + Sized {}
impl SysCtxEntry for api::SysId {}
impl SysCtxEntry for ReqNot<api::ExtMsgSet> {}
impl SysCtxEntry for Spawner {}
impl SysCtxEntry for CtedObj {}
impl SysCtxEntry for Logger {}
impl SysCtxEntry for Interner {}
task_local! {
static CTX: SysCtx;
}
pub async fn with_ctx<F: Future>(ctx: SysCtx, f: F) -> F::Output { CTX.scope(ctx, f).await }
pub fn ctx() -> SysCtx { CTX.get() }
/// Shorthand to get the [Interner] instance
pub fn i() -> Interner { ctx().get::<Interner>().clone() }
pub fn mock_ctx() -> SysCtx {
let ctx = SysCtx(Rc::default());
ctx
.add(Logger::new(api::LogStrategy::StdErr))
.add(Interner::new_master())
.add::<Spawner>(Rc::new(|_| panic!("Cannot fork in test environment")))
.add(api::SysId(NonZero::<u16>::MIN));
ctx
}

View File

@@ -4,11 +4,11 @@ use std::pin::Pin;
use dyn_clone::DynClone; use dyn_clone::DynClone;
use never::Never; use never::Never;
use orchid_base::error::{OrcErrv, OrcRes, mk_errv}; use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
use orchid_base::interner::is;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use trait_set::trait_set; use trait_set::trait_set;
use crate::atom::{AtomicFeatures, ForeignAtom, TAtom, ToAtom}; use crate::atom::{AtomicFeatures, ForeignAtom, TAtom, ToAtom};
use crate::context::i;
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::{GExpr, atom, bot}; use crate::gen_expr::{GExpr, atom, bot};
@@ -27,7 +27,7 @@ impl<T: TryFromExpr, U: TryFromExpr> TryFromExpr for (T, U) {
} }
async fn err_not_atom(pos: Pos) -> OrcErrv { async fn err_not_atom(pos: Pos) -> OrcErrv {
mk_errv(i().i("Expected an atom").await, "This expression is not an atom", [pos]) mk_errv(is("Expected an atom").await, "This expression is not an atom", [pos])
} }
impl TryFromExpr for ForeignAtom { impl TryFromExpr for ForeignAtom {

View File

@@ -1,353 +1,352 @@
use std::cell::RefCell; use std::cell::RefCell;
use std::future::Future; use std::future::Future;
use std::mem;
use std::num::NonZero; use std::num::NonZero;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
use std::{io, mem};
use futures::channel::mpsc::{Receiver, Sender, channel};
use futures::future::{LocalBoxFuture, join_all}; use futures::future::{LocalBoxFuture, join_all};
use futures::lock::Mutex; use futures::lock::Mutex;
use futures::{FutureExt, SinkExt, StreamExt, stream, stream_select}; use futures::{AsyncRead, AsyncWrite, AsyncWriteExt, StreamExt, stream};
use futures_locks::RwLock;
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use orchid_api_traits::{Decode, UnderRoot, enc_vec}; use orchid_api::{ExtHostNotif, ExtHostReq};
use orchid_base::builtin::{ExtInit, ExtPort, Spawner}; use orchid_api_traits::{Decode, Encode, Request, UnderRoot, enc_vec};
use orchid_base::char_filter::{char_filter_match, char_filter_union, mk_char_filter}; use orchid_base::char_filter::{char_filter_match, char_filter_union, mk_char_filter};
use orchid_base::clone; use orchid_base::error::try_with_reporter;
use orchid_base::error::Reporter; use orchid_base::interner::{es, is, with_interner};
use orchid_base::interner::{Interner, Tok}; use orchid_base::logging::{log, with_logger};
use orchid_base::logging::Logger;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::{Comment, Snippet}; use orchid_base::parse::{Comment, Snippet};
use orchid_base::reqnot::{ReqNot, RequestHandle, Requester}; use orchid_base::reqnot::{
Client, ClientExt, CommCtx, MsgReader, MsgReaderExt, Receipt, RepWriter, ReqHandle, ReqHandleExt,
ReqReader, ReqReaderExt, Witness, io_comm,
};
use orchid_base::stash::with_stash;
use orchid_base::tree::{TokenVariant, ttv_from_api}; use orchid_base::tree::{TokenVariant, ttv_from_api};
use substack::Substack; use substack::Substack;
use trait_set::trait_set; use task_local::task_local;
use crate::api; use crate::api;
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId}; use crate::atom::{AtomCtx, AtomTypeId, resolve_atom_type};
use crate::atom_owned::take_atom; use crate::atom_owned::{take_atom, with_obj_store};
use crate::context::{SysCtx, ctx, i, with_ctx};
use crate::expr::{BorrowedExprStore, Expr, ExprHandle}; use crate::expr::{BorrowedExprStore, Expr, ExprHandle};
use crate::ext_port::ExtPort;
use crate::func_atom::with_funs_ctx;
use crate::interner::new_interner;
use crate::lexer::{LexContext, ekey_cascade, ekey_not_applicable}; use crate::lexer::{LexContext, ekey_cascade, ekey_not_applicable};
use crate::parser::{PTokTree, ParsCtx, get_const, linev_into_api}; use crate::logger::LoggerImpl;
use crate::system::atom_by_idx; use crate::parser::{PTokTree, ParsCtx, get_const, linev_into_api, with_parsed_const_ctx};
use crate::system_ctor::{CtedObj, DynSystemCtor}; use crate::reflection::with_refl_roots;
use crate::tree::{LazyMemberFactory, TreeIntoApiCtxImpl}; use crate::system::{SysCtx, atom_by_idx, cted, with_sys};
use crate::system_ctor::{CtedObj, DynSystemCtor, SystemCtor};
use crate::tree::{TreeIntoApiCtxImpl, get_lazy, with_lazy_member_store};
pub type ExtReq<'a> = RequestHandle<'a, api::ExtMsgSet>; task_local::task_local! {
pub type ExtReqNot = ReqNot<api::ExtMsgSet>; static CLIENT: Rc<dyn Client>;
static CTX: Rc<RefCell<Option<CommCtx>>>;
pub struct ExtensionData {
pub name: &'static str,
pub systems: &'static [&'static dyn DynSystemCtor],
} }
impl ExtensionData {
pub fn new(name: &'static str, systems: &'static [&'static dyn DynSystemCtor]) -> Self { fn get_client() -> Rc<dyn Client> { CLIENT.get() }
Self { name, systems } pub async fn exit() {
let cx = CTX.get().borrow_mut().take();
cx.unwrap().exit().await
}
/// Sent the client used for global [request] and [notify] functions within the
/// runtime of this future
pub async fn with_comm<F: Future>(c: Rc<dyn Client>, ctx: CommCtx, fut: F) -> F::Output {
CLIENT.scope(c, CTX.scope(Rc::new(RefCell::new(Some(ctx))), fut)).await
}
task_local! {
pub static MUTE_REPLY: ();
}
/// Send a request through the global client's [ClientExt::request]
pub async fn request<T: Request + UnderRoot<Root = ExtHostReq>>(t: T) -> T::Response {
let response = get_client().request(t).await.unwrap();
if MUTE_REPLY.try_with(|b| *b).is_err() {
writeln!(log("msg"), "Got response {response:?}").await;
} }
response
} }
pub enum MemberRecord { /// Send a notification through the global client's [ClientExt::notify]
Gen(Vec<Tok<String>>, LazyMemberFactory), pub async fn notify<T: UnderRoot<Root = ExtHostNotif>>(t: T) {
Res, get_client().notify(t).await.unwrap()
} }
pub struct SystemRecord { pub struct SystemRecord {
lazy_members: Mutex<HashMap<api::TreeId, MemberRecord>>, cted: CtedObj,
ctx: SysCtx,
} }
trait_set! { type SystemTable = RefCell<HashMap<api::SysId, Rc<SystemRecord>>>;
pub trait WithAtomRecordCallback<'a, T> = AsyncFnOnce(
Box<dyn AtomDynfo>, task_local! {
AtomTypeId, static SYSTEM_TABLE: SystemTable;
&'a [u8]
) -> T
} }
pub async fn with_atom_record<'a, F: Future<Output = SysCtx>, T>( async fn with_sys_record<F: Future>(id: api::SysId, fut: F) -> F::Output {
get_sys_ctx: &impl Fn(api::SysId) -> F, let cted = SYSTEM_TABLE.with(|tbl| tbl.borrow().get(&id).expect("Invalid sys ID").cted.clone());
atom: &'a api::Atom, with_sys(SysCtx(id, cted), fut).await
cb: impl WithAtomRecordCallback<'a, T>,
) -> T {
let mut data = &atom.data.0[..];
let ctx = get_sys_ctx(atom.owner).await;
let inst = ctx.get::<CtedObj>().inst();
let id = AtomTypeId::decode(Pin::new(&mut data)).await;
let atom_record = atom_by_idx(inst.card(), id.clone()).expect("Atom ID reserved");
with_ctx(ctx, async move { cb(atom_record, id, data).await }).await
} }
pub struct ExtensionOwner { pub trait ContextModifier: 'static {
_interner_cell: Rc<RefCell<Option<Interner>>>, fn apply<'a>(self: Box<Self>, fut: LocalBoxFuture<'a, ()>) -> LocalBoxFuture<'a, ()>;
_systems_lock: Rc<RwLock<HashMap<api::SysId, SystemRecord>>>,
out_recv: Mutex<Receiver<Vec<u8>>>,
out_send: Sender<Vec<u8>>,
} }
impl ExtPort for ExtensionOwner { impl<F: AsyncFnOnce(LocalBoxFuture<'_, ()>) + 'static> ContextModifier for F {
fn send<'a>(&'a self, msg: &'a [u8]) -> LocalBoxFuture<'a, ()> { fn apply<'a>(self: Box<Self>, fut: LocalBoxFuture<'a, ()>) -> LocalBoxFuture<'a, ()> {
Box::pin(async { self.out_send.clone().send(msg.to_vec()).boxed_local().await.unwrap() }) Box::pin((self)(fut))
}
fn recv(&self) -> LocalBoxFuture<'_, Option<Vec<u8>>> {
Box::pin(async { self.out_recv.lock().await.next().await })
} }
} }
pub fn extension_init( pub struct ExtensionBuilder {
data: ExtensionData, pub name: &'static str,
host_header: api::HostHeader, pub systems: Vec<Box<dyn DynSystemCtor>>,
spawner: Spawner, pub context: Vec<Box<dyn ContextModifier>>,
) -> ExtInit { }
let api::HostHeader { log_strategy, msg_logs } = host_header; impl ExtensionBuilder {
let decls = (data.systems.iter().enumerate()) pub fn new(name: &'static str) -> Self { Self { name, systems: Vec::new(), context: Vec::new() } }
pub fn system(mut self, ctor: impl SystemCtor) -> Self {
self.systems.push(Box::new(ctor) as Box<_>);
self
}
pub fn add_context(&mut self, fun: impl ContextModifier) {
self.context.push(Box::new(fun) as Box<_>);
}
pub fn context(mut self, fun: impl ContextModifier) -> Self {
self.add_context(fun);
self
}
pub fn build(mut self, mut ctx: ExtPort) {
self.add_context(with_funs_ctx);
self.add_context(with_parsed_const_ctx);
self.add_context(with_obj_store);
self.add_context(with_lazy_member_store);
self.add_context(with_refl_roots);
(ctx.spawn)(Box::pin(async move {
let host_header = api::HostHeader::decode(ctx.input.as_mut()).await.unwrap();
let decls = (self.systems.iter().enumerate())
.map(|(id, sys)| (u16::try_from(id).expect("more than u16max system ctors"), sys)) .map(|(id, sys)| (u16::try_from(id).expect("more than u16max system ctors"), sys))
.map(|(id, sys)| sys.decl(api::SysDeclId(NonZero::new(id + 1).unwrap()))) .map(|(id, sys)| sys.decl(api::SysDeclId(NonZero::new(id + 1).unwrap())))
.collect_vec(); .collect_vec();
let systems_lock = Rc::new(RwLock::new(HashMap::<api::SysId, SystemRecord>::new())); api::ExtensionHeader { name: self.name.to_string(), systems: decls.clone() }
let ext_header = api::ExtensionHeader { name: data.name.to_string(), systems: decls.clone() }; .encode(ctx.output.as_mut())
let (out_send, in_recv) = channel::<Vec<u8>>(1); .await
let (in_send, out_recv) = channel::<Vec<u8>>(1); .unwrap();
let (exit_send, exit_recv) = channel(1); ctx.output.as_mut().flush().await.unwrap();
let logger = Logger::new(log_strategy); let logger1 = LoggerImpl::from_api(&host_header.logger);
let msg_logger = Logger::new(msg_logs); let logger2 = logger1.clone();
let interner_cell = Rc::new(RefCell::new(None::<Interner>)); let (client, comm_ctx, extension_srv) =
let interner_weak = Rc::downgrade(&interner_cell); io_comm(Rc::new(Mutex::new(ctx.output)), Mutex::new(ctx.input));
let systems_weak = Rc::downgrade(&systems_lock); let extension_fut = extension_srv.listen(
let get_ctx = clone!(systems_weak; move |id: api::SysId| clone!(systems_weak; async move { async |n: Box<dyn MsgReader<'_>>| {
let systems = let notif = n.read().await.unwrap();
systems_weak.upgrade().expect("System table dropped before request processing done"); match notif {
systems.read().await.get(&id).expect("System not found").ctx.clone() api::HostExtNotif::Exit => exit().await,
}));
let init_ctx = {
clone!(interner_weak, spawner, logger);
move |id: api::SysId, cted: CtedObj, reqnot: ReqNot<api::ExtMsgSet>| {
clone!(interner_weak, spawner, logger; async move {
let interner_rc =
interner_weak.upgrade().expect("System construction order while shutting down");
let i = interner_rc.borrow().clone().expect("mk_ctx called very early, no interner!");
SysCtx::new(id, i, reqnot, spawner, logger, cted)
})
} }
}; Ok(())
let rn = ReqNot::<api::ExtMsgSet>::new(
msg_logger.clone(),
move |a, _| {
clone!(in_send mut);
Box::pin(async move { in_send.send(a.to_vec()).await.unwrap() })
}, },
{ async |mut reader| {
clone!(exit_send); with_stash(async {
move |n, _| { let req = reader.read_req().await.unwrap();
clone!(exit_send mut); let handle = reader.finish().await;
async move { // Atom printing is never reported because it generates too much
match n { // noise
api::HostExtNotif::Exit => exit_send.send(()).await.unwrap(),
}
}
.boxed_local()
}
},
{
clone!(logger, get_ctx, init_ctx, systems_weak, interner_weak, decls, msg_logger);
move |hand, req| {
clone!(logger, get_ctx, init_ctx, systems_weak, interner_weak, decls, msg_logger);
async move {
let interner_cell = interner_weak.upgrade().expect("Interner dropped before request");
let interner =
interner_cell.borrow().clone().expect("Request arrived before interner set");
if !matches!(req, api::HostExtReq::AtomReq(api::AtomReq::AtomPrint(_))) { if !matches!(req, api::HostExtReq::AtomReq(api::AtomReq::AtomPrint(_))) {
writeln!(msg_logger, "{} extension received request {req:?}", data.name); writeln!(log("msg"), "{} extension received request {req:?}", self.name).await;
} }
match req { match req {
api::HostExtReq::SystemDrop(sys_drop) => { api::HostExtReq::SystemDrop(sys_drop) => {
if let Some(rc) = systems_weak.upgrade() { SYSTEM_TABLE.with(|l| l.borrow_mut().remove(&sys_drop.0));
mem::drop(rc.write().await.remove(&sys_drop.0)) handle.reply(&sys_drop, &()).await
}
hand.handle(&sys_drop, &()).await
}, },
api::HostExtReq::AtomDrop(atom_drop @ api::AtomDrop(sys_id, atom)) => api::HostExtReq::AtomDrop(atom_drop @ api::AtomDrop(sys_id, atom)) =>
with_ctx(get_ctx(sys_id).await, async move { with_sys_record(sys_id, async {
take_atom(atom).await.dyn_free().await; take_atom(atom).await.dyn_free().await;
hand.handle(&atom_drop, &()).await handle.reply(&atom_drop, &()).await
}) })
.await, .await,
api::HostExtReq::Ping(ping @ api::Ping) => hand.handle(&ping, &()).await, api::HostExtReq::Ping(ping @ api::Ping) => handle.reply(&ping, &()).await,
api::HostExtReq::Sweep(sweep @ api::Sweep) => api::HostExtReq::Sweep(api::Sweep) => todo!(),
hand.handle(&sweep, &interner.sweep_replica().await).await,
api::HostExtReq::SysReq(api::SysReq::NewSystem(new_sys)) => { api::HostExtReq::SysReq(api::SysReq::NewSystem(new_sys)) => {
let (sys_id, _) = (decls.iter().enumerate().find(|(_, s)| s.id == new_sys.system)) let (ctor_idx, _) =
(decls.iter().enumerate().find(|(_, s)| s.id == new_sys.system))
.expect("NewSystem call received for invalid system"); .expect("NewSystem call received for invalid system");
let cted = data.systems[sys_id].new_system(&new_sys); let cted = self.systems[ctor_idx].new_system(&new_sys);
with_ctx(init_ctx(new_sys.id, cted.clone(), hand.reqnot()).await, async move { let record = Rc::new(SystemRecord { cted: cted.clone() });
SYSTEM_TABLE.with(|tbl| {
let mut g = tbl.borrow_mut();
g.insert(new_sys.id, record);
});
with_sys_record(new_sys.id, async {
let lex_filter = let lex_filter =
cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| { cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| {
char_filter_union(&cf, &mk_char_filter(lx.char_filter().iter().cloned())) char_filter_union(&cf, &mk_char_filter(lx.char_filter().iter().cloned()))
}); });
let lazy_members = Mutex::new(HashMap::new());
let const_root = stream::iter(cted.inst().dyn_env().await) let const_root = stream::iter(cted.inst().dyn_env().await)
.then(|mem| { .then(async |mem| {
let lazy_mems = &lazy_members; let name = is(&mem.name).await;
async move {
let name = i().i(&mem.name).await;
let mut tia_ctx = TreeIntoApiCtxImpl { let mut tia_ctx = TreeIntoApiCtxImpl {
lazy_members: &mut *lazy_mems.lock().await,
basepath: &[], basepath: &[],
path: Substack::Bottom.push(name.clone()), path: Substack::Bottom.push(name.clone()),
}; };
(name.to_api(), mem.kind.into_api(&mut tia_ctx).await) (name.to_api(), mem.kind.into_api(&mut tia_ctx).await)
}
}) })
.collect() .collect()
.await; .await;
let prelude = let prelude =
cted.inst().dyn_prelude().await.iter().map(|sym| sym.to_api()).collect(); cted.inst().dyn_prelude().await.iter().map(|sym| sym.to_api()).collect();
let record = SystemRecord { ctx: ctx(), lazy_members };
let systems = systems_weak.upgrade().expect("System constructed during shutdown");
systems.write().await.insert(new_sys.id, record);
let line_types = join_all( let line_types = join_all(
(cted.inst().dyn_parsers().iter()) (cted.inst().dyn_parsers().iter())
.map(|p| async { interner.i(p.line_head()).await.to_api() }), .map(async |p| is(p.line_head()).await.to_api()),
) )
.await; .await;
let response = let response =
api::NewSystemResponse { lex_filter, const_root, line_types, prelude }; api::NewSystemResponse { lex_filter, const_root, line_types, prelude };
hand.handle(&new_sys, &response).await handle.reply(&new_sys, &response).await
}) })
.await .await
}, },
api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) => api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) =>
with_ctx(get_ctx(sys_id).await, async move { with_sys_record(sys_id, async {
let systems = systems_weak.upgrade().expect("Member queried during shutdown"); let (path, tree) = get_lazy(tree_id).await;
let systems_g = systems.read().await; let mut tia_ctx =
let mut lazy_members = TreeIntoApiCtxImpl { path: Substack::Bottom, basepath: &path[..] };
systems_g.get(&sys_id).expect("System not found").lazy_members.lock().await; handle.reply(&get_tree, &tree.into_api(&mut tia_ctx).await).await
let (path, cb) = match lazy_members.insert(tree_id, MemberRecord::Res) {
None => panic!("Tree for ID not found"),
Some(MemberRecord::Res) => panic!("This tree has already been transmitted"),
Some(MemberRecord::Gen(path, cb)) => (path, cb),
};
let tree = cb.build(Sym::new(path.clone(), &interner).await.unwrap()).await;
let mut tia_ctx = TreeIntoApiCtxImpl {
path: Substack::Bottom,
basepath: &path,
lazy_members: &mut lazy_members,
};
hand.handle(&get_tree, &tree.into_api(&mut tia_ctx).await).await
}) })
.await, .await,
api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => { api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => {
let fwd_tok = hand.will_handle_as(&fwd); let fwd_tok = Witness::of(&fwd);
let api::SysFwded(sys_id, payload) = fwd; let api::SysFwded(sys_id, payload) = fwd;
let ctx = get_ctx(sys_id).await; with_sys_record(sys_id, async {
with_ctx(ctx.clone(), async move { struct TrivialReqCycle<'a> {
let sys = ctx.cted().inst(); req: &'a [u8],
let reply = Rc::new(RefCell::new(None)); rep: &'a mut Vec<u8>,
let reply2 = reply.clone(); }
let sub_hand = ExtReq::new(hand.reqnot(), async move |v| { impl<'a> ReqReader<'a> for TrivialReqCycle<'a> {
reply2.borrow_mut().replace(v); fn reader(&mut self) -> Pin<&mut dyn AsyncRead> {
}); Pin::new(&mut self.req) as Pin<&mut _>
sys.dyn_request(sub_hand, payload).await; }
let reply_buf = fn finish(self: Box<Self>) -> LocalBoxFuture<'a, Box<dyn ReqHandle<'a> + 'a>> {
reply.borrow_mut().take().expect("Request discarded but did not throw"); Box::pin(async { self as Box<_> })
hand.handle_as(fwd_tok, &reply_buf).await }
}
impl<'a> ReqHandle<'a> for TrivialReqCycle<'a> {
fn start_reply(
self: Box<Self>,
) -> LocalBoxFuture<'a, io::Result<Box<dyn RepWriter<'a> + 'a>>> {
Box::pin(async { Ok(self as Box<_>) })
}
}
impl<'a> RepWriter<'a> for TrivialReqCycle<'a> {
fn writer(&mut self) -> Pin<&mut dyn AsyncWrite> {
Pin::new(&mut self.rep) as Pin<&mut _>
}
fn finish(
self: Box<Self>,
) -> LocalBoxFuture<'a, io::Result<orchid_base::reqnot::Receipt<'a>>>
{
Box::pin(async { Ok(Receipt::_new()) })
}
}
let mut reply = Vec::new();
let req = TrivialReqCycle { req: &payload, rep: &mut reply };
let _ = cted().inst().dyn_request(Box::new(req)).await;
handle.reply(fwd_tok, &reply).await
}) })
.await .await
}, },
api::HostExtReq::LexExpr(lex @ api::LexExpr { sys, src, text, pos, id }) => api::HostExtReq::LexExpr(lex @ api::LexExpr { sys, src, text, pos, id }) =>
with_ctx(get_ctx(sys).await, async move { with_sys_record(sys, async {
let text = Tok::from_api(text, &i()).await; let text = es(text).await;
let src = Sym::from_api(src, &i()).await; let src = Sym::from_api(src).await;
let rep = Reporter::new();
let expr_store = BorrowedExprStore::new(); let expr_store = BorrowedExprStore::new();
let trigger_char = text.chars().nth(pos as usize).unwrap(); let trigger_char = text.chars().nth(pos as usize).unwrap();
let ekey_na = ekey_not_applicable().await; let ekey_na = ekey_not_applicable().await;
let ekey_cascade = ekey_cascade().await; let ekey_cascade = ekey_cascade().await;
let lexers = ctx().cted().inst().dyn_lexers(); let lexers = cted().inst().dyn_lexers();
for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char)) for lx in
lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char))
{ {
let ctx = LexContext::new(&expr_store, &text, id, pos, src.clone(), &rep); let ctx = LexContext::new(&expr_store, &text, id, pos, src.clone());
match lx.lex(&text[pos as usize..], &ctx).await { match try_with_reporter(lx.lex(&text[pos as usize..], &ctx)).await {
Err(e) if e.any(|e| *e == ekey_na) => continue, Err(e) if e.any(|e| *e == ekey_na) => continue,
Err(e) => { Err(e) => {
let eopt = e.keep_only(|e| *e != ekey_cascade).map(|e| Err(e.to_api())); let eopt = e.keep_only(|e| *e != ekey_cascade).map(|e| Err(e.to_api()));
expr_store.dispose().await; expr_store.dispose().await;
return hand.handle(&lex, &eopt).await; return handle.reply(&lex, &eopt).await;
}, },
Ok((s, expr)) => { Ok((s, expr)) => {
let expr = expr.into_api(&mut (), &mut ()).await; let expr = expr.into_api(&mut (), &mut ()).await;
let pos = (text.len() - s.len()) as u32; let pos = (text.len() - s.len()) as u32;
expr_store.dispose().await; expr_store.dispose().await;
return hand.handle(&lex, &Some(Ok(api::LexedExpr { pos, expr }))).await; return handle.reply(&lex, &Some(Ok(api::LexedExpr { pos, expr }))).await;
}, },
} }
} }
writeln!(logger, "Got notified about n/a character '{trigger_char}'"); writeln!(log("warn"), "Got notified about n/a character '{trigger_char}'").await;
expr_store.dispose().await; expr_store.dispose().await;
hand.handle(&lex, &None).await handle.reply(&lex, &None).await
}) })
.await, .await,
api::HostExtReq::ParseLine(pline) => { api::HostExtReq::ParseLine(pline) => {
let api::ParseLine { module, src, exported, comments, sys, line, idx } = &pline; let api::ParseLine { module, src, exported, comments, sys, line, idx } = &pline;
with_ctx(get_ctx(*sys).await, async { with_sys_record(*sys, async {
let parsers = ctx().cted().inst().dyn_parsers(); let parsers = cted().inst().dyn_parsers();
let src = Sym::from_api(*src, &i()).await; let src = Sym::from_api(*src).await;
let comments = let comments =
join_all(comments.iter().map(|c| Comment::from_api(c, src.clone(), &interner))) join_all(comments.iter().map(|c| Comment::from_api(c, src.clone()))).await;
.await;
let expr_store = BorrowedExprStore::new(); let expr_store = BorrowedExprStore::new();
let line: Vec<PTokTree> = let line: Vec<PTokTree> =
ttv_from_api(line, &mut &expr_store, &mut (), &src, &i()).await; ttv_from_api(line, &mut &expr_store, &mut (), &src).await;
let snip = Snippet::new(line.first().expect("Empty line"), &line); let snip = Snippet::new(line.first().expect("Empty line"), &line);
let parser = parsers[*idx as usize]; let parser = parsers[*idx as usize];
let module = Sym::from_api(*module, &i()).await; let module = Sym::from_api(*module).await;
let reporter = Reporter::new(); let pctx = ParsCtx::new(module);
let pctx = ParsCtx::new(module, &reporter); let o_line =
let parse_res = parser.parse(pctx, *exported, comments, snip).await; match try_with_reporter(parser.parse(pctx, *exported, comments, snip)).await {
let o_line = match reporter.merge(parse_res) {
Err(e) => Err(e.to_api()), Err(e) => Err(e.to_api()),
Ok(t) => Ok(linev_into_api(t).await), Ok(t) => Ok(linev_into_api(t).await),
}; };
mem::drop(line); mem::drop(line);
expr_store.dispose().await; expr_store.dispose().await;
hand.handle(&pline, &o_line).await handle.reply(&pline, &o_line).await
}) })
.await .await
}, },
api::HostExtReq::FetchParsedConst(ref fpc @ api::FetchParsedConst(sys, id)) => api::HostExtReq::FetchParsedConst(ref fpc @ api::FetchParsedConst(sys, id)) =>
with_ctx(get_ctx(sys).await, async move { with_sys_record(sys, async {
let cnst = get_const(id).await; let cnst = get_const(id).await;
hand.handle(fpc, &cnst.serialize().await).await handle.reply(fpc, &cnst.serialize().await).await
}) })
.await, .await,
api::HostExtReq::AtomReq(atom_req) => { api::HostExtReq::AtomReq(atom_req) => {
let atom = atom_req.get_atom(); let atom = atom_req.get_atom();
let atom_req = atom_req.clone(); with_sys_record(atom.owner, async {
with_atom_record(&get_ctx, atom, async move |nfo, id, buf| { let (nfo, id, buf) = resolve_atom_type(atom);
let actx = AtomCtx(buf, atom.drop); let actx = AtomCtx(buf, atom.drop);
match &atom_req { match &atom_req {
api::AtomReq::SerializeAtom(ser) => { api::AtomReq::SerializeAtom(ser) => {
let mut buf = enc_vec(&id).await; let mut buf = enc_vec(&id);
match nfo.serialize(actx, Pin::<&mut Vec<_>>::new(&mut buf)).await { match nfo.serialize(actx, Pin::<&mut Vec<_>>::new(&mut buf)).await {
None => hand.handle(ser, &None).await, None => handle.reply(ser, &None).await,
Some(refs) => { Some(refs) => {
let refs = let refs =
join_all(refs.into_iter().map(|ex| async { ex.into_api(&mut ()).await })) join_all(refs.into_iter().map(async |ex| ex.into_api(&mut ()).await))
.await; .await;
hand.handle(ser, &Some((buf, refs))).await handle.reply(ser, &Some((buf, refs))).await
}, },
} }
}, },
api::AtomReq::AtomPrint(print @ api::AtomPrint(_)) => api::AtomReq::AtomPrint(print @ api::AtomPrint(_)) =>
hand.handle(print, &nfo.print(actx).await.to_api()).await, handle.reply(print, &nfo.print(actx).await.to_api()).await,
api::AtomReq::Fwded(fwded) => { api::AtomReq::Fwded(fwded) => {
let api::Fwded(_, key, payload) = &fwded; let api::Fwded(_, key, payload) = &fwded;
let mut reply = Vec::new(); let mut reply = Vec::new();
let key = Sym::from_api(*key, &interner).await; let key = Sym::from_api(*key).await;
let some = nfo let some = nfo
.handle_req( .handle_req(
actx, actx,
@@ -356,7 +355,7 @@ pub fn extension_init(
Pin::<&mut Vec<_>>::new(&mut reply), Pin::<&mut Vec<_>>::new(&mut reply),
) )
.await; .await;
hand.handle(fwded, &some.then_some(reply)).await handle.reply(fwded, &some.then_some(reply)).await
}, },
api::AtomReq::CallRef(call @ api::CallRef(_, arg)) => { api::AtomReq::CallRef(call @ api::CallRef(_, arg)) => {
let expr_store = BorrowedExprStore::new(); let expr_store = BorrowedExprStore::new();
@@ -365,7 +364,7 @@ pub fn extension_init(
let api_expr = ret.serialize().await; let api_expr = ret.serialize().await;
mem::drop(expr_handle); mem::drop(expr_handle);
expr_store.dispose().await; expr_store.dispose().await;
hand.handle(call, &api_expr).await handle.reply(call, &api_expr).await
}, },
api::AtomReq::FinalCall(call @ api::FinalCall(_, arg)) => { api::AtomReq::FinalCall(call @ api::FinalCall(_, arg)) => {
let expr_store = BorrowedExprStore::new(); let expr_store = BorrowedExprStore::new();
@@ -374,15 +373,15 @@ pub fn extension_init(
let api_expr = ret.serialize().await; let api_expr = ret.serialize().await;
mem::drop(expr_handle); mem::drop(expr_handle);
expr_store.dispose().await; expr_store.dispose().await;
hand.handle(call, &api_expr).await handle.reply(call, &api_expr).await
}, },
api::AtomReq::Command(cmd @ api::Command(_)) => match nfo.command(actx).await { api::AtomReq::Command(cmd @ api::Command(_)) => match nfo.command(actx).await {
Err(e) => hand.handle(cmd, &Err(e.to_api())).await, Err(e) => handle.reply(cmd, &Err(e.to_api())).await,
Ok(opt) => match opt { Ok(opt) => match opt {
None => hand.handle(cmd, &Ok(api::NextStep::Halt)).await, None => handle.reply(cmd, &Ok(api::NextStep::Halt)).await,
Some(cont) => { Some(cont) => {
let cont = cont.serialize().await; let cont = cont.serialize().await;
hand.handle(cmd, &Ok(api::NextStep::Continue(cont))).await handle.reply(cmd, &Ok(api::NextStep::Continue(cont))).await
}, },
}, },
}, },
@@ -392,41 +391,45 @@ pub fn extension_init(
}, },
api::HostExtReq::DeserAtom(deser) => { api::HostExtReq::DeserAtom(deser) => {
let api::DeserAtom(sys, buf, refs) = &deser; let api::DeserAtom(sys, buf, refs) = &deser;
let mut read = &mut &buf[..]; let read = &mut &buf[..];
let ctx = get_ctx(*sys).await; with_sys_record(*sys, async {
// SAFETY: deserialization implicitly grants ownership to previously owned exprs // SAFETY: deserialization implicitly grants ownership to previously owned exprs
let refs = (refs.iter()) let refs = (refs.iter())
.map(|tk| Expr::from_handle(ExprHandle::deserialize(*tk))) .map(|tk| Expr::from_handle(ExprHandle::deserialize(*tk)))
.collect_vec(); .collect_vec();
let id = AtomTypeId::decode(Pin::new(&mut read)).await; let id = AtomTypeId::decode_slice(read);
let inst = ctx.cted().inst(); let nfo = atom_by_idx(cted().inst().card(), id)
let nfo = atom_by_idx(inst.card(), id).expect("Deserializing atom with invalid ID"); .expect("Deserializing atom with invalid ID");
hand.handle(&deser, &nfo.deserialize(read, &refs).await).await handle.reply(&deser, &nfo.deserialize(read, &refs).await).await
})
.await
}, },
} }
} })
.boxed_local() .await
}
}, },
); );
*interner_cell.borrow_mut() = // add essential services to the very tail, then fold all context into the run
Some(Interner::new_replica(rn.clone().map(|ir: api::IntReq| ir.into_root()))); // future
spawner(Box::pin(clone!(spawner; async move { SYSTEM_TABLE
let mut streams = stream_select! { in_recv.map(Some), exit_recv.map(|_| None) }; .scope(
while let Some(item) = streams.next().await { RefCell::default(),
match item { with_interner(
Some(rcvd) => spawner(Box::pin(clone!(rn; async move { rn.receive(&rcvd[..]).await }))), new_interner(),
None => break, with_logger(
} logger2,
} with_comm(
}))); Rc::new(client),
ExtInit { comm_ctx,
header: ext_header, (self.context.into_iter()).fold(
port: Box::new(ExtensionOwner { Box::pin(async { extension_fut.await.unwrap() }) as LocalBoxFuture<()>,
out_recv: Mutex::new(out_recv), |fut, cx| cx.apply(fut),
out_send, ),
_interner_cell: interner_cell, ),
_systems_lock: systems_lock, ),
}), ),
)
.await;
}) as Pin<Box<_>>);
} }
} }

View File

@@ -10,12 +10,13 @@ use hashbrown::HashSet;
use orchid_base::error::OrcErrv; use orchid_base::error::OrcErrv;
use orchid_base::format::{FmtCtx, FmtUnit, Format}; use orchid_base::format::{FmtCtx, FmtUnit, Format};
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::reqnot::Requester; use orchid_base::stash::stash;
use crate::api; use crate::api;
use crate::atom::ForeignAtom; use crate::atom::ForeignAtom;
use crate::context::{ctx, i}; use crate::entrypoint::{notify, request};
use crate::gen_expr::{GExpr, GExprKind}; use crate::gen_expr::{GExpr, GExprKind};
use crate::system::sys_id;
pub struct BorrowedExprStore(RefCell<Option<HashSet<Rc<ExprHandle>>>>); pub struct BorrowedExprStore(RefCell<Option<HashSet<Rc<ExprHandle>>>>);
impl BorrowedExprStore { impl BorrowedExprStore {
@@ -73,7 +74,7 @@ impl ExprHandle {
/// to lend the expr, and you expect the receiver to use /// to lend the expr, and you expect the receiver to use
/// [ExprHandle::borrowed] or [ExprHandle::from_ticket] /// [ExprHandle::borrowed] or [ExprHandle::from_ticket]
pub fn ticket(&self) -> api::ExprTicket { self.0 } pub fn ticket(&self) -> api::ExprTicket { self.0 }
async fn send_acq(&self) { ctx().reqnot().notify(api::Acquire(ctx().sys_id(), self.0)).await } async fn send_acq(&self) { notify(api::Acquire(sys_id(), self.0)).await }
/// If this is the last one reference, do nothing, otherwise send an Acquire /// If this is the last one reference, do nothing, otherwise send an Acquire
pub async fn on_borrow_expire(self: Rc<Self>) { self.serialize().await; } pub async fn on_borrow_expire(self: Rc<Self>) { self.serialize().await; }
/// Drop the handle and get the ticket without a release notification. /// Drop the handle and get the ticket without a release notification.
@@ -94,8 +95,8 @@ impl fmt::Debug for ExprHandle {
} }
impl Drop for ExprHandle { impl Drop for ExprHandle {
fn drop(&mut self) { fn drop(&mut self) {
let notif = api::Release(ctx().sys_id(), self.0); let notif = api::Release(sys_id(), self.0);
ctx().spawn(async move { ctx().reqnot().clone().notify(notif).await }) stash(async move { notify(notif).await })
} }
} }
@@ -117,12 +118,12 @@ impl Expr {
} }
pub async fn data(&self) -> &ExprData { pub async fn data(&self) -> &ExprData {
(self.data.get_or_init(async { (self.data.get_or_init(async {
let details = ctx().reqnot().request(api::Inspect { target: self.handle.ticket() }).await; let details = request(api::Inspect { target: self.handle.ticket() }).await;
let pos = Pos::from_api(&details.location, &i()).await; let pos = Pos::from_api(&details.location).await;
let kind = match details.kind { let kind = match details.kind {
api::InspectedKind::Atom(a) => api::InspectedKind::Atom(a) =>
ExprKind::Atom(ForeignAtom::new(self.handle.clone(), a, pos.clone())), ExprKind::Atom(ForeignAtom::new(self.handle.clone(), a, pos.clone())),
api::InspectedKind::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b, &i()).await), api::InspectedKind::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b).await),
api::InspectedKind::Opaque => ExprKind::Opaque, api::InspectedKind::Opaque => ExprKind::Opaque,
}; };
ExprData { pos, kind } ExprData { pos, kind }
@@ -150,8 +151,7 @@ impl Format for Expr {
match &self.data().await.kind { match &self.data().await.kind {
ExprKind::Opaque => "OPAQUE".to_string().into(), ExprKind::Opaque => "OPAQUE".to_string().into(),
ExprKind::Bottom(b) => format!("Bottom({b})").into(), ExprKind::Bottom(b) => format!("Bottom({b})").into(),
ExprKind::Atom(a) => ExprKind::Atom(a) => FmtUnit::from_api(&request(api::ExtAtomPrint(a.atom.clone())).await),
FmtUnit::from_api(&ctx().reqnot().request(api::ExtAtomPrint(a.atom.clone())).await),
} }
} }
} }

View File

@@ -0,0 +1,12 @@
use std::pin::Pin;
use std::rc::Rc;
use futures::future::LocalBoxFuture;
use futures::{AsyncRead, AsyncWrite};
pub struct ExtPort {
pub input: Pin<Box<dyn AsyncRead>>,
pub output: Pin<Box<dyn AsyncWrite>>,
pub log: Pin<Box<dyn AsyncWrite>>,
pub spawn: Rc<dyn Fn(LocalBoxFuture<'static, ()>)>,
}

View File

@@ -1,12 +1,12 @@
use std::any::TypeId; use std::any::TypeId;
use std::borrow::Cow; use std::borrow::Cow;
use std::cell::RefCell;
use std::collections::HashMap; use std::collections::HashMap;
use std::future::Future; use std::future::Future;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
use futures::future::LocalBoxFuture; use futures::future::LocalBoxFuture;
use futures::lock::Mutex;
use futures::{AsyncWrite, FutureExt}; use futures::{AsyncWrite, FutureExt};
use itertools::Itertools; use itertools::Itertools;
use never::Never; use never::Never;
@@ -15,15 +15,17 @@ use orchid_base::clone;
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::format::{FmtCtx, FmtUnit}; use orchid_base::format::{FmtCtx, FmtUnit};
use orchid_base::name::Sym; use orchid_base::name::Sym;
use task_local::task_local;
use trait_set::trait_set; use trait_set::trait_set;
use crate::api;
use crate::atom::Atomic; use crate::atom::Atomic;
use crate::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant}; use crate::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
use crate::context::{SysCtxEntry, ctx, i};
use crate::conv::ToExpr; use crate::conv::ToExpr;
use crate::coroutine_exec::{ExecHandle, exec}; use crate::coroutine_exec::{ExecHandle, exec};
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::GExpr; use crate::gen_expr::GExpr;
use crate::system::sys_id;
trait_set! { trait_set! {
trait FunCB = Fn(Vec<Expr>) -> LocalBoxFuture<'static, OrcRes<GExpr>> + 'static; trait FunCB = Fn(Vec<Expr>) -> LocalBoxFuture<'static, OrcRes<GExpr>> + 'static;
@@ -34,9 +36,14 @@ pub trait ExprFunc<I, O>: Clone + 'static {
fn apply<'a>(&self, hand: ExecHandle<'a>, v: Vec<Expr>) -> impl Future<Output = OrcRes<GExpr>>; fn apply<'a>(&self, hand: ExecHandle<'a>, v: Vec<Expr>) -> impl Future<Output = OrcRes<GExpr>>;
} }
#[derive(Default)] task_local! {
struct FunsCtx(Mutex<HashMap<Sym, FunRecord>>); static FUNS_CTX: RefCell<HashMap<(api::SysId, Sym), FunRecord>>;
impl SysCtxEntry for FunsCtx {} }
pub(crate) fn with_funs_ctx<'a>(fut: LocalBoxFuture<'a, ()>) -> LocalBoxFuture<'a, ()> {
Box::pin(FUNS_CTX.scope(RefCell::default(), fut))
}
#[derive(Clone)] #[derive(Clone)]
struct FunRecord { struct FunRecord {
argtyps: &'static [TypeId], argtyps: &'static [TypeId],
@@ -77,17 +84,17 @@ pub(crate) struct Fun {
} }
impl Fun { impl Fun {
pub async fn new<I, O, F: ExprFunc<I, O>>(path: Sym, f: F) -> Self { pub async fn new<I, O, F: ExprFunc<I, O>>(path: Sym, f: F) -> Self {
let ctx = ctx(); FUNS_CTX.with(|cx| {
let funs: &FunsCtx = ctx.get_or_default(); let mut fung = cx.borrow_mut();
let mut fung = funs.0.lock().await; let record = if let Some(record) = fung.get(&(sys_id(), path.clone())) {
let record = if let Some(record) = fung.get(&path) {
record.clone() record.clone()
} else { } else {
let record = process_args(f); let record = process_args(f);
fung.insert(path.clone(), record.clone()); fung.insert((sys_id(), path.clone()), record.clone());
record record
}; };
Self { args: vec![], path, record } Self { args: vec![], path, record }
})
} }
pub fn arity(&self) -> u8 { self.record.argtyps.len() as u8 } pub fn arity(&self) -> u8 { self.record.argtyps.len() as u8 }
} }
@@ -108,12 +115,12 @@ impl OwnedAtom for Fun {
} }
async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await } async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await }
async fn serialize(&self, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs { async fn serialize(&self, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
self.path.to_api().encode(write).await; self.path.to_api().encode(write).await.unwrap();
self.args.clone() self.args.clone()
} }
async fn deserialize(mut ds_cx: impl DeserializeCtx, args: Self::Refs) -> Self { async fn deserialize(mut ds_cx: impl DeserializeCtx, args: Self::Refs) -> Self {
let path = Sym::from_api(ds_cx.decode().await, &i()).await; let path = Sym::from_api(ds_cx.decode().await).await;
let record = (ctx().get::<FunsCtx>().0.lock().await.get(&path)) let record = (FUNS_CTX.with(|funs| funs.borrow().get(&(sys_id(), path.clone())).cloned()))
.expect("Function missing during deserialization") .expect("Function missing during deserialization")
.clone(); .clone();
Self { args, path, record } Self { args, path, record }

View File

@@ -6,12 +6,11 @@ use orchid_base::error::{OrcErr, OrcErrv};
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants}; use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::Requester;
use orchid_base::{match_mapping, tl_cache}; use orchid_base::{match_mapping, tl_cache};
use crate::api; use crate::api;
use crate::atom::{AtomFactory, ToAtom}; use crate::atom::{AtomFactory, ToAtom};
use crate::context::ctx; use crate::entrypoint::request;
use crate::expr::Expr; use crate::expr::Expr;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@@ -40,7 +39,7 @@ impl GExpr {
} }
pub fn at(self, pos: Pos) -> Self { GExpr { pos, kind: self.kind } } pub fn at(self, pos: Pos) -> Self { GExpr { pos, kind: self.kind } }
pub async fn create(self) -> Expr { pub async fn create(self) -> Expr {
Expr::deserialize(ctx().reqnot().request(api::Create(self.serialize().await)).await).await Expr::deserialize(request(api::Create(self.serialize().await)).await).await
} }
} }
impl Format for GExpr { impl Format for GExpr {

View File

@@ -0,0 +1,50 @@
use std::rc::Rc;
use futures::future::{LocalBoxFuture, join_all, ready};
use itertools::Itertools;
use orchid_base::interner::local_interner::{Int, StrBranch, StrvBranch};
use orchid_base::interner::{IStr, IStrv, InternerSrv};
use crate::api;
use crate::entrypoint::{MUTE_REPLY, request};
#[derive(Default)]
struct ExtInterner {
str: Int<StrBranch>,
strv: Int<StrvBranch>,
}
impl InternerSrv for ExtInterner {
fn is<'a>(&'a self, v: &'a str) -> LocalBoxFuture<'a, IStr> {
match self.str.i(v) {
Ok(i) => Box::pin(ready(i)),
Err(e) => Box::pin(async {
e.set_if_empty(MUTE_REPLY.scope((), request(api::InternStr(v.to_owned()))).await)
}),
}
}
fn es(&self, t: api::TStr) -> LocalBoxFuture<'_, IStr> {
match self.str.e(t) {
Ok(i) => Box::pin(ready(i)),
Err(e) => Box::pin(async move { e.set_if_empty(Rc::new(request(api::ExternStr(t)).await)) }),
}
}
fn iv<'a>(&'a self, v: &'a [IStr]) -> LocalBoxFuture<'a, IStrv> {
match self.strv.i(v) {
Ok(i) => Box::pin(ready(i)),
Err(e) => Box::pin(async {
e.set_if_empty(request(api::InternStrv(v.iter().map(|is| is.to_api()).collect_vec())).await)
}),
}
}
fn ev(&self, t: orchid_api::TStrv) -> LocalBoxFuture<'_, IStrv> {
match self.strv.e(t) {
Ok(i) => Box::pin(ready(i)),
Err(e) => Box::pin(async move {
let tstr_v = request(api::ExternStrv(t)).await;
e.set_if_empty(Rc::new(join_all(tstr_v.into_iter().map(|t| self.es(t))).await))
}),
}
}
}
pub fn new_interner() -> Rc<dyn InternerSrv> { Rc::<ExtInterner>::default() }

View File

@@ -1,27 +1,24 @@
use std::fmt; use std::fmt;
use std::fmt::Debug;
use std::future::Future; use std::future::Future;
use std::ops::RangeInclusive; use std::ops::RangeInclusive;
use futures::FutureExt; use futures::FutureExt;
use futures::future::LocalBoxFuture; use futures::future::LocalBoxFuture;
use orchid_base::error::{OrcErrv, OrcRes, Reporter, mk_errv}; use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::{IStr, is};
use orchid_base::location::{Pos, SrcRange}; use orchid_base::location::{Pos, SrcRange};
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::ParseCtx;
use orchid_base::reqnot::Requester;
use crate::api; use crate::api;
use crate::context::{ctx, i}; use crate::entrypoint::request;
use crate::expr::BorrowedExprStore; use crate::expr::BorrowedExprStore;
use crate::parser::PTokTree; use crate::parser::PTokTree;
use crate::tree::GenTokTree; use crate::tree::GenTokTree;
pub async fn ekey_cascade() -> Tok<String> { pub async fn ekey_cascade() -> IStr { is("An error cascading from a recursive call").await }
i().i("An error cascading from a recursive call").await pub async fn ekey_not_applicable() -> IStr {
} is("Pseudo-error to communicate that the current branch in a dispatch doesn't apply").await
pub async fn ekey_not_applicable() -> Tok<String> {
i().i("Pseudo-error to communicate that the current branch in a dispatch doesn't apply").await
} }
const MSG_INTERNAL_ERROR: &str = "This error is a sentinel for the extension library.\ const MSG_INTERNAL_ERROR: &str = "This error is a sentinel for the extension library.\
it should not be emitted by the extension."; it should not be emitted by the extension.";
@@ -36,23 +33,20 @@ pub async fn err_not_applicable() -> OrcErrv {
pub struct LexContext<'a> { pub struct LexContext<'a> {
pub(crate) exprs: &'a BorrowedExprStore, pub(crate) exprs: &'a BorrowedExprStore,
pub text: &'a Tok<String>, pub text: &'a IStr,
pub id: api::ParsId, pub id: api::ParsId,
pub pos: u32, pub pos: u32,
i: Interner,
pub(crate) src: Sym, pub(crate) src: Sym,
pub(crate) rep: &'a Reporter,
} }
impl<'a> LexContext<'a> { impl<'a> LexContext<'a> {
pub fn new( pub fn new(
exprs: &'a BorrowedExprStore, exprs: &'a BorrowedExprStore,
text: &'a Tok<String>, text: &'a IStr,
id: api::ParsId, id: api::ParsId,
pos: u32, pos: u32,
src: Sym, src: Sym,
rep: &'a Reporter,
) -> Self { ) -> Self {
Self { exprs, i: i(), id, pos, rep, src, text } Self { exprs, id, pos, src, text }
} }
pub fn src(&self) -> &Sym { &self.src } pub fn src(&self) -> &Sym { &self.src }
/// This function returns [PTokTree] because it can never return /// This function returns [PTokTree] because it can never return
@@ -61,10 +55,10 @@ impl<'a> LexContext<'a> {
/// for embedding in the return value. /// for embedding in the return value.
pub async fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, PTokTree)> { pub async fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, PTokTree)> {
let start = self.pos(tail); let start = self.pos(tail);
let Some(lx) = ctx().reqnot().request(api::SubLex { pos: start, id: self.id }).await else { let Some(lx) = request(api::SubLex { pos: start, id: self.id }).await else {
return Err(err_cascade().await); return Err(err_cascade().await);
}; };
let tree = PTokTree::from_api(&lx.tree, &mut { self.exprs }, &mut (), &self.src, &i()).await; let tree = PTokTree::from_api(&lx.tree, &mut { self.exprs }, &mut (), &self.src).await;
Ok((&self.text[lx.pos as usize..], tree)) Ok((&self.text[lx.pos as usize..], tree))
} }
@@ -77,12 +71,8 @@ impl<'a> LexContext<'a> {
SrcRange::new(self.pos(tail) - len.try_into().unwrap()..self.pos(tail), &self.src) SrcRange::new(self.pos(tail) - len.try_into().unwrap()..self.pos(tail), &self.src)
} }
} }
impl ParseCtx for LexContext<'_> {
fn i(&self) -> &Interner { &self.i }
fn rep(&self) -> &Reporter { self.rep }
}
pub trait Lexer: Send + Sync + Sized + Default + 'static { pub trait Lexer: Debug + Send + Sync + Sized + Default + 'static {
const CHAR_FILTER: &'static [RangeInclusive<char>]; const CHAR_FILTER: &'static [RangeInclusive<char>];
fn lex<'a>( fn lex<'a>(
tail: &'a str, tail: &'a str,
@@ -90,7 +80,7 @@ pub trait Lexer: Send + Sync + Sized + Default + 'static {
) -> impl Future<Output = OrcRes<(&'a str, GenTokTree)>>; ) -> impl Future<Output = OrcRes<(&'a str, GenTokTree)>>;
} }
pub trait DynLexer: Send + Sync + 'static { pub trait DynLexer: Debug + Send + Sync + 'static {
fn char_filter(&self) -> &'static [RangeInclusive<char>]; fn char_filter(&self) -> &'static [RangeInclusive<char>];
fn lex<'a>( fn lex<'a>(
&self, &self,

View File

@@ -7,11 +7,12 @@ pub mod conv;
pub mod coroutine_exec; pub mod coroutine_exec;
pub mod entrypoint; pub mod entrypoint;
pub mod expr; pub mod expr;
pub mod ext_port;
pub mod func_atom; pub mod func_atom;
pub mod gen_expr; pub mod gen_expr;
pub mod interner;
pub mod lexer; pub mod lexer;
// pub mod msg; pub mod logger;
pub mod context;
pub mod other_system; pub mod other_system;
pub mod parser; pub mod parser;
pub mod reflection; pub mod reflection;
@@ -19,3 +20,4 @@ pub mod system;
pub mod system_ctor; pub mod system_ctor;
pub mod tokio; pub mod tokio;
pub mod tree; pub mod tree;
pub mod binary;

View File

@@ -0,0 +1,57 @@
use std::fmt::Arguments;
use std::fs::File;
use std::io::Write;
use std::rc::Rc;
use futures::future::LocalBoxFuture;
use hashbrown::HashMap;
use orchid_base::interner::is;
use orchid_base::logging::{LogWriter, Logger};
use crate::api;
use crate::entrypoint::notify;
pub struct LogWriterImpl {
category: String,
strat: api::LogStrategy,
}
impl LogWriter for LogWriterImpl {
fn write_fmt<'a>(&'a self, fmt: Arguments<'a>) -> LocalBoxFuture<'a, ()> {
Box::pin(async move {
match &self.strat {
api::LogStrategy::Discard => (),
api::LogStrategy::Default =>
notify(api::Log { category: is(&self.category).await.to_api(), message: fmt.to_string() })
.await,
api::LogStrategy::File { path, .. } => {
let mut file = (File::options().write(true).create(true).truncate(false).open(path))
.unwrap_or_else(|e| panic!("Could not open {path}: {e}"));
file.write_fmt(fmt).unwrap_or_else(|e| panic!("Could not write to {path}: {e}"));
},
}
})
}
}
#[derive(Clone)]
pub struct LoggerImpl {
default: Option<api::LogStrategy>,
routing: HashMap<String, api::LogStrategy>,
}
impl LoggerImpl {
pub fn from_api(api: &api::Logger) -> Self {
Self {
default: api.default.clone(),
routing: api.routing.iter().map(|(k, v)| (k.clone(), v.clone())).collect(),
}
}
}
impl Logger for LoggerImpl {
fn writer(&self, category: &str) -> Rc<dyn LogWriter> {
Rc::new(LogWriterImpl { category: category.to_string(), strat: self.strat(category) })
}
fn strat(&self, category: &str) -> orchid_api::LogStrategy {
(self.routing.get(category).cloned().or(self.default.clone()))
.expect("Unrecognized log category with no default strategy")
}
}

View File

@@ -1,6 +1,7 @@
use crate::api; use crate::api;
use crate::system::{DynSystemCard, SystemCard}; use crate::system::{DynSystemCard, SystemCard};
#[derive(Debug)]
pub struct SystemHandle<C: SystemCard> { pub struct SystemHandle<C: SystemCard> {
pub(crate) card: C, pub(crate) card: C,
pub(crate) id: api::SysId, pub(crate) id: api::SysId,

View File

@@ -5,21 +5,22 @@ use futures::future::{LocalBoxFuture, join_all};
use futures::{FutureExt, Stream, StreamExt}; use futures::{FutureExt, Stream, StreamExt};
use itertools::Itertools; use itertools::Itertools;
use never::Never; use never::Never;
use orchid_base::error::{OrcErrv, OrcRes, Reporter}; use orchid_base::error::{OrcErrv, OrcRes};
use orchid_base::id_store::IdStore; use orchid_base::id_store::IdStore;
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::IStr;
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
use orchid_base::match_mapping; use orchid_base::match_mapping;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::{Comment, ParseCtx, Snippet}; use orchid_base::parse::{Comment, Snippet};
use orchid_base::reqnot::Requester;
use orchid_base::tree::{TokTree, Token, ttv_into_api}; use orchid_base::tree::{TokTree, Token, ttv_into_api};
use task_local::task_local;
use crate::api; use crate::api;
use crate::context::{SysCtxEntry, ctx, i};
use crate::conv::ToExpr; use crate::conv::ToExpr;
use crate::entrypoint::request;
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::GExpr; use crate::gen_expr::GExpr;
use crate::system::sys_id;
use crate::tree::{GenTok, GenTokTree}; use crate::tree::{GenTok, GenTokTree};
pub type PTok = Token<Expr, Never>; pub type PTok = Token<Expr, Never>;
@@ -82,27 +83,21 @@ pub type ParserObj = &'static dyn DynParser;
pub struct ParsCtx<'a> { pub struct ParsCtx<'a> {
_parse: PhantomData<&'a mut ()>, _parse: PhantomData<&'a mut ()>,
module: Sym, module: Sym,
reporter: &'a Reporter,
i: Interner,
} }
impl<'a> ParsCtx<'a> { impl<'a> ParsCtx<'a> {
pub(crate) fn new(module: Sym, reporter: &'a Reporter) -> Self { pub(crate) fn new(module: Sym) -> Self { Self { _parse: PhantomData, module } }
Self { _parse: PhantomData, module, reporter, i: i() }
}
pub fn module(&self) -> Sym { self.module.clone() } pub fn module(&self) -> Sym { self.module.clone() }
} }
impl ParseCtx for ParsCtx<'_> {
fn i(&self) -> &Interner { &self.i }
fn rep(&self) -> &Reporter { self.reporter }
}
type BoxConstCallback = Box<dyn FnOnce(ConstCtx) -> LocalBoxFuture<'static, GExpr>>; type BoxConstCallback = Box<dyn FnOnce(ConstCtx) -> LocalBoxFuture<'static, GExpr>>;
#[derive(Default)] task_local! {
pub(crate) struct ParsedConstCtxEntry { static PARSED_CONST_CTX: IdStore<BoxConstCallback>
pub(crate) consts: IdStore<BoxConstCallback>, }
pub(crate) fn with_parsed_const_ctx<'a>(fut: LocalBoxFuture<'a, ()>) -> LocalBoxFuture<'a, ()> {
Box::pin(PARSED_CONST_CTX.scope(IdStore::default(), fut))
} }
impl SysCtxEntry for ParsedConstCtxEntry {}
pub struct ParsedLine { pub struct ParsedLine {
pub sr: SrcRange, pub sr: SrcRange,
@@ -114,7 +109,7 @@ impl ParsedLine {
sr: &SrcRange, sr: &SrcRange,
comments: impl IntoIterator<Item = &'a Comment>, comments: impl IntoIterator<Item = &'a Comment>,
exported: bool, exported: bool,
name: Tok<String>, name: IStr,
f: F, f: F,
) -> Self { ) -> Self {
let cb = Box::new(|ctx| async move { f(ctx).await.to_gen().await }.boxed_local()); let cb = Box::new(|ctx| async move { f(ctx).await.to_gen().await }.boxed_local());
@@ -126,7 +121,7 @@ impl ParsedLine {
sr: &SrcRange, sr: &SrcRange,
comments: impl IntoIterator<Item = &'a Comment>, comments: impl IntoIterator<Item = &'a Comment>,
exported: bool, exported: bool,
name: &Tok<String>, name: &IStr,
use_prelude: bool, use_prelude: bool,
lines: impl IntoIterator<Item = ParsedLine>, lines: impl IntoIterator<Item = ParsedLine>,
) -> Self { ) -> Self {
@@ -145,7 +140,7 @@ impl ParsedLine {
exported: mem.exported, exported: mem.exported,
kind: match mem.kind { kind: match mem.kind {
ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId( ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId(
ctx().get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(), PARSED_CONST_CTX.with(|consts| consts.add(cb).id()),
)), )),
ParsedMemKind::Mod { lines, use_prelude } => api::ParsedMemberKind::Module { ParsedMemKind::Mod { lines, use_prelude } => api::ParsedMemberKind::Module {
lines: linev_into_api(lines).boxed_local().await, lines: linev_into_api(lines).boxed_local().await,
@@ -170,7 +165,7 @@ pub enum ParsedLineKind {
} }
pub struct ParsedMem { pub struct ParsedMem {
pub name: Tok<String>, pub name: IStr,
pub exported: bool, pub exported: bool,
pub kind: ParsedMemKind, pub kind: ParsedMemKind,
} }
@@ -191,14 +186,14 @@ impl ConstCtx {
) -> impl Stream<Item = OrcRes<Sym>> + 'b { ) -> impl Stream<Item = OrcRes<Sym>> + 'b {
let resolve_names = api::ResolveNames { let resolve_names = api::ResolveNames {
constid: self.constid, constid: self.constid,
sys: ctx().sys_id(), sys: sys_id(),
names: names.into_iter().map(|n| n.to_api()).collect_vec(), names: names.into_iter().map(|n| n.to_api()).collect_vec(),
}; };
stream(async |mut cx| { stream(async |mut cx| {
for name_opt in ctx().reqnot().request(resolve_names).await { for name_opt in request(resolve_names).await {
cx.emit(match name_opt { cx.emit(match name_opt {
Err(e) => Err(OrcErrv::from_api(&e, &i()).await), Err(e) => Err(OrcErrv::from_api(&e).await),
Ok(name) => Ok(Sym::from_api(name, &i()).await), Ok(name) => Ok(Sym::from_api(name).await),
}) })
.await .await
} }
@@ -210,8 +205,7 @@ impl ConstCtx {
} }
pub(crate) async fn get_const(id: api::ParsedConstId) -> GExpr { pub(crate) async fn get_const(id: api::ParsedConstId) -> GExpr {
let cb = (ctx().get_or_default::<ParsedConstCtxEntry>().consts.get(id.0)) let cb = PARSED_CONST_CTX
.expect("Bad ID or double read of parsed const") .with(|ent| ent.get(id.0).expect("Bad ID or double read of parsed const").remove());
.remove();
cb(ConstCtx { constid: id }).await cb(ConstCtx { constid: id }).await
} }

View File

@@ -1,15 +1,18 @@
use std::cell::OnceCell; use std::cell::{OnceCell, RefCell};
use std::rc::Rc; use std::rc::Rc;
use futures::FutureExt; use futures::FutureExt;
use futures::future::LocalBoxFuture;
use futures::lock::Mutex; use futures::lock::Mutex;
use hashbrown::HashMap;
use memo_map::MemoMap; use memo_map::MemoMap;
use orchid_base::interner::Tok; use orchid_base::interner::{IStr, es, iv};
use orchid_base::name::{NameLike, VPath}; use orchid_base::name::{NameLike, VPath};
use orchid_base::reqnot::Requester; use task_local::task_local;
use crate::api; use crate::api;
use crate::context::{SysCtxEntry, ctx, i}; use crate::entrypoint::request;
use crate::system::sys_id;
#[derive(Debug)] #[derive(Debug)]
pub struct ReflMemData { pub struct ReflMemData {
@@ -33,29 +36,28 @@ pub enum ReflMemKind {
pub struct ReflModData { pub struct ReflModData {
inferred: Mutex<bool>, inferred: Mutex<bool>,
path: VPath, path: VPath,
members: MemoMap<Tok<String>, ReflMem>, members: MemoMap<IStr, ReflMem>,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct ReflMod(Rc<ReflModData>); pub struct ReflMod(Rc<ReflModData>);
impl ReflMod { impl ReflMod {
pub fn path(&self) -> &[Tok<String>] { &self.0.path[..] } pub fn path(&self) -> &[IStr] { &self.0.path[..] }
pub fn is_root(&self) -> bool { self.0.path.is_empty() } pub fn is_root(&self) -> bool { self.0.path.is_empty() }
async fn try_populate(&self) -> Result<(), api::LsModuleError> { async fn try_populate(&self) -> Result<(), api::LsModuleError> {
let path_tok = i().i(&self.0.path[..]).await; let path_tok = iv(&self.0.path[..]).await;
let reply = match ctx().reqnot().request(api::LsModule(ctx().sys_id(), path_tok.to_api())).await let reply = match request(api::LsModule(sys_id(), path_tok.to_api())).await {
{
Err(api::LsModuleError::TreeUnavailable) => Err(api::LsModuleError::TreeUnavailable) =>
panic!("Reflected tree accessed outside an interpreter call. This extension is faulty."), panic!("Reflected tree accessed outside an interpreter call. This extension is faulty."),
Err(err) => return Err(err), Err(err) => return Err(err),
Ok(details) => details, Ok(details) => details,
}; };
for (k, v) in reply.members { for (k, v) in reply.members {
let k = i().ex(k).await; let k = es(k).await;
let mem = match self.0.members.get(&k) { let mem = match self.0.members.get(&k) {
Some(mem) => mem, Some(mem) => mem,
None => { None => {
let path = self.0.path.clone().name_with_suffix(k.clone()).to_sym(&i()).await; let path = self.0.path.clone().name_with_suffix(k.clone()).to_sym().await;
let kind = match v.kind { let kind = match v.kind {
api::MemberInfoKind::Constant => ReflMemKind::Const, api::MemberInfoKind::Constant => ReflMemKind::Const,
api::MemberInfoKind::Module => api::MemberInfoKind::Module =>
@@ -68,7 +70,7 @@ impl ReflMod {
} }
Ok(()) Ok(())
} }
pub async fn get_child(&self, key: &Tok<String>) -> Option<ReflMem> { pub async fn get_child(&self, key: &IStr) -> Option<ReflMem> {
let inferred_g = self.0.inferred.lock().await; let inferred_g = self.0.inferred.lock().await;
if let Some(mem) = self.0.members.get(key) { if let Some(mem) = self.0.members.get(key) {
return Some(mem.clone()); return Some(mem.clone());
@@ -86,7 +88,7 @@ impl ReflMod {
} }
self.0.members.get(key).cloned() self.0.members.get(key).cloned()
} }
pub async fn get_by_path(&self, path: &[Tok<String>]) -> Result<ReflMem, InvalidPathError> { pub async fn get_by_path(&self, path: &[IStr]) -> Result<ReflMem, InvalidPathError> {
let (next, tail) = path.split_first().expect("Attempted to walk by empty path"); let (next, tail) = path.split_first().expect("Attempted to walk by empty path");
let inferred_g = self.0.inferred.lock().await; let inferred_g = self.0.inferred.lock().await;
if let Some(next) = self.0.members.get(next) { if let Some(next) = self.0.members.get(next) {
@@ -130,9 +132,9 @@ impl ReflMod {
} }
} }
#[derive(Clone)] task_local! {
struct ReflRoot(ReflMod); static REFL_ROOTS: RefCell<HashMap<api::SysId, ReflMod>>
impl SysCtxEntry for ReflRoot {} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct InvalidPathError { pub struct InvalidPathError {
@@ -150,8 +152,12 @@ fn default_member(is_root: bool, kind: ReflMemKind) -> ReflMem {
})) }))
} }
fn get_root() -> ReflRoot { pub fn refl() -> ReflMod {
ctx().get_or_insert(|| ReflRoot(default_module(VPath::new([])))).clone() REFL_ROOTS.with(|tbl| {
tbl.borrow_mut().entry(sys_id()).or_insert_with(|| default_module(VPath::new([]))).clone()
})
} }
pub fn refl() -> ReflMod { get_root().0.clone() } pub fn with_refl_roots<'a>(fut: LocalBoxFuture<'a, ()>) -> LocalBoxFuture<'a, ()> {
Box::pin(REFL_ROOTS.scope(RefCell::default(), fut))
}

View File

@@ -1,4 +1,5 @@
use std::any::{Any, TypeId}; use std::any::{Any, TypeId};
use std::fmt::Debug;
use std::future::Future; use std::future::Future;
use std::num::NonZero; use std::num::NonZero;
use std::pin::Pin; use std::pin::Pin;
@@ -8,13 +9,13 @@ use futures::future::LocalBoxFuture;
use orchid_api_traits::{Coding, Decode, Encode, Request}; use orchid_api_traits::{Coding, Decode, Encode, Request};
use orchid_base::boxed_iter::BoxedIter; use orchid_base::boxed_iter::BoxedIter;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::{Receipt, Requester}; use orchid_base::reqnot::{Receipt, ReqHandle, ReqReader, ReqReaderExt};
use task_local::task_local;
use crate::api; use crate::api;
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId, AtomicFeatures, ForeignAtom, TAtom, get_info}; use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId, AtomicFeatures, ForeignAtom, TAtom, get_info};
use crate::context::ctx;
use crate::coroutine_exec::Replier; use crate::coroutine_exec::Replier;
use crate::entrypoint::ExtReq; use crate::entrypoint::request;
use crate::func_atom::{Fun, Lambda}; use crate::func_atom::{Fun, Lambda};
use crate::lexer::LexerObj; use crate::lexer::LexerObj;
use crate::parser::ParserObj; use crate::parser::ParserObj;
@@ -22,7 +23,7 @@ use crate::system_ctor::{CtedObj, SystemCtor};
use crate::tree::GenMember; use crate::tree::GenMember;
/// System as consumed by foreign code /// System as consumed by foreign code
pub trait SystemCard: Default + Send + Sync + 'static { pub trait SystemCard: Debug + Default + Send + Sync + 'static {
type Ctor: SystemCtor; type Ctor: SystemCtor;
type Req: Coding; type Req: Coding;
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>>; fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>>;
@@ -67,7 +68,7 @@ pub async fn resolv_atom(
sys: &(impl DynSystemCard + ?Sized), sys: &(impl DynSystemCard + ?Sized),
atom: &api::Atom, atom: &api::Atom,
) -> Box<dyn AtomDynfo> { ) -> Box<dyn AtomDynfo> {
let tid = AtomTypeId::decode(Pin::new(&mut &atom.data.0[..])).await; let tid = AtomTypeId::decode(Pin::new(&mut &atom.data.0[..])).await.unwrap();
atom_by_idx(sys, tid).expect("Value of nonexistent type found") atom_by_idx(sys, tid).expect("Value of nonexistent type found")
} }
@@ -84,7 +85,10 @@ pub trait System: Send + Sync + SystemCard + 'static {
fn env() -> impl Future<Output = Vec<GenMember>>; fn env() -> impl Future<Output = Vec<GenMember>>;
fn lexers() -> Vec<LexerObj>; fn lexers() -> Vec<LexerObj>;
fn parsers() -> Vec<ParserObj>; fn parsers() -> Vec<ParserObj>;
fn request(hand: ExtReq<'_>, req: Self::Req) -> impl Future<Output = Receipt<'_>>; fn request<'a>(
hand: Box<dyn ReqHandle<'a> + 'a>,
req: Self::Req,
) -> impl Future<Output = Receipt<'a>>;
} }
pub trait DynSystem: Send + Sync + DynSystemCard + 'static { pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
@@ -92,7 +96,7 @@ pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
fn dyn_env(&self) -> LocalBoxFuture<'_, Vec<GenMember>>; fn dyn_env(&self) -> LocalBoxFuture<'_, Vec<GenMember>>;
fn dyn_lexers(&self) -> Vec<LexerObj>; fn dyn_lexers(&self) -> Vec<LexerObj>;
fn dyn_parsers(&self) -> Vec<ParserObj>; fn dyn_parsers(&self) -> Vec<ParserObj>;
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>>; fn dyn_request<'a>(&self, hand: Box<dyn ReqReader<'a> + 'a>) -> LocalBoxFuture<'a, Receipt<'a>>;
fn card(&self) -> &dyn DynSystemCard; fn card(&self) -> &dyn DynSystemCard;
} }
@@ -101,26 +105,41 @@ impl<T: System> DynSystem for T {
fn dyn_env(&self) -> LocalBoxFuture<'_, Vec<GenMember>> { Self::env().boxed_local() } fn dyn_env(&self) -> LocalBoxFuture<'_, Vec<GenMember>> { Self::env().boxed_local() }
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() } fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() } fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>> { fn dyn_request<'a>(
&self,
mut hand: Box<dyn ReqReader<'a> + 'a>,
) -> LocalBoxFuture<'a, Receipt<'a>> {
Box::pin(async move { Box::pin(async move {
Self::request(hand, <Self as SystemCard>::Req::decode(Pin::new(&mut &req[..])).await).await let value = hand.read_req::<<Self as SystemCard>::Req>().await.unwrap();
Self::request(hand.finish().await, value).await
}) })
} }
fn card(&self) -> &dyn DynSystemCard { self } fn card(&self) -> &dyn DynSystemCard { self }
} }
#[derive(Clone)]
pub(crate) struct SysCtx(pub api::SysId, pub CtedObj);
task_local! {
static SYS_CTX: SysCtx;
}
pub(crate) async fn with_sys<F: Future>(sys: SysCtx, fut: F) -> F::Output {
SYS_CTX.scope(sys, fut).await
}
pub fn sys_id() -> api::SysId { SYS_CTX.with(|cx| cx.0) }
pub fn cted() -> CtedObj { SYS_CTX.with(|cx| cx.1.clone()) }
pub async fn downcast_atom<A>(foreign: ForeignAtom) -> Result<TAtom<A>, ForeignAtom> pub async fn downcast_atom<A>(foreign: ForeignAtom) -> Result<TAtom<A>, ForeignAtom>
where A: AtomicFeatures { where A: AtomicFeatures {
let mut data = &foreign.atom.data.0[..]; let mut data = &foreign.atom.data.0[..];
let ctx = ctx(); let value = AtomTypeId::decode_slice(&mut data);
let value = AtomTypeId::decode(Pin::new(&mut data)).await; let cted = cted();
let own_inst = ctx.get::<CtedObj>().inst(); let own_inst = cted.inst();
let owner = if *ctx.get::<api::SysId>() == foreign.atom.owner { let owner = if sys_id() == foreign.atom.owner {
own_inst.card() own_inst.card()
} else { } else {
(ctx.get::<CtedObj>().deps().find(|s| s.id() == foreign.atom.owner)) cted.deps().find(|s| s.id() == foreign.atom.owner).ok_or_else(|| foreign.clone())?.get_card()
.ok_or_else(|| foreign.clone())?
.get_card()
}; };
if owner.atoms().flatten().all(|dynfo| dynfo.tid() != TypeId::of::<A>()) { if owner.atoms().flatten().all(|dynfo| dynfo.tid() != TypeId::of::<A>()) {
return Err(foreign); return Err(foreign);
@@ -130,22 +149,24 @@ where A: AtomicFeatures {
return Err(foreign); return Err(foreign);
} }
let val = dynfo.decode(AtomCtx(data, foreign.atom.drop)).await; let val = dynfo.decode(AtomCtx(data, foreign.atom.drop)).await;
let value = *val.downcast::<A::Data>().expect("atom decode returned wrong type"); let Ok(value) = val.downcast::<A::Data>() else {
Ok(TAtom { value, untyped: foreign }) panic!("decode of {} returned wrong type.", dynfo.name());
};
Ok(TAtom { value: *value, untyped: foreign })
} }
pub async fn dep_req<Sys: SystemCard, Req: Request + Into<Sys::Req>>(req: Req) -> Req::Response { pub async fn dep_req<Sys: SystemCard, Req: Request + Into<Sys::Req>>(req: Req) -> Req::Response {
let ctx = ctx();
let mut msg = Vec::new(); let mut msg = Vec::new();
req.into().encode(std::pin::pin!(&mut msg)).await; req.into().encode_vec(&mut msg);
let own_inst = ctx.get::<CtedObj>().inst(); let cted = cted();
let own_inst = cted.inst();
let owner = if own_inst.card().type_id() == TypeId::of::<Sys>() { let owner = if own_inst.card().type_id() == TypeId::of::<Sys>() {
ctx.sys_id() sys_id()
} else { } else {
(ctx.get::<CtedObj>().deps().find(|s| s.get_card().type_id() == TypeId::of::<Sys>())) (cted.deps().find(|s| s.get_card().type_id() == TypeId::of::<Sys>()))
.expect("System not in dependency array") .expect("System not in dependency array")
.id() .id()
}; };
let reply = ctx.reqnot().request(api::SysFwd(owner, msg)).await; let reply = request(api::SysFwd(owner, msg)).await;
Req::Response::decode(std::pin::pin!(&reply[..])).await Req::Response::decode(std::pin::pin!(&reply[..])).await.unwrap()
} }

View File

@@ -1,4 +1,5 @@
use std::any::Any; use std::any::Any;
use std::fmt::Debug;
use std::sync::Arc; use std::sync::Arc;
use orchid_base::boxed_iter::{BoxedIter, box_empty, box_once}; use orchid_base::boxed_iter::{BoxedIter, box_empty, box_once};
@@ -8,6 +9,7 @@ use crate::api;
use crate::other_system::{DynSystemHandle, SystemHandle}; use crate::other_system::{DynSystemHandle, SystemHandle};
use crate::system::{DynSystem, System, SystemCard}; use crate::system::{DynSystem, System, SystemCard};
#[derive(Debug)]
pub struct Cted<Ctor: SystemCtor + ?Sized> { pub struct Cted<Ctor: SystemCtor + ?Sized> {
pub deps: <Ctor::Deps as DepDef>::Sat, pub deps: <Ctor::Deps as DepDef>::Sat,
pub inst: Arc<Ctor::Instance>, pub inst: Arc<Ctor::Instance>,
@@ -15,7 +17,7 @@ pub struct Cted<Ctor: SystemCtor + ?Sized> {
impl<C: SystemCtor + ?Sized> Clone for Cted<C> { impl<C: SystemCtor + ?Sized> Clone for Cted<C> {
fn clone(&self) -> Self { Self { deps: self.deps.clone(), inst: self.inst.clone() } } fn clone(&self) -> Self { Self { deps: self.deps.clone(), inst: self.inst.clone() } }
} }
pub trait DynCted: Send + Sync + 'static { pub trait DynCted: Debug + Send + Sync + 'static {
fn as_any(&self) -> &dyn Any; fn as_any(&self) -> &dyn Any;
fn deps<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)>; fn deps<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)>;
fn inst(&self) -> Arc<dyn DynSystem>; fn inst(&self) -> Arc<dyn DynSystem>;
@@ -27,11 +29,11 @@ impl<C: SystemCtor + ?Sized> DynCted for Cted<C> {
} }
pub type CtedObj = Arc<dyn DynCted>; pub type CtedObj = Arc<dyn DynCted>;
pub trait DepSat: Clone + Send + Sync + 'static { pub trait DepSat: Debug + Clone + Send + Sync + 'static {
fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)>; fn iter<'a>(&'a self) -> BoxedIter<'a, &'a (dyn DynSystemHandle + 'a)>;
} }
pub trait DepDef { pub trait DepDef: Debug {
type Sat: DepSat; type Sat: DepSat;
fn report(names: &mut impl FnMut(&'static str)); fn report(names: &mut impl FnMut(&'static str));
fn create(take: &mut impl FnMut() -> api::SysId) -> Self::Sat; fn create(take: &mut impl FnMut() -> api::SysId) -> Self::Sat;
@@ -57,17 +59,16 @@ impl DepDef for () {
fn report(_: &mut impl FnMut(&'static str)) {} fn report(_: &mut impl FnMut(&'static str)) {}
} }
pub trait SystemCtor: Send + Sync + 'static { pub trait SystemCtor: Debug + Send + Sync + 'static {
type Deps: DepDef; type Deps: DepDef;
type Instance: System; type Instance: System;
const NAME: &'static str; const NAME: &'static str;
const VERSION: f64; const VERSION: f64;
/// Create a system instance. When this function is called, a context object /// Create a system instance.
/// isn't yet available fn inst(&self, deps: <Self::Deps as DepDef>::Sat) -> Self::Instance;
fn inst(deps: <Self::Deps as DepDef>::Sat) -> Self::Instance;
} }
pub trait DynSystemCtor: Send + Sync + 'static { pub trait DynSystemCtor: Debug + Send + Sync + 'static {
fn decl(&self, id: api::SysDeclId) -> api::SystemDecl; fn decl(&self, id: api::SysDeclId) -> api::SystemDecl;
fn new_system(&self, new: &api::NewSystem) -> CtedObj; fn new_system(&self, new: &api::NewSystem) -> CtedObj;
} }
@@ -84,7 +85,7 @@ impl<T: SystemCtor> DynSystemCtor for T {
fn new_system(&self, api::NewSystem { system: _, id: _, depends }: &api::NewSystem) -> CtedObj { fn new_system(&self, api::NewSystem { system: _, id: _, depends }: &api::NewSystem) -> CtedObj {
let mut ids = depends.iter().copied(); let mut ids = depends.iter().copied();
let deps = T::Deps::create(&mut || ids.next().unwrap()); let deps = T::Deps::create(&mut || ids.next().unwrap());
let inst = Arc::new(T::inst(deps.clone())); let inst = Arc::new(self.inst(deps.clone()));
Arc::new(Cted::<T> { deps, inst }) Arc::new(Cted::<T> { deps, inst })
} }
} }
@@ -151,8 +152,4 @@ mod dep_set_tuple_impls {
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J); dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K); dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L); // 12 dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L); // 12
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
dep_set_tuple_impl!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P); // 16
} }

View File

@@ -1,57 +1,31 @@
use crate::entrypoint::ExtensionData; use std::rc::Rc;
use crate::entrypoint::ExtensionBuilder;
use crate::ext_port::ExtPort;
/// Run an extension inside a Tokio localset. Since the extension API does not
/// provide a forking mechanism, it can safely abort once the localset is
/// exhausted. If an extension absolutely needs a parallel thread, it can import
/// and call [tokio::task::spawn_local] which will keep alive the localset and
/// postpone the aggressive shutdown, and listen for the [Drop::drop] of the
/// value returned by [crate::system_ctor::SystemCtor::inst] to initiate
/// shutdown.
#[cfg(feature = "tokio")] #[cfg(feature = "tokio")]
pub async fn tokio_main(data: ExtensionData) { pub async fn tokio_main(builder: ExtensionBuilder) -> ! {
use std::io::{ErrorKind, Write}; use tokio::io::{stderr, stdin, stdout};
use std::mem;
use std::pin::{Pin, pin};
use std::rc::Rc;
use async_once_cell::OnceCell;
use futures::StreamExt;
use futures::future::LocalBoxFuture;
use futures::lock::Mutex;
use futures::stream::FuturesUnordered;
use orchid_api_traits::{Decode, Encode};
use orchid_base::msg::{recv_msg, send_msg};
use tokio::io::{Stdout, stdin, stdout};
use tokio::task::{LocalSet, spawn_local}; use tokio::task::{LocalSet, spawn_local};
use tokio_util::compat::{Compat, TokioAsyncReadCompatExt, TokioAsyncWriteCompatExt}; use tokio_util::compat::{TokioAsyncReadCompatExt, TokioAsyncWriteCompatExt};
use crate::api;
use crate::entrypoint::extension_init;
let local_set = LocalSet::new(); let local_set = LocalSet::new();
local_set.spawn_local(async { local_set.spawn_local(async {
let host_header = api::HostHeader::decode(Pin::new(&mut stdin().compat())).await; builder.build(ExtPort {
let init = input: Box::pin(stdin().compat()),
Rc::new(extension_init(data, host_header, Rc::new(|fut| mem::drop(spawn_local(fut))))); output: Box::pin(stdout().compat_write()),
let mut buf = Vec::new(); log: Box::pin(stderr().compat_write()),
init.header.encode(Pin::new(&mut buf)).await; spawn: Rc::new(|fut| {
std::io::stdout().write_all(&buf).unwrap(); spawn_local(fut);
std::io::stdout().flush().unwrap(); }),
// These are concurrent processes that never exit, so if the FuturesUnordered });
// produces any result the extension should exit
let mut io = FuturesUnordered::<LocalBoxFuture<()>>::new();
io.push(Box::pin(async {
loop {
match recv_msg(pin!(stdin().compat())).await {
Ok(msg) => init.send(&msg[..]).await,
Err(e) if e.kind() == ErrorKind::BrokenPipe => break,
Err(e) if e.kind() == ErrorKind::UnexpectedEof => break,
Err(e) => panic!("{e}"),
}
}
}));
io.push(Box::pin(async {
while let Some(msg) = init.recv().await {
static STDOUT: OnceCell<Mutex<Compat<Stdout>>> = OnceCell::new();
let stdout_lk = STDOUT.get_or_init(async { Mutex::new(stdout().compat_write()) }).await;
let mut stdout_g = stdout_lk.lock().await;
send_msg(pin!(&mut *stdout_g), &msg[..]).await.expect("Parent pipe broken");
}
}));
io.next().await;
}); });
local_set.await; local_set.await;
std::process::exit(0)
} }

View File

@@ -1,4 +1,6 @@
use std::cell::RefCell;
use std::num::NonZero; use std::num::NonZero;
use std::rc::Rc;
use async_fn_stream::stream; use async_fn_stream::stream;
use dyn_clone::{DynClone, clone_box}; use dyn_clone::{DynClone, clone_box};
@@ -6,17 +8,16 @@ use futures::future::{LocalBoxFuture, join_all};
use futures::{FutureExt, StreamExt}; use futures::{FutureExt, StreamExt};
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::{IStr, is};
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::tree::{TokTree, Token, TokenVariant}; use orchid_base::tree::{TokTree, Token, TokenVariant};
use substack::Substack; use substack::Substack;
use task_local::task_local;
use trait_set::trait_set; use trait_set::trait_set;
use crate::api; use crate::api;
use crate::context::i;
use crate::conv::ToExpr; use crate::conv::ToExpr;
use crate::entrypoint::MemberRecord;
use crate::expr::{BorrowedExprStore, Expr, ExprHandle}; use crate::expr::{BorrowedExprStore, Expr, ExprHandle};
use crate::func_atom::{ExprFunc, Fun}; use crate::func_atom::{ExprFunc, Fun};
use crate::gen_expr::{GExpr, sym_ref}; use crate::gen_expr::{GExpr, sym_ref};
@@ -27,12 +28,7 @@ pub type GenTok = Token<Expr, GExpr>;
impl TokenVariant<api::Expression> for GExpr { impl TokenVariant<api::Expression> for GExpr {
type FromApiCtx<'a> = (); type FromApiCtx<'a> = ();
type ToApiCtx<'a> = (); type ToApiCtx<'a> = ();
async fn from_api( async fn from_api(_: &api::Expression, _: &mut Self::FromApiCtx<'_>, _: SrcRange) -> Self {
_: &api::Expression,
_: &mut Self::FromApiCtx<'_>,
_: SrcRange,
_: &Interner,
) -> Self {
panic!("Received new expression from host") panic!("Received new expression from host")
} }
async fn into_api(self, _: &mut Self::ToApiCtx<'_>) -> api::Expression { self.serialize().await } async fn into_api(self, _: &mut Self::ToApiCtx<'_>) -> api::Expression { self.serialize().await }
@@ -40,12 +36,7 @@ impl TokenVariant<api::Expression> for GExpr {
impl TokenVariant<api::ExprTicket> for Expr { impl TokenVariant<api::ExprTicket> for Expr {
type FromApiCtx<'a> = &'a BorrowedExprStore; type FromApiCtx<'a> = &'a BorrowedExprStore;
async fn from_api( async fn from_api(api: &api::ExprTicket, exprs: &mut Self::FromApiCtx<'_>, _: SrcRange) -> Self {
api: &api::ExprTicket,
exprs: &mut Self::FromApiCtx<'_>,
_: SrcRange,
_: &Interner,
) -> Self {
// SAFETY: receiving trees from sublexers implies borrowing // SAFETY: receiving trees from sublexers implies borrowing
Expr::from_handle(ExprHandle::borrowed(*api, exprs)) Expr::from_handle(ExprHandle::borrowed(*api, exprs))
} }
@@ -84,9 +75,8 @@ pub fn root_mod(name: &str, mems: impl IntoIterator<Item = Vec<GenMember>>) -> (
(name.to_string(), kind) (name.to_string(), kind)
} }
pub fn fun<I, O>(public: bool, name: &str, xf: impl ExprFunc<I, O>) -> Vec<GenMember> { pub fn fun<I, O>(public: bool, name: &str, xf: impl ExprFunc<I, O>) -> Vec<GenMember> {
let fac = LazyMemberFactory::new(async move |sym| { let fac =
MemKind::Const(Fun::new(sym, xf).await.to_gen().await) LazyMemberFactory::new(async move |sym| MemKind::Const(Fun::new(sym, xf).await.to_gen().await));
});
vec![GenMember { name: name.to_string(), kind: MemKind::Lazy(fac), public, comments: vec![] }] vec![GenMember { name: name.to_string(), kind: MemKind::Lazy(fac), public, comments: vec![] }]
} }
pub fn prefix(path: &str, items: impl IntoIterator<Item = Vec<GenMember>>) -> Vec<GenMember> { pub fn prefix(path: &str, items: impl IntoIterator<Item = Vec<GenMember>>) -> Vec<GenMember> {
@@ -167,10 +157,10 @@ pub struct GenMember {
pub comments: Vec<String>, pub comments: Vec<String>,
} }
impl GenMember { impl GenMember {
pub async fn into_api(self, tia_cx: &mut impl TreeIntoApiCtx) -> api::Member { pub(crate) async fn into_api(self, tia_cx: &mut impl TreeIntoApiCtx) -> api::Member {
let name = i().i::<String>(&self.name).await; let name = is(&self.name).await;
let kind = self.kind.into_api(&mut tia_cx.push_path(name.clone())).await; let kind = self.kind.into_api(&mut tia_cx.push_path(name.clone())).await;
let comments = join_all(self.comments.iter().map(async |cmt| i().i(cmt).await.to_api())).await; let comments = join_all(self.comments.iter().map(async |cmt| is(cmt).await.to_api())).await;
api::Member { kind, name: name.to_api(), comments, exported: self.public } api::Member { kind, name: name.to_api(), comments, exported: self.public }
} }
} }
@@ -181,9 +171,9 @@ pub enum MemKind {
Lazy(LazyMemberFactory), Lazy(LazyMemberFactory),
} }
impl MemKind { impl MemKind {
pub async fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind { pub(crate) async fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind {
match self { match self {
Self::Lazy(lazy) => api::MemberKind::Lazy(ctx.with_lazy(lazy)), Self::Lazy(lazy) => api::MemberKind::Lazy(add_lazy(ctx, lazy)),
Self::Const(c) => api::MemberKind::Const(c.serialize().await), Self::Const(c) => api::MemberKind::Const(c.serialize().await),
Self::Mod { members } => api::MemberKind::Module(api::Module { Self::Mod { members } => api::MemberKind::Module(api::Module {
members: stream(async |mut cx| { members: stream(async |mut cx| {
@@ -199,29 +189,58 @@ impl MemKind {
} }
} }
pub trait TreeIntoApiCtx { pub enum MemberRecord {
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId; Gen(Vec<IStr>, LazyMemberFactory),
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx; Res,
} }
pub struct TreeIntoApiCtxImpl<'a, 'b> { #[derive(Clone, Default)]
pub basepath: &'a [Tok<String>], pub(crate) struct LazyMemberStore(Rc<RefCell<HashMap<api::TreeId, MemberRecord>>>);
pub path: Substack<'a, Tok<String>>,
pub lazy_members: &'b mut HashMap<api::TreeId, MemberRecord>, task_local! {
static LAZY_MEMBERS: LazyMemberStore;
} }
impl TreeIntoApiCtx for TreeIntoApiCtxImpl<'_, '_> { pub fn with_lazy_member_store<'a>(fut: LocalBoxFuture<'a, ()>) -> LocalBoxFuture<'a, ()> {
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx { Box::pin(LAZY_MEMBERS.scope(LazyMemberStore::default(), fut))
TreeIntoApiCtxImpl { }
lazy_members: self.lazy_members,
basepath: self.basepath, fn add_lazy(cx: &impl TreeIntoApiCtx, fac: LazyMemberFactory) -> api::TreeId {
path: self.path.push(seg), LAZY_MEMBERS.with(|lazy_members| {
} let mut g = lazy_members.0.borrow_mut();
} let id = api::TreeId(NonZero::new((g.len() + 2) as u64).unwrap());
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId { let path = cx.path().collect_vec();
let id = api::TreeId(NonZero::new((self.lazy_members.len() + 2) as u64).unwrap()); g.insert(id, MemberRecord::Gen(path, fac));
let path = self.basepath.iter().cloned().chain(self.path.unreverse()).collect_vec();
self.lazy_members.insert(id, MemberRecord::Gen(path, fac));
id id
})
}
pub async fn get_lazy(id: api::TreeId) -> (Sym, MemKind) {
let (path, cb) =
LAZY_MEMBERS.with(|tbl| match tbl.0.borrow_mut().insert(id, MemberRecord::Res) {
None => panic!("Tree for ID not found"),
Some(MemberRecord::Res) => panic!("This tree has already been transmitted"),
Some(MemberRecord::Gen(path, cb)) => (path, cb),
});
let path = Sym::new(path).await.unwrap();
(path.clone(), cb.build(path).await)
}
pub(crate) trait TreeIntoApiCtx {
fn push_path(&mut self, seg: IStr) -> impl TreeIntoApiCtx;
fn path(&self) -> impl Iterator<Item = IStr>;
}
pub struct TreeIntoApiCtxImpl<'a> {
pub basepath: &'a [IStr],
pub path: Substack<'a, IStr>,
}
impl TreeIntoApiCtx for TreeIntoApiCtxImpl<'_> {
fn push_path(&mut self, seg: IStr) -> impl TreeIntoApiCtx {
TreeIntoApiCtxImpl { basepath: self.basepath, path: self.path.push(seg) }
}
fn path(&self) -> impl Iterator<Item = IStr> {
self.basepath.iter().cloned().chain(self.path.unreverse())
} }
} }

View File

@@ -8,22 +8,28 @@ edition = "2024"
[dependencies] [dependencies]
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" } async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
async-once-cell = "0.5.4" async-once-cell = "0.5.4"
async-process = "2.4.0"
bound = "0.6.0" bound = "0.6.0"
derive_destructure = "1.0.0" derive_destructure = "1.0.0"
futures = { version = "0.3.31", features = ["std"], default-features = false } futures = { version = "0.3.31", features = ["std"], default-features = false }
futures-locks = "0.7.1" futures-locks = "0.7.1"
hashbrown = "0.16.0" hashbrown = "0.16.1"
itertools = "0.14.0" itertools = "0.14.0"
lazy_static = "1.5.0" lazy_static = "1.5.0"
libloading = { version = "0.9.0", optional = true }
memo-map = "0.3.3" memo-map = "0.3.3"
never = "0.1.0" never = "0.1.0"
num-traits = "0.2.19" num-traits = "0.2.19"
orchid-api = { version = "0.1.0", path = "../orchid-api" } orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-base = { version = "0.1.0", path = "../orchid-base" } orchid-base = { version = "0.1.0", path = "../orchid-base" }
ordered-float = "5.0.0" ordered-float = "5.1.0"
pastey = "0.1.1" pastey = "0.2.1"
substack = "1.1.1" substack = "1.1.1"
test_executors = "0.3.5" test_executors = "0.4.1"
tokio = { version = "1.49.0", features = ["process"], optional = true }
tokio-util = { version = "0.7.18", features = ["compat"], optional = true }
trait-set = "0.3.0" trait-set = "0.3.0"
unsync-pipe = { version = "0.2.0", path = "../unsync-pipe" }
[features]
tokio = ["dep:tokio", "dep:tokio-util", "dep:libloading"]

View File

@@ -5,7 +5,7 @@ use async_once_cell::OnceCell;
use derive_destructure::destructure; use derive_destructure::destructure;
use orchid_base::format::{FmtCtx, FmtUnit, Format, take_first_fmt}; use orchid_base::format::{FmtCtx, FmtUnit, Format, take_first_fmt};
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::reqnot::Requester; use orchid_base::reqnot::ClientExt;
use orchid_base::tree::AtomRepr; use orchid_base::tree::AtomRepr;
use crate::api; use crate::api;
@@ -59,11 +59,11 @@ impl AtomHand {
pub async fn call(self, arg: Expr) -> Expr { pub async fn call(self, arg: Expr) -> Expr {
let owner_sys = self.0.owner.clone(); let owner_sys = self.0.owner.clone();
let ctx = owner_sys.ctx(); let ctx = owner_sys.ctx();
let reqnot = owner_sys.reqnot(); let client = owner_sys.client();
ctx.exprs.give_expr(arg.clone()); ctx.exprs.give_expr(arg.clone());
let ret = match Rc::try_unwrap(self.0) { let ret = match Rc::try_unwrap(self.0) {
Ok(data) => reqnot.request(api::FinalCall(data.api(), arg.id())).await, Ok(data) => client.request(api::FinalCall(data.api(), arg.id())).await.unwrap(),
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), arg.id())).await, Err(hand) => client.request(api::CallRef(hand.api_ref(), arg.id())).await.unwrap(),
}; };
let val = Expr::from_api(&ret, PathSetBuilder::new(), ctx.clone()).await; let val = Expr::from_api(&ret, PathSetBuilder::new(), ctx.clone()).await;
ctx.exprs.take_expr(arg.id()); ctx.exprs.take_expr(arg.id());
@@ -74,19 +74,21 @@ impl AtomHand {
#[must_use] #[must_use]
pub fn ext(&self) -> &Extension { self.sys().ext() } pub fn ext(&self) -> &Extension { self.sys().ext() }
pub async fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> { pub async fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req)).await self.0.owner.client().request(api::Fwded(self.0.api_ref(), key, req)).await.unwrap()
} }
#[must_use] #[must_use]
pub fn api_ref(&self) -> api::Atom { self.0.api_ref() } pub fn api_ref(&self) -> api::Atom { self.0.api_ref() }
#[must_use] #[must_use]
pub async fn to_string(&self) -> String { take_first_fmt(self, &self.0.owner.ctx().i).await } pub async fn to_string(&self) -> String { take_first_fmt(self).await }
#[must_use] #[must_use]
pub fn downgrade(&self) -> WeakAtomHand { WeakAtomHand(Rc::downgrade(&self.0)) } pub fn downgrade(&self) -> WeakAtomHand { WeakAtomHand(Rc::downgrade(&self.0)) }
} }
impl Format for AtomHand { impl Format for AtomHand {
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
(self.0.display.get_or_init(async { (self.0.display.get_or_init(async {
FmtUnit::from_api(&self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())).await) FmtUnit::from_api(
&self.0.owner.client().request(api::AtomPrint(self.0.api_ref())).await.unwrap(),
)
})) }))
.await .await
.clone() .clone()

View File

@@ -3,23 +3,32 @@ use std::num::{NonZero, NonZeroU16};
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use std::{fmt, ops}; use std::{fmt, ops};
use futures::future::LocalBoxFuture;
use futures_locks::RwLock; use futures_locks::RwLock;
use hashbrown::HashMap; use hashbrown::HashMap;
use orchid_base::builtin::Spawner;
use orchid_base::interner::Interner;
use crate::api; use crate::api;
use crate::expr_store::ExprStore; use crate::expr_store::ExprStore;
use crate::logger::LoggerImpl;
use crate::system::{System, WeakSystem}; use crate::system::{System, WeakSystem};
use crate::tree::WeakRoot; use crate::tree::WeakRoot;
pub trait JoinHandle {
fn abort(&self);
fn join(self: Box<Self>) -> LocalBoxFuture<'static, ()>;
}
pub trait Spawner {
fn spawn_obj(&self, fut: LocalBoxFuture<'static, ()>) -> Box<dyn JoinHandle>;
}
pub struct CtxData { pub struct CtxData {
pub i: Interner, spawner: Rc<dyn Spawner>,
pub spawn: Spawner,
pub systems: RwLock<HashMap<api::SysId, WeakSystem>>, pub systems: RwLock<HashMap<api::SysId, WeakSystem>>,
pub system_id: RefCell<NonZeroU16>, pub system_id: RefCell<NonZeroU16>,
pub exprs: ExprStore, pub exprs: ExprStore,
pub root: RwLock<WeakRoot>, pub root: RwLock<WeakRoot>,
pub logger: LoggerImpl,
} }
#[derive(Clone)] #[derive(Clone)]
pub struct Ctx(Rc<CtxData>); pub struct Ctx(Rc<CtxData>);
@@ -37,16 +46,25 @@ impl WeakCtx {
} }
impl Ctx { impl Ctx {
#[must_use] #[must_use]
pub fn new(spawn: Spawner) -> Self { pub fn new(spawner: impl Spawner + 'static, logger: LoggerImpl) -> Self {
Self(Rc::new(CtxData { Self(Rc::new(CtxData {
spawn, spawner: Rc::new(spawner),
i: Interner::default(),
systems: RwLock::default(), systems: RwLock::default(),
system_id: RefCell::new(NonZero::new(1).unwrap()), system_id: RefCell::new(NonZero::new(1).unwrap()),
exprs: ExprStore::default(), exprs: ExprStore::default(),
root: RwLock::default(), root: RwLock::default(),
logger,
})) }))
} }
/// Spawn a parallel future that you can join at any later time.
///
/// Don't use this for async Drop, use [orchid_base::stash::stash] instead.
/// If you use this for an actor object, make sure to actually join the
/// handle.
#[must_use]
pub fn spawn(&self, fut: impl Future<Output = ()> + 'static) -> Box<dyn JoinHandle> {
self.spawner.spawn_obj(Box::pin(fut))
}
#[must_use] #[must_use]
pub(crate) async fn system_inst(&self, id: api::SysId) -> Option<System> { pub(crate) async fn system_inst(&self, id: api::SysId) -> Option<System> {
self.systems.read().await.get(&id).and_then(WeakSystem::upgrade) self.systems.read().await.get(&id).and_then(WeakSystem::upgrade)
@@ -62,9 +80,6 @@ impl Ctx {
} }
impl fmt::Debug for Ctx { impl fmt::Debug for Ctx {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Ctx") f.debug_struct("Ctx").field("system_id", &self.system_id).finish_non_exhaustive()
.field("i", &self.i)
.field("system_id", &self.system_id)
.finish_non_exhaustive()
} }
} }

View File

@@ -1,7 +1,7 @@
use hashbrown::HashSet; use hashbrown::HashSet;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcErrv, OrcRes, Reporter, mk_errv}; use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::{IStr, is};
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::VName; use orchid_base::name::VName;
@@ -16,17 +16,17 @@ pub enum AbsPathError {
RootPath, RootPath,
} }
impl AbsPathError { impl AbsPathError {
pub async fn err_obj(self, i: &Interner, pos: Pos, path: &str) -> OrcErrv { pub async fn err_obj(self, pos: Pos, path: &str) -> OrcErrv {
let (descr, msg) = match self { let (descr, msg) = match self {
AbsPathError::RootPath => ( AbsPathError::RootPath => (
i.i("Path ends on root module").await, is("Path ends on root module").await,
format!( format!(
"{path} is equal to the empty path. You cannot directly reference the root. \ "{path} is equal to the empty path. You cannot directly reference the root. \
Use one fewer 'super::' or add more segments to make it valid." Use one fewer 'super::' or add more segments to make it valid."
), ),
), ),
AbsPathError::TooManySupers => ( AbsPathError::TooManySupers => (
i.i("Too many 'super::' steps in path").await, is("Too many 'super::' steps in path").await,
format!("{path} is leading outside the root."), format!("{path} is leading outside the root."),
), ),
}; };
@@ -41,13 +41,9 @@ impl AbsPathError {
/// ///
/// if the relative path contains as many or more `super` segments than the /// if the relative path contains as many or more `super` segments than the
/// length of the absolute path. /// length of the absolute path.
pub async fn absolute_path( pub async fn absolute_path(mut cwd: &[IStr], mut rel: &[IStr]) -> Result<VName, AbsPathError> {
mut cwd: &[Tok<String>], let i_self = is("self").await;
mut rel: &[Tok<String>], let i_super = is("super").await;
i: &Interner,
) -> Result<VName, AbsPathError> {
let i_self = i.i("self").await;
let i_super = i.i("super").await;
let mut relative = false; let mut relative = false;
if let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h == i_self) { if let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h == i_self) {
rel = tail; rel = tail;
@@ -63,19 +59,13 @@ pub async fn absolute_path(
.map_err(|_| AbsPathError::RootPath) .map_err(|_| AbsPathError::RootPath)
} }
pub struct DealiasCtx<'a> {
pub i: &'a Interner,
pub rep: &'a Reporter,
}
pub async fn resolv_glob<Mod: Tree>( pub async fn resolv_glob<Mod: Tree>(
cwd: &[Tok<String>], cwd: &[IStr],
root: &Mod, root: &Mod,
abs_path: &[Tok<String>], abs_path: &[IStr],
pos: Pos, pos: Pos,
i: &Interner,
ctx: &mut Mod::Ctx<'_>, ctx: &mut Mod::Ctx<'_>,
) -> OrcRes<HashSet<Tok<String>>> { ) -> OrcRes<HashSet<IStr>> {
let coprefix_len = cwd.iter().zip(abs_path).take_while(|(a, b)| a == b).count(); let coprefix_len = cwd.iter().zip(abs_path).take_while(|(a, b)| a == b).count();
let (co_prefix, diff_path) = abs_path.split_at(abs_path.len().min(coprefix_len + 1)); let (co_prefix, diff_path) = abs_path.split_at(abs_path.len().min(coprefix_len + 1));
let fst_diff = let fst_diff =
@@ -89,7 +79,7 @@ pub async fn resolv_glob<Mod: Tree>(
ChildErrorKind::Missing => ("Invalid import path", format!("{path} not found")), ChildErrorKind::Missing => ("Invalid import path", format!("{path} not found")),
ChildErrorKind::Private => ("Import inaccessible", format!("{path} is private")), ChildErrorKind::Private => ("Import inaccessible", format!("{path} is private")),
}; };
return Err(mk_errv(i.i(tk).await, msg, [pos])); return Err(mk_errv(is(tk).await, msg, [pos]));
}, },
}; };
Ok(target_module.children(coprefix_len < abs_path.len())) Ok(target_module.children(coprefix_len < abs_path.len()))
@@ -100,11 +90,11 @@ pub type ChildResult<'a, T> = Result<&'a T, ChildErrorKind>;
pub trait Tree { pub trait Tree {
type Ctx<'a>; type Ctx<'a>;
#[must_use] #[must_use]
fn children(&self, public_only: bool) -> HashSet<Tok<String>>; fn children(&self, public_only: bool) -> HashSet<IStr>;
#[must_use] #[must_use]
fn child( fn child(
&self, &self,
key: Tok<String>, key: IStr,
public_only: bool, public_only: bool,
ctx: &mut Self::Ctx<'_>, ctx: &mut Self::Ctx<'_>,
) -> impl Future<Output = ChildResult<'_, Self>>; ) -> impl Future<Output = ChildResult<'_, Self>>;
@@ -135,7 +125,7 @@ pub struct ChildError {
pub async fn walk<'a, T: Tree>( pub async fn walk<'a, T: Tree>(
root: &'a T, root: &'a T,
public_only: bool, public_only: bool,
path: impl IntoIterator<Item = Tok<String>>, path: impl IntoIterator<Item = IStr>,
ctx: &mut T::Ctx<'_>, ctx: &mut T::Ctx<'_>,
) -> Result<&'a T, ChildError> { ) -> Result<&'a T, ChildError> {
let mut cur = root; let mut cur = root;

55
orchid-host/src/dylib.rs Normal file
View File

@@ -0,0 +1,55 @@
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use hashbrown::HashMap;
use libloading::Library;
use orchid_base::binary::vt_to_future;
use crate::api;
use crate::ctx::Ctx;
use crate::extension::ExtPort;
static DYNAMIC_LIBRARIES: Mutex<Option<HashMap<PathBuf, Arc<Library>>>> = Mutex::new(None);
fn load_dylib(path: &Path) -> Result<Arc<Library>, libloading::Error> {
let mut g = DYNAMIC_LIBRARIES.lock().unwrap();
let map = g.get_or_insert_default();
if let Some(lib) = map.get(path) {
Ok(lib.clone())
} else {
let lib = Arc::new(unsafe { Library::new(path) }?);
map.insert(path.to_owned(), lib.clone());
Ok(lib)
}
}
#[cfg(feature = "tokio")]
pub async fn ext_dylib(path: &Path, ctx: Ctx) -> Result<ExtPort, libloading::Error> {
use futures::io::BufReader;
use futures::{AsyncBufReadExt, StreamExt};
use libloading::Symbol;
use unsync_pipe::pipe;
let (write_input, input) = pipe(1024);
let (output, read_output) = pipe(1024);
let (log, read_log) = pipe(1024);
let log_path = path.to_string_lossy().to_string();
let _ = ctx.spawn(async move {
use orchid_base::logging::log;
let mut lines = BufReader::new(read_log).lines();
while let Some(line) = lines.next().await {
writeln!(log("stderr"), "{log_path} err> {}", line.expect("Readline implies this")).await;
}
});
let library = load_dylib(path)?;
let entrypoint: Symbol<unsafe extern "C" fn(api::binary::ExtensionContext)> =
unsafe { library.get("orchid_extension_main") }?;
let data = Box::into_raw(Box::new(ctx)) as *const ();
extern "C" fn drop(data: *const ()) { std::mem::drop(unsafe { Box::from_raw(data as *mut Ctx) }) }
extern "C" fn spawn(data: *const (), vt: api::binary::FutureVT) {
let _ = unsafe { (data as *mut Ctx).as_mut().unwrap().spawn(vt_to_future(vt)) };
}
let spawner = api::binary::Spawner { data, drop, spawn };
let cx = api::binary::ExtensionContext { input, output, log, spawner };
unsafe { (entrypoint)(cx) };
Ok(ExtPort { input: Box::pin(write_input), output: Box::pin(read_output) })
}

View File

@@ -4,11 +4,10 @@ use bound::Bound;
use futures::FutureExt; use futures::FutureExt;
use futures_locks::{RwLockWriteGuard, TryLockError}; use futures_locks::{RwLockWriteGuard, TryLockError};
use orchid_base::error::OrcErrv; use orchid_base::error::OrcErrv;
use orchid_base::format::{FmtCtxImpl, Format, take_first}; use orchid_base::format::fmt;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::logging::Logger; use orchid_base::logging::log;
use crate::ctx::Ctx;
use crate::expr::{Expr, ExprKind, PathSet, Step}; use crate::expr::{Expr, ExprKind, PathSet, Step};
use crate::tree::Root; use crate::tree::Root;
@@ -30,21 +29,19 @@ pub enum ExecResult {
} }
pub struct ExecCtx { pub struct ExecCtx {
ctx: Ctx,
gas: Option<u64>, gas: Option<u64>,
stack: Vec<ExprGuard>, stack: Vec<ExprGuard>,
cur: ExprGuard, cur: ExprGuard,
cur_pos: Pos, cur_pos: Pos,
did_pop: bool, did_pop: bool,
logger: Logger,
root: Root, root: Root,
} }
impl ExecCtx { impl ExecCtx {
#[must_use] #[must_use]
pub async fn new(ctx: Ctx, logger: Logger, root: Root, init: Expr) -> Self { pub async fn new(root: Root, init: Expr) -> Self {
let cur_pos = init.pos(); let cur_pos = init.pos();
let cur = Bound::async_new(init, |init| init.kind().write()).await; let cur = Bound::async_new(init, |init| init.kind().write()).await;
Self { ctx, gas: None, stack: vec![], cur, cur_pos, did_pop: false, logger, root } Self { gas: None, stack: vec![], cur, cur_pos, did_pop: false, root }
} }
#[must_use] #[must_use]
pub fn remaining_gas(&self) -> u64 { self.gas.expect("queried remaining_gas but no gas was set") } pub fn remaining_gas(&self) -> u64 { self.gas.expect("queried remaining_gas but no gas was set") }
@@ -89,8 +86,7 @@ impl ExecCtx {
while self.use_gas(1) { while self.use_gas(1) {
let mut kind_swap = ExprKind::Missing; let mut kind_swap = ExprKind::Missing;
mem::swap(&mut kind_swap, &mut self.cur); mem::swap(&mut kind_swap, &mut self.cur);
let unit = kind_swap.print(&FmtCtxImpl { i: &self.ctx.i }).await; writeln!(log("debug"), "Exxecute lvl{} {}", self.stack.len(), fmt(&kind_swap).await).await;
writeln!(self.logger, "Exxecute lvl{} {}", self.stack.len(), take_first(&unit, true));
let (kind, op) = match kind_swap { let (kind, op) = match kind_swap {
ExprKind::Identity(target) => { ExprKind::Identity(target) => {
let inner = self.unpack_ident(&target).await; let inner = self.unpack_ident(&target).await;

View File

@@ -9,7 +9,6 @@ use futures_locks::RwLock;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::OrcErrv; use orchid_base::error::OrcErrv;
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants}; use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
use orchid_base::interner::Interner;
use orchid_base::location::{Pos, SrcRange}; use orchid_base::location::{Pos, SrcRange};
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::tl_cache; use orchid_base::tl_cache;
@@ -56,13 +55,13 @@ impl Expr {
} }
#[must_use] #[must_use]
pub async fn from_api(api: &api::Expression, psb: PathSetBuilder<'_, u64>, ctx: Ctx) -> Self { pub async fn from_api(api: &api::Expression, psb: PathSetBuilder<'_, u64>, ctx: Ctx) -> Self {
let pos = Pos::from_api(&api.location, &ctx.i).await; let pos = Pos::from_api(&api.location).await;
let kind = match &api.kind { let kind = match &api.kind {
api::ExpressionKind::Arg(n) => { api::ExpressionKind::Arg(n) => {
assert!(psb.register_arg(n), "Arguments must be enclosed in a matching lambda"); assert!(psb.register_arg(n), "Arguments must be enclosed in a matching lambda");
ExprKind::Arg ExprKind::Arg
}, },
api::ExpressionKind::Bottom(bot) => ExprKind::Bottom(OrcErrv::from_api(bot, &ctx.i).await), api::ExpressionKind::Bottom(bot) => ExprKind::Bottom(OrcErrv::from_api(bot).await),
api::ExpressionKind::Call(f, x) => { api::ExpressionKind::Call(f, x) => {
let (lpsb, rpsb) = psb.split(); let (lpsb, rpsb) = psb.split();
ExprKind::Call( ExprKind::Call(
@@ -70,7 +69,7 @@ impl Expr {
Expr::from_api(x, rpsb, ctx).boxed_local().await, Expr::from_api(x, rpsb, ctx).boxed_local().await,
) )
}, },
api::ExpressionKind::Const(name) => ExprKind::Const(Sym::from_api(*name, &ctx.i).await), api::ExpressionKind::Const(name) => ExprKind::Const(Sym::from_api(*name).await),
api::ExpressionKind::Lambda(x, body) => { api::ExpressionKind::Lambda(x, body) => {
let lbuilder = psb.lambda(x); let lbuilder = psb.lambda(x);
let body = Expr::from_api(body, lbuilder.stack(), ctx).boxed_local().await; let body = Expr::from_api(body, lbuilder.stack(), ctx).boxed_local().await;
@@ -326,12 +325,7 @@ impl WeakExpr {
impl TokenVariant<api::ExprTicket> for Expr { impl TokenVariant<api::ExprTicket> for Expr {
type FromApiCtx<'a> = ExprStore; type FromApiCtx<'a> = ExprStore;
async fn from_api( async fn from_api(api: &api::ExprTicket, ctx: &mut Self::FromApiCtx<'_>, _: SrcRange) -> Self {
api: &api::ExprTicket,
ctx: &mut Self::FromApiCtx<'_>,
_: SrcRange,
_: &Interner,
) -> Self {
ctx.get_expr(*api).expect("Invalid ticket") ctx.get_expr(*api).expect("Invalid ticket")
} }
type ToApiCtx<'a> = ExprStore; type ToApiCtx<'a> = ExprStore;
@@ -348,12 +342,7 @@ pub struct ExprWillPanic;
impl TokenVariant<api::Expression> for Expr { impl TokenVariant<api::Expression> for Expr {
type FromApiCtx<'a> = Ctx; type FromApiCtx<'a> = Ctx;
async fn from_api( async fn from_api(api: &api::Expression, ctx: &mut Self::FromApiCtx<'_>, _: SrcRange) -> Self {
api: &api::Expression,
ctx: &mut Self::FromApiCtx<'_>,
_: SrcRange,
_: &Interner,
) -> Self {
Self::from_api(api, PathSetBuilder::new(), ctx.clone()).await Self::from_api(api, PathSetBuilder::new(), ctx.clone()).await
} }
type ToApiCtx<'a> = ExprWillPanic; type ToApiCtx<'a> = ExprWillPanic;

View File

@@ -2,7 +2,7 @@ use std::cell::RefCell;
use std::future::Future; use std::future::Future;
use std::io; use std::io;
use std::num::NonZeroU64; use std::num::NonZeroU64;
use std::pin::pin; use std::pin::Pin;
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use async_fn_stream::stream; use async_fn_stream::stream;
@@ -10,28 +10,32 @@ use derive_destructure::destructure;
use futures::channel::mpsc::{Sender, channel}; use futures::channel::mpsc::{Sender, channel};
use futures::future::{join, join_all}; use futures::future::{join, join_all};
use futures::lock::Mutex; use futures::lock::Mutex;
use futures::{SinkExt, StreamExt, stream}; use futures::{AsyncRead, AsyncWrite, AsyncWriteExt, SinkExt, StreamExt};
use hashbrown::HashMap; use hashbrown::{HashMap, HashSet};
use itertools::Itertools; use itertools::Itertools;
use orchid_api_traits::Request; use orchid_api_traits::{Decode, Encode, Request};
use orchid_base::builtin::ExtInit;
use orchid_base::clone;
use orchid_base::format::{FmtCtxImpl, Format}; use orchid_base::format::{FmtCtxImpl, Format};
use orchid_base::interner::Tok; use orchid_base::interner::{IStr, IStrv, es, ev, is, iv};
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::logging::Logger; use orchid_base::logging::log;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::{DynRequester, ReqNot, Requester as _}; use orchid_base::reqnot::{Client, ClientExt, MsgReaderExt, ReqHandleExt, ReqReaderExt, io_comm};
use orchid_base::stash::{stash, with_stash};
use orchid_base::tree::AtomRepr; use orchid_base::tree::AtomRepr;
use crate::api; use crate::api;
use crate::atom::AtomHand; use crate::atom::AtomHand;
use crate::ctx::Ctx; use crate::ctx::{Ctx, JoinHandle};
use crate::dealias::{ChildError, ChildErrorKind, walk}; use crate::dealias::{ChildError, ChildErrorKind, walk};
use crate::expr::{Expr, PathSetBuilder}; use crate::expr::{Expr, PathSetBuilder};
use crate::system::SystemCtor; use crate::system::SystemCtor;
use crate::tree::MemberKind; use crate::tree::MemberKind;
pub struct ExtPort {
pub input: Pin<Box<dyn AsyncWrite>>,
pub output: Pin<Box<dyn AsyncRead>>,
}
pub struct ReqPair<R: Request>(R, Sender<R::Response>); pub struct ReqPair<R: Request>(R, Sender<R::Response>);
/// Data held about an Extension. This is refcounted within [Extension]. It's /// Data held about an Extension. This is refcounted within [Extension]. It's
@@ -42,120 +46,129 @@ pub struct ReqPair<R: Request>(R, Sender<R::Response>);
pub struct ExtensionData { pub struct ExtensionData {
name: String, name: String,
ctx: Ctx, ctx: Ctx,
reqnot: ReqNot<api::HostMsgSet>, join_ext: Option<Box<dyn JoinHandle>>,
client: Rc<dyn Client>,
systems: Vec<SystemCtor>, systems: Vec<SystemCtor>,
logger: Logger,
next_pars: RefCell<NonZeroU64>, next_pars: RefCell<NonZeroU64>,
exiting_snd: Sender<()>,
lex_recur: Mutex<HashMap<api::ParsId, Sender<ReqPair<api::SubLex>>>>, lex_recur: Mutex<HashMap<api::ParsId, Sender<ReqPair<api::SubLex>>>>,
strings: RefCell<HashSet<IStr>>,
string_vecs: RefCell<HashSet<IStrv>>,
} }
impl Drop for ExtensionData { impl Drop for ExtensionData {
fn drop(&mut self) { fn drop(&mut self) {
let reqnot = self.reqnot.clone(); let client = self.client.clone();
let mut exiting_snd = self.exiting_snd.clone(); let join_ext = self.join_ext.take().expect("Only called once in Drop");
(self.ctx.spawn)(Box::pin(async move { stash(async move {
reqnot.notify(api::HostExtNotif::Exit).await; client.notify(api::HostExtNotif::Exit).await.unwrap();
join_ext.join().await;
exiting_snd.send(()).await.unwrap() })
}))
} }
} }
#[derive(Clone)] #[derive(Clone)]
pub struct Extension(Rc<ExtensionData>); pub struct Extension(Rc<ExtensionData>);
impl Extension { impl Extension {
pub fn new(init: ExtInit, logger: Logger, msg_logger: Logger, ctx: Ctx) -> io::Result<Self> { pub async fn new(mut init: ExtPort, ctx: Ctx) -> io::Result<Self> {
api::HostHeader { logger: ctx.logger.to_api() }.encode(init.input.as_mut()).await.unwrap();
init.input.flush().await.unwrap();
let header = api::ExtensionHeader::decode(init.output.as_mut()).await.unwrap();
let header2 = header.clone();
Ok(Self(Rc::new_cyclic(|weak: &Weak<ExtensionData>| { Ok(Self(Rc::new_cyclic(|weak: &Weak<ExtensionData>| {
let init = Rc::new(init); // context not needed because exit is extension-initiated
let (exiting_snd, exiting_rcv) = channel::<()>(0); let (client, _, comm) = io_comm(Rc::new(Mutex::new(init.input)), Mutex::new(init.output));
(ctx.spawn)({ let weak2 = weak;
clone!(init, weak, ctx); let weak = weak.clone();
Box::pin(async move { let ctx2 = ctx.clone();
let rcv_stream = stream(async |mut cx| { let join_ext = ctx.clone().spawn(async move {
loop { comm
cx.emit(init.recv().await).await .listen(
} async |reader| {
}); with_stash(async {
let mut event_stream = pin!(stream::select(exiting_rcv.map(|()| None), rcv_stream));
while let Some(Some(msg)) = event_stream.next().await {
if let Some(reqnot) = weak.upgrade().map(|rc| rc.reqnot.clone()) {
let reqnot = reqnot.clone();
(ctx.spawn)(Box::pin(async move {
reqnot.receive(&msg).await;
}))
}
}
})
});
ExtensionData {
name: init.name.clone(),
exiting_snd,
ctx: ctx.clone(),
systems: (init.systems.iter().cloned())
.map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) })
.collect(),
logger: logger.clone(),
next_pars: RefCell::new(NonZeroU64::new(1).unwrap()),
lex_recur: Mutex::default(),
reqnot: ReqNot::new(
msg_logger,
move |sfn, _| clone!(init; Box::pin(async move { init.send(sfn).await })),
clone!(weak; move |notif, _| {
clone!(weak; Box::pin(async move {
let this = Extension(weak.upgrade().unwrap()); let this = Extension(weak.upgrade().unwrap());
let notif = reader.read::<api::ExtHostNotif>().await.unwrap();
// logging is never logged because its value will be logged anyway
if !matches!(notif, api::ExtHostNotif::Log(_)) { if !matches!(notif, api::ExtHostNotif::Log(_)) {
writeln!(this.reqnot().logger(), "Host received notif {notif:?}"); writeln!(log("msg"), "Host received notif {notif:?}").await;
} }
match notif { match notif {
api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => { api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => {
let target = this.0.ctx.exprs.get_expr(acq.1).expect("Invalid ticket"); let target = this.0.ctx.exprs.get_expr(acq.1).expect("Invalid ticket");
this.0.ctx.exprs.give_expr(target) this.0.ctx.exprs.give_expr(target)
} },
api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => { api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => {
if this.is_own_sys(rel.0).await { if this.is_own_sys(rel.0).await {
this.0.ctx.exprs.take_expr(rel.1); this.0.ctx.exprs.take_expr(rel.1);
} else { } else {
writeln!(this.reqnot().logger(), "Not our system {:?}", rel.0) writeln!(log("warn"), "Not our system {:?}", rel.0).await
} }
}, },
api::ExtHostNotif::Log(api::Log(str)) => this.logger().log(str), api::ExtHostNotif::Log(api::Log { category, message }) =>
write!(log(&es(category).await), "{message}").await,
api::ExtHostNotif::Sweeped(data) => {
for i in join_all(data.strings.into_iter().map(es)).await {
this.0.strings.borrow_mut().remove(&i);
} }
}))}), for i in join_all(data.vecs.into_iter().map(ev)).await {
this.0.string_vecs.borrow_mut().remove(&i);
}
},
}
Ok(())
})
.await
},
async |mut reader| {
with_stash(async {
let req = reader.read_req::<api::ExtHostReq>().await.unwrap();
let handle = reader.finish().await;
// Atom printing and interning is never reported because it generates too much
// noise
if !matches!(req, api::ExtHostReq::ExtAtomPrint(_))
|| matches!(req, api::ExtHostReq::IntReq(_))
{ {
clone!(weak, ctx); writeln!(log("msg"), "Host received request {req:?}").await;
move |hand, req| {
clone!(weak, ctx);
Box::pin(async move {
let this = Self(weak.upgrade().unwrap());
if !matches!(req, api::ExtHostReq::ExtAtomPrint(_)) {
writeln!(this.reqnot().logger(), "Host received request {req:?}");
} }
let i = this.ctx().i.clone(); let this = Self(weak.upgrade().unwrap());
match req { match req {
api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()).await, api::ExtHostReq::Ping(ping) => handle.reply(&ping, &()).await,
api::ExtHostReq::IntReq(intreq) => match intreq { api::ExtHostReq::IntReq(intreq) => match intreq {
api::IntReq::InternStr(s) => hand.handle(&s, &i.i(&*s.0).await.to_api()).await, api::IntReq::InternStr(s) => {
api::IntReq::InternStrv(v) => { let i = is(&s.0).await;
let tokens = join_all(v.0.iter().map(|m| i.ex(*m))).await; this.0.strings.borrow_mut().insert(i.clone());
hand.handle(&v, &i.i(&tokens).await.to_api()).await handle.reply(&s, &i.to_api()).await
},
api::IntReq::InternStrv(v) => {
let tokens = join_all(v.0.iter().map(|m| es(*m))).await;
this.0.strings.borrow_mut().extend(tokens.iter().cloned());
let i = iv(&tokens).await;
this.0.string_vecs.borrow_mut().insert(i.clone());
handle.reply(&v, &i.to_api()).await
},
api::IntReq::ExternStr(si) => {
let i = es(si.0).await;
this.0.strings.borrow_mut().insert(i.clone());
handle.reply(&si, &i.to_string()).await
}, },
api::IntReq::ExternStr(si) =>
hand.handle(&si, &Tok::<String>::from_api(si.0, &i).await.rc()).await,
api::IntReq::ExternStrv(vi) => { api::IntReq::ExternStrv(vi) => {
let markerv = (i.ex(vi.0).await.iter()).map(|t| t.to_api()).collect_vec(); let i = ev(vi.0).await;
hand.handle(&vi, &markerv).await this.0.strings.borrow_mut().extend(i.iter().cloned());
this.0.string_vecs.borrow_mut().insert(i.clone());
let markerv = i.iter().map(|t| t.to_api()).collect_vec();
handle.reply(&vi, &markerv).await
}, },
}, },
api::ExtHostReq::Fwd(ref fw @ api::Fwd(ref atom, ref key, ref body)) => { api::ExtHostReq::Fwd(ref fw @ api::Fwd(ref atom, ref key, ref body)) => {
let sys = let sys =
ctx.system_inst(atom.owner).await.expect("owner of live atom dropped"); ctx.system_inst(atom.owner).await.expect("owner of live atom dropped");
let client = sys.client();
let reply = let reply =
sys.reqnot().request(api::Fwded(fw.0.clone(), *key, body.clone())).await; client.request(api::Fwded(fw.0.clone(), *key, body.clone())).await.unwrap();
hand.handle(fw, &reply).await handle.reply(fw, &reply).await
}, },
api::ExtHostReq::SysFwd(ref fw @ api::SysFwd(id, ref body)) => { api::ExtHostReq::SysFwd(ref fw @ api::SysFwd(id, ref body)) => {
let sys = ctx.system_inst(id).await.unwrap(); let sys = ctx.system_inst(id).await.unwrap();
hand.handle(fw, &sys.request(body.clone()).await).await handle.reply(fw, &sys.request(body.clone()).await).await
}, },
api::ExtHostReq::SubLex(sl) => { api::ExtHostReq::SubLex(sl) => {
let (rep_in, mut rep_out) = channel(0); let (rep_in, mut rep_out) = channel(0);
@@ -165,13 +178,13 @@ impl Extension {
lex_g.get(&sl.id).cloned().expect("Sublex for nonexistent lexid"); lex_g.get(&sl.id).cloned().expect("Sublex for nonexistent lexid");
req_in.send(ReqPair(sl.clone(), rep_in)).await.unwrap(); req_in.send(ReqPair(sl.clone(), rep_in)).await.unwrap();
} }
hand.handle(&sl, &rep_out.next().await.unwrap()).await handle.reply(&sl, &rep_out.next().await.unwrap()).await
}, },
api::ExtHostReq::ExprReq(expr_req) => match expr_req { api::ExtHostReq::ExprReq(expr_req) => match expr_req {
api::ExprReq::Inspect(ins @ api::Inspect { target }) => { api::ExprReq::Inspect(ins @ api::Inspect { target }) => {
let expr = ctx.exprs.get_expr(target).expect("Invalid ticket"); let expr = ctx.exprs.get_expr(target).expect("Invalid ticket");
hand handle
.handle(&ins, &api::Inspected { .reply(&ins, &api::Inspected {
refcount: expr.strong_count() as u32, refcount: expr.strong_count() as u32,
location: expr.pos().to_api(), location: expr.pos().to_api(),
kind: expr.to_api().await, kind: expr.to_api().await,
@@ -182,12 +195,12 @@ impl Extension {
let expr = Expr::from_api(expr, PathSetBuilder::new(), ctx.clone()).await; let expr = Expr::from_api(expr, PathSetBuilder::new(), ctx.clone()).await;
let expr_id = expr.id(); let expr_id = expr.id();
ctx.exprs.give_expr(expr); ctx.exprs.give_expr(expr);
hand.handle(cre, &expr_id).await handle.reply(cre, &expr_id).await
}, },
}, },
api::ExtHostReq::LsModule(ref ls @ api::LsModule(_sys, path)) => { api::ExtHostReq::LsModule(ref ls @ api::LsModule(_sys, path)) => {
let reply: <api::LsModule as Request>::Response = 'reply: { let reply: <api::LsModule as Request>::Response = 'reply: {
let path = i.ex(path).await; let path = ev(path).await;
let root = (ctx.root.read().await.upgrade()) let root = (ctx.root.read().await.upgrade())
.expect("LSModule called when root isn't in context"); .expect("LSModule called when root isn't in context");
let root_data = &*root.0.read().await; let root_data = &*root.0.read().await;
@@ -214,7 +227,7 @@ impl Extension {
} }
Ok(api::ModuleInfo { members }) Ok(api::ModuleInfo { members })
}; };
hand.handle(ls, &reply).await handle.reply(ls, &reply).await
}, },
api::ExtHostReq::ResolveNames(ref rn) => { api::ExtHostReq::ResolveNames(ref rn) => {
let api::ResolveNames { constid, names, sys } = rn; let api::ResolveNames { constid, names, sys } = rn;
@@ -226,42 +239,63 @@ impl Extension {
}; };
let responses = stream(async |mut cx| { let responses = stream(async |mut cx| {
for name in names { for name in names {
cx.emit(match resolver(&ctx.i.ex(*name).await[..]).await { cx.emit(match resolver(&ev(*name).await[..]).await {
Ok(abs) => Ok(abs.to_sym(&ctx.i).await.to_api()), Ok(abs) => {
Err(e) => Err(e.to_api()), let sym = abs.to_sym().await;
this.0.string_vecs.borrow_mut().insert(sym.tok());
Ok(sym.to_api())
},
Err(e) => {
(this.0.strings.borrow_mut())
.extend(e.iter().map(|e| e.description.clone()));
Err(e.to_api())
},
}) })
.await .await
} }
}) })
.collect() .collect()
.await; .await;
hand.handle(rn, &responses).await handle.reply(rn, &responses).await
}, },
api::ExtHostReq::ExtAtomPrint(ref eap @ api::ExtAtomPrint(ref atom)) => { api::ExtHostReq::ExtAtomPrint(ref eap @ api::ExtAtomPrint(ref atom)) => {
let atom = AtomHand::from_api(atom, Pos::None, &mut ctx.clone()).await; let atom = AtomHand::from_api(atom, Pos::None, &mut ctx.clone()).await;
let unit = atom.print(&FmtCtxImpl { i: &this.ctx().i }).await; let unit = atom.print(&FmtCtxImpl::default()).await;
hand.handle(eap, &unit.to_api()).await handle.reply(eap, &unit.to_api()).await
}, },
} }
}) })
} .await
}, },
), )
.await
.unwrap();
});
ExtensionData {
name: header2.name.clone(),
ctx: ctx2,
systems: (header.systems.iter().cloned())
.map(|decl| SystemCtor { decl, ext: WeakExtension(weak2.clone()) })
.collect(),
join_ext: Some(join_ext),
next_pars: RefCell::new(NonZeroU64::new(1).unwrap()),
lex_recur: Mutex::default(),
client: Rc::new(client),
strings: RefCell::default(),
string_vecs: RefCell::default(),
} }
}))) })))
} }
pub fn name(&self) -> &String { &self.0.name } pub fn name(&self) -> &String { &self.0.name }
#[must_use] #[must_use]
pub fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.reqnot } pub fn client(&self) -> &dyn Client { &*self.0.client }
#[must_use] #[must_use]
pub fn ctx(&self) -> &Ctx { &self.0.ctx } pub fn ctx(&self) -> &Ctx { &self.0.ctx }
#[must_use]
pub fn logger(&self) -> &Logger { &self.0.logger }
pub fn system_ctors(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() } pub fn system_ctors(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
#[must_use] #[must_use]
pub async fn is_own_sys(&self, id: api::SysId) -> bool { pub async fn is_own_sys(&self, id: api::SysId) -> bool {
let Some(sys) = self.ctx().system_inst(id).await else { let Some(sys) = self.ctx().system_inst(id).await else {
writeln!(self.logger(), "Invalid system ID {id:?}"); writeln!(log("warn"), "Invalid system ID {id:?}").await;
return false; return false;
}; };
Rc::ptr_eq(&self.0, &sys.ext().0) Rc::ptr_eq(&self.0, &sys.ext().0)
@@ -274,7 +308,7 @@ impl Extension {
} }
pub(crate) async fn lex_req<F: Future<Output = Option<api::SubLexed>>>( pub(crate) async fn lex_req<F: Future<Output = Option<api::SubLexed>>>(
&self, &self,
source: Tok<String>, source: IStr,
src: Sym, src: Sym,
pos: u32, pos: u32,
sys: api::SysId, sys: api::SysId,
@@ -287,9 +321,10 @@ impl Extension {
self.0.lex_recur.lock().await.insert(id, req_in); // lex_recur released self.0.lex_recur.lock().await.insert(id, req_in); // lex_recur released
let (ret, ()) = join( let (ret, ()) = join(
async { async {
let res = (self.reqnot()) let res = (self.client())
.request(api::LexExpr { id, pos, sys, src: src.to_api(), text: source.to_api() }) .request(api::LexExpr { id, pos, sys, src: src.to_api(), text: source.to_api() })
.await; .await
.unwrap();
// collect sender to unblock recursion handler branch before returning // collect sender to unblock recursion handler branch before returning
self.0.lex_recur.lock().await.remove(&id); self.0.lex_recur.lock().await.remove(&id);
res res
@@ -306,10 +341,10 @@ impl Extension {
} }
pub fn system_drop(&self, id: api::SysId) { pub fn system_drop(&self, id: api::SysId) {
let rc = self.clone(); let rc = self.clone();
(self.ctx().spawn)(Box::pin(async move { let _ = self.ctx().spawn(with_stash(async move {
rc.reqnot().request(api::SystemDrop(id)).await; rc.client().request(api::SystemDrop(id)).await.unwrap();
rc.ctx().systems.write().await.remove(&id); rc.ctx().systems.write().await.remove(&id);
})) }));
} }
#[must_use] #[must_use]
pub fn downgrade(&self) -> WeakExtension { WeakExtension(Rc::downgrade(&self.0)) } pub fn downgrade(&self) -> WeakExtension { WeakExtension(Rc::downgrade(&self.0)) }

View File

@@ -1,10 +1,8 @@
use std::rc::Rc;
use futures::FutureExt; use futures::FutureExt;
use futures::lock::Mutex; use futures::lock::Mutex;
use orchid_base::clone; use orchid_base::clone;
use orchid_base::error::{OrcErrv, OrcRes, mk_errv}; use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
use orchid_base::interner::Tok; use orchid_base::interner::{IStr, is};
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::{name_char, name_start, op_char, unrep_space}; use orchid_base::parse::{name_char, name_start, op_char, unrep_space};
@@ -20,7 +18,7 @@ use crate::system::System;
pub struct LexCtx<'a> { pub struct LexCtx<'a> {
pub systems: &'a [System], pub systems: &'a [System],
pub source: &'a Tok<String>, pub source: &'a IStr,
pub path: &'a Sym, pub path: &'a Sym,
pub tail: &'a str, pub tail: &'a str,
pub sub_trees: &'a mut Vec<Expr>, pub sub_trees: &'a mut Vec<Expr>,
@@ -60,7 +58,7 @@ impl<'a> LexCtx<'a> {
} }
#[must_use] #[must_use]
pub async fn des_subtree(&mut self, tree: &api::TokenTree, exprs: ExprStore) -> ParsTokTree { pub async fn des_subtree(&mut self, tree: &api::TokenTree, exprs: ExprStore) -> ParsTokTree {
ParsTokTree::from_api(tree, &mut { exprs }, &mut self.ctx.clone(), self.path, &self.ctx.i).await ParsTokTree::from_api(tree, &mut { exprs }, &mut self.ctx.clone(), self.path).await
} }
#[must_use] #[must_use]
pub fn strip_char(&mut self, tgt: char) -> bool { pub fn strip_char(&mut self, tgt: char) -> bool {
@@ -98,21 +96,21 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
let name = &ctx.tail[..ctx.tail.len() - tail.len() - "::".len()]; let name = &ctx.tail[..ctx.tail.len() - tail.len() - "::".len()];
ctx.set_tail(tail); ctx.set_tail(tail);
let body = lex_once(ctx).boxed_local().await?; let body = lex_once(ctx).boxed_local().await?;
ParsTok::NS(ctx.ctx.i.i(name).await, Box::new(body)) ParsTok::NS(is(name).await, Box::new(body))
} else if ctx.strip_prefix("--[") { } else if ctx.strip_prefix("--[") {
let Some((cmt, tail)) = ctx.tail.split_once("]--") else { let Some((cmt, tail)) = ctx.tail.split_once("]--") else {
return Err(mk_errv( return Err(mk_errv(
ctx.ctx.i.i("Unterminated block comment").await, is("Unterminated block comment").await,
"This block comment has no ending ]--", "This block comment has no ending ]--",
[SrcRange::new(start..start + 3, ctx.path)], [SrcRange::new(start..start + 3, ctx.path)],
)); ));
}; };
ctx.set_tail(tail); ctx.set_tail(tail);
ParsTok::Comment(Rc::new(cmt.to_string())) ParsTok::Comment(is(cmt).await)
} else if let Some(tail) = ctx.tail.strip_prefix("--").filter(|t| !t.starts_with(op_char)) { } else if let Some(tail) = ctx.tail.strip_prefix("--").filter(|t| !t.starts_with(op_char)) {
let end = tail.find(['\n', '\r']).map_or(tail.len(), |n| n - 1); let end = tail.find(['\n', '\r']).map_or(tail.len(), |n| n - 1);
ctx.push_pos(end as u32); ctx.push_pos(end as u32);
ParsTok::Comment(Rc::new(tail[2..end].to_string())) ParsTok::Comment(is(&tail[2..end]).await)
} else if let Some(tail) = ctx.tail.strip_prefix('\\').filter(|t| t.starts_with(name_start)) { } else if let Some(tail) = ctx.tail.strip_prefix('\\').filter(|t| t.starts_with(name_start)) {
// fanciness like \$placeh in templates is resolved in the macro engine. // fanciness like \$placeh in templates is resolved in the macro engine.
ctx.set_tail(tail); ctx.set_tail(tail);
@@ -125,7 +123,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
while !ctx.strip_char(*rp) { while !ctx.strip_char(*rp) {
if ctx.tail.is_empty() { if ctx.tail.is_empty() {
return Err(mk_errv( return Err(mk_errv(
ctx.ctx.i.i("unclosed paren").await, is("unclosed paren").await,
format!("this {lp} has no matching {rp}"), format!("this {lp} has no matching {rp}"),
[SrcRange::new(start..start + 1, ctx.path)], [SrcRange::new(start..start + 1, ctx.path)],
)); ));
@@ -162,10 +160,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
}) })
.await; .await;
match lx { match lx {
Err(e) => Err(e) => return Err(errors.into_iter().fold(OrcErrv::from_api(&e).await, |a, b| a + b)),
return Err(
errors.into_iter().fold(OrcErrv::from_api(&e, &ctx.ctx.i).await, |a, b| a + b),
),
Ok(Some(lexed)) => { Ok(Some(lexed)) => {
ctx.set_pos(lexed.pos); ctx.set_pos(lexed.pos);
let lexed_tree = ctx.des_subtree(&lexed.expr, temp_store).await; let lexed_tree = ctx.des_subtree(&lexed.expr, temp_store).await;
@@ -185,12 +180,12 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
} }
} }
if ctx.tail.starts_with(name_start) { if ctx.tail.starts_with(name_start) {
ParsTok::Name(ctx.ctx.i.i(ctx.get_start_matches(name_char)).await) ParsTok::Name(is(ctx.get_start_matches(name_char)).await)
} else if ctx.tail.starts_with(op_char) { } else if ctx.tail.starts_with(op_char) {
ParsTok::Name(ctx.ctx.i.i(ctx.get_start_matches(op_char)).await) ParsTok::Name(is(ctx.get_start_matches(op_char)).await)
} else { } else {
return Err(mk_errv( return Err(mk_errv(
ctx.ctx.i.i("Unrecognized character").await, is("Unrecognized character").await,
"The following syntax is meaningless.", "The following syntax is meaningless.",
[SrcRange::new(start..start + 1, ctx.path)], [SrcRange::new(start..start + 1, ctx.path)],
)); ));
@@ -199,12 +194,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
Ok(ParsTokTree { tok, sr: SrcRange::new(start..ctx.get_pos(), ctx.path) }) Ok(ParsTokTree { tok, sr: SrcRange::new(start..ctx.get_pos(), ctx.path) })
} }
pub async fn lex( pub async fn lex(text: IStr, path: Sym, systems: &[System], ctx: &Ctx) -> OrcRes<Vec<ParsTokTree>> {
text: Tok<String>,
path: Sym,
systems: &[System],
ctx: &Ctx,
) -> OrcRes<Vec<ParsTokTree>> {
let mut sub_trees = Vec::new(); let mut sub_trees = Vec::new();
let mut ctx = let mut ctx =
LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems, path: &path, ctx }; LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems, path: &path, ctx };

View File

@@ -3,11 +3,13 @@ use orchid_api as api;
pub mod atom; pub mod atom;
pub mod ctx; pub mod ctx;
pub mod dealias; pub mod dealias;
pub mod dylib;
pub mod execute; pub mod execute;
pub mod expr; pub mod expr;
pub mod expr_store; pub mod expr_store;
pub mod extension; pub mod extension;
pub mod lex; pub mod lex;
pub mod logger;
pub mod parse; pub mod parse;
pub mod parsed; pub mod parsed;
pub mod subprocess; pub mod subprocess;

84
orchid-host/src/logger.rs Normal file
View File

@@ -0,0 +1,84 @@
use std::fmt::Arguments;
use std::fs::File;
use std::io::{Write, stderr};
use std::rc::Rc;
use futures::future::LocalBoxFuture;
use hashbrown::HashMap;
use itertools::Itertools;
use orchid_base::logging::{LogWriter, Logger};
use crate::api;
pub struct LogWriterImpl(api::LogStrategy);
impl LogWriter for LogWriterImpl {
fn write_fmt<'a>(&'a self, fmt: Arguments<'a>) -> LocalBoxFuture<'a, ()> {
Box::pin(async move {
match &self.0 {
api::LogStrategy::Discard => (),
api::LogStrategy::Default => {
stderr().write_fmt(fmt).expect("Could not write to stderr!");
stderr().flush().expect("Could not flush stderr")
},
api::LogStrategy::File { path, .. } => {
let mut file = (File::options().write(true).create(true).truncate(false).open(path))
.unwrap_or_else(|e| panic!("Could not open {path}: {e}"));
file.write_fmt(fmt).unwrap_or_else(|e| panic!("Could not write to {path}: {e}"));
},
}
})
}
}
#[derive(Clone, Default)]
pub struct LoggerImpl {
routing: HashMap<String, api::LogStrategy>,
default: Option<api::LogStrategy>,
}
impl LoggerImpl {
pub fn to_api(&self) -> api::Logger {
api::Logger {
default: self.default.clone(),
routing: self.routing.iter().map(|(k, v)| (k.clone(), v.clone())).collect(),
}
}
pub fn new(
default: Option<api::LogStrategy>,
strats: impl IntoIterator<Item = (String, api::LogStrategy)>,
) -> Self {
Self { routing: strats.into_iter().collect(), default }
}
pub fn set_default(&mut self, strat: api::LogStrategy) { self.default = Some(strat) }
pub fn clear_default(&mut self) { self.default = None }
pub fn set_category(&mut self, category: &str, strat: api::LogStrategy) {
self.routing.insert(category.to_string(), strat);
}
pub fn with_default(mut self, strat: api::LogStrategy) -> Self {
self.set_default(strat);
self
}
pub fn with_category(mut self, category: &str, strat: api::LogStrategy) -> Self {
self.set_category(category, strat);
self
}
pub async fn log(&self, category: &str, msg: impl AsRef<str>) {
writeln!(self.writer(category), "{}", msg.as_ref()).await
}
pub fn has_category(&self, category: &str) -> bool { self.routing.contains_key(category) }
pub async fn log_buf(&self, category: &str, event: impl AsRef<str>, buf: &[u8]) {
if std::env::var("ORCHID_LOG_BUFFERS").is_ok_and(|v| !v.is_empty()) {
let data = buf.iter().map(|b| format!("{b:02x}")).join(" ");
writeln!(self.writer(category), "{}: [{data}]", event.as_ref()).await
}
}
}
impl Logger for LoggerImpl {
fn writer(&self, category: &str) -> Rc<dyn LogWriter> {
Rc::new(LogWriterImpl(self.strat(category).clone()))
}
fn strat(&self, category: &str) -> api::LogStrategy {
(self.routing.get(category).cloned().or(self.default.clone()))
.expect("Invalid category and catchall logger not set")
}
}

View File

@@ -1,12 +1,11 @@
use futures::FutureExt; use futures::FutureExt;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcRes, Reporter, mk_errv}; use orchid_base::error::{OrcRes, mk_errv, report};
use orchid_base::format::fmt; use orchid_base::format::fmt;
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::{IStr, is};
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::{ use orchid_base::parse::{
Comment, Import, ParseCtx, Parsed, Snippet, expect_end, line_items, parse_multiname, Comment, Import, Parsed, Snippet, expect_end, line_items, parse_multiname, try_pop_no_fluff,
try_pop_no_fluff,
}; };
use orchid_base::tree::{Paren, TokTree, Token}; use orchid_base::tree::{Paren, TokTree, Token};
use substack::Substack; use substack::Substack;
@@ -22,12 +21,6 @@ pub struct HostParseCtxImpl<'a> {
pub ctx: Ctx, pub ctx: Ctx,
pub src: Sym, pub src: Sym,
pub systems: &'a [System], pub systems: &'a [System],
pub rep: &'a Reporter,
}
impl ParseCtx for HostParseCtxImpl<'_> {
fn rep(&self) -> &Reporter { self.rep }
fn i(&self) -> &Interner { &self.ctx.i }
} }
impl HostParseCtx for HostParseCtxImpl<'_> { impl HostParseCtx for HostParseCtxImpl<'_> {
@@ -36,7 +29,7 @@ impl HostParseCtx for HostParseCtxImpl<'_> {
fn src_path(&self) -> Sym { self.src.clone() } fn src_path(&self) -> Sym { self.src.clone() }
} }
pub trait HostParseCtx: ParseCtx { pub trait HostParseCtx {
#[must_use] #[must_use]
fn ctx(&self) -> &Ctx; fn ctx(&self) -> &Ctx;
#[must_use] #[must_use]
@@ -47,14 +40,14 @@ pub trait HostParseCtx: ParseCtx {
pub async fn parse_items( pub async fn parse_items(
ctx: &impl HostParseCtx, ctx: &impl HostParseCtx,
path: Substack<'_, Tok<String>>, path: Substack<'_, IStr>,
items: ParsSnippet<'_>, items: ParsSnippet<'_>,
) -> OrcRes<Vec<Item>> { ) -> OrcRes<Vec<Item>> {
let lines = line_items(ctx, items).await; let lines = line_items(items).await;
let mut line_ok = Vec::new(); let mut line_ok = Vec::new();
for Parsed { output: comments, tail } in lines { for Parsed { output: comments, tail } in lines {
match parse_item(ctx, path.clone(), comments, tail).boxed_local().await { match parse_item(ctx, path.clone(), comments, tail).boxed_local().await {
Err(e) => ctx.rep().report(e), Err(e) => report(e),
Ok(l) => line_ok.extend(l), Ok(l) => line_ok.extend(l),
} }
} }
@@ -63,23 +56,23 @@ pub async fn parse_items(
pub async fn parse_item( pub async fn parse_item(
ctx: &impl HostParseCtx, ctx: &impl HostParseCtx,
path: Substack<'_, Tok<String>>, path: Substack<'_, IStr>,
comments: Vec<Comment>, comments: Vec<Comment>,
item: ParsSnippet<'_>, item: ParsSnippet<'_>,
) -> OrcRes<Vec<Item>> { ) -> OrcRes<Vec<Item>> {
match item.pop_front() { match item.pop_front() {
Some((TokTree { tok: Token::Name(n), .. }, postdisc)) => match n { Some((TokTree { tok: Token::Name(n), .. }, postdisc)) => match n {
n if *n == ctx.i().i("export").await => match try_pop_no_fluff(ctx, postdisc).await? { n if *n == is("export").await => match try_pop_no_fluff(postdisc).await? {
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } =>
parse_exportable_item(ctx, path, comments, true, n.clone(), tail).await, parse_exportable_item(ctx, path, comments, true, n.clone(), tail).await,
Parsed { output, tail: _ } => Err(mk_errv( Parsed { output, tail: _ } => Err(mk_errv(
ctx.i().i("Malformed export").await, is("Malformed export").await,
"`export` can either prefix other lines or list names inside ( )", "`export` can either prefix other lines or list names inside ( )",
[output.sr()], [output.sr()],
)), )),
}, },
n if *n == ctx.i().i("import").await => { n if *n == is("import").await => {
let imports = parse_import(ctx, postdisc).await?; let imports = parse_import(postdisc).await?;
Ok(Vec::from_iter(imports.into_iter().map(|t| Item { Ok(Vec::from_iter(imports.into_iter().map(|t| Item {
comments: comments.clone(), comments: comments.clone(),
sr: t.sr.clone(), sr: t.sr.clone(),
@@ -88,33 +81,29 @@ pub async fn parse_item(
}, },
n => parse_exportable_item(ctx, path, comments, false, n.clone(), postdisc).await, n => parse_exportable_item(ctx, path, comments, false, n.clone(), postdisc).await,
}, },
Some(_) => Err(mk_errv( Some(_) =>
ctx.i().i("Expected a line type").await, Err(mk_errv(is("Expected a line type").await, "All lines must begin with a keyword", [
"All lines must begin with a keyword", item.sr()
[item.sr()], ])),
)),
None => unreachable!("These lines are filtered and aggregated in earlier stages"), None => unreachable!("These lines are filtered and aggregated in earlier stages"),
} }
} }
pub async fn parse_import<'a>( pub async fn parse_import<'a>(tail: ParsSnippet<'a>) -> OrcRes<Vec<Import>> {
ctx: &impl HostParseCtx, let Parsed { output: imports, tail } = parse_multiname(tail).await?;
tail: ParsSnippet<'a>, expect_end(tail).await?;
) -> OrcRes<Vec<Import>> {
let Parsed { output: imports, tail } = parse_multiname(ctx, tail).await?;
expect_end(ctx, tail).await?;
Ok(imports) Ok(imports)
} }
pub async fn parse_exportable_item<'a>( pub async fn parse_exportable_item<'a>(
ctx: &impl HostParseCtx, ctx: &impl HostParseCtx,
path: Substack<'_, Tok<String>>, path: Substack<'_, IStr>,
comments: Vec<Comment>, comments: Vec<Comment>,
exported: bool, exported: bool,
discr: Tok<String>, discr: IStr,
tail: ParsSnippet<'a>, tail: ParsSnippet<'a>,
) -> OrcRes<Vec<Item>> { ) -> OrcRes<Vec<Item>> {
let kind = if discr == ctx.i().i("mod").await { let kind = if discr == is("mod").await {
let (name, body) = parse_module(ctx, path, tail).await?; let (name, body) = parse_module(ctx, path, tail).await?;
ItemKind::Member(ParsedMember { name, exported, kind: ParsedMemberKind::Mod(body) }) ItemKind::Member(ParsedMember { name, exported, kind: ParsedMemberKind::Mod(body) })
} else if let Some(parser) = ctx.systems().find_map(|s| s.get_parser(discr.clone())) { } else if let Some(parser) = ctx.systems().find_map(|s| s.get_parser(discr.clone())) {
@@ -127,7 +116,7 @@ pub async fn parse_exportable_item<'a>(
} else { } else {
let ext_lines = ctx.systems().flat_map(System::line_types).join(", "); let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
return Err(mk_errv( return Err(mk_errv(
ctx.i().i("Unrecognized line type").await, is("Unrecognized line type").await,
format!("Line types are: mod, {ext_lines}"), format!("Line types are: mod, {ext_lines}"),
[tail.prev().sr()], [tail.prev().sr()],
)); ));
@@ -137,25 +126,25 @@ pub async fn parse_exportable_item<'a>(
pub async fn parse_module<'a>( pub async fn parse_module<'a>(
ctx: &impl HostParseCtx, ctx: &impl HostParseCtx,
path: Substack<'_, Tok<String>>, path: Substack<'_, IStr>,
tail: ParsSnippet<'a>, tail: ParsSnippet<'a>,
) -> OrcRes<(Tok<String>, ParsedModule)> { ) -> OrcRes<(IStr, ParsedModule)> {
let (name, tail) = match try_pop_no_fluff(ctx, tail).await? { let (name, tail) = match try_pop_no_fluff(tail).await? {
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => (n.clone(), tail), Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => (n.clone(), tail),
Parsed { output, .. } => { Parsed { output, .. } => {
return Err(mk_errv( return Err(mk_errv(
ctx.i().i("Missing module name").await, is("Missing module name").await,
format!("A name was expected, {} was found", fmt(output, ctx.i()).await), format!("A name was expected, {} was found", fmt(output).await),
[output.sr()], [output.sr()],
)); ));
}, },
}; };
let Parsed { output, tail: surplus } = try_pop_no_fluff(ctx, tail).await?; let Parsed { output, tail: surplus } = try_pop_no_fluff(tail).await?;
expect_end(ctx, surplus).await?; expect_end(surplus).await?;
let Some(body) = output.as_s(Paren::Round) else { let Some(body) = output.as_s(Paren::Round) else {
return Err(mk_errv( return Err(mk_errv(
ctx.i().i("Expected module body").await, is("Expected module body").await,
format!("A ( block ) was expected, {} was found", fmt(output, ctx.i()).await), format!("A ( block ) was expected, {} was found", fmt(output).await),
[output.sr()], [output.sr()],
)); ));
}; };

View File

@@ -6,7 +6,7 @@ use futures::future::{LocalBoxFuture, join_all};
use hashbrown::HashSet; use hashbrown::HashSet;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants}; use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
use orchid_base::interner::Tok; use orchid_base::interner::{IStr, IStrv};
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
use orchid_base::parse::{Comment, Import}; use orchid_base::parse::{Comment, Import};
use orchid_base::tl_cache; use orchid_base::tl_cache;
@@ -57,10 +57,10 @@ impl Format for Item {
ItemKind::Member(mem) => match &mem.kind { ItemKind::Member(mem) => match &mem.kind {
ParsedMemberKind::Const(_, sys) => ParsedMemberKind::Const(_, sys) =>
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} via {1}"))) tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} via {1}")))
.units([mem.name.rc().into(), sys.print(c).await]), .units([mem.name.to_string().into(), sys.print(c).await]),
ParsedMemberKind::Mod(module) => ParsedMemberKind::Mod(module) =>
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("module {0} {{\n\t{1}\n}}"))) tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("module {0} {{\n\t{1}\n}}")))
.units([mem.name.rc().into(), module.print(c).boxed_local().await]), .units([mem.name.to_string().into(), module.print(c).boxed_local().await]),
}, },
}; };
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{0}\n{1}"))) tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{0}\n{1}")))
@@ -69,14 +69,14 @@ impl Format for Item {
} }
pub struct ParsedMember { pub struct ParsedMember {
pub name: Tok<String>, pub name: IStr,
pub exported: bool, pub exported: bool,
pub kind: ParsedMemberKind, pub kind: ParsedMemberKind,
} }
impl ParsedMember { impl ParsedMember {
#[must_use] #[must_use]
pub fn name(&self) -> Tok<String> { self.name.clone() } pub fn name(&self) -> IStr { self.name.clone() }
pub fn new(exported: bool, name: Tok<String>, kind: impl Into<ParsedMemberKind>) -> Self { pub fn new(exported: bool, name: IStr, kind: impl Into<ParsedMemberKind>) -> Self {
Self { exported, name, kind: kind.into() } Self { exported, name, kind: kind.into() }
} }
} }
@@ -89,17 +89,14 @@ impl Debug for ParsedMember {
} }
} }
pub(crate) type ParsedExprCallback = pub(crate) type ParsedExprCallback = Rc<dyn for<'a> Fn(&'a [IStr]) -> LocalBoxFuture<'a, Expr>>;
Rc<dyn for<'a> Fn(&'a [Tok<String>]) -> LocalBoxFuture<'a, Expr>>;
pub struct ParsedExpr { pub struct ParsedExpr {
pub(crate) debug: String, pub(crate) debug: String,
pub(crate) callback: ParsedExprCallback, pub(crate) callback: ParsedExprCallback,
} }
impl ParsedExpr { impl ParsedExpr {
pub async fn run(self, imported_names: &[Tok<String>]) -> Expr { pub async fn run(self, imported_names: &[IStr]) -> Expr { (self.callback)(imported_names).await }
(self.callback)(imported_names).await
}
} }
impl fmt::Debug for ParsedExpr { impl fmt::Debug for ParsedExpr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.debug) } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.debug) }
@@ -115,7 +112,7 @@ impl From<ParsedModule> for ParsedMemberKind {
} }
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct ParsedModule { pub struct ParsedModule {
pub exports: Vec<Tok<String>>, pub exports: Vec<IStr>,
pub items: Vec<Item>, pub items: Vec<Item>,
pub use_prelude: bool, pub use_prelude: bool,
} }
@@ -141,7 +138,7 @@ impl ParsedModule {
(self.items.iter()) (self.items.iter())
.filter_map(|it| if let ItemKind::Import(i) = &it.kind { Some(i) } else { None }) .filter_map(|it| if let ItemKind::Import(i) = &it.kind { Some(i) } else { None })
} }
pub fn default_item(self, name: Tok<String>, sr: SrcRange) -> Item { pub fn default_item(self, name: IStr, sr: SrcRange) -> Item {
let mem = ParsedMember { exported: true, name, kind: ParsedMemberKind::Mod(self) }; let mem = ParsedMember { exported: true, name, kind: ParsedMemberKind::Mod(self) };
Item { comments: vec![], sr, kind: ItemKind::Member(mem) } Item { comments: vec![], sr, kind: ItemKind::Member(mem) }
} }
@@ -150,7 +147,7 @@ impl Tree for ParsedModule {
type Ctx<'a> = (); type Ctx<'a> = ();
async fn child( async fn child(
&self, &self,
key: Tok<String>, key: IStr,
public_only: bool, public_only: bool,
(): &mut Self::Ctx<'_>, (): &mut Self::Ctx<'_>,
) -> ChildResult<'_, Self> { ) -> ChildResult<'_, Self> {
@@ -168,7 +165,7 @@ impl Tree for ParsedModule {
} }
ChildResult::Err(ChildErrorKind::Missing) ChildResult::Err(ChildErrorKind::Missing)
} }
fn children(&self, public_only: bool) -> HashSet<Tok<String>> { fn children(&self, public_only: bool) -> HashSet<IStr> {
let mut public: HashSet<_> = self.exports.iter().cloned().collect(); let mut public: HashSet<_> = self.exports.iter().cloned().collect();
if !public_only { if !public_only {
public.extend( public.extend(
@@ -197,11 +194,11 @@ impl Format for ParsedModule {
/// point to a module and rule_loc selects a macro rule within that module /// point to a module and rule_loc selects a macro rule within that module
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct ConstPath { pub struct ConstPath {
steps: Tok<Vec<Tok<String>>>, steps: IStrv,
} }
impl ConstPath { impl ConstPath {
#[must_use] #[must_use]
pub fn to_const(steps: Tok<Vec<Tok<String>>>) -> Self { Self { steps } } pub fn to_const(steps: IStrv) -> Self { Self { steps } }
} }
pub async fn tt_to_api(exprs: &mut ExprStore, subtree: ParsTokTree) -> api::TokenTree { pub async fn tt_to_api(exprs: &mut ExprStore, subtree: ParsTokTree) -> api::TokenTree {

View File

@@ -1,102 +1,34 @@
use std::cell::RefCell; use std::{io, process};
use std::io::{self, Write};
use std::pin::Pin;
use async_process::{self, Child, ChildStdin, ChildStdout};
use futures::future::LocalBoxFuture;
use futures::io::BufReader; use futures::io::BufReader;
use futures::lock::Mutex; use futures::{self, AsyncBufReadExt, StreamExt};
use futures::{self, AsyncBufReadExt, AsyncWriteExt}; use orchid_base::logging::log;
use orchid_api_traits::{Decode, Encode}; #[cfg(feature = "tokio")]
use orchid_base::builtin::{ExtInit, ExtPort}; use tokio_util::compat::{TokioAsyncReadCompatExt, TokioAsyncWriteCompatExt};
use orchid_base::logging::Logger;
use orchid_base::msg::{recv_msg, send_msg};
use crate::api;
use crate::ctx::Ctx; use crate::ctx::Ctx;
use crate::extension::ExtPort;
pub async fn ext_command( #[cfg(feature = "tokio")]
cmd: std::process::Command, pub async fn ext_command(cmd: std::process::Command, ctx: Ctx) -> io::Result<ExtPort> {
logger: Logger, let name = cmd.get_program().to_string_lossy().to_string();
msg_logs: Logger, let mut child = tokio::process::Command::from(cmd)
ctx: Ctx, .stdin(process::Stdio::piped())
) -> io::Result<ExtInit> { .stdout(process::Stdio::piped())
let mut child = async_process::Command::from(cmd) .stderr(process::Stdio::piped())
.stdin(async_process::Stdio::piped())
.stdout(async_process::Stdio::piped())
.stderr(async_process::Stdio::piped())
.spawn()?; .spawn()?;
let mut stdin = child.stdin.take().unwrap(); std::thread::spawn(|| {});
api::HostHeader { log_strategy: logger.strat(), msg_logs: msg_logs.strat() } let stdin = child.stdin.take().unwrap();
.encode(Pin::new(&mut stdin)) let stdout = child.stdout.take().unwrap();
.await; let child_stderr = child.stderr.take().unwrap();
let mut stdout = child.stdout.take().unwrap(); let _ = ctx.spawn(Box::pin(async move {
let header = api::ExtensionHeader::decode(Pin::new(&mut stdout)).await; let _ = child;
let mut child_stderr = child.stderr.take().unwrap(); let mut lines = BufReader::new(child_stderr.compat()).lines();
(ctx.spawn)(Box::pin(async move { while let Some(line) = lines.next().await {
let mut reader = BufReader::new(&mut child_stderr); // route stderr with an empty category string. This is not the intended logging
loop { // method
let mut buf = String::new(); writeln!(log("stderr"), "{} err> {}", name, line.expect("Readline implies this")).await;
if 0 == reader.read_line(&mut buf).await.unwrap() {
break;
}
logger.log(buf.strip_suffix('\n').expect("Readline implies this"));
} }
})); }));
Ok(ExtInit { Ok(ExtPort { input: Box::pin(stdin.compat_write()), output: Box::pin(stdout.compat()) })
port: Box::new(Subprocess {
name: header.name.clone(),
child: RefCell::new(Some(child)),
stdin: Some(Mutex::new(Box::pin(stdin))),
stdout: Mutex::new(Box::pin(stdout)),
ctx,
}),
header,
})
}
pub struct Subprocess {
name: String,
child: RefCell<Option<Child>>,
stdin: Option<Mutex<Pin<Box<ChildStdin>>>>,
stdout: Mutex<Pin<Box<ChildStdout>>>,
ctx: Ctx,
}
impl Drop for Subprocess {
fn drop(&mut self) {
let mut child = self.child.borrow_mut().take().unwrap();
let name = self.name.clone();
if std::thread::panicking() {
eprintln!("Killing extension {name}");
// we don't really care to handle errors here
let _: Result<_, _> = std::io::stderr().flush();
let _: Result<_, _> = child.kill();
return;
}
let stdin = self.stdin.take().unwrap();
(self.ctx.spawn)(Box::pin(async move {
stdin.lock().await.close().await.unwrap();
let status = (child.status().await)
.unwrap_or_else(|e| panic!("{e}, extension {name} exited with error"));
assert!(status.success(), "Extension {name} exited with error {status}");
}))
}
}
impl ExtPort for Subprocess {
fn send<'a>(&'a self, msg: &'a [u8]) -> LocalBoxFuture<'a, ()> {
Box::pin(async {
send_msg(Pin::new(&mut *self.stdin.as_ref().unwrap().lock().await), msg).await.unwrap()
})
}
fn recv(&self) -> LocalBoxFuture<'_, Option<Vec<u8>>> {
Box::pin(async {
std::io::Write::flush(&mut std::io::stderr()).unwrap();
match recv_msg(self.stdout.lock().await.as_mut()).await {
Ok(msg) => Some(msg),
Err(e) if e.kind() == io::ErrorKind::BrokenPipe => None,
Err(e) if e.kind() == io::ErrorKind::UnexpectedEof => None,
Err(e) => panic!("Failed to read from stdout: {}, {e}", e.kind()),
}
})
}
} }

View File

@@ -2,11 +2,11 @@ use futures::FutureExt;
use futures::future::join_all; use futures::future::join_all;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcErrv, OrcRes}; use orchid_base::error::{OrcErrv, OrcRes};
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::{IStr, es};
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::Comment; use orchid_base::parse::Comment;
use orchid_base::reqnot::Requester; use orchid_base::reqnot::ClientExt;
use orchid_base::tree::ttv_from_api; use orchid_base::tree::ttv_from_api;
use substack::Substack; use substack::Substack;
@@ -22,7 +22,7 @@ pub struct Parser {
pub(crate) system: System, pub(crate) system: System,
pub(crate) idx: u16, pub(crate) idx: u16,
} }
type ModPath<'a> = Substack<'a, Tok<String>>; type ModPath<'a> = Substack<'a, IStr>;
impl Parser { impl Parser {
pub async fn parse( pub async fn parse(
@@ -39,7 +39,7 @@ impl Parser {
let line = let line =
join_all((line.into_iter()).map(|t| async { tt_to_api(&mut temp_store.clone(), t).await })) join_all((line.into_iter()).map(|t| async { tt_to_api(&mut temp_store.clone(), t).await }))
.await; .await;
let mod_path = ctx.src_path().suffix(path.unreverse(), self.system.i()).await; let mod_path = ctx.src_path().suffix(path.unreverse()).await;
let comments = comments.iter().map(Comment::to_api).collect_vec(); let comments = comments.iter().map(Comment::to_api).collect_vec();
let req = api::ParseLine { let req = api::ParseLine {
idx: self.idx, idx: self.idx,
@@ -50,17 +50,16 @@ impl Parser {
comments, comments,
line, line,
}; };
match self.system.reqnot().request(req).await { match self.system.client().request(req).await.unwrap() {
Ok(parsed_v) => Ok(parsed_v) =>
conv(parsed_v, path, callback, &mut ConvCtx { conv(parsed_v, path, callback, &mut ConvCtx {
i: self.system.i(),
mod_path: &mod_path, mod_path: &mod_path,
ext_exprs: &mut temp_store, ext_exprs: &mut temp_store,
src_path: &src_path, src_path: &src_path,
sys: &self.system, sys: &self.system,
}) })
.await, .await,
Err(e) => Err(OrcErrv::from_api(&e, &self.system.ctx().i).await), Err(e) => Err(OrcErrv::from_api(&e).await),
} }
} }
} }
@@ -69,13 +68,12 @@ struct ConvCtx<'a> {
sys: &'a System, sys: &'a System,
mod_path: &'a Sym, mod_path: &'a Sym,
src_path: &'a Sym, src_path: &'a Sym,
i: &'a Interner,
ext_exprs: &'a mut ExprStore, ext_exprs: &'a mut ExprStore,
} }
async fn conv( async fn conv(
parsed_v: Vec<api::ParsedLine>, parsed_v: Vec<api::ParsedLine>,
module: Substack<'_, Tok<String>>, module: Substack<'_, IStr>,
callback: &'_ mut impl AsyncFnMut(Substack<'_, Tok<String>>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>, callback: &'_ mut impl AsyncFnMut(Substack<'_, IStr>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>,
ctx: &mut ConvCtx<'_>, ctx: &mut ConvCtx<'_>,
) -> OrcRes<Vec<Item>> { ) -> OrcRes<Vec<Item>> {
let mut items = Vec::new(); let mut items = Vec::new();
@@ -85,12 +83,12 @@ async fn conv(
(name, exported, kind), (name, exported, kind),
api::ParsedLineKind::Recursive(rec) => { api::ParsedLineKind::Recursive(rec) => {
let tokens = let tokens =
ttv_from_api(rec, ctx.ext_exprs, &mut ctx.sys.ctx().clone(), ctx.src_path, ctx.i).await; ttv_from_api(rec, ctx.ext_exprs, &mut ctx.sys.ctx().clone(), ctx.src_path).await;
items.extend(callback(module.clone(), tokens).await?); items.extend(callback(module.clone(), tokens).await?);
continue; continue;
}, },
}; };
let name = ctx.i.ex(name).await; let name = es(name).await;
let mem_path = module.push(name.clone()); let mem_path = module.push(name.clone());
let mkind = match kind { let mkind = match kind {
api::ParsedMemberKind::Module { lines, use_prelude } => { api::ParsedMemberKind::Module { lines, use_prelude } => {
@@ -98,16 +96,16 @@ async fn conv(
ParsedMemberKind::Mod(ParsedModule::new(use_prelude, items)) ParsedMemberKind::Mod(ParsedModule::new(use_prelude, items))
}, },
api::ParsedMemberKind::Constant(cid) => { api::ParsedMemberKind::Constant(cid) => {
ctx.sys.0.const_paths.insert(cid, ctx.mod_path.suffix(mem_path.unreverse(), ctx.i).await); ctx.sys.0.const_paths.insert(cid, ctx.mod_path.suffix(mem_path.unreverse()).await);
ParsedMemberKind::Const(cid, ctx.sys.clone()) ParsedMemberKind::Const(cid, ctx.sys.clone())
}, },
}; };
items.push(Item { items.push(Item {
comments: join_all( comments: join_all(
parsed.comments.iter().map(|c| Comment::from_api(c, ctx.src_path.clone(), ctx.i)), parsed.comments.iter().map(|c| Comment::from_api(c, ctx.src_path.clone())),
) )
.await, .await,
sr: SrcRange::from_api(&parsed.source_range, ctx.i).await, sr: SrcRange::from_api(&parsed.source_range).await,
kind: ItemKind::Member(ParsedMember { name, exported, kind: mkind }), kind: ItemKind::Member(ParsedMember { name, exported, kind: mkind }),
}) })
} }

View File

@@ -12,10 +12,12 @@ use memo_map::MemoMap;
use orchid_base::char_filter::char_filter_match; use orchid_base::char_filter::char_filter_match;
use orchid_base::error::{OrcRes, mk_errv_floating}; use orchid_base::error::{OrcRes, mk_errv_floating};
use orchid_base::format::{FmtCtx, FmtUnit, Format}; use orchid_base::format::{FmtCtx, FmtUnit, Format};
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::{IStr, es, is};
use orchid_base::iter_utils::IteratorPrint; use orchid_base::iter_utils::IteratorPrint;
use orchid_base::logging::log;
use orchid_base::name::{NameLike, Sym, VName, VPath}; use orchid_base::name::{NameLike, Sym, VName, VPath};
use orchid_base::reqnot::{ReqNot, Requester}; use orchid_base::reqnot::{Client, ClientExt};
use orchid_base::stash::stash;
use ordered_float::NotNan; use ordered_float::NotNan;
use substack::{Stackframe, Substack}; use substack::{Stackframe, Substack};
@@ -35,7 +37,7 @@ pub(crate) struct SystemInstData {
decl_id: api::SysDeclId, decl_id: api::SysDeclId,
lex_filter: api::CharFilter, lex_filter: api::CharFilter,
id: api::SysId, id: api::SysId,
line_types: Vec<Tok<String>>, line_types: Vec<IStr>,
prelude: Vec<Sym>, prelude: Vec<Sym>,
owned_atoms: RwLock<HashMap<api::AtomId, WeakAtomHand>>, owned_atoms: RwLock<HashMap<api::AtomId, WeakAtomHand>>,
pub(crate) const_paths: MemoMap<api::ParsedConstId, Sym>, pub(crate) const_paths: MemoMap<api::ParsedConstId, Sym>,
@@ -68,8 +70,6 @@ impl System {
#[must_use] #[must_use]
pub fn ctx(&self) -> &Ctx { &self.0.ctx } pub fn ctx(&self) -> &Ctx { &self.0.ctx }
#[must_use] #[must_use]
pub fn i(&self) -> &Interner { &self.0.ctx.i }
#[must_use]
pub fn deps(&self) -> &[System] { &self.0.deps } pub fn deps(&self) -> &[System] { &self.0.deps }
#[must_use] #[must_use]
pub fn ctor(&self) -> SystemCtor { pub fn ctor(&self) -> SystemCtor {
@@ -77,22 +77,27 @@ impl System {
.expect("Ctor was used to create ext") .expect("Ctor was used to create ext")
} }
#[must_use] #[must_use]
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { self.0.ext.reqnot() } pub(crate) fn client(&self) -> &dyn Client { self.0.ext.client() }
#[must_use] #[must_use]
pub async fn get_tree(&self, id: api::TreeId) -> api::MemberKind { pub async fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
self.reqnot().request(api::GetMember(self.0.id, id)).await self.client().request(api::GetMember(self.0.id, id)).await.unwrap()
} }
#[must_use] #[must_use]
pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() } pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() }
#[must_use] #[must_use]
pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) } pub fn can_lex(&self, c: char) -> bool {
let ret = char_filter_match(&self.0.lex_filter, c);
let ctor = self.ctor();
stash(async move { writeln!(log("debug"), "{} can lex {c}: {}", ctor.name(), ret).await });
ret
}
#[must_use] #[must_use]
pub fn prelude(&self) -> Vec<Sym> { self.0.prelude.clone() } pub fn prelude(&self) -> Vec<Sym> { self.0.prelude.clone() }
/// Have this system lex a part of the source. It is assumed that /// Have this system lex a part of the source. It is assumed that
/// [Self::can_lex] was called and returned true. /// [Self::can_lex] was called and returned true.
pub async fn lex<F: Future<Output = Option<api::SubLexed>>>( pub async fn lex<F: Future<Output = Option<api::SubLexed>>>(
&self, &self,
source: Tok<String>, source: IStr,
src: Sym, src: Sym,
pos: u32, pos: u32,
r: impl FnMut(u32) -> F, r: impl FnMut(u32) -> F,
@@ -100,16 +105,16 @@ impl System {
self.0.ext.lex_req(source, src, pos, self.id(), r).await self.0.ext.lex_req(source, src, pos, self.id(), r).await
} }
#[must_use] #[must_use]
pub fn get_parser(&self, ltyp: Tok<String>) -> Option<Parser> { pub fn get_parser(&self, ltyp: IStr) -> Option<Parser> {
(self.0.line_types.iter().enumerate()) (self.0.line_types.iter().enumerate())
.find(|(_, txt)| *txt == &ltyp) .find(|(_, txt)| *txt == &ltyp)
.map(|(idx, _)| Parser { idx: idx as u16, system: self.clone() }) .map(|(idx, _)| Parser { idx: idx as u16, system: self.clone() })
} }
pub fn line_types(&self) -> impl Iterator<Item = &Tok<String>> + '_ { self.0.line_types.iter() } pub fn line_types(&self) -> impl Iterator<Item = &IStr> + '_ { self.0.line_types.iter() }
#[must_use] #[must_use]
pub async fn request(&self, req: Vec<u8>) -> Vec<u8> { pub async fn request(&self, req: Vec<u8>) -> Vec<u8> {
self.reqnot().request(api::SysFwded(self.id(), req)).await self.client().request(api::SysFwded(self.id(), req)).await.unwrap()
} }
pub(crate) async fn new_atom(&self, data: Vec<u8>, id: api::AtomId) -> AtomHand { pub(crate) async fn new_atom(&self, data: Vec<u8>, id: api::AtomId) -> AtomHand {
let mut owned_g = self.0.owned_atoms.write().await; let mut owned_g = self.0.owned_atoms.write().await;
@@ -124,10 +129,10 @@ impl System {
} }
pub(crate) fn drop_atom(&self, dropped_atom_id: api::AtomId) { pub(crate) fn drop_atom(&self, dropped_atom_id: api::AtomId) {
let this = self.0.clone(); let this = self.0.clone();
(self.0.ctx.spawn)(Box::pin(async move { let _ = self.0.ctx.spawn(Box::pin(async move {
this.ext.reqnot().request(api::AtomDrop(this.id, dropped_atom_id)).await; this.ext.client().request(api::AtomDrop(this.id, dropped_atom_id)).await.unwrap();
this.owned_atoms.write().await.remove(&dropped_atom_id); this.owned_atoms.write().await.remove(&dropped_atom_id);
})) }));
} }
#[must_use] #[must_use]
pub fn downgrade(&self) -> WeakSystem { pub fn downgrade(&self) -> WeakSystem {
@@ -137,7 +142,7 @@ impl System {
pub(crate) async fn name_resolver( pub(crate) async fn name_resolver(
&self, &self,
orig: api::ParsedConstId, orig: api::ParsedConstId,
) -> impl AsyncFnMut(&[Tok<String>]) -> OrcRes<VName> + use<> { ) -> impl AsyncFnMut(&[IStr]) -> OrcRes<VName> + use<> {
let root = self.0.ctx.root.read().await.upgrade().expect("find_names when root not in context"); let root = self.0.ctx.root.read().await.upgrade().expect("find_names when root not in context");
let orig = self.0.const_paths.get(&orig).expect("origin for find_names invalid").clone(); let orig = self.0.const_paths.get(&orig).expect("origin for find_names invalid").clone();
let ctx = self.0.ctx.clone(); let ctx = self.0.ctx.clone();
@@ -155,7 +160,7 @@ impl System {
Some(Ok(dest)) => return Ok(dest.target.to_vname().suffix(tail.iter().cloned())), Some(Ok(dest)) => return Ok(dest.target.to_vname().suffix(tail.iter().cloned())),
Some(Err(dests)) => Some(Err(dests)) =>
return Err(mk_errv_floating( return Err(mk_errv_floating(
ctx.i.i("Ambiguous name").await, is("Ambiguous name").await,
format!( format!(
"{selector} could refer to {}", "{selector} could refer to {}",
dests.iter().map(|ri| &ri.target).display("or") dests.iter().map(|ri| &ri.target).display("or")
@@ -170,7 +175,7 @@ impl System {
return Ok(VPath::new(cwd.iter().cloned()).name_with_suffix(selector.clone())); return Ok(VPath::new(cwd.iter().cloned()).name_with_suffix(selector.clone()));
} }
Err(mk_errv_floating( Err(mk_errv_floating(
ctx.i.i("Invalid name").await, is("Invalid name").await,
format!("{selector} doesn't refer to a module"), format!("{selector} doesn't refer to a module"),
)) ))
} }
@@ -203,8 +208,7 @@ impl SystemCtor {
#[must_use] #[must_use]
pub fn name(&self) -> &str { &self.decl.name } pub fn name(&self) -> &str { &self.decl.name }
pub async fn name_tok(&self) -> Sym { pub async fn name_tok(&self) -> Sym {
(Sym::parse(&self.decl.name, &self.ext.upgrade().expect("ext dropped early").ctx().i).await) (Sym::parse(&self.decl.name).await).expect("System cannot have empty name")
.expect("System cannot have empty name")
} }
#[must_use] #[must_use]
pub fn priority(&self) -> NotNan<f64> { self.decl.priority } pub fn priority(&self) -> NotNan<f64> { self.decl.priority }
@@ -220,17 +224,17 @@ impl SystemCtor {
debug_assert_eq!(depends.len(), self.decl.depends.len(), "Wrong number of deps provided"); debug_assert_eq!(depends.len(), self.decl.depends.len(), "Wrong number of deps provided");
let ext = self.ext.upgrade().expect("SystemCtor should be freed before Extension"); let ext = self.ext.upgrade().expect("SystemCtor should be freed before Extension");
let id = ext.ctx().next_sys_id(); let id = ext.ctx().next_sys_id();
let sys_inst = ext.reqnot().request(api::NewSystem { depends, id, system: self.decl.id }).await; let sys_inst =
ext.client().request(api::NewSystem { depends, id, system: self.decl.id }).await.unwrap();
let data = System(Rc::new(SystemInstData { let data = System(Rc::new(SystemInstData {
deps, deps,
decl_id: self.decl.id, decl_id: self.decl.id,
ext: ext.clone(), ext: ext.clone(),
ctx: ext.ctx().clone(), ctx: ext.ctx().clone(),
lex_filter: sys_inst.lex_filter, lex_filter: sys_inst.lex_filter,
line_types: join_all(sys_inst.line_types.iter().map(|m| Tok::from_api(*m, &ext.ctx().i))) line_types: join_all(sys_inst.line_types.iter().map(|m| es(*m))).await,
.await,
id, id,
prelude: join_all(sys_inst.prelude.iter().map(|tok| Sym::from_api(*tok, &ext.ctx().i))).await, prelude: join_all(sys_inst.prelude.iter().map(|tok| Sym::from_api(*tok))).await,
owned_atoms: RwLock::new(HashMap::new()), owned_atoms: RwLock::new(HashMap::new()),
const_paths: MemoMap::new(), const_paths: MemoMap::new(),
})); }));

View File

@@ -13,11 +13,11 @@ use hashbrown::hash_map::Entry;
use itertools::Itertools; use itertools::Itertools;
use memo_map::MemoMap; use memo_map::MemoMap;
use orchid_base::clone; use orchid_base::clone;
use orchid_base::error::{OrcRes, Reporter, mk_errv}; use orchid_base::error::{OrcRes, mk_errv, report};
use orchid_base::interner::Tok; use orchid_base::interner::{IStr, IStrv, es, is, iv};
use orchid_base::location::{CodeGenInfo, Pos}; use orchid_base::location::{CodeGenInfo, Pos};
use orchid_base::name::{NameLike, Sym, VPath}; use orchid_base::name::{NameLike, Sym, VPath};
use orchid_base::reqnot::Requester; use orchid_base::reqnot::ClientExt;
use crate::api; use crate::api;
use crate::ctx::Ctx; use crate::ctx::Ctx;
@@ -45,7 +45,7 @@ impl Root {
#[must_use] #[must_use]
pub async fn from_api(api: api::Module, sys: &System) -> Self { pub async fn from_api(api: api::Module, sys: &System) -> Self {
let consts = MemoMap::new(); let consts = MemoMap::new();
let mut tfac = TreeFromApiCtx { consts: &consts, path: sys.i().i(&[][..]).await, sys }; let mut tfac = TreeFromApiCtx { consts: &consts, path: iv(&[][..]).await, sys };
let root = Module::from_api(api, &mut tfac).await; let root = Module::from_api(api, &mut tfac).await;
Root(Rc::new(RwLock::new(RootData { root, consts, ctx: sys.ctx().clone() }))) Root(Rc::new(RwLock::new(RootData { root, consts, ctx: sys.ctx().clone() })))
} }
@@ -60,7 +60,7 @@ impl Root {
Ok(Self(Rc::new(RwLock::new(RootData { root, consts, ctx: this.ctx.clone() })))) Ok(Self(Rc::new(RwLock::new(RootData { root, consts, ctx: this.ctx.clone() }))))
} }
#[must_use] #[must_use]
pub async fn add_parsed(&self, parsed: &ParsedModule, pars_prefix: Sym, rep: &Reporter) -> Self { pub async fn add_parsed(&self, parsed: &ParsedModule, pars_prefix: Sym) -> Self {
let mut ref_this = self.0.write().await; let mut ref_this = self.0.write().await;
let this = &mut *ref_this; let this = &mut *ref_this;
let mut deferred_consts = HashMap::new(); let mut deferred_consts = HashMap::new();
@@ -72,7 +72,6 @@ impl Root {
pars_prefix: pars_prefix.clone(), pars_prefix: pars_prefix.clone(),
root: &this.root, root: &this.root,
ctx: &this.ctx, ctx: &this.ctx,
rep,
}; };
let mut module = Module::from_parsed(parsed, pars_prefix.clone(), &mut tfpctx).await; let mut module = Module::from_parsed(parsed, pars_prefix.clone(), &mut tfpctx).await;
for step in pars_prefix.iter().rev() { for step in pars_prefix.iter().rev() {
@@ -89,7 +88,7 @@ impl Root {
*this.ctx.root.write().await = new.downgrade(); *this.ctx.root.write().await = new.downgrade();
for (path, (sys_id, pc_id)) in deferred_consts { for (path, (sys_id, pc_id)) in deferred_consts {
let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing"); let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing");
let api_expr = sys.reqnot().request(api::FetchParsedConst(sys.id(), pc_id)).await; let api_expr = sys.client().request(api::FetchParsedConst(sys.id(), pc_id)).await.unwrap();
let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), this.ctx.clone()).await; let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), this.ctx.clone()).await;
new.0.write().await.consts.insert(path, expr); new.0.write().await.consts.insert(path, expr);
} }
@@ -110,7 +109,7 @@ impl Root {
} }
match module { match module {
Ok(_) => Err(mk_errv( Ok(_) => Err(mk_errv(
ctx.i.i("module used as constant").await, is("module used as constant").await,
format!("{name} is a module, not a constant"), format!("{name} is a module, not a constant"),
[pos], [pos],
)), )),
@@ -118,7 +117,7 @@ impl Root {
ChildErrorKind::Private => panic!("public_only is false"), ChildErrorKind::Private => panic!("public_only is false"),
ChildErrorKind::Constant => panic!("Tree refers to constant not in table"), ChildErrorKind::Constant => panic!("Tree refers to constant not in table"),
ChildErrorKind::Missing => Err(mk_errv( ChildErrorKind::Missing => Err(mk_errv(
ctx.i.i("Constant does not exist").await, is("Constant does not exist").await,
format!("{name} does not refer to a constant"), format!("{name} does not refer to a constant"),
[pos], [pos],
)), )),
@@ -144,12 +143,12 @@ impl Default for WeakRoot {
pub struct TreeFromApiCtx<'a> { pub struct TreeFromApiCtx<'a> {
pub sys: &'a System, pub sys: &'a System,
pub consts: &'a MemoMap<Sym, Expr>, pub consts: &'a MemoMap<Sym, Expr>,
pub path: Tok<Vec<Tok<String>>>, pub path: IStrv,
} }
impl<'a> TreeFromApiCtx<'a> { impl<'a> TreeFromApiCtx<'a> {
#[must_use] #[must_use]
pub async fn push<'c>(&'c self, name: Tok<String>) -> TreeFromApiCtx<'c> { pub async fn push<'c>(&'c self, name: IStr) -> TreeFromApiCtx<'c> {
let path = self.sys.ctx().i.i(&self.path.iter().cloned().chain([name]).collect_vec()).await; let path = iv(&self.path.iter().cloned().chain([name]).collect_vec()).await;
TreeFromApiCtx { path, consts: self.consts, sys: self.sys } TreeFromApiCtx { path, consts: self.consts, sys: self.sys }
} }
} }
@@ -162,17 +161,17 @@ pub struct ResolvedImport {
#[derive(Clone, Default)] #[derive(Clone, Default)]
pub struct Module { pub struct Module {
pub imports: HashMap<Tok<String>, Result<ResolvedImport, Vec<ResolvedImport>>>, pub imports: HashMap<IStr, Result<ResolvedImport, Vec<ResolvedImport>>>,
pub members: HashMap<Tok<String>, Rc<Member>>, pub members: HashMap<IStr, Rc<Member>>,
} }
impl Module { impl Module {
#[must_use] #[must_use]
pub async fn from_api(api: api::Module, ctx: &mut TreeFromApiCtx<'_>) -> Self { pub async fn from_api(api: api::Module, ctx: &mut TreeFromApiCtx<'_>) -> Self {
let mut members = HashMap::new(); let mut members = HashMap::new();
for mem in api.members { for mem in api.members {
let mem_name = ctx.sys.i().ex(mem.name).await; let mem_name = es(mem.name).await;
let vname = VPath::new(ctx.path.iter().cloned()).name_with_suffix(mem_name.clone()); let vname = VPath::new(ctx.path.iter().cloned()).name_with_suffix(mem_name.clone());
let name = vname.to_sym(ctx.sys.i()).await; let name = vname.to_sym().await;
let (lazy, kind) = match mem.kind { let (lazy, kind) = match mem.kind {
api::MemberKind::Lazy(id) => api::MemberKind::Lazy(id) =>
(Some(LazyMemberHandle { id, sys: ctx.sys.id(), path: name.clone() }), None), (Some(LazyMemberHandle { id, sys: ctx.sys.id(), path: name.clone() }), None),
@@ -205,23 +204,23 @@ impl Module {
let mut glob_imports_by_name = HashMap::<_, Vec<_>>::new(); let mut glob_imports_by_name = HashMap::<_, Vec<_>>::new();
for import in parsed.get_imports().into_iter().filter(|i| i.name.is_none()) { for import in parsed.get_imports().into_iter().filter(|i| i.name.is_none()) {
let pos = import.sr.pos(); let pos = import.sr.pos();
match absolute_path(&path, &import.path, &ctx.ctx.i).await { match absolute_path(&path, &import.path).await {
Err(e) => ctx.rep.report(e.err_obj(&ctx.ctx.i, pos, &import.path.to_string()).await), Err(e) => report(e.err_obj(pos, &import.path.to_string()).await),
Ok(abs_path) => { Ok(abs_path) => {
let names_res = match abs_path.strip_prefix(&ctx.pars_prefix[..]) { let names_res = match abs_path.strip_prefix(&ctx.pars_prefix[..]) {
None => { None => {
let mut tree_ctx = (ctx.ctx.clone(), ctx.consts); let mut tree_ctx = (ctx.ctx.clone(), ctx.consts);
resolv_glob(&path, ctx.root, &abs_path, pos, &ctx.ctx.i, &mut tree_ctx).await resolv_glob(&path, ctx.root, &abs_path, pos, &mut tree_ctx).await
}, },
Some(sub_tgt) => { Some(sub_tgt) => {
let sub_path = (path.strip_prefix(&ctx.pars_prefix[..])) let sub_path = (path.strip_prefix(&ctx.pars_prefix[..]))
.expect("from_parsed called with path outside pars_prefix"); .expect("from_parsed called with path outside pars_prefix");
resolv_glob(sub_path, ctx.pars_root, sub_tgt, pos, &ctx.ctx.i, &mut ()).await resolv_glob(sub_path, ctx.pars_root, sub_tgt, pos, &mut ()).await
}, },
}; };
let abs_path = abs_path.to_sym(&ctx.ctx.i).await; let abs_path = abs_path.to_sym().await;
match names_res { match names_res {
Err(e) => ctx.rep.report(e), Err(e) => report(e),
Ok(names) => Ok(names) =>
for name in names { for name in names {
match glob_imports_by_name.entry(name) { match glob_imports_by_name.entry(name) {
@@ -244,30 +243,28 @@ impl Module {
prelude_item.last_seg(), prelude_item.last_seg(),
Ok(ResolvedImport { Ok(ResolvedImport {
target: prelude_item, target: prelude_item,
pos: CodeGenInfo::new_details(sys.ctor().name_tok().await, "In prelude", &ctx.ctx.i) pos: CodeGenInfo::new_details(sys.ctor().name_tok().await, "In prelude").await.pos(),
.await
.pos(),
}), }),
); );
} }
} }
} }
let conflicting_imports_msg = ctx.ctx.i.i("Conflicting imports").await; let conflicting_imports_msg = is("Conflicting imports").await;
for (key, values) in imports_by_name { for (key, values) in imports_by_name {
if values.len() == 1 { if values.len() == 1 {
let import = values.into_iter().next().unwrap(); let import = values.into_iter().next().unwrap();
let sr = import.sr.clone(); let sr = import.sr.clone();
let abs_path_res = absolute_path(&path, &import.clone().mspath(), &ctx.ctx.i).await; let abs_path_res = absolute_path(&path, &import.clone().mspath()).await;
match abs_path_res { match abs_path_res {
Err(e) => ctx.rep.report(e.err_obj(&ctx.ctx.i, sr.pos(), &import.to_string()).await), Err(e) => report(e.err_obj(sr.pos(), &import.to_string()).await),
Ok(abs_path) => { Ok(abs_path) => {
let target = abs_path.to_sym(&ctx.ctx.i).await; let target = abs_path.to_sym().await;
imports.insert(key, Ok(ResolvedImport { target, pos: sr.pos() })); imports.insert(key, Ok(ResolvedImport { target, pos: sr.pos() }));
}, },
} }
} else { } else {
for item in values { for item in values {
ctx.rep.report(mk_errv( report(mk_errv(
conflicting_imports_msg.clone(), conflicting_imports_msg.clone(),
format!("{key} is imported multiple times from different modules"), format!("{key} is imported multiple times from different modules"),
[item.sr.pos()], [item.sr.pos()],
@@ -277,12 +274,11 @@ impl Module {
} }
for (key, values) in glob_imports_by_name { for (key, values) in glob_imports_by_name {
if !imports.contains_key(&key) { if !imports.contains_key(&key) {
let i = &ctx.ctx.i;
let values = stream::iter(values) let values = stream::iter(values)
.then(|(n, sr)| { .then(|(n, sr)| {
clone!(key; async move { clone!(key; async move {
ResolvedImport { ResolvedImport {
target: n.to_vname().suffix([key.clone()]).to_sym(i).await, target: n.to_vname().suffix([key.clone()]).to_sym().await,
pos: sr.pos(), pos: sr.pos(),
} }
}) })
@@ -292,12 +288,12 @@ impl Module {
imports.insert(key, if values.len() == 1 { Ok(values[0].clone()) } else { Err(values) }); imports.insert(key, if values.len() == 1 { Ok(values[0].clone()) } else { Err(values) });
} }
} }
let self_referential_msg = ctx.ctx.i.i("Self-referential import").await; let self_referential_msg = is("Self-referential import").await;
for (key, value) in imports.iter() { for (key, value) in imports.iter() {
let Ok(import) = value else { continue }; let Ok(import) = value else { continue };
if import.target.strip_prefix(&path[..]).is_some_and(|t| t.starts_with(slice::from_ref(key))) if import.target.strip_prefix(&path[..]).is_some_and(|t| t.starts_with(slice::from_ref(key)))
{ {
ctx.rep.report(mk_errv( report(mk_errv(
self_referential_msg.clone(), self_referential_msg.clone(),
format!("import {} points to itself or a path within itself", &import.target), format!("import {} points to itself or a path within itself", &import.target),
[import.pos.clone()], [import.pos.clone()],
@@ -308,7 +304,7 @@ impl Module {
for item in &parsed.items { for item in &parsed.items {
match &item.kind { match &item.kind {
ItemKind::Member(mem) => { ItemKind::Member(mem) => {
let path = path.to_vname().suffix([mem.name.clone()]).to_sym(&ctx.ctx.i).await; let path = path.to_vname().suffix([mem.name.clone()]).to_sym().await;
let kind = OnceCell::from(MemberKind::from_parsed(&mem.kind, path.clone(), ctx).await); let kind = OnceCell::from(MemberKind::from_parsed(&mem.kind, path.clone(), ctx).await);
members.insert( members.insert(
mem.name.clone(), mem.name.clone(),
@@ -385,7 +381,6 @@ pub struct FromParsedCtx<'a> {
pars_prefix: Sym, pars_prefix: Sym,
pars_root: &'a ParsedModule, pars_root: &'a ParsedModule,
root: &'a Module, root: &'a Module,
rep: &'a Reporter,
ctx: &'a Ctx, ctx: &'a Ctx,
consts: &'a MemoMap<Sym, Expr>, consts: &'a MemoMap<Sym, Expr>,
deferred_consts: &'a mut HashMap<Sym, (api::SysId, api::ParsedConstId)>, deferred_consts: &'a mut HashMap<Sym, (api::SysId, api::ParsedConstId)>,
@@ -395,7 +390,7 @@ impl Tree for Module {
type Ctx<'a> = (Ctx, &'a MemoMap<Sym, Expr>); type Ctx<'a> = (Ctx, &'a MemoMap<Sym, Expr>);
async fn child( async fn child(
&self, &self,
key: Tok<String>, key: IStr,
public_only: bool, public_only: bool,
(ctx, consts): &mut Self::Ctx<'_>, (ctx, consts): &mut Self::Ctx<'_>,
) -> crate::dealias::ChildResult<'_, Self> { ) -> crate::dealias::ChildResult<'_, Self> {
@@ -410,7 +405,7 @@ impl Tree for Module {
MemberKind::Const => Err(ChildErrorKind::Constant), MemberKind::Const => Err(ChildErrorKind::Constant),
} }
} }
fn children(&self, public_only: bool) -> hashbrown::HashSet<Tok<String>> { fn children(&self, public_only: bool) -> hashbrown::HashSet<IStr> {
self.members.iter().filter(|(_, v)| !public_only || v.public).map(|(k, _)| k.clone()).collect() self.members.iter().filter(|(_, v)| !public_only || v.public).map(|(k, _)| k.clone()).collect()
} }
} }

View File

@@ -3,11 +3,19 @@ name = "orchid-std"
version = "0.1.0" version = "0.1.0"
edition = "2024" edition = "2024"
[[bin]]
name = "orchid-std"
path = "src/main.rs"
[lib]
crate-type = ["cdylib", "lib"]
path = "src/lib.rs"
[dependencies] [dependencies]
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" } async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
async-once-cell = "0.5.4" async-once-cell = "0.5.4"
futures = { version = "0.3.31", features = ["std"], default-features = false } futures = { version = "0.3.31", features = ["std"], default-features = false }
hashbrown = "0.16.0" hashbrown = "0.16.1"
itertools = "0.14.0" itertools = "0.14.0"
never = "0.1.0" never = "0.1.0"
once_cell = "1.21.3" once_cell = "1.21.3"
@@ -18,12 +26,12 @@ orchid-base = { version = "0.1.0", path = "../orchid-base" }
orchid-extension = { version = "0.1.0", path = "../orchid-extension", features = [ orchid-extension = { version = "0.1.0", path = "../orchid-extension", features = [
"tokio", "tokio",
] } ] }
ordered-float = "5.0.0" ordered-float = "5.1.0"
pastey = "0.1.1" pastey = "0.2.1"
rust_decimal = "1.38.0" rust_decimal = "1.39.0"
subslice-offset = "0.1.1" subslice-offset = "0.1.1"
substack = "1.1.1" substack = "1.1.1"
tokio = { version = "1.47.1", features = ["full"] } tokio = { version = "1.49.0", features = ["full"] }
[dev-dependencies] [dev-dependencies]
test_executors = "0.3.5" test_executors = "0.4.1"

View File

@@ -11,3 +11,12 @@ pub use std::tuple::{HomoTpl, Tpl, Tuple, UntypedTuple};
pub use macros::macro_system::MacroSystem; pub use macros::macro_system::MacroSystem;
pub use macros::mactree::{MacTok, MacTree}; pub use macros::mactree::{MacTok, MacTree};
use orchid_api as api; use orchid_api as api;
use orchid_extension::binary::orchid_extension_main_body;
use orchid_extension::entrypoint::ExtensionBuilder;
pub extern "C" fn orchid_extension_main(cx: api::binary::ExtensionContext) {
orchid_extension_main_body(
cx,
ExtensionBuilder::new("orchid-std::main").system(StdSystem).system(MacroSystem),
);
}

View File

@@ -4,7 +4,6 @@ use never::Never;
use orchid_base::format::fmt; use orchid_base::format::fmt;
use orchid_extension::atom::{Atomic, TAtom}; use orchid_extension::atom::{Atomic, TAtom};
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own}; use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
use orchid_extension::context::i;
use orchid_extension::conv::ToExpr; use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::exec; use orchid_extension::coroutine_exec::exec;
use orchid_extension::expr::Expr; use orchid_extension::expr::Expr;
@@ -38,7 +37,7 @@ impl OwnedAtom for InstantiateTplCall {
async fn call(mut self, arg: Expr) -> GExpr { async fn call(mut self, arg: Expr) -> GExpr {
exec(async move |mut h| { exec(async move |mut h| {
match h.exec::<TAtom<MacTree>>(arg.clone()).await { match h.exec::<TAtom<MacTree>>(arg.clone()).await {
Err(_) => panic!("Expected a macro param, found {}", fmt(&arg, &i()).await), Err(_) => panic!("Expected a macro param, found {}", fmt(&arg).await),
Ok(t) => self.argv.push(own(&t).await), Ok(t) => self.argv.push(own(&t).await),
}; };
if self.argv.len() < self.argc { if self.argv.len() < self.argc {

View File

@@ -3,15 +3,13 @@ use std::pin::pin;
use futures::{FutureExt, StreamExt, stream}; use futures::{FutureExt, StreamExt, stream};
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcRes, Reporter}; use orchid_base::error::{OrcRes, report, with_reporter};
use orchid_base::interner::is;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::{ use orchid_base::parse::{Comment, Parsed, Snippet, expect_tok, token_errv, try_pop_no_fluff};
Comment, ParseCtx, Parsed, Snippet, expect_tok, token_errv, try_pop_no_fluff,
};
use orchid_base::sym; use orchid_base::sym;
use orchid_base::tree::Paren; use orchid_base::tree::Paren;
use orchid_extension::atom::TAtom; use orchid_extension::atom::TAtom;
use orchid_extension::context::i;
use orchid_extension::conv::TryFromExpr; use orchid_extension::conv::TryFromExpr;
use orchid_extension::gen_expr::{atom, call, sym_ref}; use orchid_extension::gen_expr::{atom, call, sym_ref};
use orchid_extension::parser::{ConstCtx, PSnippet, PTok, PTokTree, ParsCtx, ParsedLine, Parser}; use orchid_extension::parser::{ConstCtx, PSnippet, PTok, PTokTree, ParsCtx, ParsedLine, Parser};
@@ -24,40 +22,36 @@ pub struct LetLine;
impl Parser for LetLine { impl Parser for LetLine {
const LINE_HEAD: &'static str = "let"; const LINE_HEAD: &'static str = "let";
async fn parse<'a>( async fn parse<'a>(
ctx: ParsCtx<'a>, _: ParsCtx<'a>,
exported: bool, exported: bool,
comments: Vec<Comment>, comments: Vec<Comment>,
line: PSnippet<'a>, line: PSnippet<'a>,
) -> OrcRes<Vec<ParsedLine>> { ) -> OrcRes<Vec<ParsedLine>> {
let sr = line.sr(); let sr = line.sr();
let Parsed { output: name_tok, tail } = try_pop_no_fluff(&ctx, line).await?; let Parsed { output: name_tok, tail } = try_pop_no_fluff(line).await?;
let Some(name) = name_tok.as_name() else { let Some(name) = name_tok.as_name() else {
let err = token_errv(&ctx, name_tok, "Constant must have a name", |t| { let err = token_errv(name_tok, "Constant must have a name", |t| {
format!("Expected a name but found {t}") format!("Expected a name but found {t}")
}); });
return Err(err.await); return Err(err.await);
}; };
let Parsed { tail, .. } = expect_tok(&ctx, tail, ctx.i().i("=").await).await?; let Parsed { tail, .. } = expect_tok(tail, is("=").await).await?;
let aliased = parse_tokv(tail, &ctx).await; let aliased = parse_tokv(tail).await;
Ok(vec![ParsedLine::cnst(&line.sr(), &comments, exported, name, async move |ctx| { Ok(vec![ParsedLine::cnst(&line.sr(), &comments, exported, name, async move |ctx| {
let rep = Reporter::new();
let macro_input = let macro_input =
MacTok::S(Paren::Round, dealias_mac_v(&aliased, &ctx, &rep).await).at(sr.pos()); MacTok::S(Paren::Round, with_reporter(dealias_mac_v(&aliased, &ctx)).await?).at(sr.pos());
if let Some(e) = rep.errv() { Ok(call(sym_ref(sym!(macros::resolve)), [atom(macro_input)]))
return Err(e);
}
Ok(call(sym_ref(sym!(macros::resolve; i())), [atom(macro_input)]))
})]) })])
} }
} }
pub async fn dealias_mac_v(aliased: &MacTreeSeq, ctx: &ConstCtx, rep: &Reporter) -> MacTreeSeq { pub async fn dealias_mac_v(aliased: &MacTreeSeq, ctx: &ConstCtx) -> MacTreeSeq {
let keys = aliased.glossary().iter().cloned().collect_vec(); let keys = aliased.glossary().iter().cloned().collect_vec();
let mut names: HashMap<_, _> = HashMap::new(); let mut names: HashMap<_, _> = HashMap::new();
let mut stream = pin!(ctx.names(&keys).zip(stream::iter(&keys))); let mut stream = pin!(ctx.names(&keys).zip(stream::iter(&keys)));
while let Some((canonical, local)) = stream.next().await { while let Some((canonical, local)) = stream.next().await {
match canonical { match canonical {
Err(e) => rep.report(e), Err(e) => report(e),
Ok(name) => { Ok(name) => {
names.insert(local.clone(), name); names.insert(local.clone(), name);
}, },
@@ -69,16 +63,16 @@ pub async fn dealias_mac_v(aliased: &MacTreeSeq, ctx: &ConstCtx, rep: &Reporter)
}) })
} }
pub async fn parse_tokv(line: PSnippet<'_>, ctx: &impl ParseCtx) -> MacTreeSeq { pub async fn parse_tokv(line: PSnippet<'_>) -> MacTreeSeq {
if let Some((idx, arg)) = line.iter().enumerate().find_map(|(i, x)| Some((i, x.as_lambda()?))) { if let Some((idx, arg)) = line.iter().enumerate().find_map(|(i, x)| Some((i, x.as_lambda()?))) {
let (head, lambda) = line.split_at(idx as u32); let (head, lambda) = line.split_at(idx as u32);
let (_, body) = lambda.pop_front().unwrap(); let (_, body) = lambda.pop_front().unwrap();
let body = parse_tokv(body, ctx).boxed_local().await; let body = parse_tokv(body).boxed_local().await;
let mut all = parse_tokv_no_lambdas(&head, ctx).await; let mut all = parse_tokv_no_lambdas(&head).await;
match parse_tok(arg, ctx).await { match parse_tok(arg).await {
Some(arg) => all.push(MacTok::Lambda(arg, body).at(lambda.sr().pos())), Some(arg) => all.push(MacTok::Lambda(arg, body).at(lambda.sr().pos())),
None => ctx.rep().report( None => report(
token_errv(ctx, arg, "Lambda argument fluff", |arg| { token_errv(arg, "Lambda argument fluff", |arg| {
format!("Lambda arguments must be a valid token, found meaningless fragment {arg}") format!("Lambda arguments must be a valid token, found meaningless fragment {arg}")
}) })
.await, .await,
@@ -86,29 +80,29 @@ pub async fn parse_tokv(line: PSnippet<'_>, ctx: &impl ParseCtx) -> MacTreeSeq {
}; };
MacTreeSeq::new(all) MacTreeSeq::new(all)
} else { } else {
MacTreeSeq::new(parse_tokv_no_lambdas(&line, ctx).await) MacTreeSeq::new(parse_tokv_no_lambdas(&line).await)
} }
} }
async fn parse_tokv_no_lambdas(line: &[PTokTree], ctx: &impl ParseCtx) -> Vec<MacTree> { async fn parse_tokv_no_lambdas(line: &[PTokTree]) -> Vec<MacTree> {
stream::iter(line).filter_map(|tt| parse_tok(tt, ctx)).collect::<Vec<_>>().await stream::iter(line).filter_map(parse_tok).collect::<Vec<_>>().await
} }
pub async fn parse_tok(tree: &PTokTree, ctx: &impl ParseCtx) -> Option<MacTree> { pub async fn parse_tok(tree: &PTokTree) -> Option<MacTree> {
let tok = match &tree.tok { let tok = match &tree.tok {
PTok::Bottom(errv) => MacTok::Bottom(errv.clone()), PTok::Bottom(errv) => MacTok::Bottom(errv.clone()),
PTok::BR | PTok::Comment(_) => return None, PTok::BR | PTok::Comment(_) => return None,
PTok::Name(n) => MacTok::Name(Sym::new([n.clone()], ctx.i()).await.unwrap()), PTok::Name(n) => MacTok::Name(Sym::new([n.clone()]).await.unwrap()),
PTok::NS(..) => match tree.as_multiname() { PTok::NS(..) => match tree.as_multiname() {
Ok(mn) => MacTok::Name(mn.to_sym(ctx.i()).await), Ok(mn) => MacTok::Name(mn.to_sym().await),
Err(nested) => { Err(nested) => {
ctx.rep().report( report(
token_errv(ctx, tree, ":: can only be followed by a name in an expression", |tok| { token_errv(tree, ":: can only be followed by a name in an expression", |tok| {
format!("Expected name, found {tok}") format!("Expected name, found {tok}")
}) })
.await, .await,
); );
return parse_tok(nested, ctx).boxed_local().await; return parse_tok(nested).boxed_local().await;
}, },
}, },
PTok::Handle(expr) => match TAtom::<PhAtom>::try_from_expr(expr.clone()).await { PTok::Handle(expr) => match TAtom::<PhAtom>::try_from_expr(expr.clone()).await {
@@ -117,8 +111,7 @@ pub async fn parse_tok(tree: &PTokTree, ctx: &impl ParseCtx) -> Option<MacTree>
}, },
PTok::NewExpr(never) => match *never {}, PTok::NewExpr(never) => match *never {},
PTok::LambdaHead(_) => panic!("Lambda-head handled in the sequence parser"), PTok::LambdaHead(_) => panic!("Lambda-head handled in the sequence parser"),
PTok::S(p, body) => PTok::S(p, body) => MacTok::S(*p, parse_tokv(Snippet::new(tree, body)).boxed_local().await),
MacTok::S(*p, parse_tokv(Snippet::new(tree, body), ctx).boxed_local().await),
}; };
Some(tok.at(tree.sr().pos())) Some(tok.at(tree.sr().pos()))
} }

View File

@@ -1,14 +1,15 @@
use orchid_base::sym; use orchid_base::sym;
use orchid_extension::atom::TAtom; use orchid_extension::atom::TAtom;
use orchid_extension::atom_owned::own; use orchid_extension::atom_owned::own;
use orchid_extension::context::i;
use orchid_extension::conv::ToExpr; use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::exec;
use orchid_extension::gen_expr::{call, sym_ref}; use orchid_extension::gen_expr::{call, sym_ref};
use orchid_extension::tree::{GenMember, fun, prefix}; use orchid_extension::tree::{GenMember, fun, prefix};
use crate::macros::mactree::MacTree; use crate::macros::mactree::MacTree;
use crate::macros::resolve::resolve; use crate::macros::resolve::resolve;
use crate::macros::utils::{build_macro, mactree, mactreev}; use crate::macros::utils::{build_macro, mactree, mactreev};
use crate::{HomoTpl, UntypedTuple};
pub async fn gen_macro_lib() -> Vec<GenMember> { pub async fn gen_macro_lib() -> Vec<GenMember> {
prefix("macros", [ prefix("macros", [
@@ -18,47 +19,49 @@ pub async fn gen_macro_lib() -> Vec<GenMember> {
build_macro(None, ["..", "_"]).finish(), build_macro(None, ["..", "_"]).finish(),
build_macro(Some(1), ["+"]) build_macro(Some(1), ["+"])
.rule(mactreev!("...$" lhs 0 macros::common::+ "...$" rhs 1), [async |[lhs, rhs]| { .rule(mactreev!("...$" lhs 0 macros::common::+ "...$" rhs 1), [async |[lhs, rhs]| {
call(sym_ref(sym!(std::number::add; i())), [resolve(lhs).await, resolve(rhs).await]) call(sym_ref(sym!(std::number::add)), [resolve(lhs).await, resolve(rhs).await])
}]) }])
.finish(), .finish(),
build_macro(Some(2), ["*"]) build_macro(Some(2), ["*"])
.rule(mactreev!("...$" lhs 0 macros::common::* "...$" rhs 1), [async |[lhs, rhs]| { .rule(mactreev!("...$" lhs 0 macros::common::* "...$" rhs 1), [async |[lhs, rhs]| {
call(sym_ref(sym!(std::number::mul; i())), [resolve(lhs).await, resolve(rhs).await]) call(sym_ref(sym!(std::number::mul)), [resolve(lhs).await, resolve(rhs).await])
}]) }])
.finish(), .finish(),
build_macro(None, ["comma_list", ","]) build_macro(None, ["comma_list", ","])
.rule( .rule(
mactreev!(macros::common::comma_list ( "...$" head 0 macros::common::, "...$" tail 1)), mactreev!(macros::common::comma_list ( "...$" head 0 macros::common::, "...$" tail 1)),
[async |[head, tail]| { [async |[head, tail]| {
call(sym_ref(sym!(std::tuple::cat; i())), [ exec(async |mut h| {
call(sym_ref(sym!(std::tuple::one; i())), [head.to_gen().await]), let recur = resolve(mactree!(macros::common::comma_list "push" tail ;)).await;
resolve(mactree!(macros::common::comma_list "push" tail ;)).await, let mut tail = h.exec::<HomoTpl<TAtom<MacTree>>>(recur).await?;
]) tail.0.insert(0, h.exec(head).await?);
Ok(tail)
})
.await
}], }],
) )
.rule(mactreev!(macros::common::comma_list ( "...$" final_tail 0 )), [async |[tail]| { .rule(mactreev!(macros::common::comma_list ( "...$" final_tail 0 )), [async |[tail]| {
call(sym_ref(sym!(std::tuple::one; i())), [tail.to_gen().await]) HomoTpl(vec![tail.to_gen().await])
}])
.rule(mactreev!(macros::common::comma_list()), [async |[]| {
sym_ref(sym!(std::tuple::empty; i()))
}]) }])
.rule(mactreev!(macros::common::comma_list()), [async |[]| UntypedTuple(Vec::new())])
.finish(), .finish(),
build_macro(None, ["semi_list", ";"]) build_macro(None, ["semi_list", ";"])
.rule( .rule(
mactreev!(macros::common::semi_list ( "...$" head 0 macros::common::; "...$" tail 1)), mactreev!(macros::common::semi_list ( "...$" head 0 macros::common::; "...$" tail 1)),
[async |[head, tail]| { [async |[head, tail]| {
call(sym_ref(sym!(std::tuple::cat; i())), [ exec(async |mut h| {
call(sym_ref(sym!(std::tuple::one; i())), [head.to_gen().await]), let recur = resolve(mactree!(macros::common::semi_list "push" tail ;)).await;
resolve(mactree!(macros::common::semi_list "push" tail ;)).await, let mut tail = h.exec::<HomoTpl<TAtom<MacTree>>>(recur).await?;
]) tail.0.insert(0, h.exec(head).await?);
Ok(tail)
})
.await
}], }],
) )
.rule(mactreev!(macros::common::semi_list ( "...$" final_tail 0 )), [async |[tail]| { .rule(mactreev!(macros::common::semi_list ( "...$" final_tail 0 )), [async |[tail]| {
call(sym_ref(sym!(std::tuple::one; i())), [tail.to_gen().await]) HomoTpl(vec![tail.to_gen().await])
}])
.rule(mactreev!(macros::common::semi_list()), [async |[]| {
sym_ref(sym!(std::tuple::empty; i()))
}]) }])
.rule(mactreev!(macros::common::semi_list()), [async |[]| UntypedTuple(Vec::new())])
.finish(), .finish(),
]), ]),
]) ])

View File

@@ -1,20 +1,19 @@
use std::cell::RefCell;
use std::rc::Rc; use std::rc::Rc;
use async_fn_stream::stream;
use async_once_cell::OnceCell; use async_once_cell::OnceCell;
use futures::{StreamExt, stream}; use futures::StreamExt;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcRes, Reporter, mk_errv}; use orchid_base::error::{OrcRes, mk_errv, report, with_reporter};
use orchid_base::interner::is;
use orchid_base::parse::{ use orchid_base::parse::{
Comment, ParseCtx, Parsed, Snippet, expect_end, expect_tok, line_items, token_errv, Comment, Parsed, Snippet, expect_end, expect_tok, line_items, token_errv, try_pop_no_fluff,
try_pop_no_fluff,
}; };
use orchid_base::tree::{Paren, Token}; use orchid_base::tree::{Paren, Token};
use orchid_base::{clone, sym}; use orchid_base::{clone, sym};
use orchid_extension::atom::TAtom; use orchid_extension::atom::TAtom;
use orchid_extension::context::i;
use orchid_extension::conv::{ToExpr, TryFromExpr}; use orchid_extension::conv::{ToExpr, TryFromExpr};
use orchid_extension::gen_expr::{atom, call, sym_ref}; use orchid_extension::gen_expr::{call, sym_ref};
use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser}; use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser};
use crate::macros::let_line::{dealias_mac_v, parse_tokv}; use crate::macros::let_line::{dealias_mac_v, parse_tokv};
@@ -35,22 +34,22 @@ impl Parser for MacroLine {
) -> OrcRes<Vec<ParsedLine>> { ) -> OrcRes<Vec<ParsedLine>> {
if exported { if exported {
return Err(mk_errv( return Err(mk_errv(
ctx.i().i("macros are always exported").await, is("macros are always exported").await,
"The export keyword is forbidden here to avoid confusion\n\ "The export keyword is forbidden here to avoid confusion\n\
because macros are exported by default", because macros are exported by default",
[line.sr()], [line.sr()],
)); ));
} }
let module = ctx.module(); let module = ctx.module();
let Parsed { output: prio_or_body, tail } = try_pop_no_fluff(&ctx, line).await?; let Parsed { output: prio_or_body, tail } = try_pop_no_fluff(line).await?;
let bad_first_item_err = || { let bad_first_item_err = || {
token_errv(&ctx, prio_or_body, "Expected priority or block", |s| { token_errv(prio_or_body, "Expected priority or block", |s| {
format!("Expected a priority number or a () block, found {s}") format!("Expected a priority number or a () block, found {s}")
}) })
}; };
let (prio, body) = match &prio_or_body.tok { let (prio, body) = match &prio_or_body.tok {
Token::S(Paren::Round, body) => { Token::S(Paren::Round, body) => {
expect_end(&ctx, tail).await?; expect_end(tail).await?;
(None, body) (None, body)
}, },
Token::Handle(expr) => match TAtom::<Int>::try_from_expr(expr.clone()).await { Token::Handle(expr) => match TAtom::<Int>::try_from_expr(expr.clone()).await {
@@ -58,33 +57,32 @@ impl Parser for MacroLine {
return Err(e + bad_first_item_err().await); return Err(e + bad_first_item_err().await);
}, },
Ok(prio) => { Ok(prio) => {
let Parsed { output: body, tail } = try_pop_no_fluff(&ctx, tail).await?; let Parsed { output: body, tail } = try_pop_no_fluff(tail).await?;
let Token::S(Paren::Round, block) = &body.tok else { let Token::S(Paren::Round, block) = &body.tok else {
return Err( return Err(
token_errv(&ctx, prio_or_body, "Expected () block", |s| { token_errv(prio_or_body, "Expected () block", |s| {
format!("Expected a () block, found {s}") format!("Expected a () block, found {s}")
}) })
.await, .await,
); );
}; };
expect_end(&ctx, tail).await?; expect_end(tail).await?;
(Some(prio), block) (Some(prio), block)
}, },
}, },
_ => return Err(bad_first_item_err().await), _ => return Err(bad_first_item_err().await),
}; };
let lines = line_items(&ctx, Snippet::new(prio_or_body, body)).await; let lines = line_items(Snippet::new(prio_or_body, body)).await;
let Some((kw_line, rule_lines)) = lines.split_first() else { return Ok(Vec::new()) }; let Some((kw_line, rule_lines)) = lines.split_first() else { return Ok(Vec::new()) };
let mut keywords = Vec::new(); let mut keywords = Vec::new();
let Parsed { tail: kw_tail, .. } = let Parsed { tail: kw_tail, .. } = expect_tok(kw_line.tail, is("keywords").await).await?;
expect_tok(&ctx, kw_line.tail, ctx.i().i("keywords").await).await?;
for kw_tok in kw_tail.iter().filter(|kw| !kw.is_fluff()) { for kw_tok in kw_tail.iter().filter(|kw| !kw.is_fluff()) {
match kw_tok.as_name() { match kw_tok.as_name() {
Some(kw) => { Some(kw) => {
keywords.push((kw, kw_tok.sr())); keywords.push((kw, kw_tok.sr()));
}, },
None => ctx.rep().report( None => report(
token_errv(&ctx, kw_tok, "invalid macro keywords list", |tok| { token_errv(kw_tok, "invalid macro keywords list", |tok| {
format!("The keywords list must be a sequence of names; received {tok}") format!("The keywords list must be a sequence of names; received {tok}")
}) })
.await, .await,
@@ -93,7 +91,7 @@ impl Parser for MacroLine {
} }
let Some((macro_name, _)) = keywords.first().cloned() else { let Some((macro_name, _)) = keywords.first().cloned() else {
return Err(mk_errv( return Err(mk_errv(
ctx.i().i("macro with no keywords").await, is("macro with no keywords").await,
"Macros must define at least one macro of their own.", "Macros must define at least one macro of their own.",
[kw_line.tail.sr()], [kw_line.tail.sr()],
)); ));
@@ -102,18 +100,18 @@ impl Parser for MacroLine {
let mut lines = Vec::new(); let mut lines = Vec::new();
for (idx, line) in rule_lines.iter().enumerate().map(|(n, v)| (n as u32, v)) { for (idx, line) in rule_lines.iter().enumerate().map(|(n, v)| (n as u32, v)) {
let sr = line.tail.sr(); let sr = line.tail.sr();
let name = ctx.i().i(&format!("rule::{}::{}", macro_name, idx)).await; let name = is(&format!("rule::{}::{}", macro_name, idx)).await;
let Parsed { tail, .. } = expect_tok(&ctx, line.tail, ctx.i().i("rule").await).await?; let Parsed { tail, .. } = expect_tok(line.tail, is("rule").await).await?;
let arrow_token = ctx.i().i("=>").await; let arrow_token = is("=>").await;
let Some((pattern, body)) = tail.split_once(|tok| tok.is_kw(arrow_token.clone())) else { let Some((pattern, body)) = tail.split_once(|tok| tok.is_kw(arrow_token.clone())) else {
ctx.rep().report(mk_errv( report(mk_errv(
ctx.i().i("Missing => in rule").await, is("Missing => in rule").await,
"Rule lines are of the form `rule ...pattern => ...body`", "Rule lines are of the form `rule ...pattern => ...body`",
[line.tail.sr()], [line.tail.sr()],
)); ));
continue; continue;
}; };
let pattern = parse_tokv(pattern, &ctx).await; let pattern = parse_tokv(pattern).await;
let mut placeholders = Vec::new(); let mut placeholders = Vec::new();
pattern.map(&mut false, &mut |tok| { pattern.map(&mut false, &mut |tok| {
if let MacTok::Ph(ph) = tok.tok() { if let MacTok::Ph(ph) = tok.tok() {
@@ -121,9 +119,9 @@ impl Parser for MacroLine {
} }
None None
}); });
let mut body_mactree = parse_tokv(body, &ctx).await; let mut body_mactree = parse_tokv(body).await;
for (ph, ph_pos) in placeholders.iter().rev() { for (ph, ph_pos) in placeholders.iter().rev() {
let name = ctx.module().suffix([ph.name.clone()], ctx.i()).await; let name = ctx.module().suffix([ph.name.clone()]).await;
body_mactree = body_mactree =
MacTreeSeq::new([ MacTreeSeq::new([
MacTok::Lambda(MacTok::Name(name).at(ph_pos.clone()), body_mactree).at(ph_pos.clone()) MacTok::Lambda(MacTok::Name(name).at(ph_pos.clone()), body_mactree).at(ph_pos.clone())
@@ -132,53 +130,42 @@ impl Parser for MacroLine {
let body_sr = body.sr(); let body_sr = body.sr();
rules.push((name.clone(), placeholders, pattern)); rules.push((name.clone(), placeholders, pattern));
lines.push(ParsedLine::cnst(&sr, &line.output, true, name, async move |ctx| { lines.push(ParsedLine::cnst(&sr, &line.output, true, name, async move |ctx| {
let rep = Reporter::new(); let macro_input =
let body = dealias_mac_v(&body_mactree, &ctx, &rep).await; MacTok::S(Paren::Round, with_reporter(dealias_mac_v(&body_mactree, &ctx)).await?)
let macro_input = MacTok::S(Paren::Round, body).at(body_sr.pos()); .at(body_sr.pos());
if let Some(e) = rep.errv() { Ok(call(sym_ref(sym!(macros::resolve)), [macro_input.to_gen().await]))
return Err(e);
}
Ok(call(sym_ref(sym!(macros::resolve; i())), [macro_input.to_gen().await]))
})) }))
} }
let mac_cell = Rc::new(OnceCell::new()); let mac_cell = Rc::new(OnceCell::new());
let rules = Rc::new(RefCell::new(Some(rules))); let rules = Rc::new(rules);
for (kw, sr) in &*keywords { for (kw, sr) in &*keywords {
clone!(mac_cell, rules, module, macro_name, prio); clone!(mac_cell, rules, module, macro_name, prio);
lines.push(ParsedLine::cnst(&sr.clone(), &comments, true, kw.clone(), async move |cctx| { let kw_key = is(&format!("__macro__{kw}")).await;
let mac = mac_cell lines.push(ParsedLine::cnst(&sr.clone(), &comments, true, kw_key, async move |cctx| {
.get_or_init(async { let mac_future = async {
let rep = Reporter::new(); let rules = with_reporter(
let rules = rules.borrow_mut().take().expect("once cell initializer runs"); stream(async |mut h| {
let rules = stream::iter(rules) for (body, ph_names, pattern_rel) in rules.iter() {
.then(|(body_name, placeholders, pattern_rel)| { let pattern = dealias_mac_v(pattern_rel, &cctx).await;
let cctx = &cctx; let ph_names = ph_names.iter().map(|(ph, _)| ph.name.clone()).collect_vec();
let rep = &rep; match Matcher::new(pattern.clone()).await {
async move { Ok(matcher) =>
let pattern = dealias_mac_v(&pattern_rel, cctx, rep).await; h.emit(Rule { body: body.clone(), matcher, pattern, ph_names }).await,
let pattern_res = Matcher::new(pattern.clone()).await; Err(e) => report(e),
let placeholders = placeholders.into_iter().map(|(ph, _)| ph.name).collect_vec();
match pattern_res {
Ok(matcher) => Some(Rule { body_name, matcher, pattern, placeholders }),
Err(e) => {
rep.report(e);
None
},
} }
} }
}) })
.flat_map(stream::iter) .collect::<Vec<_>>(),
.collect::<Vec<_>>() )
.await; .await?;
Macro(Rc::new(MacroData { Ok(Macro(Rc::new(MacroData {
canonical_name: module.suffix([macro_name], &i()).await, canonical_name: module.suffix([macro_name]).await,
module, module,
prio: prio.map(|i| i.0 as u64), prio: prio.map(|i| i.0 as u64),
rules, rules,
})) })))
}) };
.await; mac_cell.get_or_init(mac_future).await.clone().to_gen().await
atom(mac.clone())
})) }))
} }
Ok(lines) Ok(lines)

View File

@@ -1,10 +1,8 @@
use never::Never; use never::Never;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::Receipt; use orchid_base::reqnot::{Receipt, ReqHandle};
use orchid_base::sym; use orchid_base::sym;
use orchid_extension::atom::{AtomDynfo, AtomicFeatures}; use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
use orchid_extension::context::i;
use orchid_extension::entrypoint::ExtReq;
use orchid_extension::lexer::LexerObj; use orchid_extension::lexer::LexerObj;
use orchid_extension::other_system::SystemHandle; use orchid_extension::other_system::SystemHandle;
use orchid_extension::parser::ParserObj; use orchid_extension::parser::ParserObj;
@@ -24,14 +22,14 @@ use crate::macros::std_macros::gen_std_macro_lib;
use crate::macros::utils::MacroBodyArgCollector; use crate::macros::utils::MacroBodyArgCollector;
use crate::{MacTree, StdSystem}; use crate::{MacTree, StdSystem};
#[derive(Default)] #[derive(Debug, Default)]
pub struct MacroSystem; pub struct MacroSystem;
impl SystemCtor for MacroSystem { impl SystemCtor for MacroSystem {
type Deps = StdSystem; type Deps = StdSystem;
type Instance = Self; type Instance = Self;
const NAME: &'static str = "orchid::macros"; const NAME: &'static str = "orchid::macros";
const VERSION: f64 = 0.00_01; const VERSION: f64 = 0.00_01;
fn inst(_: SystemHandle<StdSystem>) -> Self::Instance { Self } fn inst(&self, _: SystemHandle<StdSystem>) -> Self::Instance { Self }
} }
impl SystemCard for MacroSystem { impl SystemCard for MacroSystem {
type Ctor = Self; type Ctor = Self;
@@ -48,19 +46,19 @@ impl SystemCard for MacroSystem {
} }
} }
impl System for MacroSystem { impl System for MacroSystem {
async fn request(_: ExtReq<'_>, req: Never) -> Receipt<'_> { match req {} } async fn request<'a>(_: Box<dyn ReqHandle<'a> + 'a>, req: Never) -> Receipt<'a> { match req {} }
async fn prelude() -> Vec<Sym> { async fn prelude() -> Vec<Sym> {
vec![ vec![
sym!(macros::common::+; i()), sym!(macros::common::+),
sym!(macros::common::*; i()), sym!(macros::common::*),
sym!(macros::common::,; i()), sym!(macros::common::,),
sym!(macros::common::;; i()), sym!(macros::common::;),
sym!(macros::common::..; i()), sym!(macros::common::..),
sym!(macros::common::_; i()), sym!(macros::common::_),
sym!(std::tuple::t; i()), sym!(std::tuple::t),
sym!(pattern::match; i()), sym!(pattern::match),
sym!(pattern::ref; i()), sym!(pattern::ref),
sym!(pattern::=>; i()), sym!(pattern::=>),
] ]
} }
fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer, &PhLexer] } fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer, &PhLexer] }

View File

@@ -2,7 +2,7 @@ use std::borrow::Cow;
use std::rc::Rc; use std::rc::Rc;
use never::Never; use never::Never;
use orchid_base::interner::Tok; use orchid_base::interner::IStr;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_extension::atom::Atomic; use orchid_extension::atom::Atomic;
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant}; use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
@@ -25,8 +25,8 @@ pub struct Macro(pub Rc<MacroData>);
pub struct Rule { pub struct Rule {
pub pattern: MacTreeSeq, pub pattern: MacTreeSeq,
pub matcher: Matcher, pub matcher: Matcher,
pub placeholders: Vec<Tok<String>>, pub ph_names: Vec<IStr>,
pub body_name: Tok<String>, pub body: IStr,
} }
impl Atomic for Macro { impl Atomic for Macro {
type Data = (); type Data = ();

View File

@@ -8,7 +8,7 @@ use hashbrown::HashSet;
use orchid_api_derive::Coding; use orchid_api_derive::Coding;
use orchid_base::error::OrcErrv; use orchid_base::error::OrcErrv;
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants}; use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
use orchid_base::interner::Tok; use orchid_base::interner::IStr;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::tl_cache; use orchid_base::tl_cache;
@@ -205,7 +205,7 @@ pub async fn mtreev_fmt<'b>(
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct Ph { pub struct Ph {
pub name: Tok<String>, pub name: IStr,
pub kind: PhKind, pub kind: PhKind,
} }
impl Display for Ph { impl Display for Ph {

View File

@@ -3,7 +3,7 @@ use std::ops::RangeInclusive;
use futures::FutureExt; use futures::FutureExt;
use itertools::chain; use itertools::chain;
use orchid_base::error::{OrcRes, mk_errv}; use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::parse::ParseCtx; use orchid_base::interner::is;
use orchid_base::tokens::PARENS; use orchid_base::tokens::PARENS;
use orchid_base::tree::Paren; use orchid_base::tree::Paren;
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable}; use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
@@ -14,7 +14,7 @@ use crate::macros::instantiate_tpl::InstantiateTplCall;
use crate::macros::let_line::parse_tok; use crate::macros::let_line::parse_tok;
use crate::macros::mactree::{MacTok, MacTree, MacTreeSeq}; use crate::macros::mactree::{MacTok, MacTree, MacTreeSeq};
#[derive(Default)] #[derive(Debug, Default)]
pub struct MacTreeLexer; pub struct MacTreeLexer;
impl Lexer for MacTreeLexer { impl Lexer for MacTreeLexer {
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['\''..='\'']; const CHAR_FILTER: &'static [RangeInclusive<char>] = &['\''..='\''];
@@ -54,11 +54,9 @@ impl Lexer for MacTreeLexer {
let tok = MacTok::S(*paren, MacTreeSeq::new(items)); let tok = MacTok::S(*paren, MacTreeSeq::new(items));
break Ok((tail3, tok.at(ctx.pos_tt(tail, tail3).pos()))); break Ok((tail3, tok.at(ctx.pos_tt(tail, tail3).pos())));
} else if tail2.is_empty() { } else if tail2.is_empty() {
return Err(mk_errv( return Err(mk_errv(is("Unclosed block").await, format!("Expected closing {rp}"), [
ctx.i().i("Unclosed block").await, ctx.pos_lt(1, tail),
format!("Expected closing {rp}"), ]));
[ctx.pos_lt(1, tail)],
));
} }
let (new_tail, new_item) = mac_tree(tail2, args, ctx).boxed_local().await?; let (new_tail, new_item) = mac_tree(tail2, args, ctx).boxed_local().await?;
body_tail = new_tail; body_tail = new_tail;
@@ -87,7 +85,7 @@ impl Lexer for MacTreeLexer {
Ok((tail3, MacTok::Lambda(param, MacTreeSeq::new(body)).at(ctx.pos_tt(tail, tail3).pos()))) Ok((tail3, MacTok::Lambda(param, MacTreeSeq::new(body)).at(ctx.pos_tt(tail, tail3).pos())))
} else { } else {
let (tail2, sub) = ctx.recurse(tail).await?; let (tail2, sub) = ctx.recurse(tail).await?;
let parsed = parse_tok(&sub, ctx).await.expect("Unexpected invalid token"); let parsed = parse_tok(&sub).await.expect("Unexpected invalid token");
Ok((tail2, parsed)) Ok((tail2, parsed))
} }
} }

View File

@@ -8,11 +8,11 @@ use orchid_api::ExprTicket;
use orchid_api_derive::Coding; use orchid_api_derive::Coding;
use orchid_base::error::{OrcRes, mk_errv}; use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::format::fmt; use orchid_base::format::fmt;
use orchid_base::interner::is;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::sym; use orchid_base::sym;
use orchid_extension::atom::{Atomic, TAtom}; use orchid_extension::atom::{Atomic, TAtom};
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own}; use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
use orchid_extension::context::i;
use orchid_extension::conv::ToExpr; use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::{ExecHandle, exec}; use orchid_extension::coroutine_exec::{ExecHandle, exec};
use orchid_extension::expr::{Expr, ExprHandle}; use orchid_extension::expr::{Expr, ExprHandle};
@@ -43,7 +43,7 @@ impl MatcherData {
pub fn keys(&self) -> impl Stream<Item = Sym> { pub fn keys(&self) -> impl Stream<Item = Sym> {
stream(async |mut h| { stream(async |mut h| {
for tk in &self.keys { for tk in &self.keys {
h.emit(Sym::from_api(*tk, &i()).await).await h.emit(Sym::from_api(*tk).await).await
} }
}) })
} }
@@ -85,7 +85,7 @@ pub async fn gen_match_macro_lib() -> Vec<GenMember> {
}, },
), ),
fun(true, "matcher", async |names: HomoTpl<TAtom<SymAtom>>, matcher: Expr| MatcherAtom { fun(true, "matcher", async |names: HomoTpl<TAtom<SymAtom>>, matcher: Expr| MatcherAtom {
keys: join_all(names.0.iter().map(async |atm| Sym::from_api(atm.0, &i()).await)).await, keys: join_all(names.0.iter().map(async |atm| Sym::from_api(atm.0).await)).await,
matcher, matcher,
}), }),
build_macro(None, ["match", "match_rule", "_row", "=>"]) build_macro(None, ["match", "match_rule", "_row", "=>"])
@@ -93,7 +93,7 @@ pub async fn gen_match_macro_lib() -> Vec<GenMember> {
async |[value, rules]| { async |[value, rules]| {
exec(async move |mut h| { exec(async move |mut h| {
let rule_lines = h let rule_lines = h
.exec::<TAtom<Tuple>>(call(sym_ref(sym!(macros::resolve; i())), [ .exec::<TAtom<Tuple>>(call(sym_ref(sym!(macros::resolve)), [
mactree!(macros::common::semi_list "push" rules.clone();).to_gen().await, mactree!(macros::common::semi_list "push" rules.clone();).to_gen().await,
])) ]))
.await?; .await?;
@@ -105,20 +105,20 @@ pub async fn gen_match_macro_lib() -> Vec<GenMember> {
)) ))
.await?; .await?;
let Tpl((matcher, body)) = h let Tpl((matcher, body)) = h
.exec(call(sym_ref(sym!(macros::resolve; i())), [ .exec(call(sym_ref(sym!(macros::resolve)), [
mactree!(pattern::_row "push" own(&line_mac).await ;).to_gen().await, mactree!(pattern::_row "push" own(&line_mac).await ;).to_gen().await,
])) ]))
.await?; .await?;
rule_atoms.push((matcher, body)); rule_atoms.push((matcher, body));
} }
let base_case = lambda(0, [bot(mk_errv( let base_case = lambda(0, [bot(mk_errv(
i().i("No branches match").await, is("No branches match").await,
"None of the patterns matches this value", "None of the patterns matches this value",
[rules.pos()], [rules.pos()],
))]); ))]);
let match_expr = stream::iter(rule_atoms.into_iter().rev()) let match_expr = stream::iter(rule_atoms.into_iter().rev())
.fold(base_case, async |tail, (mat, body)| { .fold(base_case, async |tail, (mat, body)| {
lambda(0, [call(sym_ref(sym!(pattern::match_one; i())), [ lambda(0, [call(sym_ref(sym!(pattern::match_one)), [
mat.to_gen().await, mat.to_gen().await,
arg(0), arg(0),
body.to_gen().await, body.to_gen().await,
@@ -144,14 +144,14 @@ pub async fn gen_match_macro_lib() -> Vec<GenMember> {
async |[pattern, mut value]| { async |[pattern, mut value]| {
exec(async move |mut h| -> OrcRes<Tpl<(TAtom<MatcherAtom>, GExpr)>> { exec(async move |mut h| -> OrcRes<Tpl<(TAtom<MatcherAtom>, GExpr)>> {
let Ok(pat) = h let Ok(pat) = h
.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve; i())), [ .exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve)), [
mactree!(pattern::match_rule "push" pattern.clone();).to_gen().await, mactree!(pattern::match_rule "push" pattern.clone();).to_gen().await,
])) ]))
.await .await
else { else {
return Err(mk_errv( return Err(mk_errv(
i().i("Invalid pattern").await, is("Invalid pattern").await,
format!("Could not parse {} as a match pattern", fmt(&pattern, &i()).await), format!("Could not parse {} as a match pattern", fmt(&pattern).await),
[pattern.pos()], [pattern.pos()],
)); ));
}; };
@@ -169,18 +169,18 @@ pub async fn gen_match_macro_lib() -> Vec<GenMember> {
.rule(mactreev!(pattern::match_rule(pattern::ref "$" name)), [async |[name]| { .rule(mactreev!(pattern::match_rule(pattern::ref "$" name)), [async |[name]| {
let MacTok::Name(name) = name.tok() else { let MacTok::Name(name) = name.tok() else {
return Err(mk_errv( return Err(mk_errv(
i().i("pattern 'ref' requires a name to bind to").await, is("pattern 'ref' requires a name to bind to").await,
format!( format!(
"'ref' was interpreted as a binding matcher, \ "'ref' was interpreted as a binding matcher, \
but it was followed by {} instead of a name", but it was followed by {} instead of a name",
fmt(&name, &i()).await fmt(&name).await
), ),
[name.pos()], [name.pos()],
)); ));
}; };
Ok(MatcherAtom { Ok(MatcherAtom {
keys: vec![name.clone()], keys: vec![name.clone()],
matcher: sym_ref(sym!(pattern::ref_body; i())).to_expr().await, matcher: sym_ref(sym!(pattern::ref_body)).to_expr().await,
}) })
}]) }])
.finish(), .finish(),

View File

@@ -1,10 +1,10 @@
use orchid_api_derive::Coding; use orchid_api_derive::Coding;
use orchid_base::error::{OrcRes, mk_errv}; use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::format::FmtUnit; use orchid_base::format::FmtUnit;
use orchid_base::interner::{es, is};
use orchid_base::parse::{name_char, name_start}; use orchid_base::parse::{name_char, name_start};
use orchid_extension::atom::Atomic; use orchid_extension::atom::Atomic;
use orchid_extension::atom_thin::{ThinAtom, ThinVariant}; use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
use orchid_extension::context::i;
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable}; use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
use orchid_extension::tree::{GenTokTree, x_tok}; use orchid_extension::tree::{GenTokTree, x_tok};
@@ -13,7 +13,7 @@ use crate::macros::mactree::{Ph, PhKind};
#[derive(Clone, Coding)] #[derive(Clone, Coding)]
pub struct PhAtom(orchid_api::TStr, PhKind); pub struct PhAtom(orchid_api::TStr, PhKind);
impl PhAtom { impl PhAtom {
pub async fn to_full(&self) -> Ph { Ph { kind: self.1, name: i().ex(self.0).await } } pub async fn to_full(&self) -> Ph { Ph { kind: self.1, name: es(self.0).await } }
} }
impl Atomic for PhAtom { impl Atomic for PhAtom {
type Data = Self; type Data = Self;
@@ -21,11 +21,11 @@ impl Atomic for PhAtom {
} }
impl ThinAtom for PhAtom { impl ThinAtom for PhAtom {
async fn print(&self) -> FmtUnit { async fn print(&self) -> FmtUnit {
Ph { name: i().ex(self.0).await, kind: self.1 }.to_string().into() Ph { name: es(self.0).await, kind: self.1 }.to_string().into()
} }
} }
#[derive(Default)] #[derive(Debug, Default)]
pub struct PhLexer; pub struct PhLexer;
impl Lexer for PhLexer { impl Lexer for PhLexer {
const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['$'..='$', '.'..='.']; const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['$'..='$', '.'..='.'];
@@ -52,7 +52,7 @@ impl Lexer for PhLexer {
(prio_num, tail) (prio_num, tail)
} else { } else {
return Err(mk_errv( return Err(mk_errv(
i().i("Invalid priority, must be 0-255").await, is("Invalid priority, must be 0-255").await,
format!("{prio} is not a valid placeholder priority"), format!("{prio} is not a valid placeholder priority"),
[ctx.pos_lt(prio.len(), tail)], [ctx.pos_lt(prio.len(), tail)],
)); ));
@@ -71,7 +71,7 @@ impl Lexer for PhLexer {
return Err(err_not_applicable().await); return Err(err_not_applicable().await);
} }
}; };
let ph_atom = PhAtom(i().i::<String>(name).await.to_api(), phkind); let ph_atom = PhAtom(is(name).await.to_api(), phkind);
Ok((tail, x_tok(ph_atom).await.at(ctx.pos_tt(line, tail)))) Ok((tail, x_tok(ph_atom).await.at(ctx.pos_tt(line, tail))))
} }
} }

View File

@@ -6,12 +6,13 @@ use hashbrown::{HashMap, HashSet};
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::mk_errv; use orchid_base::error::mk_errv;
use orchid_base::format::fmt; use orchid_base::format::fmt;
use orchid_base::interner::is;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::logging::log;
use orchid_base::name::{NameLike, Sym, VPath};
use orchid_base::tree::Paren; use orchid_base::tree::Paren;
use orchid_extension::atom::TAtom; use orchid_extension::atom::TAtom;
use orchid_extension::atom_owned::own; use orchid_extension::atom_owned::own;
use orchid_extension::context::{ctx, i};
use orchid_extension::conv::ToExpr; use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::{ExecHandle, exec}; use orchid_extension::coroutine_exec::{ExecHandle, exec};
use orchid_extension::gen_expr::{GExpr, arg, bot, call, lambda, sym_ref}; use orchid_extension::gen_expr::{GExpr, arg, bot, call, lambda, sym_ref};
@@ -24,17 +25,19 @@ use crate::macros::mactree::MacTreeSeq;
use crate::macros::rule::state::{MatchState, StateEntry}; use crate::macros::rule::state::{MatchState, StateEntry};
use crate::{MacTok, MacTree}; use crate::{MacTok, MacTree};
pub async fn resolve(tpl: MacTree) -> GExpr { pub async fn resolve(val: MacTree) -> GExpr {
exec(async move |mut h| { exec(async move |mut h| {
let ctx = ctx(); writeln!(log("debug"), "Macro-resolving {}", fmt(&val).await).await;
// if ctx.logger().is_active() {
writeln!(ctx.logger(), "Macro-resolving {}", fmt(&tpl, &i()).await);
// }
let root = refl(); let root = refl();
let mut macros = HashMap::new(); let mut macros = HashMap::new();
for n in tpl.glossary() { for n in val.glossary() {
if let Ok(ReflMemKind::Const) = root.get_by_path(n).await.map(|m| m.kind()) { let (foot, body) = n.split_last_seg();
let Ok(mac) = h.exec::<TAtom<Macro>>(sym_ref(n.clone())).await else { continue }; let new_name = VPath::new(body.iter().cloned())
.name_with_suffix(is(&format!("__macro__{foot}")).await)
.to_sym()
.await;
if let Ok(ReflMemKind::Const) = root.get_by_path(&new_name).await.map(|m| m.kind()) {
let Ok(mac) = h.exec::<TAtom<Macro>>(sym_ref(new_name)).await else { continue };
let mac = own(&mac).await; let mac = own(&mac).await;
macros.entry(mac.0.canonical_name.clone()).or_insert(mac); macros.entry(mac.0.canonical_name.clone()).or_insert(mac);
} }
@@ -45,7 +48,7 @@ pub async fn resolve(tpl: MacTree) -> GExpr {
for (_, mac) in macros.iter() { for (_, mac) in macros.iter() {
let mut record = FilteredMacroRecord { mac, rules: Vec::new() }; let mut record = FilteredMacroRecord { mac, rules: Vec::new() };
for (rule_i, rule) in mac.0.rules.iter().enumerate() { for (rule_i, rule) in mac.0.rules.iter().enumerate() {
if rule.pattern.glossary.is_subset(tpl.glossary()) { if rule.pattern.glossary.is_subset(val.glossary()) {
record.rules.push(rule_i); record.rules.push(rule_i);
} }
} }
@@ -61,7 +64,15 @@ pub async fn resolve(tpl: MacTree) -> GExpr {
} }
} }
let mut rctx = ResolveCtx { h, exclusive, priod }; let mut rctx = ResolveCtx { h, exclusive, priod };
resolve_one(&mut rctx, Substack::Bottom, &tpl).await let gex = resolve_one(&mut rctx, Substack::Bottom, &val).await;
writeln!(
log("debug"),
"Macro-resolution over {}\nreturned {}",
fmt(&val).await,
fmt(&gex).await
)
.await;
gex
}) })
.await .await
} }
@@ -98,7 +109,7 @@ async fn resolve_one(
MacTok::Lambda(arg, body) => { MacTok::Lambda(arg, body) => {
let MacTok::Name(name) = &*arg.tok else { let MacTok::Name(name) = &*arg.tok else {
return bot(mk_errv( return bot(mk_errv(
i().i("Syntax error after macros").await, is("Syntax error after macros").await,
"This token ends up as a binding, consider replacing it with a name", "This token ends up as a binding, consider replacing it with a name",
[arg.pos()], [arg.pos()],
)); ));
@@ -109,8 +120,8 @@ async fn resolve_one(
}, },
MacTok::S(Paren::Round, body) => resolve_seq(ctx, arg_stk, body.clone(), value.pos()).await, MacTok::S(Paren::Round, body) => resolve_seq(ctx, arg_stk, body.clone(), value.pos()).await,
MacTok::S(..) => bot(mk_errv( MacTok::S(..) => bot(mk_errv(
i().i("Leftover [] or {} not matched by macro").await, is("Leftover [] or {} not matched by macro").await,
format!("{} was not matched by any macro", fmt(value, &i()).await), format!("{} was not matched by any macro", fmt(value).await),
[value.pos()], [value.pos()],
)), )),
} }
@@ -138,7 +149,7 @@ async fn resolve_seq(
) -> GExpr { ) -> GExpr {
if val.items.is_empty() { if val.items.is_empty() {
return bot(mk_errv( return bot(mk_errv(
i().i("Empty sequence").await, is("Empty sequence").await,
"() or (\\arg ) left after macro execution. \ "() or (\\arg ) left after macro execution. \
This is usually caused by an incomplete call to a macro with bad error detection", This is usually caused by an incomplete call to a macro with bad error detection",
[fallback_pos], [fallback_pos],
@@ -212,7 +223,7 @@ async fn resolve_seq(
Err((lran, rran)) Err((lran, rran))
} }
}); });
let mac_conflict_tk = i().i("Macro conflict").await; let mac_conflict_tk = is("Macro conflict").await;
let error = conflict_sets let error = conflict_sets
.filter(|r| 1 < r.len()) .filter(|r| 1 < r.len())
.map(|set| { .map(|set| {
@@ -270,13 +281,12 @@ async fn resolve_seq(
async fn mk_body_call(mac: &Macro, rule: &Rule, state: &MatchState<'_>, pos: Pos) -> GExpr { async fn mk_body_call(mac: &Macro, rule: &Rule, state: &MatchState<'_>, pos: Pos) -> GExpr {
let mut call_args = vec![]; let mut call_args = vec![];
for name in rule.placeholders.iter() { for name in rule.ph_names.iter() {
call_args.push(match state.get(name).expect("Missing state entry for placeholder") { call_args.push(match state.get(name).expect("Missing state entry for placeholder") {
StateEntry::Scalar(scal) => (**scal).clone().to_gen().await, StateEntry::Scalar(scal) => (**scal).clone().to_gen().await,
StateEntry::Vec(vec) => StateEntry::Vec(vec) =>
MacTok::S(Paren::Round, MacTreeSeq::new(vec.iter().cloned())).at(Pos::None).to_gen().await, MacTok::S(Paren::Round, MacTreeSeq::new(vec.iter().cloned())).at(Pos::None).to_gen().await,
}); });
} }
call(sym_ref(mac.0.module.suffix([rule.body_name.clone()], &i()).await), call_args) call(sym_ref(mac.0.module.suffix([rule.body.clone()]).await), call_args).at(pos.clone())
.at(pos.clone())
} }

View File

@@ -2,17 +2,16 @@ use futures::FutureExt;
use futures::future::join_all; use futures::future::join_all;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcRes, mk_errv}; use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::interner::Tok; use orchid_base::interner::{IStr, is};
use orchid_base::join_ok; use orchid_base::join_ok;
use orchid_base::side::Side; use orchid_base::side::Side;
use orchid_extension::context::i;
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher}; use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
use super::vec_attrs::vec_attrs; use super::vec_attrs::vec_attrs;
use crate::macros::mactree::{Ph, PhKind}; use crate::macros::mactree::{Ph, PhKind};
use crate::macros::{MacTok, MacTree}; use crate::macros::{MacTok, MacTree};
pub type MaxVecSplit<'a> = (&'a [MacTree], (Tok<String>, u8, bool), &'a [MacTree]); pub type MaxVecSplit<'a> = (&'a [MacTree], (IStr, u8, bool), &'a [MacTree]);
/// Derive the details of the central vectorial and the two sides from a /// Derive the details of the central vectorial and the two sides from a
/// slice of Expr's /// slice of Expr's
@@ -126,7 +125,7 @@ async fn mk_scalar(pattern: &MacTree) -> OrcRes<ScalMatcher> {
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(&body.items).boxed_local().await?)), MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(&body.items).boxed_local().await?)),
MacTok::Lambda(..) => MacTok::Lambda(..) =>
return Err(mk_errv( return Err(mk_errv(
i().i("Lambda in matcher").await, is("Lambda in matcher").await,
"Lambdas can't be matched for, only generated in templates", "Lambdas can't be matched for, only generated in templates",
[pattern.pos()], [pattern.pos()],
)), )),
@@ -137,10 +136,11 @@ async fn mk_scalar(pattern: &MacTree) -> OrcRes<ScalMatcher> {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use orchid_base::interner::local_interner::local_interner;
use orchid_base::interner::{is, with_interner};
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
use orchid_base::sym; use orchid_base::sym;
use orchid_base::tokens::Paren; use orchid_base::tokens::Paren;
use orchid_extension::context::{i, mock_ctx, with_ctx};
use test_executors::spin_on; use test_executors::spin_on;
use super::mk_any; use super::mk_any;
@@ -149,27 +149,27 @@ mod test {
#[test] #[test]
fn test_scan() { fn test_scan() {
spin_on(with_ctx(mock_ctx(), async { spin_on(with_interner(local_interner(), async {
let ex = |tok: MacTok| async { tok.at(SrcRange::mock(&i()).await.pos()) }; let ex = |tok: MacTok| async { tok.at(SrcRange::mock().await.pos()) };
let pattern = vec![ let pattern = vec![
ex(MacTok::Ph(Ph { ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false }, kind: PhKind::Vector { priority: 0, at_least_one: false },
name: i().i("::prefix").await, name: is("::prefix").await,
})) }))
.await, .await,
ex(MacTok::Name(sym!(prelude::do; i()))).await, ex(MacTok::Name(sym!(prelude::do))).await,
ex(MacTok::S( ex(MacTok::S(
Paren::Round, Paren::Round,
MacTreeSeq::new([ MacTreeSeq::new([
ex(MacTok::Ph(Ph { ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false }, kind: PhKind::Vector { priority: 0, at_least_one: false },
name: i().i("expr").await, name: is("expr").await,
})) }))
.await, .await,
ex(MacTok::Name(sym!(prelude::; ; i()))).await, ex(MacTok::Name(sym!(prelude::;))).await,
ex(MacTok::Ph(Ph { ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 1, at_least_one: false }, kind: PhKind::Vector { priority: 1, at_least_one: false },
name: i().i("rest").await, name: is("rest").await,
})) }))
.await, .await,
]), ]),
@@ -177,7 +177,7 @@ mod test {
.await, .await,
ex(MacTok::Ph(Ph { ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false }, kind: PhKind::Vector { priority: 0, at_least_one: false },
name: i().i("::suffix").await, name: is("::suffix").await,
})) }))
.await, .await,
]; ];

View File

@@ -2,8 +2,8 @@ use std::fmt;
use std::rc::Rc; use std::rc::Rc;
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::interner::is;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_extension::context::i;
use super::any_match::any_match; use super::any_match::any_match;
use super::build::mk_any; use super::build::mk_any;
@@ -24,12 +24,12 @@ impl Matcher {
let first = pattern.first().expect("Empty pattern is not allowed"); let first = pattern.first().expect("Empty pattern is not allowed");
if vec_attrs(first).is_none() { if vec_attrs(first).is_none() {
let pos = first.pos(); let pos = first.pos();
pattern.insert(0, MacTok::Ph(Ph { name: i().i("::before").await, kind }).at(pos)); pattern.insert(0, MacTok::Ph(Ph { name: is("::before").await, kind }).at(pos));
} }
let last = pattern.last().expect("first returned Some above"); let last = pattern.last().expect("first returned Some above");
if vec_attrs(last).is_none() { if vec_attrs(last).is_none() {
let pos = last.pos(); let pos = last.pos();
pattern.insert(0, MacTok::Ph(Ph { name: i().i("::after").await, kind }).at(pos)); pattern.insert(0, MacTok::Ph(Ph { name: is("::after").await, kind }).at(pos));
} }
Ok(Matcher { inner: mk_any(&pattern).await? }) Ok(Matcher { inner: mk_any(&pattern).await? })
} }
@@ -42,7 +42,7 @@ impl Matcher {
) -> Option<(&'a [MacTree], MatchState<'a>, &'a [MacTree])> { ) -> Option<(&'a [MacTree], MatchState<'a>, &'a [MacTree])> {
let mut result = any_match(&self.inner, seq, &save_loc)?; let mut result = any_match(&self.inner, seq, &save_loc)?;
async fn remove_frame<'a>(result: &mut MatchState<'a>, key: &str) -> &'a [MacTree] { async fn remove_frame<'a>(result: &mut MatchState<'a>, key: &str) -> &'a [MacTree] {
match result.remove(i().i(key).await) { match result.remove(is(key).await) {
Some(StateEntry::Scalar(_)) => panic!("{key} is defined in the constructor as a Vec"), Some(StateEntry::Scalar(_)) => panic!("{key} is defined in the constructor as a Vec"),
Some(StateEntry::Vec(v)) => v, Some(StateEntry::Vec(v)) => v,
None => &[], None => &[],

View File

@@ -3,7 +3,7 @@
use std::fmt; use std::fmt;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::interner::Tok; use orchid_base::interner::IStr;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::side::Side; use orchid_base::side::Side;
use orchid_base::tokens::{PARENS, Paren}; use orchid_base::tokens::{PARENS, Paren};
@@ -11,12 +11,12 @@ use orchid_base::tokens::{PARENS, Paren};
pub enum ScalMatcher { pub enum ScalMatcher {
Name(Sym), Name(Sym),
S(Paren, Box<AnyMatcher>), S(Paren, Box<AnyMatcher>),
Placeh { key: Tok<String> }, Placeh { key: IStr },
} }
pub enum VecMatcher { pub enum VecMatcher {
Placeh { Placeh {
key: Tok<String>, key: IStr,
nonzero: bool, nonzero: bool,
}, },
Scan { Scan {
@@ -41,7 +41,7 @@ pub enum VecMatcher {
/// the length of matches on either side. /// the length of matches on either side.
/// ///
/// Vectorial keys that appear on either side, in priority order /// Vectorial keys that appear on either side, in priority order
key_order: Vec<Tok<String>>, key_order: Vec<IStr>,
}, },
} }

View File

@@ -2,7 +2,7 @@
use std::any::Any; use std::any::Any;
use hashbrown::HashMap; use hashbrown::HashMap;
use orchid_base::interner::Tok; use orchid_base::interner::IStr;
use orchid_base::join::join_maps; use orchid_base::join::join_maps;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::match_mapping; use orchid_base::match_mapping;
@@ -30,11 +30,11 @@ pub enum StateEntry<'a> {
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct MatchState<'a> { pub struct MatchState<'a> {
placeholders: HashMap<Tok<String>, StateEntry<'a>>, placeholders: HashMap<IStr, StateEntry<'a>>,
name_posv: HashMap<Sym, Vec<Pos>>, name_posv: HashMap<Sym, Vec<Pos>>,
} }
impl<'a> MatchState<'a> { impl<'a> MatchState<'a> {
pub fn from_ph(key: Tok<String>, entry: StateEntry<'a>) -> Self { pub fn from_ph(key: IStr, entry: StateEntry<'a>) -> Self {
Self { placeholders: HashMap::from([(key, entry)]), name_posv: HashMap::new() } Self { placeholders: HashMap::from([(key, entry)]), name_posv: HashMap::new() }
} }
pub fn combine(self, s: Self) -> Self { pub fn combine(self, s: Self) -> Self {
@@ -45,7 +45,7 @@ impl<'a> MatchState<'a> {
}), }),
} }
} }
pub fn ph_len(&self, key: &Tok<String>) -> Option<usize> { pub fn ph_len(&self, key: &IStr) -> Option<usize> {
match self.placeholders.get(key)? { match self.placeholders.get(key)? {
StateEntry::Vec(slc) => Some(slc.len()), StateEntry::Vec(slc) => Some(slc.len()),
_ => None, _ => None,
@@ -57,10 +57,8 @@ impl<'a> MatchState<'a> {
pub fn names(&self) -> impl Iterator<Item = (Sym, &[Pos])> { pub fn names(&self) -> impl Iterator<Item = (Sym, &[Pos])> {
self.name_posv.iter().map(|(sym, vec)| (sym.clone(), &vec[..])) self.name_posv.iter().map(|(sym, vec)| (sym.clone(), &vec[..]))
} }
pub fn get(&self, key: &Tok<String>) -> Option<&StateEntry<'a>> { self.placeholders.get(key) } pub fn get(&self, key: &IStr) -> Option<&StateEntry<'a>> { self.placeholders.get(key) }
pub fn remove(&mut self, name: Tok<String>) -> Option<StateEntry<'a>> { pub fn remove(&mut self, name: IStr) -> Option<StateEntry<'a>> { self.placeholders.remove(&name) }
self.placeholders.remove(&name)
}
pub fn mk_owned(self) -> OwnedState { pub fn mk_owned(self) -> OwnedState {
OwnedState { OwnedState {
placeholders: (self.placeholders.into_iter()) placeholders: (self.placeholders.into_iter())
@@ -88,10 +86,10 @@ pub enum OwnedEntry {
Scalar(MacTree), Scalar(MacTree),
} }
pub struct OwnedState { pub struct OwnedState {
placeholders: HashMap<Tok<String>, OwnedEntry>, placeholders: HashMap<IStr, OwnedEntry>,
name_posv: HashMap<Sym, Vec<Pos>>, name_posv: HashMap<Sym, Vec<Pos>>,
} }
impl OwnedState { impl OwnedState {
pub fn get(&self, key: &Tok<String>) -> Option<&OwnedEntry> { self.placeholders.get(key) } pub fn get(&self, key: &IStr) -> Option<&OwnedEntry> { self.placeholders.get(key) }
pub fn positions(&self, name: &Sym) -> &[Pos] { self.name_posv.get(name).map_or(&[], |v| &v[..]) } pub fn positions(&self, name: &Sym) -> &[Pos] { self.name_posv.get(name).map_or(&[], |v| &v[..]) }
} }

View File

@@ -1,4 +1,4 @@
use orchid_base::interner::Tok; use orchid_base::interner::IStr;
use crate::macros::mactree::{Ph, PhKind}; use crate::macros::mactree::{Ph, PhKind};
use crate::macros::{MacTok, MacTree}; use crate::macros::{MacTok, MacTree};
@@ -6,7 +6,7 @@ use crate::macros::{MacTok, MacTree};
/// Returns the name, priority and at_least_one of the expression if it is /// Returns the name, priority and at_least_one of the expression if it is
/// a vectorial placeholder /// a vectorial placeholder
#[must_use] #[must_use]
pub fn vec_attrs(expr: &MacTree) -> Option<(Tok<String>, u8, bool)> { pub fn vec_attrs(expr: &MacTree) -> Option<(IStr, u8, bool)> {
match (*expr.tok).clone() { match (*expr.tok).clone() {
MacTok::Ph(Ph { kind: PhKind::Vector { priority, at_least_one }, name }) => MacTok::Ph(Ph { kind: PhKind::Vector { priority, at_least_one }, name }) =>
Some((name, priority, at_least_one)), Some((name, priority, at_least_one)),

View File

@@ -3,7 +3,6 @@ use orchid_base::error::OrcRes;
use orchid_base::sym; use orchid_base::sym;
use orchid_extension::atom::TAtom; use orchid_extension::atom::TAtom;
use orchid_extension::atom_owned::own; use orchid_extension::atom_owned::own;
use orchid_extension::context::i;
use orchid_extension::conv::ToExpr; use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::exec; use orchid_extension::coroutine_exec::exec;
use orchid_extension::expr::Expr; use orchid_extension::expr::Expr;
@@ -27,8 +26,8 @@ pub async fn gen_std_macro_lib() -> Vec<GenMember> {
fun(false, "is_none_body", async |val: OrcOpt<Expr>| { fun(false, "is_none_body", async |val: OrcOpt<Expr>| {
if val.0.is_none() { OrcOpt(Some(Tpl(()))) } else { OrcOpt(None) } if val.0.is_none() { OrcOpt(Some(Tpl(()))) } else { OrcOpt(None) }
}), }),
build_macro(None, ["of", "empty"]) build_macro(None, ["some", "none"])
.rule(mactreev!(pattern::match_rule ( std::option::of "...$" sub_pattern 0)), [ .rule(mactreev!(pattern::match_rule ( std::option::some "...$" sub_pattern 0)), [
|[sub]: [_; _]| { |[sub]: [_; _]| {
exec(async move |mut h| { exec(async move |mut h| {
let sub = h let sub = h
@@ -39,7 +38,7 @@ pub async fn gen_std_macro_lib() -> Vec<GenMember> {
Ok(MatcherAtom { Ok(MatcherAtom {
keys: sub.keys().collect().await, keys: sub.keys().collect().await,
matcher: h matcher: h
.register(call(sym_ref(sym!(std::option::is_some_body; i())), [sub .register(call(sym_ref(sym!(std::option::is_some_body)), [sub
.to_gen() .to_gen()
.await])) .await]))
.await, .await,
@@ -47,11 +46,11 @@ pub async fn gen_std_macro_lib() -> Vec<GenMember> {
}) })
}, },
]) ])
.rule(mactreev!(pattern::match_rule(std::option::empty)), [|[]: [_; _]| { .rule(mactreev!(pattern::match_rule(std::option::none)), [|[]: [_; _]| {
exec(async |mut h| { exec(async |mut h| {
Ok(MatcherAtom { Ok(MatcherAtom {
keys: vec![], keys: vec![],
matcher: h.register(sym_ref(sym!(std::option::is_none_body; i()))).await, matcher: h.register(sym_ref(sym!(std::option::is_none_body))).await,
}) })
}) })
}]) }])
@@ -62,16 +61,16 @@ pub async fn gen_std_macro_lib() -> Vec<GenMember> {
.rule(mactreev!(std::tuple::t [ "...$" elements 0 ]), [|[elements]: [_; _]| { .rule(mactreev!(std::tuple::t [ "...$" elements 0 ]), [|[elements]: [_; _]| {
exec(async move |mut h| { exec(async move |mut h| {
let tup = h let tup = h
.exec::<HomoTpl<TAtom<MacTree>>>(call(sym_ref(sym!(macros::resolve; i())), [ .exec::<HomoTpl<TAtom<MacTree>>>(call(sym_ref(sym!(macros::resolve)), [
mactree!((macros::common::comma_list "push" elements ;)).to_gen().await, mactree!((macros::common::comma_list "push" elements ;)).to_gen().await,
])) ]))
.await?; .await?;
let val = stream::iter(&tup.0[..]) let val = stream::iter(&tup.0[..])
.fold(sym_ref(sym!(std::tuple::empty; i())), async |head, new| { .fold(sym_ref(sym!(std::tuple::empty)), async |head, new| {
call(sym_ref(sym!(std::tuple::cat; i())), [ call(sym_ref(sym!(std::tuple::cat)), [
head, head,
call(sym_ref(sym!(std::tuple::one; i())), [call( call(sym_ref(sym!(std::tuple::one)), [call(
sym_ref(sym!(macros::resolve; i())), sym_ref(sym!(macros::resolve)),
[new.clone().to_gen().await], [new.clone().to_gen().await],
)]), )]),
]) ])
@@ -102,7 +101,7 @@ pub async fn gen_std_macro_lib() -> Vec<GenMember> {
fn parse_tpl(elements: MacTree, tail_matcher: Option<MacTree>) -> impl Future<Output = GExpr> { fn parse_tpl(elements: MacTree, tail_matcher: Option<MacTree>) -> impl Future<Output = GExpr> {
exec(async move |mut h| -> OrcRes<MatcherAtom> { exec(async move |mut h| -> OrcRes<MatcherAtom> {
let tup = h let tup = h
.exec::<HomoTpl<TAtom<MacTree>>>(call(sym_ref(sym!(macros::resolve; i())), [ .exec::<HomoTpl<TAtom<MacTree>>>(call(sym_ref(sym!(macros::resolve)), [
mactree!((macros::common::comma_list "push" elements ;)).to_gen().await, mactree!((macros::common::comma_list "push" elements ;)).to_gen().await,
])) ]))
.await?; .await?;
@@ -110,7 +109,7 @@ fn parse_tpl(elements: MacTree, tail_matcher: Option<MacTree>) -> impl Future<Ou
for mac_a in &tup.0[..] { for mac_a in &tup.0[..] {
let mac = own(mac_a).await; let mac = own(mac_a).await;
let sub = h let sub = h
.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve; i())), [ .exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve)), [
mactree!(pattern::match_rule ("push" mac ;)).to_gen().await, mactree!(pattern::match_rule ("push" mac ;)).to_gen().await,
])) ]))
.await?; .await?;
@@ -118,7 +117,7 @@ fn parse_tpl(elements: MacTree, tail_matcher: Option<MacTree>) -> impl Future<Ou
} }
let tail_matcher = match tail_matcher { let tail_matcher = match tail_matcher {
Some(mac) => Some( Some(mac) => Some(
h.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve; i())), [ h.exec::<TAtom<MatcherAtom>>(call(sym_ref(sym!(macros::resolve)), [
mactree!(pattern::match_rule "push" mac ;).to_gen().await, mactree!(pattern::match_rule "push" mac ;).to_gen().await,
])) ]))
.await?, .await?,
@@ -131,7 +130,7 @@ fn parse_tpl(elements: MacTree, tail_matcher: Option<MacTree>) -> impl Future<Ou
.chain(stream::iter(&tail_matcher).flat_map(|mat| mat.keys())) .chain(stream::iter(&tail_matcher).flat_map(|mat| mat.keys()))
.collect() .collect()
.await, .await,
matcher: call(sym_ref(sym!(std::tuple::matcher_body; i())), [ matcher: call(sym_ref(sym!(std::tuple::matcher_body)), [
HomoTpl(subs).to_gen().await, HomoTpl(subs).to_gen().await,
OrcOpt(tail_matcher).to_gen().await, OrcOpt(tail_matcher).to_gen().await,
]) ])
@@ -162,8 +161,8 @@ fn tuple_matcher_body(
None => (), None => (),
Some(tail_mat) => { Some(tail_mat) => {
let tail_tpl = stream::iter(&value.0[children.0.len()..]) let tail_tpl = stream::iter(&value.0[children.0.len()..])
.fold(sym_ref(sym!(std::tuple::empty; i())), async |prefix, new| { .fold(sym_ref(sym!(std::tuple::empty)), async |prefix, new| {
call(sym_ref(sym!(std::tuple::cat; i())), [prefix, new.clone().to_gen().await]) call(sym_ref(sym!(std::tuple::cat)), [prefix, new.clone().to_gen().await])
}) })
.await; .await;
match tail_mat.run_matcher(&mut h, tail_tpl).await? { match tail_mat.run_matcher(&mut h, tail_tpl).await? {

Some files were not shown because too many files have changed in this diff Show More