Compare commits

9 Commits

Author SHA1 Message Date
7971a2b4eb Correctly halts 2025-09-16 22:54:22 +02:00
ee45dbd28e Hide todos in the legacy folder 2025-09-09 16:37:37 +02:00
ce08021e79 exec working up to halt
clean shutdown doesn't for some reason
2025-09-09 16:30:49 +02:00
e339350505 Phased out async-stream in pursuit of compile performance 2025-09-04 15:01:53 +02:00
088cb6a247 updated all deps
migrated away from paste and async-std
2025-09-03 18:42:54 +02:00
7031f3a7d8 Macro system done in theory
too afraid to begin debugging, resting for a moment
2025-09-03 16:05:26 +02:00
051b5e666f First steps for the macro system 2025-08-01 18:32:55 +02:00
f87185ef88 RA leaks memory in code-server, switching back to desktop 2025-07-31 14:31:26 +00:00
769c6cfc9f Various progress, doesnt compile
Added prelude, made lambdas a single-token prefix like NS, made progress on implementations, removed const line type
2025-07-31 00:30:41 +02:00
104 changed files with 3583 additions and 1808 deletions

View File

@@ -6,6 +6,9 @@ orcxdb = "xtask orcxdb"
[env] [env]
CARGO_WORKSPACE_DIR = { value = "", relative = true } CARGO_WORKSPACE_DIR = { value = "", relative = true }
ORCHID_EXTENSIONS = "target/debug/orchid-std" ORCHID_EXTENSIONS = "target/debug/orchid-std"
ORCHID_DEFAULT_SYSTEMS = "orchid::std" ORCHID_DEFAULT_SYSTEMS = "orchid::std;orchid::macros"
ORCHID_LOG_BUFFERS = "true" ORCHID_LOG_BUFFERS = "true"
RUSTBACKTRACE = "1" RUST_BACKTRACE = "1"
[build]
# rustflags = ["-Znext-solver"]

641
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -10,5 +10,6 @@ members = [
"orchid-api", "orchid-api",
"orchid-api-derive", "orchid-api-derive",
"orchid-api-traits", "orchid-api-traits",
"stdio-perftest", "xtask", "orchid-macros", "stdio-perftest",
"xtask", "async-fn-stream",
] ]

View File

@@ -1,6 +1,8 @@
Since the macro AST is built as a custom tokenizer inside the system, it needs access to the import set. On the other hand, import sets aren't available until after parsing. Need a way to place this order in a lexer without restricting the expression value of the lexer. Decide whether we need patterns at runtime. Maybe macros aren't obligated to return MacTree so destructuring can be done in a safer and easier way?
The daft option of accepting import resolution queries at runtime is available but consider better options. Double-check type and templating logic in the note, it's a bit fishy.
Consider whether all macros need to be loaded or the const references could be used to pre-filter for a given let line.
## alternate extension mechanism ## alternate extension mechanism

View File

@@ -0,0 +1,10 @@
[package]
name = "async-fn-stream"
version = "0.1.0"
edition = "2024"
[dependencies]
futures = { version = "0.3.31", features = ["std"], default-features = false }
[dev-dependencies]
test_executors = "0.3.5"

207
async-fn-stream/src/lib.rs Normal file
View File

@@ -0,0 +1,207 @@
use std::cell::Cell;
use std::future::poll_fn;
use std::marker::PhantomData;
use std::pin::Pin;
use std::ptr;
use std::task::{Context, Poll};
use futures::future::LocalBoxFuture;
use futures::{FutureExt, Stream};
type YieldSlot<'a, T> = &'a Cell<Option<T>>;
/// Handle that allows you to emit values on a stream. If you drop
/// this, the stream will end and you will not be polled again.
pub struct StreamCtx<'a, T>(&'a Cell<Option<T>>, PhantomData<&'a ()>);
impl<T> StreamCtx<'_, T> {
pub fn emit(&mut self, value: T) -> impl Future<Output = ()> {
assert!(self.0.replace(Some(value)).is_none(), "Leftover value in stream");
let mut state = Poll::Pending;
poll_fn(move |_| std::mem::replace(&mut state, Poll::Ready(())))
}
}
enum FnOrFut<'a, T, O> {
Fn(Option<Box<dyn FnOnce(YieldSlot<'a, T>) -> LocalBoxFuture<'a, O> + 'a>>),
Fut(LocalBoxFuture<'a, O>),
}
struct AsyncFnStream<'a, T> {
driver: FnOrFut<'a, T, ()>,
output: Cell<Option<T>>,
}
impl<'a, T> Stream for AsyncFnStream<'a, T> {
type Item = T;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
unsafe {
let self_mut = self.get_unchecked_mut();
let fut = match &mut self_mut.driver {
FnOrFut::Fut(fut) => fut,
FnOrFut::Fn(f) => {
// safety: the cell is held inline in self, which is pinned.
let cell = ptr::from_ref(&self_mut.output).as_ref().unwrap();
let fut = f.take().unwrap()(cell);
self_mut.driver = FnOrFut::Fut(fut);
return Pin::new_unchecked(self_mut).poll_next(cx);
},
};
match fut.as_mut().poll(cx) {
Poll::Ready(()) => Poll::Ready(None),
Poll::Pending => match self_mut.output.replace(None) {
None => Poll::Pending,
Some(t) => Poll::Ready(Some(t)),
},
}
}
}
}
struct AsyncFnTryStream<'a, T, E> {
driver: FnOrFut<'a, T, Result<StreamCtx<'a, T>, E>>,
output: Cell<Option<T>>,
}
impl<'a, T, E> Stream for AsyncFnTryStream<'a, T, E> {
type Item = Result<T, E>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
unsafe {
let self_mut = self.get_unchecked_mut();
let fut = match &mut self_mut.driver {
FnOrFut::Fut(fut) => fut,
FnOrFut::Fn(f) => {
// safety: the cell is held inline in self, which is pinned.
let cell = ptr::from_ref(&self_mut.output).as_ref().unwrap();
let fut = f.take().unwrap()(cell);
self_mut.driver = FnOrFut::Fut(fut);
return Pin::new_unchecked(self_mut).poll_next(cx);
},
};
match fut.as_mut().poll(cx) {
Poll::Ready(Ok(_)) => Poll::Ready(None),
Poll::Ready(Err(ex)) => Poll::Ready(Some(Err(ex))),
Poll::Pending => match self_mut.output.replace(None) {
None => Poll::Pending,
Some(t) => Poll::Ready(Some(Ok(t))),
},
}
}
}
}
/// Create a stream from an async function acting as a coroutine
pub fn stream<'a, T: 'a>(
f: impl for<'b> AsyncFnOnce(StreamCtx<'b, T>) + 'a,
) -> impl Stream<Item = T> + 'a {
AsyncFnStream {
output: Cell::new(None),
driver: FnOrFut::Fn(Some(Box::new(|t| {
async { f(StreamCtx(t, PhantomData)).await }.boxed_local()
}))),
}
}
/// Create a stream of result from a fallible function.
pub fn try_stream<'a, T: 'a, E: 'a>(
f: impl for<'b> AsyncFnOnce(StreamCtx<'b, T>) -> Result<StreamCtx<'b, T>, E> + 'a,
) -> impl Stream<Item = Result<T, E>> + 'a {
AsyncFnTryStream {
output: Cell::new(None),
driver: FnOrFut::Fn(Some(Box::new(|t| {
async { f(StreamCtx(t, PhantomData)).await }.boxed_local()
}))),
}
}
#[cfg(test)]
mod test {
use std::task::Poll;
use std::{future, pin};
use futures::channel::mpsc::channel;
use futures::{Stream, StreamExt, TryStreamExt};
use test_executors::spin_on;
use crate::{stream, try_stream};
#[test]
fn sync() {
spin_on(async {
let v = stream(async |mut cx| {
for i in 0..5 {
cx.emit(i).await
}
})
.collect::<Vec<_>>()
.await;
assert_eq!(v, [0, 1, 2, 3, 4])
})
}
#[test]
/// The exact behaviour of the poll function under blocked use
fn with_delay() {
spin_on(async {
let (mut send, mut recv) = channel(0);
let mut s = pin::pin!(stream(async |mut cx| {
for i in 0..2 {
cx.emit(i).await
}
recv.next().await;
for i in 2..5 {
cx.emit(i).await
}
}));
let mut log = String::new();
let log = future::poll_fn(|cx| {
match s.as_mut().poll_next(cx) {
Poll::Ready(Some(r)) => log += &format!("Found {r}\n"),
Poll::Ready(None) => return Poll::Ready(format!("{log}Ended")),
Poll::Pending => match send.try_send(()) {
Ok(()) => log += "Unblocked\n",
Err(err) => return Poll::Ready(format!("{log}Unblock err: {err}")),
},
}
Poll::Pending
})
.await;
const EXPECTED: &str = "\
Found 0\n\
Found 1\n\
Unblocked\n\
Found 2\n\
Found 3\n\
Found 4\n\
Ended";
assert_eq!(log, EXPECTED)
})
}
#[test]
fn sync_try_all_ok() {
spin_on(async {
let v = try_stream::<_, ()>(async |mut cx| {
for i in 0..5 {
cx.emit(i).await
}
Ok(cx)
})
.try_collect::<Vec<_>>()
.await;
assert_eq!(v, Ok(vec![0, 1, 2, 3, 4]))
})
}
#[test]
fn sync_try_err() {
spin_on(async {
let v = try_stream::<_, ()>(async |mut cx| {
for i in 0..5 {
cx.emit(i).await
}
Err(())
})
.try_collect::<Vec<_>>()
.await;
assert_eq!(v, Err(()))
})
}
}

View File

@@ -1,2 +1,2 @@
const user = "dave" let user = "dave"
const main = println "Hello $user!" exit_status::success let main = println "Hello $user!" exit_status::success

View File

@@ -9,9 +9,8 @@ proc-macro = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
quote = "1.0.38" quote = "1.0.40"
syn = { version = "2.0.95" } syn = { version = "2.0.106" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
proc-macro2 = "1.0.92" proc-macro2 = "1.0.101"
darling = "0.20.10"
itertools = "0.14.0" itertools = "0.14.0"

View File

@@ -12,7 +12,7 @@ pub fn derive(input: TokenStream) -> TokenStream {
let decode = decode_body(&input.data); let decode = decode_body(&input.data);
let expanded = quote! { let expanded = quote! {
impl #impl_generics orchid_api_traits::Decode for #name #ty_generics #where_clause { impl #impl_generics orchid_api_traits::Decode for #name #ty_generics #where_clause {
async fn decode<R: orchid_api_traits::async_std::io::Read + ?Sized>( async fn decode<R: orchid_api_traits::AsyncRead + ?Sized>(
mut read: std::pin::Pin<&mut R> mut read: std::pin::Pin<&mut R>
) -> Self { ) -> Self {
#decode #decode

View File

@@ -14,7 +14,7 @@ pub fn derive(input: TokenStream) -> TokenStream {
let encode = encode_body(&input.data); let encode = encode_body(&input.data);
let expanded = quote! { let expanded = quote! {
impl #e_impl_generics orchid_api_traits::Encode for #name #e_ty_generics #e_where_clause { impl #e_impl_generics orchid_api_traits::Encode for #name #e_ty_generics #e_where_clause {
async fn encode<W: orchid_api_traits::async_std::io::Write + ?Sized>( async fn encode<W: orchid_api_traits::AsyncWrite + ?Sized>(
&self, &self,
mut write: std::pin::Pin<&mut W> mut write: std::pin::Pin<&mut W>
) { ) {

View File

@@ -6,9 +6,8 @@ edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
async-std = "1.13.0" async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
async-stream = "0.3.6" futures = { version = "0.3.31", features = ["std"], default-features = false }
futures = "0.3.31"
itertools = "0.14.0" itertools = "0.14.0"
never = "0.1.0" never = "0.1.0"
ordered-float = "5.0.0" ordered-float = "5.0.0"

View File

@@ -7,9 +7,8 @@ use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use async_std::io::{Read, ReadExt, Write, WriteExt}; use async_fn_stream::stream;
use async_stream::stream; use futures::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, StreamExt};
use futures::StreamExt;
use never::Never; use never::Never;
use ordered_float::NotNan; use ordered_float::NotNan;
@@ -18,16 +17,16 @@ use crate::encode_enum;
pub trait Decode: 'static { pub trait Decode: 'static {
/// Decode an instance from the beginning of the buffer. Return the decoded /// Decode an instance from the beginning of the buffer. Return the decoded
/// data and the remaining buffer. /// data and the remaining buffer.
fn decode<R: Read + ?Sized>(read: Pin<&mut R>) -> impl Future<Output = Self> + '_; fn decode<R: AsyncRead + ?Sized>(read: Pin<&mut R>) -> impl Future<Output = Self> + '_;
} }
pub trait Encode { pub trait Encode {
/// Append an instance of the struct to the buffer /// Append an instance of the struct to the buffer
fn encode<W: Write + ?Sized>(&self, write: Pin<&mut W>) -> impl Future<Output = ()>; fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) -> impl Future<Output = ()>;
} }
pub trait Coding: Encode + Decode + Clone { pub trait Coding: Encode + Decode + Clone {
fn get_decoder<T: 'static, F: Future<Output = T> + 'static>( fn get_decoder<T: 'static, F: Future<Output = T> + 'static>(
map: impl Fn(Self) -> F + Clone + 'static, map: impl Fn(Self) -> F + Clone + 'static,
) -> impl AsyncFn(Pin<&mut dyn Read>) -> T { ) -> impl AsyncFn(Pin<&mut dyn AsyncRead>) -> T {
async move |r| map(Self::decode(r).await).await async move |r| map(Self::decode(r).await).await
} }
} }
@@ -36,14 +35,14 @@ impl<T: Encode + Decode + Clone> Coding for T {}
macro_rules! num_impl { macro_rules! num_impl {
($number:ty) => { ($number:ty) => {
impl Decode for $number { impl Decode for $number {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
let mut bytes = [0u8; (<$number>::BITS / 8) as usize]; let mut bytes = [0u8; (<$number>::BITS / 8) as usize];
read.read_exact(&mut bytes).await.unwrap(); read.read_exact(&mut bytes).await.unwrap();
<$number>::from_be_bytes(bytes) <$number>::from_be_bytes(bytes)
} }
} }
impl Encode for $number { impl Encode for $number {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
write.write_all(&self.to_be_bytes()).await.expect("Could not write number") write.write_all(&self.to_be_bytes()).await.expect("Could not write number")
} }
} }
@@ -63,12 +62,12 @@ num_impl!(i8);
macro_rules! nonzero_impl { macro_rules! nonzero_impl {
($name:ty) => { ($name:ty) => {
impl Decode for NonZero<$name> { impl Decode for NonZero<$name> {
async fn decode<R: Read + ?Sized>(read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(read: Pin<&mut R>) -> Self {
Self::new(<$name as Decode>::decode(read).await).unwrap() Self::new(<$name as Decode>::decode(read).await).unwrap()
} }
} }
impl Encode for NonZero<$name> { impl Encode for NonZero<$name> {
async fn encode<W: Write + ?Sized>(&self, write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) {
self.get().encode(write).await self.get().encode(write).await
} }
} }
@@ -87,19 +86,21 @@ nonzero_impl!(i64);
nonzero_impl!(i128); nonzero_impl!(i128);
impl<T: Encode + ?Sized> Encode for &T { impl<T: Encode + ?Sized> Encode for &T {
async fn encode<W: Write + ?Sized>(&self, write: Pin<&mut W>) { (**self).encode(write).await } async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) {
(**self).encode(write).await
}
} }
macro_rules! float_impl { macro_rules! float_impl {
($t:ty, $size:expr) => { ($t:ty, $size:expr) => {
impl Decode for NotNan<$t> { impl Decode for NotNan<$t> {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
let mut bytes = [0u8; $size]; let mut bytes = [0u8; $size];
read.read_exact(&mut bytes).await.unwrap(); read.read_exact(&mut bytes).await.unwrap();
NotNan::new(<$t>::from_be_bytes(bytes)).expect("Float was NaN") NotNan::new(<$t>::from_be_bytes(bytes)).expect("Float was NaN")
} }
} }
impl Encode for NotNan<$t> { impl Encode for NotNan<$t> {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
write.write_all(&self.as_ref().to_be_bytes()).await.expect("Could not write number") write.write_all(&self.as_ref().to_be_bytes()).await.expect("Could not write number")
} }
} }
@@ -110,7 +111,7 @@ float_impl!(f64, 8);
float_impl!(f32, 4); float_impl!(f32, 4);
impl Decode for String { impl Decode for String {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
let len = u64::decode(read.as_mut()).await.try_into().unwrap(); let len = u64::decode(read.as_mut()).await.try_into().unwrap();
let mut data = vec![0u8; len]; let mut data = vec![0u8; len];
read.read_exact(&mut data).await.unwrap(); read.read_exact(&mut data).await.unwrap();
@@ -118,30 +119,36 @@ impl Decode for String {
} }
} }
impl Encode for String { impl Encode for String {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
u64::try_from(self.len()).unwrap().encode(write.as_mut()).await; u64::try_from(self.len()).unwrap().encode(write.as_mut()).await;
write.write_all(self.as_bytes()).await.unwrap() write.write_all(self.as_bytes()).await.unwrap()
} }
} }
impl Encode for str { impl Encode for str {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
u64::try_from(self.len()).unwrap().encode(write.as_mut()).await; u64::try_from(self.len()).unwrap().encode(write.as_mut()).await;
write.write_all(self.as_bytes()).await.unwrap() write.write_all(self.as_bytes()).await.unwrap()
} }
} }
impl<T: Decode> Decode for Vec<T> { impl<T: Decode> Decode for Vec<T> {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
let len = u64::decode(read.as_mut()).await.try_into().unwrap(); let len = u64::decode(read.as_mut()).await.try_into().unwrap();
stream! { loop { yield T::decode(read.as_mut()).await } }.take(len).collect().await stream(async |mut cx| {
for _ in 0..len {
cx.emit(T::decode(read.as_mut()).await).await
}
})
.collect()
.await
} }
} }
impl<T: Encode> Encode for Vec<T> { impl<T: Encode> Encode for Vec<T> {
async fn encode<W: Write + ?Sized>(&self, write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) {
self.as_slice().encode(write).await self.as_slice().encode(write).await
} }
} }
impl<T: Encode> Encode for [T] { impl<T: Encode> Encode for [T] {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
u64::try_from(self.len()).unwrap().encode(write.as_mut()).await; u64::try_from(self.len()).unwrap().encode(write.as_mut()).await;
for t in self.iter() { for t in self.iter() {
t.encode(write.as_mut()).await t.encode(write.as_mut()).await
@@ -149,7 +156,7 @@ impl<T: Encode> Encode for [T] {
} }
} }
impl<T: Decode> Decode for Option<T> { impl<T: Decode> Decode for Option<T> {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
match u8::decode(read.as_mut()).await { match u8::decode(read.as_mut()).await {
0 => None, 0 => None,
1 => Some(T::decode(read).await), 1 => Some(T::decode(read).await),
@@ -158,14 +165,14 @@ impl<T: Decode> Decode for Option<T> {
} }
} }
impl<T: Encode> Encode for Option<T> { impl<T: Encode> Encode for Option<T> {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
let t = if let Some(t) = self { t } else { return 0u8.encode(write.as_mut()).await }; let t = if let Some(t) = self { t } else { return 0u8.encode(write.as_mut()).await };
1u8.encode(write.as_mut()).await; 1u8.encode(write.as_mut()).await;
t.encode(write).await; t.encode(write).await;
} }
} }
impl<T: Decode, E: Decode> Decode for Result<T, E> { impl<T: Decode, E: Decode> Decode for Result<T, E> {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
match u8::decode(read.as_mut()).await { match u8::decode(read.as_mut()).await {
0 => Self::Ok(T::decode(read).await), 0 => Self::Ok(T::decode(read).await),
1 => Self::Err(E::decode(read).await), 1 => Self::Err(E::decode(read).await),
@@ -175,7 +182,7 @@ impl<T: Decode, E: Decode> Decode for Result<T, E> {
} }
impl<T: Encode, E: Encode> Encode for Result<T, E> { impl<T: Encode, E: Encode> Encode for Result<T, E> {
async fn encode<W: Write + ?Sized>(&self, write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) {
match self { match self {
Ok(t) => encode_enum(write, 0, |w| t.encode(w)).await, Ok(t) => encode_enum(write, 0, |w| t.encode(w)).await,
Err(e) => encode_enum(write, 1, |w| e.encode(w)).await, Err(e) => encode_enum(write, 1, |w| e.encode(w)).await,
@@ -183,13 +190,19 @@ impl<T: Encode, E: Encode> Encode for Result<T, E> {
} }
} }
impl<K: Decode + Eq + Hash, V: Decode> Decode for HashMap<K, V> { impl<K: Decode + Eq + Hash, V: Decode> Decode for HashMap<K, V> {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
let len = u64::decode(read.as_mut()).await.try_into().unwrap(); let len = u64::decode(read.as_mut()).await.try_into().unwrap();
stream! { loop { yield <(K, V)>::decode(read.as_mut()).await } }.take(len).collect().await stream(async |mut cx| {
for _ in 0..len {
cx.emit(<(K, V)>::decode(read.as_mut()).await).await
}
})
.collect()
.await
} }
} }
impl<K: Encode + Eq + Hash, V: Encode> Encode for HashMap<K, V> { impl<K: Encode + Eq + Hash, V: Encode> Encode for HashMap<K, V> {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
u64::try_from(self.len()).unwrap().encode(write.as_mut()).await; u64::try_from(self.len()).unwrap().encode(write.as_mut()).await;
for pair in self.iter() { for pair in self.iter() {
pair.encode(write.as_mut()).await pair.encode(write.as_mut()).await
@@ -199,12 +212,12 @@ impl<K: Encode + Eq + Hash, V: Encode> Encode for HashMap<K, V> {
macro_rules! tuple { macro_rules! tuple {
(($($t:ident)*) ($($T:ident)*)) => { (($($t:ident)*) ($($T:ident)*)) => {
impl<$($T: Decode),*> Decode for ($($T,)*) { impl<$($T: Decode),*> Decode for ($($T,)*) {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
($($T::decode(read.as_mut()).await,)*) ($($T::decode(read.as_mut()).await,)*)
} }
} }
impl<$($T: Encode),*> Encode for ($($T,)*) { impl<$($T: Encode),*> Encode for ($($T,)*) {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
let ($($t,)*) = self; let ($($t,)*) = self;
$( $t.encode(write.as_mut()).await; )* $( $t.encode(write.as_mut()).await; )*
} }
@@ -230,41 +243,45 @@ tuple!((t u v x y z a b c d e f g h i) (T U V X Y Z A B C D E F G H I));
tuple!((t u v x y z a b c d e f g h i j) (T U V X Y Z A B C D E F G H I J)); // 16 tuple!((t u v x y z a b c d e f g h i j) (T U V X Y Z A B C D E F G H I J)); // 16
impl Decode for () { impl Decode for () {
async fn decode<R: Read + ?Sized>(_: Pin<&mut R>) -> Self {} async fn decode<R: AsyncRead + ?Sized>(_: Pin<&mut R>) -> Self {}
} }
impl Encode for () { impl Encode for () {
async fn encode<W: Write + ?Sized>(&self, _: Pin<&mut W>) {} async fn encode<W: AsyncWrite + ?Sized>(&self, _: Pin<&mut W>) {}
} }
impl Decode for Never { impl Decode for Never {
async fn decode<R: Read + ?Sized>(_: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(_: Pin<&mut R>) -> Self {
unreachable!("A value of Never cannot exist so it can't have been serialized"); unreachable!("A value of Never cannot exist so it can't have been serialized");
} }
} }
impl Encode for Never { impl Encode for Never {
async fn encode<W: Write + ?Sized>(&self, _: Pin<&mut W>) { match *self {} } async fn encode<W: AsyncWrite + ?Sized>(&self, _: Pin<&mut W>) { match *self {} }
} }
impl Decode for bool { impl Decode for bool {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
let mut buf = [0]; let mut buf = [0];
read.read_exact(&mut buf).await.unwrap(); read.read_exact(&mut buf).await.unwrap();
buf[0] != 0 buf[0] != 0
} }
} }
impl Encode for bool { impl Encode for bool {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
write.write_all(&[if *self { 0xffu8 } else { 0u8 }]).await.unwrap() write.write_all(&[if *self { 0xffu8 } else { 0u8 }]).await.unwrap()
} }
} }
impl<T: Decode, const N: usize> Decode for [T; N] { impl<T: Decode, const N: usize> Decode for [T; N] {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
// TODO: figure out how to do this in safe rust on the stack let v = stream(async |mut cx| {
let v = for _ in 0..N {
stream! { loop { yield T::decode(read.as_mut()).await } }.take(N).collect::<Vec<_>>().await; cx.emit(T::decode(read.as_mut()).await).await
}
})
.collect::<Vec<_>>()
.await;
v.try_into().unwrap_or_else(|_| unreachable!("The length of this stream is statically known")) v.try_into().unwrap_or_else(|_| unreachable!("The length of this stream is statically known"))
} }
} }
impl<T: Encode, const N: usize> Encode for [T; N] { impl<T: Encode, const N: usize> Encode for [T; N] {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
for t in self.iter() { for t in self.iter() {
t.encode(write.as_mut()).await t.encode(write.as_mut()).await
} }
@@ -274,12 +291,12 @@ impl<T: Encode, const N: usize> Encode for [T; N] {
macro_rules! two_end_range { macro_rules! two_end_range {
($this:ident, $name:tt, $op:tt, $start:expr, $end:expr) => { ($this:ident, $name:tt, $op:tt, $start:expr, $end:expr) => {
impl<T: Decode> Decode for $name<T> { impl<T: Decode> Decode for $name<T> {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
T::decode(read.as_mut()).await $op T::decode(read).await T::decode(read.as_mut()).await $op T::decode(read).await
} }
} }
impl<T: Encode> Encode for $name<T> { impl<T: Encode> Encode for $name<T> {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
let $this = self; let $this = self;
($start).encode(write.as_mut()).await; ($start).encode(write.as_mut()).await;
($end).encode(write).await; ($end).encode(write).await;
@@ -294,12 +311,14 @@ two_end_range!(x, RangeInclusive, ..=, x.start(), x.end());
macro_rules! smart_ptr { macro_rules! smart_ptr {
($name:tt) => { ($name:tt) => {
impl<T: Decode> Decode for $name<T> { impl<T: Decode> Decode for $name<T> {
async fn decode<R: Read + ?Sized>(read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(read: Pin<&mut R>) -> Self {
$name::new(T::decode(read).await) $name::new(T::decode(read).await)
} }
} }
impl<T: Encode> Encode for $name<T> { impl<T: Encode> Encode for $name<T> {
async fn encode<W: Write + ?Sized>(&self, write: Pin<&mut W>) { (**self).encode(write).await } async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) {
(**self).encode(write).await
}
} }
}; };
} }
@@ -309,12 +328,12 @@ smart_ptr!(Rc);
smart_ptr!(Box); smart_ptr!(Box);
impl Decode for char { impl Decode for char {
async fn decode<R: Read + ?Sized>(read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(read: Pin<&mut R>) -> Self {
char::from_u32(u32::decode(read).await).unwrap() char::from_u32(u32::decode(read).await).unwrap()
} }
} }
impl Encode for char { impl Encode for char {
async fn encode<W: Write + ?Sized>(&self, write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, write: Pin<&mut W>) {
(*self as u32).encode(write).await (*self as u32).encode(write).await
} }
} }

View File

@@ -1,12 +1,12 @@
use std::future::Future; use std::future::Future;
use std::pin::Pin; use std::pin::Pin;
use async_std::io::{Read, ReadExt, Write, WriteExt}; use futures::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
use itertools::{Chunk, Itertools}; use itertools::{Chunk, Itertools};
use crate::Encode; use crate::Encode;
pub async fn encode_enum<'a, W: Write + ?Sized, F: Future<Output = ()>>( pub async fn encode_enum<'a, W: AsyncWrite + ?Sized, F: Future<Output = ()>>(
mut write: Pin<&'a mut W>, mut write: Pin<&'a mut W>,
id: u8, id: u8,
f: impl FnOnce(Pin<&'a mut W>) -> F, f: impl FnOnce(Pin<&'a mut W>) -> F,
@@ -15,7 +15,7 @@ pub async fn encode_enum<'a, W: Write + ?Sized, F: Future<Output = ()>>(
f(write).await f(write).await
} }
pub async fn write_exact<W: Write + ?Sized>(mut write: Pin<&mut W>, bytes: &'static [u8]) { pub async fn write_exact<W: AsyncWrite + ?Sized>(mut write: Pin<&mut W>, bytes: &'static [u8]) {
write.write_all(bytes).await.expect("Failed to write exact bytes") write.write_all(bytes).await.expect("Failed to write exact bytes")
} }
@@ -27,7 +27,7 @@ pub fn print_bytes(b: &[u8]) -> String {
.join(" ") .join(" ")
} }
pub async fn read_exact<R: Read + ?Sized>(mut read: Pin<&mut R>, bytes: &'static [u8]) { pub async fn read_exact<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>, bytes: &'static [u8]) {
let mut data = vec![0u8; bytes.len()]; let mut data = vec![0u8; bytes.len()];
read.read_exact(&mut data).await.expect("Failed to read bytes"); read.read_exact(&mut data).await.expect("Failed to read bytes");
if data != bytes { if data != bytes {

View File

@@ -3,8 +3,8 @@ mod helpers;
mod hierarchy; mod hierarchy;
mod relations; mod relations;
pub use async_std;
pub use coding::*; pub use coding::*;
pub use futures::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
pub use helpers::*; pub use helpers::*;
pub use hierarchy::*; pub use hierarchy::*;
pub use relations::*; pub use relations::*;

View File

@@ -9,7 +9,8 @@ edition = "2024"
ordered-float = "5.0.0" ordered-float = "5.0.0"
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" } orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
async-std = "1.13.0" futures = { version = "0.3.31", features = ["std"], default-features = false }
itertools = "0.14.0"
[dev-dependencies] [dev-dependencies]
test_executors = "0.3.2" test_executors = "0.3.2"

View File

@@ -1,14 +1,28 @@
use std::fmt;
use std::num::NonZeroU64; use std::num::NonZeroU64;
use itertools::Itertools;
use orchid_api_derive::{Coding, Hierarchy}; use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::Request; use orchid_api_traits::Request;
use crate::{ use crate::{
ExprTicket, Expression, ExtHostReq, FormattingUnit, HostExtNotif, HostExtReq, OrcResult, SysId, ExprTicket, Expression, ExtHostReq, FormattingUnit, HostExtReq, OrcResult, SysId, TStrv,
TStrv,
}; };
pub type AtomData = Vec<u8>; #[derive(Clone, Coding)]
pub struct AtomData(pub Vec<u8>);
impl fmt::Debug for AtomData {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut byte_strings = self.0.iter().map(|b| format!("{b:02x}"));
if self.0.len() < 32 {
write!(f, "AtomData({})", byte_strings.join(" "))
} else {
let data_table =
byte_strings.chunks(32).into_iter().map(|mut chunk| chunk.join(" ")).join("\n");
write!(f, "AtomData(\n{}\n)", data_table)
}
}
}
/// Unique ID associated with atoms that have an identity /// Unique ID associated with atoms that have an identity
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
@@ -16,7 +30,7 @@ pub struct AtomId(pub NonZeroU64);
/// An atom owned by an implied system. Usually used in responses from a system. /// An atom owned by an implied system. Usually used in responses from a system.
/// This has the same semantics as [Atom] except in that the owner is implied. /// This has the same semantics as [Atom] except in that the owner is implied.
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding)] #[derive(Clone, Debug, Coding)]
pub struct LocalAtom { pub struct LocalAtom {
pub drop: Option<AtomId>, pub drop: Option<AtomId>,
pub data: AtomData, pub data: AtomData,
@@ -27,7 +41,7 @@ impl LocalAtom {
/// An atom representation that can be serialized and sent around. Atoms /// An atom representation that can be serialized and sent around. Atoms
/// represent the smallest increment of work. /// represent the smallest increment of work.
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding)] #[derive(Clone, Debug, Coding)]
pub struct Atom { pub struct Atom {
/// Instance ID of the system that created the atom /// Instance ID of the system that created the atom
pub owner: SysId, pub owner: SysId,
@@ -49,7 +63,7 @@ pub struct Atom {
} }
/// Attempt to apply an atom as a function to an expression /// Attempt to apply an atom as a function to an expression
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(AtomReq, HostExtReq)] #[extends(AtomReq, HostExtReq)]
pub struct CallRef(pub Atom, pub ExprTicket); pub struct CallRef(pub Atom, pub ExprTicket);
impl Request for CallRef { impl Request for CallRef {
@@ -59,14 +73,14 @@ impl Request for CallRef {
/// Attempt to apply an atom as a function, consuming the atom and enabling the /// Attempt to apply an atom as a function, consuming the atom and enabling the
/// library to reuse its datastructures rather than duplicating them. This is an /// library to reuse its datastructures rather than duplicating them. This is an
/// optimization over [CallRef] followed by [AtomDrop]. /// optimization over [CallRef] followed by [AtomDrop].
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(AtomReq, HostExtReq)] #[extends(AtomReq, HostExtReq)]
pub struct FinalCall(pub Atom, pub ExprTicket); pub struct FinalCall(pub Atom, pub ExprTicket);
impl Request for FinalCall { impl Request for FinalCall {
type Response = Expression; type Response = Expression;
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(AtomReq, HostExtReq)] #[extends(AtomReq, HostExtReq)]
pub struct SerializeAtom(pub Atom); pub struct SerializeAtom(pub Atom);
impl Request for SerializeAtom { impl Request for SerializeAtom {
@@ -81,14 +95,14 @@ impl Request for DeserAtom {
} }
/// A request blindly routed to the system that provides an atom. /// A request blindly routed to the system that provides an atom.
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(AtomReq, HostExtReq)] #[extends(AtomReq, HostExtReq)]
pub struct Fwded(pub Atom, pub TStrv, pub Vec<u8>); pub struct Fwded(pub Atom, pub TStrv, pub Vec<u8>);
impl Request for Fwded { impl Request for Fwded {
type Response = Option<Vec<u8>>; type Response = Option<Vec<u8>>;
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(ExtHostReq)] #[extends(ExtHostReq)]
pub struct Fwd(pub Atom, pub TStrv, pub Vec<u8>); pub struct Fwd(pub Atom, pub TStrv, pub Vec<u8>);
impl Request for Fwd { impl Request for Fwd {
@@ -100,7 +114,7 @@ pub enum NextStep {
Continue(Expression), Continue(Expression),
Halt, Halt,
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(AtomReq, HostExtReq)] #[extends(AtomReq, HostExtReq)]
pub struct Command(pub Atom); pub struct Command(pub Atom);
impl Request for Command { impl Request for Command {
@@ -111,17 +125,20 @@ impl Request for Command {
/// isn't referenced anywhere. This should have no effect if the atom's `drop` /// isn't referenced anywhere. This should have no effect if the atom's `drop`
/// flag is false. /// flag is false.
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)]
#[extends(HostExtNotif)] #[extends(HostExtReq)]
pub struct AtomDrop(pub SysId, pub AtomId); pub struct AtomDrop(pub SysId, pub AtomId);
impl Request for AtomDrop {
type Response = ();
}
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(AtomReq, HostExtReq)] #[extends(AtomReq, HostExtReq)]
pub struct AtomPrint(pub Atom); pub struct AtomPrint(pub Atom);
impl Request for AtomPrint { impl Request for AtomPrint {
type Response = FormattingUnit; type Response = FormattingUnit;
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(ExtHostReq)] #[extends(ExtHostReq)]
pub struct ExtAtomPrint(pub Atom); pub struct ExtAtomPrint(pub Atom);
impl Request for ExtAtomPrint { impl Request for ExtAtomPrint {
@@ -129,7 +146,7 @@ impl Request for ExtAtomPrint {
} }
/// Requests that apply to an existing atom instance /// Requests that apply to an existing atom instance
#[derive(Clone, Debug, Hash, PartialEq, Eq, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)] #[extends(HostExtReq)]
#[extendable] #[extendable]
pub enum AtomReq { pub enum AtomReq {

View File

@@ -1,3 +1,4 @@
use std::fmt;
use std::num::NonZeroU64; use std::num::NonZeroU64;
use orchid_api_derive::{Coding, Hierarchy}; use orchid_api_derive::{Coding, Hierarchy};
@@ -10,8 +11,13 @@ use crate::{Atom, ExtHostNotif, ExtHostReq, Location, OrcError, SysId, TStrv};
/// [Acquire]. /// [Acquire].
/// ///
/// The ID is globally unique within its lifetime, but may be reused. /// The ID is globally unique within its lifetime, but may be reused.
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)] #[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
pub struct ExprTicket(pub NonZeroU64); pub struct ExprTicket(pub NonZeroU64);
impl fmt::Debug for ExprTicket {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "ExprTicket({:x})", self.0.get())
}
}
/// Acquire a strong reference to an expression. This keeps it alive until a /// Acquire a strong reference to an expression. This keeps it alive until a
/// corresponding [Release] is emitted. The number of times a system has /// corresponding [Release] is emitted. The number of times a system has
@@ -62,7 +68,7 @@ pub enum ExpressionKind {
Arg(u64), Arg(u64),
/// Insert the specified host-expression in the template here. When the clause /// Insert the specified host-expression in the template here. When the clause
/// is used in the const tree, this variant is forbidden. /// is used in the const tree, this variant is forbidden.
Slot(ExprTicket), Slot { tk: ExprTicket, by_value: bool },
/// The lhs must be fully processed before the rhs can be processed. /// The lhs must be fully processed before the rhs can be processed.
/// Equivalent to Haskell's function of the same name /// Equivalent to Haskell's function of the same name
Seq(Box<Expression>, Box<Expression>), Seq(Box<Expression>, Box<Expression>),

View File

@@ -30,6 +30,7 @@ pub struct ParseLine {
pub src: TStrv, pub src: TStrv,
pub comments: Vec<Comment>, pub comments: Vec<Comment>,
pub exported: bool, pub exported: bool,
pub idx: u16,
pub line: Vec<TokenTree>, pub line: Vec<TokenTree>,
} }
impl Request for ParseLine { impl Request for ParseLine {
@@ -59,7 +60,7 @@ pub struct ParsedMember {
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum ParsedMemberKind { pub enum ParsedMemberKind {
Constant(ParsedConstId), Constant(ParsedConstId),
Module(Vec<ParsedLine>), Module { lines: Vec<ParsedLine>, use_prelude: bool },
} }
/// Obtain the value of a parsed constant. This is guaranteed to be called after /// Obtain the value of a parsed constant. This is guaranteed to be called after
@@ -67,10 +68,7 @@ pub enum ParsedMemberKind {
/// the macro engine could run here. /// the macro engine could run here.
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtReq)] #[extends(HostExtReq)]
pub struct FetchParsedConst { pub struct FetchParsedConst(pub SysId, pub ParsedConstId);
pub sys: SysId,
pub id: ParsedConstId,
}
impl Request for FetchParsedConst { impl Request for FetchParsedConst {
type Response = Expression; type Response = Expression;
} }
@@ -85,8 +83,8 @@ pub struct Comment {
/// called during a [FetchParsedConst] call, but it can be called for a /// called during a [FetchParsedConst] call, but it can be called for a
/// different [ParsedConstId] from the one in [FetchParsedConst]. /// different [ParsedConstId] from the one in [FetchParsedConst].
/// ///
/// Each name is either resolved to an alias or existing constant `Some(TStrv)` /// Each name is either resolved to a valid name or a potential error error.
/// or not resolved `None`. An error is never raised, as names may have a /// The error is not raised by the interpreter itself, as names may have a
/// primary meaning such as a local binding which can be overridden by specific /// primary meaning such as a local binding which can be overridden by specific
/// true names such as those triggering macro keywords. It is not recommended to /// true names such as those triggering macro keywords. It is not recommended to
/// define syntax that can break by defining arbitrary constants, as line /// define syntax that can break by defining arbitrary constants, as line
@@ -100,5 +98,5 @@ pub struct ResolveNames {
} }
impl Request for ResolveNames { impl Request for ResolveNames {
type Response = Vec<Option<TStrv>>; type Response = Vec<OrcResult<TStrv>>;
} }

View File

@@ -24,7 +24,7 @@
use std::pin::Pin; use std::pin::Pin;
use async_std::io::{Read, Write}; use futures::{AsyncRead, AsyncWrite};
use orchid_api_derive::{Coding, Hierarchy}; use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::{Channel, Decode, Encode, MsgSet, Request, read_exact, write_exact}; use orchid_api_traits::{Channel, Decode, Encode, MsgSet, Request, read_exact, write_exact};
@@ -36,7 +36,7 @@ pub struct HostHeader {
pub msg_logs: logging::LogStrategy, pub msg_logs: logging::LogStrategy,
} }
impl Decode for HostHeader { impl Decode for HostHeader {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
read_exact(read.as_mut(), HOST_INTRO).await; read_exact(read.as_mut(), HOST_INTRO).await;
Self { Self {
log_strategy: logging::LogStrategy::decode(read.as_mut()).await, log_strategy: logging::LogStrategy::decode(read.as_mut()).await,
@@ -45,7 +45,7 @@ impl Decode for HostHeader {
} }
} }
impl Encode for HostHeader { impl Encode for HostHeader {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
write_exact(write.as_mut(), HOST_INTRO).await; write_exact(write.as_mut(), HOST_INTRO).await;
self.log_strategy.encode(write.as_mut()).await; self.log_strategy.encode(write.as_mut()).await;
self.msg_logs.encode(write.as_mut()).await self.msg_logs.encode(write.as_mut()).await
@@ -58,13 +58,13 @@ pub struct ExtensionHeader {
pub systems: Vec<system::SystemDecl>, pub systems: Vec<system::SystemDecl>,
} }
impl Decode for ExtensionHeader { impl Decode for ExtensionHeader {
async fn decode<R: Read + ?Sized>(mut read: Pin<&mut R>) -> Self { async fn decode<R: AsyncRead + ?Sized>(mut read: Pin<&mut R>) -> Self {
read_exact(read.as_mut(), EXT_INTRO).await; read_exact(read.as_mut(), EXT_INTRO).await;
Self { name: String::decode(read.as_mut()).await, systems: Vec::decode(read).await } Self { name: String::decode(read.as_mut()).await, systems: Vec::decode(read).await }
} }
} }
impl Encode for ExtensionHeader { impl Encode for ExtensionHeader {
async fn encode<W: Write + ?Sized>(&self, mut write: Pin<&mut W>) { async fn encode<W: AsyncWrite + ?Sized>(&self, mut write: Pin<&mut W>) {
write_exact(write.as_mut(), EXT_INTRO).await; write_exact(write.as_mut(), EXT_INTRO).await;
self.name.encode(write.as_mut()).await; self.name.encode(write.as_mut()).await;
self.systems.encode(write).await self.systems.encode(write).await
@@ -120,14 +120,14 @@ pub enum HostExtReq {
ParseLine(parser::ParseLine), ParseLine(parser::ParseLine),
FetchParsedConst(parser::FetchParsedConst), FetchParsedConst(parser::FetchParsedConst),
GetMember(tree::GetMember), GetMember(tree::GetMember),
SystemDrop(system::SystemDrop),
AtomDrop(atom::AtomDrop),
} }
/// Notifications sent from the host to the extension /// Notifications sent from the host to the extension
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extendable] #[extendable]
pub enum HostExtNotif { pub enum HostExtNotif {
SystemDrop(system::SystemDrop),
AtomDrop(atom::AtomDrop),
/// The host can assume that after this notif is sent, a correctly written /// The host can assume that after this notif is sent, a correctly written
/// extension will eventually exit. /// extension will eventually exit.
Exit, Exit,

View File

@@ -5,7 +5,7 @@ use orchid_api_derive::{Coding, Hierarchy};
use orchid_api_traits::Request; use orchid_api_traits::Request;
use ordered_float::NotNan; use ordered_float::NotNan;
use crate::{CharFilter, ExtHostReq, HostExtNotif, HostExtReq, MemberKind, TStr}; use crate::{CharFilter, ExtHostReq, HostExtReq, MemberKind, TStr, TStrv};
/// ID of a system type /// ID of a system type
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)] #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
@@ -63,11 +63,15 @@ pub struct NewSystemResponse {
pub lex_filter: CharFilter, pub lex_filter: CharFilter,
pub line_types: Vec<TStr>, pub line_types: Vec<TStr>,
pub const_root: HashMap<TStr, MemberKind>, pub const_root: HashMap<TStr, MemberKind>,
pub prelude: Vec<TStrv>,
} }
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(HostExtNotif)] #[extends(HostExtReq)]
pub struct SystemDrop(pub SysId); pub struct SystemDrop(pub SysId);
impl Request for SystemDrop {
type Response = ();
}
#[derive(Clone, Debug, Coding, Hierarchy)] #[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(SysReq, HostExtReq)] #[extends(SysReq, HostExtReq)]

View File

@@ -27,7 +27,7 @@ pub struct TokenTree {
#[derive(Clone, Debug, Coding)] #[derive(Clone, Debug, Coding)]
pub enum Token { pub enum Token {
/// Lambda function head, from the opening \ until the beginning of the body. /// Lambda function head, from the opening \ until the beginning of the body.
LambdaHead(Vec<TokenTree>), LambdaHead(Box<TokenTree>),
/// A name segment or an operator. /// A name segment or an operator.
Name(TStr), Name(TStr),
/// A newly generated expression. The last place this is supposed to happen is /// A newly generated expression. The last place this is supposed to happen is

View File

@@ -6,13 +6,12 @@ edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
async-once-cell = "0.5.4" async-once-cell = "0.5.4"
async-std = "1.13.0"
async-stream = "0.3.6"
derive_destructure = "1.0.0" derive_destructure = "1.0.0"
dyn-clone = "1.0.17" dyn-clone = "1.0.20"
futures = "0.3.31" futures = { version = "0.3.31", features = ["std"], default-features = false }
hashbrown = "0.15.2" hashbrown = "0.16.0"
itertools = "0.14.0" itertools = "0.14.0"
lazy_static = "1.5.0" lazy_static = "1.5.0"
never = "0.1.0" never = "0.1.0"
@@ -21,9 +20,9 @@ orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" } orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
ordered-float = "5.0.0" ordered-float = "5.0.0"
regex = "1.11.1" regex = "1.11.2"
rust-embed = "8.5.0" rust-embed = "8.7.2"
some_executor = "0.4.0" some_executor = "0.6.1"
substack = "1.1.1" substack = "1.1.1"
test_executors = "0.3.2" test_executors = "0.3.5"
trait-set = "0.3.0" trait-set = "0.3.0"

View File

@@ -1,12 +1,12 @@
#[macro_export] #[macro_export]
macro_rules! clone { macro_rules! clone {
($($n:ident),+; $body:expr) => ( ($($n:ident $($mut:ident)?),+; $body:expr) => (
{ {
$( let $n = $n.clone(); )+ $( let $($mut)? $n = $n.clone(); )+
$body $body
} }
); );
($($n:ident),+) => { ($($n:ident $($mut:ident)?),+) => {
$( let $n = $n.clone(); )+ $( let $($mut)? $n = $n.clone(); )+
} }
} }

View File

@@ -71,9 +71,8 @@ impl OrcErr {
} }
} }
} }
impl Eq for OrcErr {} impl PartialEq<Tok<String>> for OrcErr {
impl PartialEq for OrcErr { fn eq(&self, other: &Tok<String>) -> bool { self.description == *other }
fn eq(&self, other: &Self) -> bool { self.description == other.description }
} }
impl From<OrcErr> for Vec<OrcErr> { impl From<OrcErr> for Vec<OrcErr> {
fn from(value: OrcErr) -> Self { vec![value] } fn from(value: OrcErr) -> Self { vec![value] }
@@ -158,28 +157,59 @@ impl fmt::Display for OrcErrv {
pub type OrcRes<T> = Result<T, OrcErrv>; pub type OrcRes<T> = Result<T, OrcErrv>;
pub fn mk_err( pub fn join_ok<T, U>(left: OrcRes<T>, right: OrcRes<U>) -> OrcRes<(T, U)> {
description: Tok<String>, match (left, right) {
message: impl AsRef<str>, (Ok(t), Ok(u)) => Ok((t, u)),
posv: impl IntoIterator<Item = ErrPos>, (Err(e), Ok(_)) | (Ok(_), Err(e)) => Err(e),
) -> OrcErr { (Err(e1), Err(e2)) => Err(e1 + e2),
OrcErr {
description,
message: Arc::new(message.as_ref().to_string()),
positions: posv.into_iter().collect(),
} }
} }
#[macro_export]
macro_rules! join_ok {
($($names:ident $(: $tys:ty)? = $vals:expr;)*) => {
let $crate::join_ok!(@NAMES $($names $(: $tys)? = $vals;)*)
:
$crate::join_ok!(@TYPES $($names $(: $tys)? = $vals;)*)
=
$crate::join_ok!(@VALUES $($names $(: $tys)? = $vals;)*)?;
};
(@NAMES $name:ident $(: $ty:ty)? = $val:expr ; $($names:ident $(: $tys:ty)? = $vals:expr;)*) => {
($name, $crate::join_ok!(@NAMES $($names $(: $tys)? = $vals;)*))
};
(@NAMES) => { _ };
(@TYPES $name:ident : $ty:ty = $val:expr ; $($names:ident $(: $tys:ty)? = $vals:expr;)*) => {
($ty, $crate::join_ok!(@TYPES $($names $(: $tys)? = $vals;)*))
};
(@TYPES $name:ident = $val:expr ; $($names:ident $(: $tys:ty)? = $vals:expr;)*) => {
(_, $crate::join_ok!(@TYPES $($names $(: $tys)? = $vals;)*))
};
(@TYPES) => { () };
(@VALUES $name:ident $(: $ty:ty)? = $val:expr ; $($names:ident $(: $tys:ty)? = $vals:expr;)*) => {
$crate::error::join_ok($val, $crate::join_ok!(@VALUES $($names $(: $tys)? = $vals;)*))
};
(@VALUES) => { Ok(()) };
}
pub fn mk_errv_floating(description: Tok<String>, message: impl AsRef<str>) -> OrcErrv {
mk_errv::<Pos>(description, message, [])
}
pub fn mk_errv<I: Into<ErrPos>>( pub fn mk_errv<I: Into<ErrPos>>(
description: Tok<String>, description: Tok<String>,
message: impl AsRef<str>, message: impl AsRef<str>,
posv: impl IntoIterator<Item = I>, posv: impl IntoIterator<Item = I>,
) -> OrcErrv { ) -> OrcErrv {
mk_err(description, message, posv.into_iter().map_into()).into() OrcErr {
description,
message: Arc::new(message.as_ref().to_string()),
positions: posv.into_iter().map_into().collect(),
}
.into()
} }
pub async fn async_io_err<I: Into<ErrPos>>( pub async fn async_io_err<I: Into<ErrPos>>(
err: async_std::io::Error, err: std::io::Error,
i: &Interner, i: &Interner,
posv: impl IntoIterator<Item = I>, posv: impl IntoIterator<Item = I>,
) -> OrcErrv { ) -> OrcErrv {

View File

@@ -7,7 +7,7 @@ use std::rc::Rc;
use std::sync::atomic; use std::sync::atomic;
use std::{fmt, hash}; use std::{fmt, hash};
use async_std::sync::Mutex; use futures::lock::Mutex;
use hashbrown::{HashMap, HashSet}; use hashbrown::{HashMap, HashSet};
use itertools::Itertools as _; use itertools::Itertools as _;
use orchid_api_traits::Request; use orchid_api_traits::Request;

View File

@@ -0,0 +1,24 @@
use std::fmt;
use itertools::{Itertools, Position};
pub struct PrintList<'a, I: Iterator<Item = E> + Clone, E: fmt::Display>(pub I, pub &'a str);
impl<'a, I: Iterator<Item = E> + Clone, E: fmt::Display> fmt::Display for PrintList<'a, I, E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for (pos, item) in self.0.clone().with_position() {
match pos {
Position::First | Position::Only => write!(f, "{item}")?,
Position::Middle => write!(f, ", {item}")?,
Position::Last => write!(f, ", {} {item}", self.1)?,
}
}
Ok(())
}
}
pub trait IteratorPrint: Iterator<Item: fmt::Display> + Clone {
fn display<'a>(self, operator: &'a str) -> PrintList<'a, Self, Self::Item> {
PrintList(self, operator)
}
}
impl<T: Iterator<Item: fmt::Display> + Clone> IteratorPrint for T {}

View File

@@ -12,6 +12,7 @@ pub mod event;
pub mod format; pub mod format;
pub mod id_store; pub mod id_store;
pub mod interner; pub mod interner;
pub mod iter_utils;
pub mod join; pub mod join;
pub mod location; pub mod location;
pub mod logging; pub mod logging;

View File

@@ -15,7 +15,7 @@ trait_set! {
pub trait GetSrc = FnMut(&Sym) -> Tok<String>; pub trait GetSrc = FnMut(&Sym) -> Tok<String>;
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum Pos { pub enum Pos {
None, None,
SlotTarget, SlotTarget,
@@ -108,6 +108,10 @@ impl SrcRange {
pub fn to_api(&self) -> api::SourceRange { pub fn to_api(&self) -> api::SourceRange {
api::SourceRange { path: self.path.to_api(), range: self.range.clone() } api::SourceRange { path: self.path.to_api(), range: self.range.clone() }
} }
pub fn to(&self, rhs: &Self) -> Self {
assert_eq!(self.path, rhs.path, "Range continues across files");
Self { path: self.path(), range: self.start().min(rhs.start())..self.end().max(rhs.end()) }
}
} }
impl From<SrcRange> for ErrPos { impl From<SrcRange> for ErrPos {
fn from(val: SrcRange) -> Self { val.pos().into() } fn from(val: SrcRange) -> Self { val.pos().into() }

View File

@@ -1,4 +1,4 @@
/// A shorthand for mapping over enums with identical structure. Used for /// A shorthand for mapping over enums with similar structure. Used for
/// converting between owned enums and the corresponding API enums that only /// converting between owned enums and the corresponding API enums that only
/// differ in the type of their fields. /// differ in the type of their fields.
/// ///
@@ -7,7 +7,11 @@
/// match_mapping!(self, ThisType => OtherType { /// match_mapping!(self, ThisType => OtherType {
/// EmptyVariant, /// EmptyVariant,
/// TupleVariant(foo => intern(foo), bar.clone()), /// TupleVariant(foo => intern(foo), bar.clone()),
/// StructVariant{ a.to_api(), b } /// StructVariant{ a.to_api(), b },
/// DedicatedConverter(value () convert)
/// } {
/// ThisType::DimorphicVariant(c) => OtherType::CorrespondingVariant(c.left(), c.right()),
/// ThisType::UnexpectedVariant => panic!(),
/// }) /// })
/// ``` /// ```
#[macro_export] #[macro_export]

View File

@@ -1,10 +1,10 @@
use std::io; use std::io;
use std::pin::Pin; use std::pin::Pin;
use async_std::io::{Read, ReadExt, Write, WriteExt}; use futures::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
use orchid_api_traits::{Decode, Encode}; use orchid_api_traits::{Decode, Encode};
pub async fn send_msg(mut write: Pin<&mut impl Write>, msg: &[u8]) -> io::Result<()> { pub async fn send_msg(mut write: Pin<&mut impl AsyncWrite>, msg: &[u8]) -> io::Result<()> {
let mut len_buf = vec![]; let mut len_buf = vec![];
u32::try_from(msg.len()).unwrap().encode(Pin::new(&mut len_buf)).await; u32::try_from(msg.len()).unwrap().encode(Pin::new(&mut len_buf)).await;
write.write_all(&len_buf).await?; write.write_all(&len_buf).await?;
@@ -12,7 +12,7 @@ pub async fn send_msg(mut write: Pin<&mut impl Write>, msg: &[u8]) -> io::Result
write.flush().await write.flush().await
} }
pub async fn recv_msg(mut read: Pin<&mut impl Read>) -> io::Result<Vec<u8>> { pub async fn recv_msg(mut read: Pin<&mut impl AsyncRead>) -> io::Result<Vec<u8>> {
let mut len_buf = [0u8; (u32::BITS / 8) as usize]; let mut len_buf = [0u8; (u32::BITS / 8) as usize];
read.read_exact(&mut len_buf).await?; read.read_exact(&mut len_buf).await?;
let len = u32::decode(Pin::new(&mut &len_buf[..])).await; let len = u32::decode(Pin::new(&mut &len_buf[..])).await;

View File

@@ -19,10 +19,9 @@ trait_set! {
pub trait NameIter = Iterator<Item = Tok<String>> + DoubleEndedIterator + ExactSizeIterator; pub trait NameIter = Iterator<Item = Tok<String>> + DoubleEndedIterator + ExactSizeIterator;
} }
/// A token path which may be empty. [VName] is the non-empty, /// A token path which may be empty. [VName] is the non-empty version
/// [PathSlice] is the borrowed version
#[derive(Clone, Default, Hash, PartialEq, Eq)] #[derive(Clone, Default, Hash, PartialEq, Eq)]
pub struct VPath(pub Vec<Tok<String>>); pub struct VPath(Vec<Tok<String>>);
impl VPath { impl VPath {
/// Collect segments into a vector /// Collect segments into a vector
pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Self { pub fn new(items: impl IntoIterator<Item = Tok<String>>) -> Self {
@@ -236,8 +235,8 @@ impl Sym {
Self::from_tok(Tok::from_api(marker, i).await).expect("Empty sequence found for serialized Sym") Self::from_tok(Tok::from_api(marker, i).await).expect("Empty sequence found for serialized Sym")
} }
pub fn to_api(&self) -> api::TStrv { self.tok().to_api() } pub fn to_api(&self) -> api::TStrv { self.tok().to_api() }
pub async fn push(&self, tok: Tok<String>, i: &Interner) -> Sym { pub async fn suffix(&self, tokv: impl IntoIterator<Item = Tok<String>>, i: &Interner) -> Sym {
Self::new(self.0.iter().cloned().chain([tok]), i).await.unwrap() Self::new(self.0.iter().cloned().chain(tokv), i).await.unwrap()
} }
} }
impl fmt::Debug for Sym { impl fmt::Debug for Sym {
@@ -272,34 +271,34 @@ pub trait NameLike:
/// Convert into held slice /// Convert into held slice
fn as_slice(&self) -> &[Tok<String>] { Borrow::<[Tok<String>]>::borrow(self) } fn as_slice(&self) -> &[Tok<String>] { Borrow::<[Tok<String>]>::borrow(self) }
/// Get iterator over tokens /// Get iterator over tokens
fn iter(&self) -> impl NameIter + '_ { self.as_slice().iter().cloned() } fn segs(&self) -> impl NameIter + '_ { self.as_slice().iter().cloned() }
/// Get iterator over string segments /// Get iterator over string segments
fn str_iter(&self) -> impl Iterator<Item = &'_ str> + '_ { fn str_iter(&self) -> impl Iterator<Item = &'_ str> + '_ {
self.as_slice().iter().map(|t| t.as_str()) self.as_slice().iter().map(|t| t.as_str())
} }
/// Fully resolve the name for printing /// Fully resolve the name for printing
#[must_use] #[must_use]
fn to_strv(&self) -> Vec<String> { self.iter().map(|s| s.to_string()).collect() } fn to_strv(&self) -> Vec<String> { self.segs().map(|s| s.to_string()).collect() }
/// Format the name as an approximate filename /// Format the name as an approximate filename
fn as_src_path(&self) -> String { format!("{}.orc", self.iter().join("/")) } fn as_src_path(&self) -> String { format!("{}.orc", self.segs().join("/")) }
/// Return the number of segments in the name /// Return the number of segments in the name
fn len(&self) -> NonZeroUsize { fn len_nz(&self) -> NonZeroUsize {
NonZeroUsize::try_from(self.iter().count()).expect("NameLike never empty") NonZeroUsize::try_from(self.segs().count()).expect("NameLike never empty")
} }
/// Like slice's `split_first` except we know that it always returns Some /// Like slice's `split_first` except we know that it always returns Some
fn split_first(&self) -> (Tok<String>, &[Tok<String>]) { fn split_first_seg(&self) -> (Tok<String>, &[Tok<String>]) {
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty"); let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
(foot.clone(), torso) (foot.clone(), torso)
} }
/// Like slice's `split_last` except we know that it always returns Some /// Like slice's `split_last` except we know that it always returns Some
fn split_last(&self) -> (Tok<String>, &[Tok<String>]) { fn split_last_seg(&self) -> (Tok<String>, &[Tok<String>]) {
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty"); let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
(foot.clone(), torso) (foot.clone(), torso)
} }
/// Get the first element /// Get the first element
fn first(&self) -> Tok<String> { self.split_first().0 } fn first_seg(&self) -> Tok<String> { self.split_first_seg().0 }
/// Get the last element /// Get the last element
fn last(&self) -> Tok<String> { self.split_last().0 } fn last_seg(&self) -> Tok<String> { self.split_last_seg().0 }
} }
impl NameLike for Sym {} impl NameLike for Sym {}

View File

@@ -3,7 +3,7 @@ use std::ops::Range;
use ordered_float::NotNan; use ordered_float::NotNan;
use crate::error::{OrcErr, mk_err}; use crate::error::{OrcErrv, mk_errv};
use crate::interner::Interner; use crate::interner::Interner;
use crate::location::SrcRange; use crate::location::SrcRange;
use crate::name::Sym; use crate::name::Sym;
@@ -55,20 +55,20 @@ pub struct NumError {
pub kind: NumErrorKind, pub kind: NumErrorKind,
} }
pub async fn num_to_err( pub async fn num_to_errv(
NumError { kind, range }: NumError, NumError { kind, range }: NumError,
offset: u32, offset: u32,
source: &Sym, source: &Sym,
i: &Interner, i: &Interner,
) -> OrcErr { ) -> OrcErrv {
mk_err( mk_errv(
i.i("Failed to parse number").await, i.i("Failed to parse number").await,
match kind { match kind {
NumErrorKind::NaN => "NaN emerged during parsing", NumErrorKind::NaN => "NaN emerged during parsing",
NumErrorKind::InvalidDigit => "non-digit character encountered", NumErrorKind::InvalidDigit => "non-digit character encountered",
NumErrorKind::Overflow => "The number being described is too large or too accurate", NumErrorKind::Overflow => "The number being described is too large or too accurate",
}, },
[SrcRange::new(offset + range.start as u32..offset + range.end as u32, source).pos().into()], [SrcRange::new(offset + range.start as u32..offset + range.end as u32, source)],
) )
} }
@@ -79,7 +79,6 @@ pub fn parse_num(string: &str) -> Result<Numeric, NumError> {
.or_else(|| string.strip_prefix("0b").map(|s| (2u8, s, 2))) .or_else(|| string.strip_prefix("0b").map(|s| (2u8, s, 2)))
.or_else(|| string.strip_prefix("0o").map(|s| (8u8, s, 2))) .or_else(|| string.strip_prefix("0o").map(|s| (8u8, s, 2)))
.unwrap_or((10u8, string, 0)); .unwrap_or((10u8, string, 0));
eprintln!("({radix}, {noprefix}, {pos})");
// identity // identity
let (base_s, exponent) = match noprefix.split_once('p') { let (base_s, exponent) = match noprefix.split_once('p') {
Some((b, e)) => { Some((b, e)) => {
@@ -88,16 +87,15 @@ pub fn parse_num(string: &str) -> Result<Numeric, NumError> {
}, },
None => (noprefix, 0), None => (noprefix, 0),
}; };
eprintln!("({base_s},{exponent})");
match base_s.split_once('.') { match base_s.split_once('.') {
None => { None => {
let base = int_parse(base_s, radix, pos)?; let base = int_parse(base_s, radix, pos)?;
if let Ok(pos_exp) = u32::try_from(exponent) { if let Ok(pos_exp) = u32::try_from(exponent)
if let Some(radical) = u64::from(radix).checked_pow(pos_exp) { && let Some(radical) = u64::from(radix).checked_pow(pos_exp)
{
let num = base.checked_mul(radical).and_then(|m| m.try_into().ok()).ok_or(overflow_e)?; let num = base.checked_mul(radical).and_then(|m| m.try_into().ok()).ok_or(overflow_e)?;
return Ok(Numeric::Int(num)); return Ok(Numeric::Int(num));
} }
}
let f = (base as f64) * (radix as f64).powi(exponent); let f = (base as f64) * (radix as f64).powi(exponent);
let err = NumError { range: 0..string.len(), kind: NumErrorKind::NaN }; let err = NumError { range: 0..string.len(), kind: NumErrorKind::NaN };
Ok(Numeric::Float(NotNan::new(f).map_err(|_| err)?)) Ok(Numeric::Float(NotNan::new(f).map_err(|_| err)?))

View File

@@ -7,18 +7,18 @@ use futures::future::join_all;
use itertools::Itertools; use itertools::Itertools;
use crate::api; use crate::api;
use crate::error::{OrcRes, Reporter, mk_err, mk_errv}; use crate::error::{OrcErrv, OrcRes, Reporter, mk_errv};
use crate::format::fmt; use crate::format::{FmtCtx, FmtUnit, Format, fmt};
use crate::interner::{Interner, Tok}; use crate::interner::{Interner, Tok};
use crate::location::SrcRange; use crate::location::SrcRange;
use crate::name::{Sym, VName, VPath}; use crate::name::{Sym, VName, VPath};
use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_range}; use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_fmt, ttv_range};
pub trait ParseCtx { pub trait ParseCtx {
#[must_use] #[must_use]
fn i(&self) -> &Interner; fn i(&self) -> &Interner;
#[must_use] #[must_use]
fn reporter(&self) -> &Reporter; fn rep(&self) -> &Reporter;
} }
pub struct ParseCtxImpl<'a> { pub struct ParseCtxImpl<'a> {
pub i: &'a Interner, pub i: &'a Interner,
@@ -26,7 +26,7 @@ pub struct ParseCtxImpl<'a> {
} }
impl ParseCtx for ParseCtxImpl<'_> { impl ParseCtx for ParseCtxImpl<'_> {
fn i(&self) -> &Interner { self.i } fn i(&self) -> &Interner { self.i }
fn reporter(&self) -> &Reporter { self.r } fn rep(&self) -> &Reporter { self.r }
} }
pub fn name_start(c: char) -> bool { c.is_alphabetic() || c == '_' } pub fn name_start(c: char) -> bool { c.is_alphabetic() || c == '_' }
@@ -95,18 +95,10 @@ impl<A: ExprRepr, X: ExtraTok> Deref for Snippet<'_, A, X> {
type Target = [TokTree<A, X>]; type Target = [TokTree<A, X>];
fn deref(&self) -> &Self::Target { self.cur } fn deref(&self) -> &Self::Target { self.cur }
} }
impl<A: ExprRepr, X: ExtraTok> Format for Snippet<'_, A, X> {
/// Remove tokens that aren't meaningful in expression context, such as comments async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
/// or line breaks ttv_fmt(&**self, c).await
pub fn strip_fluff<A: ExprRepr, X: ExtraTok>(tt: &TokTree<A, X>) -> Option<TokTree<A, X>> { }
let tok = match &tt.tok {
Token::BR => return None,
Token::Comment(_) => return None,
Token::LambdaHead(arg) => Token::LambdaHead(arg.iter().filter_map(strip_fluff).collect()),
Token::S(p, b) => Token::S(*p, b.iter().filter_map(strip_fluff).collect()),
t => t.clone(),
};
Some(TokTree { tok, sr: tt.sr.clone() })
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@@ -208,6 +200,15 @@ pub async fn expect_tok<'a, A: ExprRepr, X: ExtraTok>(
} }
} }
pub async fn token_errv<A: ExprRepr, X: ExtraTok>(
ctx: &impl ParseCtx,
tok: &TokTree<A, X>,
description: &'static str,
message: impl FnOnce(&str) -> String,
) -> OrcErrv {
mk_errv(ctx.i().i(description).await, message(&fmt(tok, ctx.i()).await), [tok.sr.pos()])
}
pub struct Parsed<'a, T, H: ExprRepr, X: ExtraTok> { pub struct Parsed<'a, T, H: ExprRepr, X: ExtraTok> {
pub output: T, pub output: T,
pub tail: Snippet<'a, H, X>, pub tail: Snippet<'a, H, X>,
@@ -236,10 +237,10 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
match &tt.tok { match &tt.tok {
Token::NS(ns, body) => { Token::NS(ns, body) => {
if !ns.starts_with(name_start) { if !ns.starts_with(name_start) {
ctx.reporter().report(mk_err( ctx.rep().report(mk_errv(
ctx.i().i("Unexpected name prefix").await, ctx.i().i("Unexpected name prefix").await,
"Only names can precede ::", "Only names can precede ::",
[ttpos.into()], [ttpos],
)) ))
}; };
let out = Box::pin(rec(body, ctx)).await?; let out = Box::pin(rec(body, ctx)).await?;
@@ -256,7 +257,7 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
while let Some((output, tail)) = body.pop_front() { while let Some((output, tail)) = body.pop_front() {
match rec(output, ctx).boxed_local().await { match rec(output, ctx).boxed_local().await {
Ok(names) => o.extend(names), Ok(names) => o.extend(names),
Err(e) => ctx.reporter().report(e), Err(e) => ctx.rep().report(e),
} }
body = tail; body = tail;
} }
@@ -295,6 +296,10 @@ impl Import {
None => self.path.into_name().expect("Import cannot be empty"), None => self.path.into_name().expect("Import cannot be empty"),
} }
} }
pub fn new(sr: SrcRange, path: VPath, name: Tok<String>) -> Self {
Import { path, name: Some(name), sr }
}
pub fn new_glob(sr: SrcRange, path: VPath) -> Self { Import { path, name: None, sr } }
} }
impl Display for Import { impl Display for Import {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {

View File

@@ -1,4 +1,3 @@
use std::any::Any;
use std::cell::RefCell; use std::cell::RefCell;
use std::future::Future; use std::future::Future;
use std::marker::PhantomData; use std::marker::PhantomData;
@@ -8,11 +7,12 @@ use std::pin::Pin;
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use async_std::channel;
use async_std::sync::Mutex;
use derive_destructure::destructure; use derive_destructure::destructure;
use dyn_clone::{DynClone, clone_box}; use dyn_clone::{DynClone, clone_box};
use futures::channel::mpsc;
use futures::future::LocalBoxFuture; use futures::future::LocalBoxFuture;
use futures::lock::Mutex;
use futures::{SinkExt, StreamExt};
use hashbrown::HashMap; use hashbrown::HashMap;
use orchid_api_traits::{Channel, Coding, Decode, Encode, MsgSet, Request}; use orchid_api_traits::{Channel, Coding, Decode, Encode, MsgSet, Request};
use trait_set::trait_set; use trait_set::trait_set;
@@ -40,19 +40,19 @@ fn get_id(message: &[u8]) -> (u64, &[u8]) {
} }
pub trait ReqHandlish { pub trait ReqHandlish {
fn defer_drop(&self, val: impl Any + 'static) fn defer(&self, cb: impl Future<Output = ()> + 'static)
where Self: Sized { where Self: Sized {
self.defer_drop_objsafe(Box::new(val)); self.defer_objsafe(Box::pin(cb));
} }
fn defer_drop_objsafe(&self, val: Box<dyn Any>); fn defer_objsafe(&self, val: Pin<Box<dyn Future<Output = ()>>>);
} }
impl ReqHandlish for &'_ dyn ReqHandlish { impl ReqHandlish for &'_ dyn ReqHandlish {
fn defer_drop_objsafe(&self, val: Box<dyn Any>) { (**self).defer_drop_objsafe(val) } fn defer_objsafe(&self, val: Pin<Box<dyn Future<Output = ()>>>) { (**self).defer_objsafe(val) }
} }
#[derive(destructure)] #[derive(destructure)]
pub struct RequestHandle<'a, MS: MsgSet> { pub struct RequestHandle<'a, MS: MsgSet> {
defer_drop: RefCell<Vec<Box<dyn Any>>>, defer: RefCell<Vec<Pin<Box<dyn Future<Output = ()>>>>>,
fulfilled: AtomicBool, fulfilled: AtomicBool,
id: u64, id: u64,
_reqlt: PhantomData<&'a mut ()>, _reqlt: PhantomData<&'a mut ()>,
@@ -60,13 +60,7 @@ pub struct RequestHandle<'a, MS: MsgSet> {
} }
impl<'a, MS: MsgSet + 'static> RequestHandle<'a, MS> { impl<'a, MS: MsgSet + 'static> RequestHandle<'a, MS> {
fn new(parent: ReqNot<MS>, id: u64) -> Self { fn new(parent: ReqNot<MS>, id: u64) -> Self {
Self { Self { defer: RefCell::default(), fulfilled: false.into(), _reqlt: PhantomData, parent, id }
defer_drop: RefCell::default(),
fulfilled: false.into(),
_reqlt: PhantomData,
parent,
id,
}
} }
pub fn reqnot(&self) -> ReqNot<MS> { self.parent.clone() } pub fn reqnot(&self) -> ReqNot<MS> { self.parent.clone() }
pub async fn handle<U: Request>(&self, _: &U, rep: &U::Response) -> Receipt<'a> { pub async fn handle<U: Request>(&self, _: &U, rep: &U::Response) -> Receipt<'a> {
@@ -82,11 +76,17 @@ impl<'a, MS: MsgSet + 'static> RequestHandle<'a, MS> {
response.encode(Pin::new(&mut buf)).await; response.encode(Pin::new(&mut buf)).await;
let mut send = clone_box(&*self.reqnot().0.lock().await.send); let mut send = clone_box(&*self.reqnot().0.lock().await.send);
(send)(&buf, self.parent.clone()).await; (send)(&buf, self.parent.clone()).await;
let deferred = mem::take(&mut *self.defer.borrow_mut());
for item in deferred {
item.await
}
Receipt(PhantomData) Receipt(PhantomData)
} }
} }
impl<MS: MsgSet> ReqHandlish for RequestHandle<'_, MS> { impl<MS: MsgSet> ReqHandlish for RequestHandle<'_, MS> {
fn defer_drop_objsafe(&self, val: Box<dyn Any>) { self.defer_drop.borrow_mut().push(val); } fn defer_objsafe(&self, val: Pin<Box<dyn Future<Output = ()>>>) {
self.defer.borrow_mut().push(val)
}
} }
impl<MS: MsgSet> Drop for RequestHandle<'_, MS> { impl<MS: MsgSet> Drop for RequestHandle<'_, MS> {
fn drop(&mut self) { fn drop(&mut self) {
@@ -102,7 +102,7 @@ pub struct ReqNotData<T: MsgSet> {
send: Box<dyn SendFn<T>>, send: Box<dyn SendFn<T>>,
notif: Box<dyn NotifFn<T>>, notif: Box<dyn NotifFn<T>>,
req: Box<dyn ReqFn<T>>, req: Box<dyn ReqFn<T>>,
responses: HashMap<u64, channel::Sender<Vec<u8>>>, responses: HashMap<u64, mpsc::Sender<Vec<u8>>>,
} }
/// Wraps a raw message buffer to save on copying. /// Wraps a raw message buffer to save on copying.
@@ -144,7 +144,7 @@ impl<T: MsgSet> ReqNot<T> {
let notif_val = <T::In as Channel>::Notif::decode(Pin::new(&mut &payload[..])).await; let notif_val = <T::In as Channel>::Notif::decode(Pin::new(&mut &payload[..])).await;
notif_cb(notif_val, self.clone()).await notif_cb(notif_val, self.clone()).await
} else if 0 < id.bitand(1 << 63) { } else if 0 < id.bitand(1 << 63) {
let sender = g.responses.remove(&!id).expect("Received response for invalid message"); let mut sender = g.responses.remove(&!id).expect("Received response for invalid message");
sender.send(message.to_vec()).await.unwrap() sender.send(message.to_vec()).await.unwrap()
} else { } else {
let message = <T::In as Channel>::Req::decode(Pin::new(&mut &payload[..])).await; let message = <T::In as Channel>::Req::decode(Pin::new(&mut &payload[..])).await;
@@ -205,13 +205,13 @@ impl<T: MsgSet> DynRequester for ReqNot<T> {
g.id += 1; g.id += 1;
let mut buf = id.to_be_bytes().to_vec(); let mut buf = id.to_be_bytes().to_vec();
req.encode(Pin::new(&mut buf)).await; req.encode(Pin::new(&mut buf)).await;
let (send, recv) = channel::bounded(1); let (send, mut recv) = mpsc::channel(1);
g.responses.insert(id, send); g.responses.insert(id, send);
let mut send = clone_box(&*g.send); let mut send = clone_box(&*g.send);
mem::drop(g); mem::drop(g);
let rn = self.clone(); let rn = self.clone();
send(&buf, rn).await; send(&buf, rn).await;
let items = recv.recv().await; let items = recv.next().await;
RawReply(items.unwrap()) RawReply(items.unwrap())
}) })
} }
@@ -235,7 +235,10 @@ impl<This: DynRequester + ?Sized> Requester for This {
async fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response { async fn request<R: Request + Into<Self::Transfer>>(&self, data: R) -> R::Response {
let req = format!("{data:?}"); let req = format!("{data:?}");
let rep = R::Response::decode(Pin::new(&mut &self.raw_request(data.into()).await[..])).await; let rep = R::Response::decode(Pin::new(&mut &self.raw_request(data.into()).await[..])).await;
let req_str = req.to_string();
if !req_str.starts_with("AtomPrint") && !req_str.starts_with("ExtAtomPrint") {
writeln!(self.logger(), "Request {req} got response {rep:?}"); writeln!(self.logger(), "Request {req} got response {rep:?}");
}
rep rep
} }
} }
@@ -249,17 +252,16 @@ mod test {
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use async_std::sync::Mutex;
use futures::FutureExt; use futures::FutureExt;
use orchid_api::LogStrategy; use futures::lock::Mutex;
use orchid_api_derive::Coding; use orchid_api_derive::Coding;
use orchid_api_traits::{Channel, Request}; use orchid_api_traits::{Channel, Request};
use test_executors::spin_on; use test_executors::spin_on;
use super::{MsgSet, ReqNot}; use super::{MsgSet, ReqNot};
use crate::clone;
use crate::logging::Logger; use crate::logging::Logger;
use crate::reqnot::Requester as _; use crate::reqnot::Requester as _;
use crate::{api, clone};
#[derive(Clone, Debug, Coding, PartialEq)] #[derive(Clone, Debug, Coding, PartialEq)]
pub struct TestReq(u8); pub struct TestReq(u8);
@@ -282,7 +284,7 @@ mod test {
#[test] #[test]
fn notification() { fn notification() {
spin_on(async { spin_on(async {
let logger = Logger::new(LogStrategy::StdErr); let logger = Logger::new(api::LogStrategy::StdErr);
let received = Arc::new(Mutex::new(None)); let received = Arc::new(Mutex::new(None));
let receiver = ReqNot::<TestMsgSet>::new( let receiver = ReqNot::<TestMsgSet>::new(
logger.clone(), logger.clone(),
@@ -310,7 +312,7 @@ mod test {
#[test] #[test]
fn request() { fn request() {
spin_on(async { spin_on(async {
let logger = Logger::new(LogStrategy::StdErr); let logger = Logger::new(api::LogStrategy::StdErr);
let receiver = Rc::new(Mutex::<Option<ReqNot<TestMsgSet>>>::new(None)); let receiver = Rc::new(Mutex::<Option<ReqNot<TestMsgSet>>>::new(None));
let sender = Rc::new(ReqNot::<TestMsgSet>::new( let sender = Rc::new(ReqNot::<TestMsgSet>::new(
logger.clone(), logger.clone(),

View File

@@ -4,7 +4,7 @@ use std::future::Future;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::rc::Rc; use std::rc::Rc;
use async_stream::stream; use async_fn_stream::stream;
use futures::future::join_all; use futures::future::join_all;
use futures::{FutureExt, StreamExt}; use futures::{FutureExt, StreamExt};
use itertools::Itertools; use itertools::Itertools;
@@ -16,7 +16,7 @@ use crate::error::OrcErrv;
use crate::format::{FmtCtx, FmtUnit, Format, Variants}; use crate::format::{FmtCtx, FmtUnit, Format, Variants};
use crate::interner::{Interner, Tok}; use crate::interner::{Interner, Tok};
use crate::location::{Pos, SrcRange}; use crate::location::{Pos, SrcRange};
use crate::name::Sym; use crate::name::{Sym, VName, VPath};
use crate::parse::Snippet; use crate::parse::Snippet;
use crate::{api, match_mapping, tl_cache}; use crate::{api, match_mapping, tl_cache};
@@ -62,8 +62,7 @@ pub fn recur<H: ExprRepr, X: ExtraTok>(
tok @ (Token::BR | Token::Bottom(_) | Token::Comment(_) | Token::Name(_)) => tok, tok @ (Token::BR | Token::Bottom(_) | Token::Comment(_) | Token::Name(_)) => tok,
tok @ (Token::Handle(_) | Token::NewExpr(_)) => tok, tok @ (Token::Handle(_) | Token::NewExpr(_)) => tok,
Token::NS(n, b) => Token::NS(n, Box::new(recur(*b, f))), Token::NS(n, b) => Token::NS(n, Box::new(recur(*b, f))),
Token::LambdaHead(arg) => Token::LambdaHead(arg) => Token::LambdaHead(Box::new(recur(*arg, f))),
Token::LambdaHead(arg.into_iter().map(|tt| recur(tt, f)).collect_vec()),
Token::S(p, b) => Token::S(p, b.into_iter().map(|tt| recur(tt, f)).collect_vec()), Token::S(p, b) => Token::S(p, b.into_iter().map(|tt| recur(tt, f)).collect_vec()),
}; };
TokTree { sr: range, tok } TokTree { sr: range, tok }
@@ -117,7 +116,7 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
NS(n => Tok::from_api(*n, i).await, NS(n => Tok::from_api(*n, i).await,
b => Box::new(Self::from_api(b, hctx, xctx, src, i).boxed_local().await)), b => Box::new(Self::from_api(b, hctx, xctx, src, i).boxed_local().await)),
Bottom(e => OrcErrv::from_api(e, i).await), Bottom(e => OrcErrv::from_api(e, i).await),
LambdaHead(arg => ttv_from_api(arg, hctx, xctx, src, i).await), LambdaHead(arg => Box::new(Self::from_api(arg, hctx, xctx, src, i).boxed_local().await)),
Name(n => Tok::from_api(*n, i).await), Name(n => Tok::from_api(*n, i).await),
S(*par, b => ttv_from_api(b, hctx, xctx, src, i).await), S(*par, b => ttv_from_api(b, hctx, xctx, src, i).await),
Comment(c.clone()), Comment(c.clone()),
@@ -137,7 +136,7 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
NS(n.to_api(), b => Box::new(b.into_api(hctx, xctx).boxed_local().await)), NS(n.to_api(), b => Box::new(b.into_api(hctx, xctx).boxed_local().await)),
Bottom(e.to_api()), Bottom(e.to_api()),
Comment(c.clone()), Comment(c.clone()),
LambdaHead(arg => ttv_into_api(arg, hctx, xctx).boxed_local().await), LambdaHead(arg => Box::new(arg.into_api(hctx, xctx).boxed_local().await)),
Name(nn.to_api()), Name(nn.to_api()),
S(p, b => ttv_into_api(b, hctx, xctx).boxed_local().await), S(p, b => ttv_into_api(b, hctx, xctx).boxed_local().await),
Handle(hand.into_api(hctx).await), Handle(hand.into_api(hctx).await),
@@ -150,21 +149,35 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
pub fn as_name(&self) -> Option<Tok<String>> { pub fn as_name(&self) -> Option<Tok<String>> {
if let Token::Name(n) = &self.tok { Some(n.clone()) } else { None } if let Token::Name(n) = &self.tok { Some(n.clone()) } else { None }
} }
pub fn as_multiname(&self) -> Result<VName, &TokTree<H, X>> {
let mut segs = VPath::new([]);
let mut cur = self;
loop {
match &cur.tok {
Token::Name(last) => return Ok(segs.name_with_suffix(last.clone())),
Token::NS(seg, inner) => {
segs = segs.suffix([seg.clone()]);
cur = inner;
},
_ => return Err(cur),
}
}
}
pub fn as_s(&self, par: Paren) -> Option<Snippet<'_, H, X>> { pub fn as_s(&self, par: Paren) -> Option<Snippet<'_, H, X>> {
self.tok.as_s(par).map(|slc| Snippet::new(self, slc)) self.tok.as_s(par).map(|slc| Snippet::new(self, slc))
} }
pub fn as_lambda(&self) -> Option<Snippet<'_, H, X>> { pub fn as_lambda(&self) -> Option<&Self> {
match &self.tok { match &self.tok {
Token::LambdaHead(arg) => Some(Snippet::new(self, arg)), Token::LambdaHead(arg) => Some(&**arg),
_ => None, _ => None,
} }
} }
pub fn is_fluff(&self) -> bool { matches!(self.tok, Token::Comment(_) | Token::BR) } pub fn is_fluff(&self) -> bool { matches!(self.tok, Token::Comment(_) | Token::BR) }
pub fn lambda(arg: Vec<Self>, mut body: Vec<Self>) -> Self { pub fn lambda(arg: Self, mut body: Vec<Self>) -> Self {
let arg_range = ttv_range(&arg).expect("Lambda with empty arg!"); let arg_range = arg.sr();
let mut s_range = arg_range.clone(); let mut s_range = arg_range.clone();
s_range.range.end = body.last().expect("Lambda with empty body!").sr.range.end; s_range.range.end = body.last().expect("Lambda with empty body!").sr.range.end;
body.insert(0, Token::LambdaHead(arg).at(arg_range)); body.insert(0, Token::LambdaHead(Box::new(arg)).at(arg_range));
Token::S(Paren::Round, body).at(s_range) Token::S(Paren::Round, body).at(s_range)
} }
pub fn sr(&self) -> SrcRange { self.sr.clone() } pub fn sr(&self) -> SrcRange { self.sr.clone() }
@@ -182,11 +195,11 @@ pub async fn ttv_from_api<H: ExprRepr, X: ExtraTok>(
src: &Sym, src: &Sym,
i: &Interner, i: &Interner,
) -> Vec<TokTree<H, X>> { ) -> Vec<TokTree<H, X>> {
stream! { stream(async |mut cx| {
for tok in tokv { for tok in tokv {
yield TokTree::<H, X>::from_api(tok.borrow(), hctx, xctx, src, i).boxed_local().await cx.emit(TokTree::<H, X>::from_api(tok.borrow(), hctx, xctx, src, i).boxed_local().await).await
}
} }
})
.collect() .collect()
.await .await
} }
@@ -196,11 +209,11 @@ pub async fn ttv_into_api<H: ExprRepr, X: ExtraTok>(
hctx: &mut H::ToApiCtx<'_>, hctx: &mut H::ToApiCtx<'_>,
xctx: &mut X::ToApiCtx<'_>, xctx: &mut X::ToApiCtx<'_>,
) -> Vec<api::TokenTree> { ) -> Vec<api::TokenTree> {
stream! { stream(async |mut cx| {
for tok in tokv { for tok in tokv {
yield tok.into_api(hctx, xctx).await cx.emit(tok.into_api(hctx, xctx).await).await
}
} }
})
.collect() .collect()
.await .await
} }
@@ -230,7 +243,7 @@ pub enum Token<H: ExprRepr, X: ExtraTok> {
Comment(Rc<String>), Comment(Rc<String>),
/// The part of a lambda between `\` and `.` enclosing the argument. The body /// The part of a lambda between `\` and `.` enclosing the argument. The body
/// stretches to the end of the enclosing parens or the end of the const line /// stretches to the end of the enclosing parens or the end of the const line
LambdaHead(Vec<TokTree<H, X>>), LambdaHead(Box<TokTree<H, X>>),
/// A binding, operator, or a segment of a namespaced::name /// A binding, operator, or a segment of a namespaced::name
Name(Tok<String>), Name(Tok<String>),
/// A namespace prefix, like `my_ns::` followed by a token /// A namespace prefix, like `my_ns::` followed by a token
@@ -267,7 +280,7 @@ impl<H: ExprRepr, X: ExtraTok> Format for Token<H, X> {
Self::Comment(c) => format!("--[{c}]--").into(), Self::Comment(c) => format!("--[{c}]--").into(),
Self::LambdaHead(arg) => Self::LambdaHead(arg) =>
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("\\{0b}."))) tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("\\{0b}.")))
.units([ttv_fmt(arg, c).await]), .units([arg.print(c).boxed_local().await]),
Self::NS(n, b) => tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{0}::{1l}"))) Self::NS(n, b) => tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{0}::{1l}")))
.units([n.to_string().into(), b.print(c).boxed_local().await]), .units([n.to_string().into(), b.print(c).boxed_local().await]),
Self::Name(n) => format!("{n}").into(), Self::Name(n) => format!("{n}").into(),

View File

@@ -6,28 +6,36 @@ edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
ahash = "0.8.11" async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
async-lock = "3.4.1"
async-once-cell = "0.5.4" async-once-cell = "0.5.4"
async-std = "1.13.0"
async-stream = "0.3.6"
derive_destructure = "1.0.0" derive_destructure = "1.0.0"
dyn-clone = "1.0.17" dyn-clone = "1.0.20"
futures = "0.3.31" futures = { version = "0.3.31", features = [
hashbrown = "0.15.2" "std",
"async-await",
], default-features = false }
hashbrown = "0.16.0"
include_dir = { version = "0.7.4", optional = true } include_dir = { version = "0.7.4", optional = true }
itertools = "0.14.0" itertools = "0.14.0"
konst = "0.3.16" konst = "0.4.1"
lazy_static = "1.5.0" lazy_static = "1.5.0"
memo-map = "0.3.3" memo-map = "0.3.3"
never = "0.1.0" never = "0.1.0"
once_cell = "1.20.2" once_cell = "1.21.3"
orchid-api = { version = "0.1.0", path = "../orchid-api" } orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" } orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-base = { version = "0.1.0", path = "../orchid-base" } orchid-base = { version = "0.1.0", path = "../orchid-base" }
ordered-float = "5.0.0" ordered-float = "5.0.0"
pastey = "0.1.0" pastey = "0.1.1"
some_executor = "0.5.1" some_executor = "0.6.1"
substack = "1.1.1" substack = "1.1.1"
tokio = { version = "1.46.1", optional = true } tokio = { version = "1.47.1", optional = true, features = [] }
tokio-util = { version = "0.7.16", optional = true, features = ["compat"] }
trait-set = "0.3.0" trait-set = "0.3.0"
[features]
tokio = ["dep:tokio", "dep:tokio-util"]
default = ["tokio"]

View File

@@ -1,4 +1,5 @@
use std::any::{Any, TypeId, type_name}; use std::any::{Any, TypeId, type_name};
use std::collections::HashMap;
use std::fmt; use std::fmt;
use std::future::Future; use std::future::Future;
use std::num::NonZeroU32; use std::num::NonZeroU32;
@@ -6,16 +7,13 @@ use std::ops::Deref;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
use ahash::HashMap;
use async_std::io::{Read, Write};
use async_std::stream;
use dyn_clone::{DynClone, clone_box}; use dyn_clone::{DynClone, clone_box};
use futures::future::LocalBoxFuture; use futures::future::LocalBoxFuture;
use futures::{FutureExt, StreamExt}; use futures::{AsyncRead, AsyncWrite, FutureExt, StreamExt, stream};
use orchid_api_derive::Coding; use orchid_api_derive::Coding;
use orchid_api_traits::{Coding, Decode, Encode, Request, enc_vec}; use orchid_api_traits::{Coding, Decode, Encode, Request, enc_vec};
use orchid_base::clone; use orchid_base::clone;
use orchid_base::error::{OrcErr, OrcRes, mk_err}; use orchid_base::error::{OrcErrv, OrcRes, mk_errv, mk_errv_floating};
use orchid_base::format::{FmtCtx, FmtUnit, Format}; use orchid_base::format::{FmtCtx, FmtUnit, Format};
use orchid_base::interner::Interner; use orchid_base::interner::Interner;
use orchid_base::location::Pos; use orchid_base::location::Pos;
@@ -24,6 +22,7 @@ use orchid_base::reqnot::Requester;
use trait_set::trait_set; use trait_set::trait_set;
use crate::api; use crate::api;
use crate::conv::ToExpr;
// use crate::error::{ProjectError, ProjectResult}; // use crate::error::{ProjectError, ProjectResult};
use crate::expr::{Expr, ExprData, ExprHandle, ExprKind}; use crate::expr::{Expr, ExprData, ExprHandle, ExprKind};
use crate::gen_expr::GExpr; use crate::gen_expr::GExpr;
@@ -92,7 +91,7 @@ pub struct ForeignAtom {
} }
impl ForeignAtom { impl ForeignAtom {
pub fn pos(&self) -> Pos { self.pos.clone() } pub fn pos(&self) -> Pos { self.pos.clone() }
pub fn ctx(&self) -> SysCtx { self.expr.ctx.clone() } pub fn ctx(&self) -> &SysCtx { &self.expr.ctx }
pub fn ex(self) -> Expr { pub fn ex(self) -> Expr {
let (handle, pos) = (self.expr.clone(), self.pos.clone()); let (handle, pos) = (self.expr.clone(), self.pos.clone());
let data = ExprData { pos, kind: ExprKind::Atom(ForeignAtom { ..self }) }; let data = ExprData { pos, kind: ExprKind::Atom(ForeignAtom { ..self }) };
@@ -110,6 +109,9 @@ impl ForeignAtom {
.await?; .await?;
Some(M::Response::decode(Pin::new(&mut &rep[..])).await) Some(M::Response::decode(Pin::new(&mut &rep[..])).await)
} }
pub async fn downcast<T: AtomicFeatures>(self) -> Result<TypAtom<T>, NotTypAtom> {
TypAtom::downcast(self.ex().handle()).await
}
} }
impl fmt::Display for ForeignAtom { impl fmt::Display for ForeignAtom {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Atom::{:?}", self.atom) } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Atom::{:?}", self.atom) }
@@ -122,6 +124,9 @@ impl Format for ForeignAtom {
FmtUnit::from_api(&self.ctx().reqnot().request(api::ExtAtomPrint(self.atom.clone())).await) FmtUnit::from_api(&self.ctx().reqnot().request(api::ExtAtomPrint(self.atom.clone())).await)
} }
} }
impl ToExpr for ForeignAtom {
async fn to_expr(self) -> GExpr { self.ex().to_expr().await }
}
pub struct NotTypAtom { pub struct NotTypAtom {
pub pos: Pos, pub pos: Pos,
@@ -130,11 +135,11 @@ pub struct NotTypAtom {
pub ctx: SysCtx, pub ctx: SysCtx,
} }
impl NotTypAtom { impl NotTypAtom {
pub async fn mk_err(&self) -> OrcErr { pub async fn mk_err(&self) -> OrcErrv {
mk_err( mk_errv(
self.ctx.i().i("Not the expected type").await, self.ctx.i().i("Not the expected type").await,
format!("This expression is not a {}", self.typ.name()), format!("This expression is not a {}", self.typ.name()),
[self.pos.clone().into()], [self.pos.clone()],
) )
} }
} }
@@ -150,8 +155,8 @@ trait_set! {
trait AtomReqCb<A> = for<'a> Fn( trait AtomReqCb<A> = for<'a> Fn(
&'a A, &'a A,
SysCtx, SysCtx,
Pin<&'a mut dyn Read>, Pin<&'a mut dyn AsyncRead>,
Pin<&'a mut dyn Write>, Pin<&'a mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, ()> ) -> LocalBoxFuture<'a, ()>
} }
@@ -166,17 +171,19 @@ impl<A: AtomCard> MethodSetBuilder<A> {
assert!(!M::NAME.is_empty(), "AtomMethod::NAME cannoot be empty"); assert!(!M::NAME.is_empty(), "AtomMethod::NAME cannoot be empty");
self.handlers.push(( self.handlers.push((
M::NAME, M::NAME,
Rc::new(move |a: &A, ctx: SysCtx, req: Pin<&mut dyn Read>, rep: Pin<&mut dyn Write>| { Rc::new(
move |a: &A, ctx: SysCtx, req: Pin<&mut dyn AsyncRead>, rep: Pin<&mut dyn AsyncWrite>| {
async { Supports::<M>::handle(a, ctx, M::decode(req).await).await.encode(rep).await } async { Supports::<M>::handle(a, ctx, M::decode(req).await).await.encode(rep).await }
.boxed_local() .boxed_local()
}), },
),
)); ));
self self
} }
pub async fn pack(&self, ctx: SysCtx) -> MethodSet<A> { pub async fn pack(&self, ctx: SysCtx) -> MethodSet<A> {
MethodSet { MethodSet {
handlers: stream::from_iter(self.handlers.iter()) handlers: stream::iter(self.handlers.iter())
.then(|(k, v)| { .then(|(k, v)| {
clone!(ctx; async move { clone!(ctx; async move {
(Sym::parse(k, ctx.i()).await.unwrap(), v.clone()) (Sym::parse(k, ctx.i()).await.unwrap(), v.clone())
@@ -197,8 +204,8 @@ impl<A: AtomCard> MethodSet<A> {
atom: &'a A, atom: &'a A,
ctx: SysCtx, ctx: SysCtx,
key: Sym, key: Sym,
req: Pin<&'a mut dyn Read>, req: Pin<&'a mut dyn AsyncRead>,
rep: Pin<&'a mut dyn Write>, rep: Pin<&'a mut dyn AsyncWrite>,
) -> bool { ) -> bool {
match self.handlers.get(&key) { match self.handlers.get(&key) {
None => false, None => false,
@@ -216,10 +223,12 @@ impl<A: AtomCard> Default for MethodSetBuilder<A> {
#[derive(Clone)] #[derive(Clone)]
pub struct TypAtom<A: AtomicFeatures> { pub struct TypAtom<A: AtomicFeatures> {
pub data: ForeignAtom, pub untyped: ForeignAtom,
pub value: A::Data, pub value: A::Data,
} }
impl<A: AtomicFeatures> TypAtom<A> { impl<A: AtomicFeatures> TypAtom<A> {
pub fn ctx(&self) -> &SysCtx { self.untyped.ctx() }
pub fn i(&self) -> &Interner { self.ctx().i() }
pub async fn downcast(expr: Rc<ExprHandle>) -> Result<Self, NotTypAtom> { pub async fn downcast(expr: Rc<ExprHandle>) -> Result<Self, NotTypAtom> {
match Expr::from_handle(expr).atom().await { match Expr::from_handle(expr).atom().await {
Err(expr) => Err(NotTypAtom { Err(expr) => Err(NotTypAtom {
@@ -242,9 +251,9 @@ impl<A: AtomicFeatures> TypAtom<A> {
pub async fn request<M: AtomMethod>(&self, req: M) -> M::Response pub async fn request<M: AtomMethod>(&self, req: M) -> M::Response
where A: Supports<M> { where A: Supports<M> {
M::Response::decode(Pin::new( M::Response::decode(Pin::new(
&mut &(self.data.ctx().reqnot().request(api::Fwd( &mut &(self.untyped.ctx().reqnot().request(api::Fwd(
self.data.atom.clone(), self.untyped.atom.clone(),
Sym::parse(M::NAME, self.data.ctx().i()).await.unwrap().tok().to_api(), Sym::parse(M::NAME, self.untyped.ctx().i()).await.unwrap().tok().to_api(),
enc_vec(&req).await, enc_vec(&req).await,
))) )))
.await .await
@@ -257,6 +266,9 @@ impl<A: AtomicFeatures> Deref for TypAtom<A> {
type Target = A::Data; type Target = A::Data;
fn deref(&self) -> &Self::Target { &self.value } fn deref(&self) -> &Self::Target { &self.value }
} }
impl<A: AtomicFeatures> ToExpr for TypAtom<A> {
async fn to_expr(self) -> GExpr { self.untyped.to_expr().await }
}
pub struct AtomCtx<'a>(pub &'a [u8], pub Option<api::AtomId>, pub SysCtx); pub struct AtomCtx<'a>(pub &'a [u8], pub Option<api::AtomId>, pub SysCtx);
impl FmtCtx for AtomCtx<'_> { impl FmtCtx for AtomCtx<'_> {
@@ -274,14 +286,14 @@ pub trait AtomDynfo: 'static {
&'a self, &'a self,
ctx: AtomCtx<'a>, ctx: AtomCtx<'a>,
key: Sym, key: Sym,
req: Pin<&'b mut dyn Read>, req: Pin<&'b mut dyn AsyncRead>,
rep: Pin<&'c mut dyn Write>, rep: Pin<&'c mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, bool>; ) -> LocalBoxFuture<'a, bool>;
fn command<'a>(&'a self, ctx: AtomCtx<'a>) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>>; fn command<'a>(&'a self, ctx: AtomCtx<'a>) -> LocalBoxFuture<'a, OrcRes<Option<GExpr>>>;
fn serialize<'a, 'b: 'a>( fn serialize<'a, 'b: 'a>(
&'a self, &'a self,
ctx: AtomCtx<'a>, ctx: AtomCtx<'a>,
write: Pin<&'b mut dyn Write>, write: Pin<&'b mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, Option<Vec<Expr>>>; ) -> LocalBoxFuture<'a, Option<Vec<Expr>>>;
fn deserialize<'a>( fn deserialize<'a>(
&'a self, &'a self,
@@ -317,10 +329,10 @@ impl Format for AtomFactory {
} }
} }
pub async fn err_not_callable(i: &Interner) -> OrcErr { pub async fn err_not_callable(i: &Interner) -> OrcErrv {
mk_err(i.i("This atom is not callable").await, "Attempted to apply value as function", []) mk_errv_floating(i.i("This atom is not callable").await, "Attempted to apply value as function")
} }
pub async fn err_not_command(i: &Interner) -> OrcErr { pub async fn err_not_command(i: &Interner) -> OrcErrv {
mk_err(i.i("This atom is not a command").await, "Settled on an inactionable value", []) mk_errv_floating(i.i("This atom is not a command").await, "Settled on an inactionable value")
} }

View File

@@ -6,24 +6,23 @@ use std::ops::Deref;
use std::pin::Pin; use std::pin::Pin;
use std::sync::atomic::AtomicU64; use std::sync::atomic::AtomicU64;
use async_lock::{RwLock, RwLockReadGuard};
use async_once_cell::OnceCell; use async_once_cell::OnceCell;
use async_std::io::{Read, Write}; use dyn_clone::{DynClone, clone_box};
use async_std::sync::{RwLock, RwLockReadGuard};
use futures::FutureExt;
use futures::future::{LocalBoxFuture, ready}; use futures::future::{LocalBoxFuture, ready};
use futures::{AsyncRead, AsyncWrite, FutureExt};
use itertools::Itertools; use itertools::Itertools;
use memo_map::MemoMap; use memo_map::MemoMap;
use never::Never; use never::Never;
use orchid_api::AtomId;
use orchid_api_traits::{Decode, Encode, enc_vec}; use orchid_api_traits::{Decode, Encode, enc_vec};
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::format::{FmtCtx, FmtCtxImpl, FmtUnit}; use orchid_base::format::{FmtCtx, FmtCtxImpl, FmtUnit, take_first};
use orchid_base::name::Sym; use orchid_base::name::Sym;
use crate::api; use crate::api;
use crate::atom::{ use crate::atom::{
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet, AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
MethodSetBuilder, err_not_callable, err_not_command, get_info, MethodSetBuilder, TypAtom, err_not_callable, err_not_command, get_info,
}; };
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::{GExpr, bot}; use crate::gen_expr::{GExpr, bot};
@@ -41,9 +40,10 @@ impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVari
let (typ_id, _) = get_info::<A>(ctx.get::<CtedObj>().inst().card()); let (typ_id, _) = get_info::<A>(ctx.get::<CtedObj>().inst().card());
let mut data = enc_vec(&typ_id).await; let mut data = enc_vec(&typ_id).await;
self.encode(Pin::<&mut Vec<u8>>::new(&mut data)).await; self.encode(Pin::<&mut Vec<u8>>::new(&mut data)).await;
ctx.get_or_default::<ObjStore>().objects.read().await.insert(atom_id, Box::new(self)); let g = ctx.get_or_default::<ObjStore>().objects.read().await;
eprintln!("Created atom {:?} of type {}", atom_id, type_name::<A>()); g.insert(atom_id, Box::new(self));
api::Atom { drop: Some(atom_id), data, owner: ctx.sys_id() } std::mem::drop(g);
api::Atom { drop: Some(atom_id), data: api::AtomData(data), owner: ctx.sys_id() }
}) })
} }
fn _info() -> Self::_Info { OwnedAtomDynfo { msbuild: A::reg_reqs(), ms: OnceCell::new() } } fn _info() -> Self::_Info { OwnedAtomDynfo { msbuild: A::reg_reqs(), ms: OnceCell::new() } }
@@ -53,13 +53,15 @@ impl<A: OwnedAtom + Atomic<Variant = OwnedVariant>> AtomicFeaturesImpl<OwnedVari
/// While an atom read guard is held, no atom can be removed. /// While an atom read guard is held, no atom can be removed.
pub(crate) struct AtomReadGuard<'a> { pub(crate) struct AtomReadGuard<'a> {
id: api::AtomId, id: api::AtomId,
guard: RwLockReadGuard<'a, MemoMap<AtomId, Box<dyn DynOwnedAtom>>>, guard: RwLockReadGuard<'a, MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>,
} }
impl<'a> AtomReadGuard<'a> { impl<'a> AtomReadGuard<'a> {
async fn new(id: api::AtomId, ctx: &'a SysCtx) -> Self { async fn new(id: api::AtomId, ctx: &'a SysCtx) -> Self {
let guard = ctx.get_or_default::<ObjStore>().objects.read().await; let guard = ctx.get_or_default::<ObjStore>().objects.read().await;
if guard.get(&id).is_none() {
let valid = guard.iter().map(|i| i.0).collect_vec(); let valid = guard.iter().map(|i| i.0).collect_vec();
assert!(guard.get(&id).is_some(), "Received invalid atom ID: {id:?} not in {valid:?}"); panic!("Received invalid atom ID: {id:?} not in {valid:?}");
}
Self { id, guard } Self { id, guard }
} }
} }
@@ -68,6 +70,7 @@ impl Deref for AtomReadGuard<'_> {
fn deref(&self) -> &Self::Target { &**self.guard.get(&self.id).unwrap() } fn deref(&self) -> &Self::Target { &**self.guard.get(&self.id).unwrap() }
} }
/// Remove an atom from the store
pub(crate) async fn take_atom(id: api::AtomId, ctx: &SysCtx) -> Box<dyn DynOwnedAtom> { pub(crate) async fn take_atom(id: api::AtomId, ctx: &SysCtx) -> Box<dyn DynOwnedAtom> {
let mut g = ctx.get_or_default::<ObjStore>().objects.write().await; let mut g = ctx.get_or_default::<ObjStore>().objects.write().await;
g.remove(&id).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0)) g.remove(&id).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0))
@@ -104,8 +107,8 @@ impl<T: OwnedAtom> AtomDynfo for OwnedAtomDynfo<T> {
&'a self, &'a self,
AtomCtx(_, id, ctx): AtomCtx, AtomCtx(_, id, ctx): AtomCtx,
key: Sym, key: Sym,
req: Pin<&'b mut dyn Read>, req: Pin<&'b mut dyn AsyncRead>,
rep: Pin<&'c mut dyn Write>, rep: Pin<&'c mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, bool> { ) -> LocalBoxFuture<'a, bool> {
Box::pin(async move { Box::pin(async move {
let a = AtomReadGuard::new(id.unwrap(), &ctx).await; let a = AtomReadGuard::new(id.unwrap(), &ctx).await;
@@ -125,7 +128,7 @@ impl<T: OwnedAtom> AtomDynfo for OwnedAtomDynfo<T> {
fn serialize<'a, 'b: 'a>( fn serialize<'a, 'b: 'a>(
&'a self, &'a self,
AtomCtx(_, id, ctx): AtomCtx<'a>, AtomCtx(_, id, ctx): AtomCtx<'a>,
mut write: Pin<&'b mut dyn Write>, mut write: Pin<&'b mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> { ) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
Box::pin(async move { Box::pin(async move {
let id = id.unwrap(); let id = id.unwrap();
@@ -216,7 +219,7 @@ pub trait OwnedAtom: Atomic<Variant = OwnedVariant> + Any + Clone + 'static {
fn val(&self) -> impl Future<Output = Cow<'_, Self::Data>>; fn val(&self) -> impl Future<Output = Cow<'_, Self::Data>>;
#[allow(unused_variables)] #[allow(unused_variables)]
fn call_ref(&self, arg: Expr) -> impl Future<Output = GExpr> { fn call_ref(&self, arg: Expr) -> impl Future<Output = GExpr> {
async move { bot([err_not_callable(arg.ctx().i()).await]) } async move { bot(err_not_callable(arg.ctx().i()).await) }
} }
fn call(self, arg: Expr) -> impl Future<Output = GExpr> { fn call(self, arg: Expr) -> impl Future<Output = GExpr> {
async { async {
@@ -228,19 +231,19 @@ pub trait OwnedAtom: Atomic<Variant = OwnedVariant> + Any + Clone + 'static {
} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn command(self, ctx: SysCtx) -> impl Future<Output = OrcRes<Option<GExpr>>> { fn command(self, ctx: SysCtx) -> impl Future<Output = OrcRes<Option<GExpr>>> {
async move { Err(err_not_command(ctx.i()).await.into()) } async move { Err(err_not_command(ctx.i()).await) }
} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn free(self, ctx: SysCtx) -> impl Future<Output = ()> { async {} } fn free(self, ctx: SysCtx) -> impl Future<Output = ()> { async {} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> impl Future<Output = FmtUnit> { fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> impl Future<Output = FmtUnit> {
async { format!("OwnedAtom({})", type_name::<Self>()).into() } async { format!("OwnedAtom({})", type_name::<Self>()).into() }
} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn serialize( fn serialize(
&self, &self,
ctx: SysCtx, ctx: SysCtx,
write: Pin<&mut (impl Write + ?Sized)>, write: Pin<&mut (impl AsyncWrite + ?Sized)>,
) -> impl Future<Output = Self::Refs> { ) -> impl Future<Output = Self::Refs> {
assert_serializable::<Self>(); assert_serializable::<Self>();
async { panic!("Either implement serialize or set Refs to Never for {}", type_name::<Self>()) } async { panic!("Either implement serialize or set Refs to Never for {}", type_name::<Self>()) }
@@ -259,10 +262,10 @@ fn assert_serializable<T: OwnedAtom>() {
assert_ne!(TypeId::of::<T::Refs>(), TypeId::of::<Never>(), "{MSG}"); assert_ne!(TypeId::of::<T::Refs>(), TypeId::of::<Never>(), "{MSG}");
} }
pub trait DynOwnedAtom: 'static { pub trait DynOwnedAtom: DynClone + 'static {
fn atom_tid(&self) -> TypeId; fn atom_tid(&self) -> TypeId;
fn as_any_ref(&self) -> &dyn Any; fn as_any_ref(&self) -> &dyn Any;
fn encode<'a>(&'a self, buffer: Pin<&'a mut dyn Write>) -> LocalBoxFuture<'a, ()>; fn encode<'a>(&'a self, buffer: Pin<&'a mut dyn AsyncWrite>) -> LocalBoxFuture<'a, ()>;
fn dyn_call_ref(&self, arg: Expr) -> LocalBoxFuture<'_, GExpr>; fn dyn_call_ref(&self, arg: Expr) -> LocalBoxFuture<'_, GExpr>;
fn dyn_call(self: Box<Self>, arg: Expr) -> LocalBoxFuture<'static, GExpr>; fn dyn_call(self: Box<Self>, arg: Expr) -> LocalBoxFuture<'static, GExpr>;
fn dyn_command(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>>; fn dyn_command(self: Box<Self>, ctx: SysCtx) -> LocalBoxFuture<'static, OrcRes<Option<GExpr>>>;
@@ -271,13 +274,13 @@ pub trait DynOwnedAtom: 'static {
fn dyn_serialize<'a>( fn dyn_serialize<'a>(
&'a self, &'a self,
ctx: SysCtx, ctx: SysCtx,
sink: Pin<&'a mut dyn Write>, sink: Pin<&'a mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, Option<Vec<Expr>>>; ) -> LocalBoxFuture<'a, Option<Vec<Expr>>>;
} }
impl<T: OwnedAtom> DynOwnedAtom for T { impl<T: OwnedAtom> DynOwnedAtom for T {
fn atom_tid(&self) -> TypeId { TypeId::of::<T>() } fn atom_tid(&self) -> TypeId { TypeId::of::<T>() }
fn as_any_ref(&self) -> &dyn Any { self } fn as_any_ref(&self) -> &dyn Any { self }
fn encode<'a>(&'a self, buffer: Pin<&'a mut dyn Write>) -> LocalBoxFuture<'a, ()> { fn encode<'a>(&'a self, buffer: Pin<&'a mut dyn AsyncWrite>) -> LocalBoxFuture<'a, ()> {
async { self.val().await.as_ref().encode(buffer).await }.boxed_local() async { self.val().await.as_ref().encode(buffer).await }.boxed_local()
} }
fn dyn_call_ref(&self, arg: Expr) -> LocalBoxFuture<'_, GExpr> { fn dyn_call_ref(&self, arg: Expr) -> LocalBoxFuture<'_, GExpr> {
@@ -293,12 +296,12 @@ impl<T: OwnedAtom> DynOwnedAtom for T {
self.free(ctx).boxed_local() self.free(ctx).boxed_local()
} }
fn dyn_print(&self, ctx: SysCtx) -> LocalBoxFuture<'_, FmtUnit> { fn dyn_print(&self, ctx: SysCtx) -> LocalBoxFuture<'_, FmtUnit> {
async move { self.print(&FmtCtxImpl { i: ctx.i() }).await }.boxed_local() async move { self.print_atom(&FmtCtxImpl { i: ctx.i() }).await }.boxed_local()
} }
fn dyn_serialize<'a>( fn dyn_serialize<'a>(
&'a self, &'a self,
ctx: SysCtx, ctx: SysCtx,
sink: Pin<&'a mut dyn Write>, sink: Pin<&'a mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> { ) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
match TypeId::of::<Never>() == TypeId::of::<<Self as OwnedAtom>::Refs>() { match TypeId::of::<Never>() == TypeId::of::<<Self as OwnedAtom>::Refs>() {
true => ready(None).boxed_local(), true => ready(None).boxed_local(),
@@ -308,8 +311,38 @@ impl<T: OwnedAtom> DynOwnedAtom for T {
} }
#[derive(Default)] #[derive(Default)]
struct ObjStore { pub(crate) struct ObjStore {
next_id: AtomicU64, pub(crate) next_id: AtomicU64,
objects: RwLock<MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>, pub(crate) objects: RwLock<MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>,
} }
impl SysCtxEntry for ObjStore {} impl SysCtxEntry for ObjStore {}
pub async fn own<A: OwnedAtom>(typ: TypAtom<A>) -> A {
let ctx = typ.untyped.ctx();
let g = ctx.get_or_default::<ObjStore>().objects.read().await;
let atom_id = typ.untyped.atom.drop.expect("Owned atoms always have a drop ID");
let dyn_atom =
g.get(&atom_id).expect("Atom ID invalid; atom type probably not owned by this crate");
dyn_atom.as_any_ref().downcast_ref().cloned().expect("The ID should imply a type as well")
}
pub async fn debug_print_obj_store(ctx: &SysCtx, show_atoms: bool) {
let store = ctx.get_or_default::<ObjStore>();
let keys = store.objects.read().await.keys().cloned().collect_vec();
let mut message = "Atoms in store:".to_string();
if !show_atoms {
message += &keys.iter().map(|k| format!(" {:?}", k)).join("");
} else {
for k in keys {
let g = store.objects.read().await;
let Some(atom) = g.get(&k) else {
message += &format!("\n{k:?} has since been deleted");
continue;
};
let atom = clone_box(&**atom);
std::mem::drop(g);
message += &format!("\n{k:?} -> {}", take_first(&atom.dyn_print(ctx.clone()).await, true));
}
}
eprintln!("{message}")
}

View File

@@ -3,9 +3,8 @@ use std::future::Future;
use std::pin::Pin; use std::pin::Pin;
use async_once_cell::OnceCell; use async_once_cell::OnceCell;
use async_std::io::{Read, Write};
use futures::FutureExt;
use futures::future::LocalBoxFuture; use futures::future::LocalBoxFuture;
use futures::{AsyncRead, AsyncWrite, FutureExt};
use orchid_api_traits::{Coding, enc_vec}; use orchid_api_traits::{Coding, enc_vec};
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::format::FmtUnit; use orchid_base::format::FmtUnit;
@@ -29,7 +28,7 @@ impl<A: ThinAtom + Atomic<Variant = ThinVariant>> AtomicFeaturesImpl<ThinVariant
let (id, _) = get_info::<A>(ctx.get::<CtedObj>().inst().card()); let (id, _) = get_info::<A>(ctx.get::<CtedObj>().inst().card());
let mut buf = enc_vec(&id).await; let mut buf = enc_vec(&id).await;
self.encode(Pin::new(&mut buf)).await; self.encode(Pin::new(&mut buf)).await;
api::Atom { drop: None, data: buf, owner: ctx.sys_id() } api::Atom { drop: None, data: api::AtomData(buf), owner: ctx.sys_id() }
}) })
} }
fn _info() -> Self::_Info { ThinAtomDynfo { msbuild: Self::reg_reqs(), ms: OnceCell::new() } } fn _info() -> Self::_Info { ThinAtomDynfo { msbuild: Self::reg_reqs(), ms: OnceCell::new() } }
@@ -59,8 +58,8 @@ impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
&'a self, &'a self,
AtomCtx(buf, _, sys): AtomCtx<'a>, AtomCtx(buf, _, sys): AtomCtx<'a>,
key: Sym, key: Sym,
req: Pin<&'m1 mut dyn Read>, req: Pin<&'m1 mut dyn AsyncRead>,
rep: Pin<&'m2 mut dyn Write>, rep: Pin<&'m2 mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, bool> { ) -> LocalBoxFuture<'a, bool> {
Box::pin(async move { Box::pin(async move {
let ms = self.ms.get_or_init(self.msbuild.pack(sys.clone())).await; let ms = self.ms.get_or_init(self.msbuild.pack(sys.clone())).await;
@@ -76,7 +75,7 @@ impl<T: ThinAtom> AtomDynfo for ThinAtomDynfo<T> {
fn serialize<'a, 'b: 'a>( fn serialize<'a, 'b: 'a>(
&'a self, &'a self,
ctx: AtomCtx<'a>, ctx: AtomCtx<'a>,
write: Pin<&'b mut dyn Write>, write: Pin<&'b mut dyn AsyncWrite>,
) -> LocalBoxFuture<'a, Option<Vec<Expr>>> { ) -> LocalBoxFuture<'a, Option<Vec<Expr>>> {
Box::pin(async { Box::pin(async {
T::decode(Pin::new(&mut &ctx.0[..])).await.encode(write).await; T::decode(Pin::new(&mut &ctx.0[..])).await.encode(write).await;
@@ -105,11 +104,11 @@ pub trait ThinAtom:
{ {
#[allow(unused_variables)] #[allow(unused_variables)]
fn call(&self, arg: Expr) -> impl Future<Output = GExpr> { fn call(&self, arg: Expr) -> impl Future<Output = GExpr> {
async move { bot([err_not_callable(arg.ctx().i()).await]) } async move { bot(err_not_callable(arg.ctx().i()).await) }
} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn command(&self, ctx: SysCtx) -> impl Future<Output = OrcRes<Option<GExpr>>> { fn command(&self, ctx: SysCtx) -> impl Future<Output = OrcRes<Option<GExpr>>> {
async move { Err(err_not_command(ctx.i()).await.into()) } async move { Err(err_not_command(ctx.i()).await) }
} }
#[allow(unused_variables)] #[allow(unused_variables)]
fn print(&self, ctx: SysCtx) -> impl Future<Output = FmtUnit> { fn print(&self, ctx: SysCtx) -> impl Future<Output = FmtUnit> {

View File

@@ -1,13 +1,14 @@
use std::future::Future; use std::future::Future;
use orchid_base::error::{OrcErr, OrcRes, mk_err}; use never::Never;
use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
use orchid_base::interner::Interner; use orchid_base::interner::Interner;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use crate::atom::{AtomicFeatures, ToAtom, TypAtom}; use crate::atom::{AtomicFeatures, ForeignAtom, ToAtom, TypAtom};
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::{GExpr, atom, bot}; use crate::gen_expr::{GExpr, atom, bot};
use crate::system::downcast_atom; use crate::system::{SysCtx, downcast_atom};
pub trait TryFromExpr: Sized { pub trait TryFromExpr: Sized {
fn try_from_expr(expr: Expr) -> impl Future<Output = OrcRes<Self>>; fn try_from_expr(expr: Expr) -> impl Future<Output = OrcRes<Self>>;
@@ -23,46 +24,61 @@ impl<T: TryFromExpr, U: TryFromExpr> TryFromExpr for (T, U) {
} }
} }
async fn err_not_atom(pos: Pos, i: &Interner) -> OrcErr { async fn err_not_atom(pos: Pos, i: &Interner) -> OrcErrv {
mk_err(i.i("Expected an atom").await, "This expression is not an atom", [pos.into()]) mk_errv(i.i("Expected an atom").await, "This expression is not an atom", [pos])
} }
async fn err_type(pos: Pos, i: &Interner) -> OrcErr { async fn err_type(pos: Pos, i: &Interner) -> OrcErrv {
mk_err(i.i("Type error").await, "The atom is a different type than expected", [pos.into()]) mk_errv(i.i("Type error").await, "The atom is a different type than expected", [pos])
}
impl TryFromExpr for ForeignAtom {
async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
match expr.atom().await {
Err(ex) => Err(err_not_atom(ex.data().await.pos.clone(), ex.ctx().i()).await),
Ok(f) => Ok(f),
}
}
} }
impl<A: AtomicFeatures> TryFromExpr for TypAtom<A> { impl<A: AtomicFeatures> TryFromExpr for TypAtom<A> {
async fn try_from_expr(expr: Expr) -> OrcRes<Self> { async fn try_from_expr(expr: Expr) -> OrcRes<Self> {
match expr.atom().await { let f = ForeignAtom::try_from_expr(expr).await?;
Err(ex) => Err(err_not_atom(ex.data().await.pos.clone(), ex.ctx().i()).await.into()), match downcast_atom::<A>(f).await {
Ok(f) => match downcast_atom::<A>(f).await {
Ok(a) => Ok(a), Ok(a) => Ok(a),
Err(f) => Err(err_type(f.pos(), f.ctx().i()).await.into()), Err(f) => Err(err_type(f.pos(), f.ctx().i()).await),
},
} }
} }
} }
impl TryFromExpr for SysCtx {
async fn try_from_expr(expr: Expr) -> OrcRes<Self> { Ok(expr.ctx()) }
}
pub trait ToExpr { pub trait ToExpr {
fn to_expr(self) -> GExpr; fn to_expr(self) -> impl Future<Output = GExpr>;
} }
impl ToExpr for GExpr { impl ToExpr for GExpr {
fn to_expr(self) -> GExpr { self } async fn to_expr(self) -> GExpr { self }
} }
impl ToExpr for Expr { impl ToExpr for Expr {
fn to_expr(self) -> GExpr { self.slot() } async fn to_expr(self) -> GExpr { self.slot() }
} }
impl<T: ToExpr> ToExpr for OrcRes<T> { impl<T: ToExpr> ToExpr for OrcRes<T> {
fn to_expr(self) -> GExpr { async fn to_expr(self) -> GExpr {
match self { match self {
Err(e) => bot(e), Err(e) => bot(e),
Ok(t) => t.to_expr(), Ok(t) => t.to_expr().await,
} }
} }
} }
impl<A: ToAtom> ToExpr for A { impl<A: ToAtom> ToExpr for A {
fn to_expr(self) -> GExpr { atom(self) } async fn to_expr(self) -> GExpr { atom(self) }
}
impl ToExpr for Never {
async fn to_expr(self) -> GExpr { match self {} }
} }

View File

@@ -0,0 +1,104 @@
use std::borrow::Cow;
use std::marker::PhantomData;
use std::rc::Rc;
use futures::channel::mpsc::{Sender, channel};
use futures::lock::Mutex;
use futures::stream::{self, LocalBoxStream};
use futures::{FutureExt, SinkExt, StreamExt};
use never::Never;
use orchid_base::error::OrcRes;
use orchid_base::format::{FmtCtx, FmtUnit};
use crate::atom::Atomic;
use crate::atom_owned::{OwnedAtom, OwnedVariant};
use crate::conv::{ToExpr, TryFromExpr};
use crate::expr::Expr;
use crate::gen_expr::{GExpr, arg, call, lambda, seq};
enum Command {
Execute(GExpr, Sender<Expr>),
Register(GExpr, Sender<Expr>),
Halt(GExpr),
}
struct BuilderCoroutineData {
name: Option<String>,
receiver: Mutex<LocalBoxStream<'static, Command>>,
}
#[derive(Clone)]
struct BuilderCoroutine(Rc<BuilderCoroutineData>);
impl BuilderCoroutine {
pub async fn run(self) -> GExpr {
let cmd = self.0.receiver.lock().await.next().await;
match cmd {
None => panic!("Before the stream ends, we should have gotten a Halt"),
Some(Command::Halt(expr)) => expr,
Some(Command::Execute(expr, reply)) => call([
lambda(0, [seq([
arg(0),
call([Replier { reply, builder: self }.to_expr().await, arg(0)]),
])]),
expr,
]),
Some(Command::Register(expr, reply)) =>
call([Replier { reply, builder: self }.to_expr().await, expr]),
}
}
}
#[derive(Clone)]
pub struct Replier {
reply: Sender<Expr>,
builder: BuilderCoroutine,
}
impl Atomic for Replier {
type Data = ();
type Variant = OwnedVariant;
}
impl OwnedAtom for Replier {
type Refs = Never;
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
async fn call(mut self, arg: Expr) -> GExpr {
self.reply.send(arg).await.expect("What the heck");
std::mem::drop(self.reply);
self.builder.run().await
}
async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
match &self.builder.0.name {
None => "BuilderCoroutine".into(),
Some(name) => format!("BuilderCoroutine({name})").into(),
}
}
}
pub async fn exec<R: ToExpr>(
debug: impl AsRef<str>,
f: impl for<'a> AsyncFnOnce(ExecHandle<'a>) -> R + 'static,
) -> GExpr {
let (cmd_snd, cmd_recv) = channel(0);
let halt = async { Command::Halt(f(ExecHandle(cmd_snd, PhantomData)).await.to_expr().await) }
.into_stream();
let coro = BuilderCoroutine(Rc::new(BuilderCoroutineData {
name: Some(debug.as_ref().to_string()),
receiver: Mutex::new(stream::select(halt, cmd_recv).boxed_local()),
}));
coro.run().await
}
static WEIRD_DROP_ERR: &str = "Coroutine dropped while we are being polled somehow";
pub struct ExecHandle<'a>(Sender<Command>, PhantomData<&'a ()>);
impl ExecHandle<'_> {
pub async fn exec<T: TryFromExpr>(&mut self, val: impl ToExpr) -> OrcRes<T> {
let (reply_snd, mut reply_recv) = channel(1);
self.0.send(Command::Execute(val.to_expr().await, reply_snd)).await.expect(WEIRD_DROP_ERR);
T::try_from_expr(reply_recv.next().await.expect(WEIRD_DROP_ERR)).await
}
pub async fn register(&mut self, val: impl ToExpr) -> Expr {
let (reply_snd, mut reply_recv) = channel(1);
self.0.send(Command::Register(val.to_expr().await, reply_snd)).await.expect(WEIRD_DROP_ERR);
reply_recv.next().await.expect(WEIRD_DROP_ERR)
}
}

View File

@@ -5,18 +5,18 @@ use std::num::NonZero;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
use async_std::channel::{self, Receiver, Sender}; use async_lock::RwLock;
use async_std::stream; use futures::channel::mpsc::{Receiver, Sender, channel};
use async_std::sync::Mutex;
use futures::future::{LocalBoxFuture, join_all}; use futures::future::{LocalBoxFuture, join_all};
use futures::{FutureExt, StreamExt, stream_select}; use futures::lock::Mutex;
use futures::{FutureExt, SinkExt, StreamExt, stream, stream_select};
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use orchid_api::{ExtMsgSet, IntReq};
use orchid_api_traits::{Decode, UnderRoot, enc_vec}; use orchid_api_traits::{Decode, UnderRoot, enc_vec};
use orchid_base::builtin::{ExtInit, ExtPort, Spawner}; use orchid_base::builtin::{ExtInit, ExtPort, Spawner};
use orchid_base::char_filter::{char_filter_match, char_filter_union, mk_char_filter}; use orchid_base::char_filter::{char_filter_match, char_filter_union, mk_char_filter};
use orchid_base::clone; use orchid_base::clone;
use orchid_base::error::Reporter;
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::{Interner, Tok};
use orchid_base::logging::Logger; use orchid_base::logging::Logger;
use orchid_base::name::Sym; use orchid_base::name::Sym;
@@ -29,12 +29,12 @@ use trait_set::trait_set;
use crate::api; use crate::api;
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId}; use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId};
use crate::atom_owned::take_atom; use crate::atom_owned::take_atom;
use crate::expr::{Expr, ExprHandle}; use crate::expr::{BorrowedExprStore, Expr, ExprHandle};
use crate::lexer::{LexContext, err_cascade, err_not_applicable}; use crate::lexer::{LexContext, ekey_cascade, ekey_not_applicable};
use crate::parser::{ParsCtx, get_const, linev_into_api}; use crate::parser::{PTokTree, ParsCtx, get_const, linev_into_api};
use crate::system::{SysCtx, atom_by_idx}; use crate::system::{SysCtx, atom_by_idx};
use crate::system_ctor::{CtedObj, DynSystemCtor}; use crate::system_ctor::{CtedObj, DynSystemCtor};
use crate::tree::{GenTok, GenTokTree, LazyMemberFactory, TreeIntoApiCtxImpl}; use crate::tree::{LazyMemberFactory, TreeIntoApiCtxImpl};
pub type ExtReq<'a> = RequestHandle<'a, api::ExtMsgSet>; pub type ExtReq<'a> = RequestHandle<'a, api::ExtMsgSet>;
pub type ExtReqNot = ReqNot<api::ExtMsgSet>; pub type ExtReqNot = ReqNot<api::ExtMsgSet>;
@@ -55,7 +55,7 @@ pub enum MemberRecord {
} }
pub struct SystemRecord { pub struct SystemRecord {
lazy_members: HashMap<api::TreeId, MemberRecord>, lazy_members: Mutex<HashMap<api::TreeId, MemberRecord>>,
ctx: SysCtx, ctx: SysCtx,
} }
@@ -73,7 +73,7 @@ pub async fn with_atom_record<'a, F: Future<Output = SysCtx>, T>(
atom: &'a api::Atom, atom: &'a api::Atom,
cb: impl WithAtomRecordCallback<'a, T>, cb: impl WithAtomRecordCallback<'a, T>,
) -> T { ) -> T {
let mut data = &atom.data[..]; let mut data = &atom.data.0[..];
let ctx = get_sys_ctx(atom.owner).await; let ctx = get_sys_ctx(atom.owner).await;
let inst = ctx.get::<CtedObj>().inst(); let inst = ctx.get::<CtedObj>().inst();
let id = AtomTypeId::decode(Pin::new(&mut data)).await; let id = AtomTypeId::decode(Pin::new(&mut data)).await;
@@ -83,17 +83,17 @@ pub async fn with_atom_record<'a, F: Future<Output = SysCtx>, T>(
pub struct ExtensionOwner { pub struct ExtensionOwner {
_interner_cell: Rc<RefCell<Option<Interner>>>, _interner_cell: Rc<RefCell<Option<Interner>>>,
_systems_lock: Rc<Mutex<HashMap<api::SysId, SystemRecord>>>, _systems_lock: Rc<RwLock<HashMap<api::SysId, SystemRecord>>>,
out_recv: Receiver<Vec<u8>>, out_recv: Mutex<Receiver<Vec<u8>>>,
out_send: Sender<Vec<u8>>, out_send: Sender<Vec<u8>>,
} }
impl ExtPort for ExtensionOwner { impl ExtPort for ExtensionOwner {
fn send<'a>(&'a self, msg: &'a [u8]) -> LocalBoxFuture<'a, ()> { fn send<'a>(&'a self, msg: &'a [u8]) -> LocalBoxFuture<'a, ()> {
Box::pin(async { self.out_send.send(msg.to_vec()).boxed_local().await.unwrap() }) Box::pin(async { self.out_send.clone().send(msg.to_vec()).boxed_local().await.unwrap() })
} }
fn recv(&self) -> LocalBoxFuture<'_, Option<Vec<u8>>> { fn recv(&self) -> LocalBoxFuture<'_, Option<Vec<u8>>> {
Box::pin(async { (self.out_recv.recv().await).ok() }) Box::pin(async { self.out_recv.lock().await.next().await })
} }
} }
@@ -107,11 +107,11 @@ pub fn extension_init(
.map(|(id, sys)| (u16::try_from(id).expect("more than u16max system ctors"), sys)) .map(|(id, sys)| (u16::try_from(id).expect("more than u16max system ctors"), sys))
.map(|(id, sys)| sys.decl(api::SysDeclId(NonZero::new(id + 1).unwrap()))) .map(|(id, sys)| sys.decl(api::SysDeclId(NonZero::new(id + 1).unwrap())))
.collect_vec(); .collect_vec();
let systems_lock = Rc::new(Mutex::new(HashMap::<api::SysId, SystemRecord>::new())); let systems_lock = Rc::new(RwLock::new(HashMap::<api::SysId, SystemRecord>::new()));
let ext_header = api::ExtensionHeader { name: data.name.to_string(), systems: decls.clone() }; let ext_header = api::ExtensionHeader { name: data.name.to_string(), systems: decls.clone() };
let (out_send, in_recv) = channel::bounded::<Vec<u8>>(1); let (out_send, in_recv) = channel::<Vec<u8>>(1);
let (in_send, out_recv) = channel::bounded::<Vec<u8>>(1); let (in_send, out_recv) = channel::<Vec<u8>>(1);
let (exit_send, exit_recv) = channel::bounded(1); let (exit_send, exit_recv) = channel(1);
let logger = Logger::new(log_strategy); let logger = Logger::new(log_strategy);
let msg_logger = Logger::new(msg_logs); let msg_logger = Logger::new(msg_logs);
let interner_cell = Rc::new(RefCell::new(None::<Interner>)); let interner_cell = Rc::new(RefCell::new(None::<Interner>));
@@ -120,11 +120,11 @@ pub fn extension_init(
let get_ctx = clone!(systems_weak; move |id: api::SysId| clone!(systems_weak; async move { let get_ctx = clone!(systems_weak; move |id: api::SysId| clone!(systems_weak; async move {
let systems = let systems =
systems_weak.upgrade().expect("System table dropped before request processing done"); systems_weak.upgrade().expect("System table dropped before request processing done");
systems.lock().await.get(&id).expect("System not found").ctx.clone() systems.read().await.get(&id).expect("System not found").ctx.clone()
})); }));
let init_ctx = { let init_ctx = {
clone!(interner_weak, spawner, logger); clone!(interner_weak, spawner, logger);
move |id: api::SysId, cted: CtedObj, reqnot: ReqNot<ExtMsgSet>| { move |id: api::SysId, cted: CtedObj, reqnot: ReqNot<api::ExtMsgSet>| {
clone!(interner_weak, spawner, logger; async move { clone!(interner_weak, spawner, logger; async move {
let interner_rc = let interner_rc =
interner_weak.upgrade().expect("System construction order while shutting down"); interner_weak.upgrade().expect("System construction order while shutting down");
@@ -135,22 +135,25 @@ pub fn extension_init(
}; };
let rn = ReqNot::<api::ExtMsgSet>::new( let rn = ReqNot::<api::ExtMsgSet>::new(
msg_logger.clone(), msg_logger.clone(),
move |a, _| clone!(in_send; Box::pin(async move { in_send.send(a.to_vec()).await.unwrap() })), move |a, _| {
clone!(systems_weak, exit_send, get_ctx; move |n, _| { clone!(in_send mut);
clone!(systems_weak, exit_send, get_ctx; async move { Box::pin(async move { in_send.send(a.to_vec()).await.unwrap() })
match n { },
api::HostExtNotif::Exit => exit_send.send(()).await.unwrap(), {
api::HostExtNotif::SystemDrop(api::SystemDrop(sys_id)) => clone!(exit_send);
if let Some(rc) = systems_weak.upgrade() { move |n, _| {
mem::drop(rc.lock().await.remove(&sys_id)) clone!(exit_send mut);
async move {
match n {
api::HostExtNotif::Exit => {
eprintln!("Exit received");
exit_send.send(()).await.unwrap()
}, },
api::HostExtNotif::AtomDrop(api::AtomDrop(sys_id, atom)) => {
let ctx = get_ctx(sys_id).await;
take_atom(atom, &ctx).await.dyn_free(ctx.clone()).await
} }
} }
}.boxed_local()) .boxed_local()
}), }
},
{ {
clone!(logger, get_ctx, init_ctx, systems_weak, interner_weak, decls, msg_logger); clone!(logger, get_ctx, init_ctx, systems_weak, interner_weak, decls, msg_logger);
move |hand, req| { move |hand, req| {
@@ -158,8 +161,22 @@ pub fn extension_init(
async move { async move {
let interner_cell = interner_weak.upgrade().expect("Interner dropped before request"); let interner_cell = interner_weak.upgrade().expect("Interner dropped before request");
let i = interner_cell.borrow().clone().expect("Request arrived before interner set"); let i = interner_cell.borrow().clone().expect("Request arrived before interner set");
if !matches!(req, api::HostExtReq::AtomReq(api::AtomReq::AtomPrint(_))) {
writeln!(msg_logger, "{} extension received request {req:?}", data.name); writeln!(msg_logger, "{} extension received request {req:?}", data.name);
}
match req { match req {
api::HostExtReq::SystemDrop(sys_drop) => {
if let Some(rc) = systems_weak.upgrade() {
mem::drop(rc.write().await.remove(&sys_drop.0))
}
hand.handle(&sys_drop, &()).await
},
api::HostExtReq::AtomDrop(atom_drop @ api::AtomDrop(sys_id, atom)) => {
let ctx = get_ctx(sys_id).await;
take_atom(atom, &ctx).await.dyn_free(ctx.clone()).await;
hand.handle(&atom_drop, &()).await
},
api::HostExtReq::Ping(ping @ api::Ping) => hand.handle(&ping, &()).await, api::HostExtReq::Ping(ping @ api::Ping) => hand.handle(&ping, &()).await,
api::HostExtReq::Sweep(sweep @ api::Sweep) => api::HostExtReq::Sweep(sweep @ api::Sweep) =>
hand.handle(&sweep, &i.sweep_replica().await).await, hand.handle(&sweep, &i.sweep_replica().await).await,
@@ -171,36 +188,42 @@ pub fn extension_init(
cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| { cted.inst().dyn_lexers().iter().fold(api::CharFilter(vec![]), |cf, lx| {
char_filter_union(&cf, &mk_char_filter(lx.char_filter().iter().cloned())) char_filter_union(&cf, &mk_char_filter(lx.char_filter().iter().cloned()))
}); });
let lazy_mems = Mutex::new(HashMap::new()); let lazy_members = Mutex::new(HashMap::new());
let ctx = init_ctx(new_sys.id, cted.clone(), hand.reqnot()).await; let ctx = init_ctx(new_sys.id, cted.clone(), hand.reqnot()).await;
let const_root = stream::from_iter(cted.inst().dyn_env()) let const_root = stream::iter(cted.inst().dyn_env())
.then(|mem| { .then(|mem| {
let (req, lazy_mems) = (&hand, &lazy_mems); let lazy_mems = &lazy_members;
clone!(i, ctx; async move { clone!(i, ctx; async move {
let mut tia_ctx = TreeIntoApiCtxImpl { let mut tia_ctx = TreeIntoApiCtxImpl {
lazy_members: &mut *lazy_mems.lock().await, lazy_members: &mut *lazy_mems.lock().await,
sys: ctx, sys: ctx,
basepath: &[], basepath: &[],
path: Substack::Bottom, path: Substack::Bottom,
req
}; };
(i.i(&mem.name).await.to_api(), mem.kind.into_api(&mut tia_ctx).await) (i.i(&mem.name).await.to_api(), mem.kind.into_api(&mut tia_ctx).await)
}) })
}) })
.collect() .collect()
.await; .await;
let record = SystemRecord { ctx, lazy_members: lazy_mems.into_inner() }; let prelude =
cted.inst().dyn_prelude(&i).await.iter().map(|sym| sym.to_api()).collect();
let record = SystemRecord { ctx, lazy_members };
let systems = systems_weak.upgrade().expect("System constructed during shutdown"); let systems = systems_weak.upgrade().expect("System constructed during shutdown");
systems.lock().await.insert(new_sys.id, record); systems.write().await.insert(new_sys.id, record);
let response = api::NewSystemResponse { lex_filter, const_root, line_types: vec![] }; let line_types = join_all(
(cted.inst().dyn_parsers().iter())
.map(|p| async { i.i(p.line_head()).await.to_api() }),
)
.await;
let response = api::NewSystemResponse { lex_filter, const_root, line_types, prelude };
hand.handle(&new_sys, &response).await hand.handle(&new_sys, &response).await
}, },
api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) => { api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) => {
let sys_ctx = get_ctx(sys_id).await; let sys_ctx = get_ctx(sys_id).await;
let systems = systems_weak.upgrade().expect("Member queried during shutdown"); let systems = systems_weak.upgrade().expect("Member queried during shutdown");
let mut systems_g = systems.lock().await; let systems_g = systems.read().await;
let SystemRecord { lazy_members, .. } = let mut lazy_members =
systems_g.get_mut(&sys_id).expect("System not found"); systems_g.get(&sys_id).expect("System not found").lazy_members.lock().await;
let (path, cb) = match lazy_members.insert(tree_id, MemberRecord::Res) { let (path, cb) = match lazy_members.insert(tree_id, MemberRecord::Res) {
None => panic!("Tree for ID not found"), None => panic!("Tree for ID not found"),
Some(MemberRecord::Res) => panic!("This tree has already been transmitted"), Some(MemberRecord::Res) => panic!("This tree has already been transmitted"),
@@ -211,8 +234,7 @@ pub fn extension_init(
sys: sys_ctx, sys: sys_ctx,
path: Substack::Bottom, path: Substack::Bottom,
basepath: &path, basepath: &path,
lazy_members, lazy_members: &mut lazy_members,
req: &hand,
}; };
hand.handle(&get_tree, &tree.into_api(&mut tia_ctx).await).await hand.handle(&get_tree, &tree.into_api(&mut tia_ctx).await).await
}, },
@@ -223,56 +245,73 @@ pub fn extension_init(
sys.dyn_request(hand, payload).await sys.dyn_request(hand, payload).await
}, },
api::HostExtReq::LexExpr(lex @ api::LexExpr { sys, src, text, pos, id }) => { api::HostExtReq::LexExpr(lex @ api::LexExpr { sys, src, text, pos, id }) => {
let sys_ctx = get_ctx(sys).await; let mut sys_ctx = get_ctx(sys).await;
let text = Tok::from_api(text, &i).await; let text = Tok::from_api(text, &i).await;
let src = Sym::from_api(src, sys_ctx.i()).await; let src = Sym::from_api(src, sys_ctx.i()).await;
let ctx = LexContext { id, pos, text: &text, src, ctx: sys_ctx.clone() }; let rep = Reporter::new();
let expr_store = BorrowedExprStore::new();
let trigger_char = text.chars().nth(pos as usize).unwrap(); let trigger_char = text.chars().nth(pos as usize).unwrap();
let err_na = err_not_applicable(&i).await; let ekey_na = ekey_not_applicable(&i).await;
let err_cascade = err_cascade(&i).await; let ekey_cascade = ekey_cascade(&i).await;
let lexers = sys_ctx.cted().inst().dyn_lexers(); let lexers = sys_ctx.cted().inst().dyn_lexers();
for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char)) { for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char)) {
let ctx = LexContext {
id,
pos,
text: &text,
src: src.clone(),
ctx: sys_ctx.clone(),
rep: &rep,
exprs: &expr_store,
};
match lx.lex(&text[pos as usize..], &ctx).await { match lx.lex(&text[pos as usize..], &ctx).await {
Err(e) if e.any(|e| *e == err_na) => continue, Err(e) if e.any(|e| *e == ekey_na) => continue,
Err(e) => { Err(e) => {
let eopt = e.keep_only(|e| *e != err_cascade).map(|e| Err(e.to_api())); let eopt = e.keep_only(|e| *e != ekey_cascade).map(|e| Err(e.to_api()));
expr_store.dispose().await;
return hand.handle(&lex, &eopt).await; return hand.handle(&lex, &eopt).await;
}, },
Ok((s, expr)) => { Ok((s, expr)) => {
let expr = expr.into_api(&mut (), &mut (sys_ctx, &hand)).await; let expr = expr.into_api(&mut (), &mut sys_ctx).await;
let pos = (text.len() - s.len()) as u32; let pos = (text.len() - s.len()) as u32;
expr_store.dispose().await;
return hand.handle(&lex, &Some(Ok(api::LexedExpr { pos, expr }))).await; return hand.handle(&lex, &Some(Ok(api::LexedExpr { pos, expr }))).await;
}, },
} }
} }
writeln!(logger, "Got notified about n/a character '{trigger_char}'"); writeln!(logger, "Got notified about n/a character '{trigger_char}'");
expr_store.dispose().await;
hand.handle(&lex, &None).await hand.handle(&lex, &None).await
}, },
api::HostExtReq::ParseLine(pline) => { api::HostExtReq::ParseLine(pline) => {
let api::ParseLine { module, src, exported, comments, sys, line } = &pline; let api::ParseLine { module, src, exported, comments, sys, line, idx } = &pline;
let mut ctx = get_ctx(*sys).await; let ctx = get_ctx(*sys).await;
let parsers = ctx.cted().inst().dyn_parsers(); let parsers = ctx.cted().inst().dyn_parsers();
let src = Sym::from_api(*src, ctx.i()).await; let src = Sym::from_api(*src, ctx.i()).await;
let comments = let comments =
join_all(comments.iter().map(|c| Comment::from_api(c, src.clone(), &i))).await; join_all(comments.iter().map(|c| Comment::from_api(c, src.clone(), &i))).await;
let line: Vec<GenTokTree> = ttv_from_api(line, &mut ctx, &mut (), &src, &i).await; let expr_store = BorrowedExprStore::new();
let mut from_api_ctx = (ctx.clone(), &expr_store);
let line: Vec<PTokTree> =
ttv_from_api(line, &mut from_api_ctx, &mut (), &src, &i).await;
let snip = Snippet::new(line.first().expect("Empty line"), &line); let snip = Snippet::new(line.first().expect("Empty line"), &line);
let (head, tail) = snip.pop_front().unwrap(); let parser = parsers[*idx as usize];
let name = if let GenTok::Name(n) = &head.tok { n } else { panic!("No line head") };
let parser =
parsers.iter().find(|p| p.line_head() == **name).expect("No parser candidate");
let module = Sym::from_api(*module, ctx.i()).await; let module = Sym::from_api(*module, ctx.i()).await;
let pctx = ParsCtx::new(ctx.clone(), module); let reporter = Reporter::new();
let o_line = match parser.parse(pctx, *exported, comments, tail).await { let pctx = ParsCtx::new(ctx.clone(), module, &reporter);
let parse_res = parser.parse(pctx, *exported, comments, snip).await;
let o_line = match reporter.merge(parse_res) {
Err(e) => Err(e.to_api()), Err(e) => Err(e.to_api()),
Ok(t) => Ok(linev_into_api(t, ctx.clone(), &hand).await), Ok(t) => Ok(linev_into_api(t, ctx.clone()).await),
}; };
mem::drop(line);
expr_store.dispose().await;
hand.handle(&pline, &o_line).await hand.handle(&pline, &o_line).await
}, },
api::HostExtReq::FetchParsedConst(ref fpc @ api::FetchParsedConst { id, sys }) => { api::HostExtReq::FetchParsedConst(ref fpc @ api::FetchParsedConst(sys, id)) => {
let ctx = get_ctx(sys).await; let ctx = get_ctx(sys).await;
let cnst = get_const(id, ctx.clone()).await; let cnst = get_const(id, ctx.clone()).await;
hand.handle(fpc, &cnst.api_return(ctx, &hand).await).await hand.handle(fpc, &cnst.api_return(ctx).await).await
}, },
api::HostExtReq::AtomReq(atom_req) => { api::HostExtReq::AtomReq(atom_req) => {
let atom = atom_req.get_atom(); let atom = atom_req.get_atom();
@@ -309,23 +348,29 @@ pub fn extension_init(
hand.handle(fwded, &some.then_some(reply)).await hand.handle(fwded, &some.then_some(reply)).await
}, },
api::AtomReq::CallRef(call @ api::CallRef(_, arg)) => { api::AtomReq::CallRef(call @ api::CallRef(_, arg)) => {
// SAFETY: function calls own their argument implicitly let expr_store = BorrowedExprStore::new();
let expr_handle = unsafe { ExprHandle::from_args(ctx.clone(), *arg) }; let expr_handle = ExprHandle::borrowed(ctx.clone(), *arg, &expr_store);
let ret = nfo.call_ref(actx, Expr::from_handle(Rc::new(expr_handle))).await; let ret = nfo.call_ref(actx, Expr::from_handle(expr_handle.clone())).await;
hand.handle(call, &ret.api_return(ctx.clone(), &hand).await).await let api_expr = ret.api_return(ctx.clone()).await;
mem::drop(expr_handle);
expr_store.dispose().await;
hand.handle(call, &api_expr).await
}, },
api::AtomReq::FinalCall(call @ api::FinalCall(_, arg)) => { api::AtomReq::FinalCall(call @ api::FinalCall(_, arg)) => {
// SAFETY: function calls own their argument implicitly let expr_store = BorrowedExprStore::new();
let expr_handle = unsafe { ExprHandle::from_args(ctx.clone(), *arg) }; let expr_handle = ExprHandle::borrowed(ctx.clone(), *arg, &expr_store);
let ret = nfo.call(actx, Expr::from_handle(Rc::new(expr_handle))).await; let ret = nfo.call(actx, Expr::from_handle(expr_handle.clone())).await;
hand.handle(call, &ret.api_return(ctx.clone(), &hand).await).await let api_expr = ret.api_return(ctx.clone()).await;
mem::drop(expr_handle);
expr_store.dispose().await;
hand.handle(call, &api_expr).await
}, },
api::AtomReq::Command(cmd @ api::Command(_)) => match nfo.command(actx).await { api::AtomReq::Command(cmd @ api::Command(_)) => match nfo.command(actx).await {
Err(e) => hand.handle(cmd, &Err(e.to_api())).await, Err(e) => hand.handle(cmd, &Err(e.to_api())).await,
Ok(opt) => match opt { Ok(opt) => match opt {
None => hand.handle(cmd, &Ok(api::NextStep::Halt)).await, None => hand.handle(cmd, &Ok(api::NextStep::Halt)).await,
Some(cont) => { Some(cont) => {
let cont = cont.api_return(ctx.clone(), &hand).await; let cont = cont.api_return(ctx.clone()).await;
hand.handle(cmd, &Ok(api::NextStep::Continue(cont))).await hand.handle(cmd, &Ok(api::NextStep::Continue(cont))).await
}, },
}, },
@@ -340,8 +385,7 @@ pub fn extension_init(
let ctx = get_ctx(*sys).await; let ctx = get_ctx(*sys).await;
// SAFETY: deserialization implicitly grants ownership to previously owned exprs // SAFETY: deserialization implicitly grants ownership to previously owned exprs
let refs = (refs.iter()) let refs = (refs.iter())
.map(|tk| unsafe { ExprHandle::from_args(ctx.clone(), *tk) }) .map(|tk| Expr::from_handle(ExprHandle::deserialize(ctx.clone(), *tk)))
.map(|handle| Expr::from_handle(Rc::new(handle)))
.collect_vec(); .collect_vec();
let id = AtomTypeId::decode(Pin::new(&mut read)).await; let id = AtomTypeId::decode(Pin::new(&mut read)).await;
let inst = ctx.cted().inst(); let inst = ctx.cted().inst();
@@ -355,7 +399,7 @@ pub fn extension_init(
}, },
); );
*interner_cell.borrow_mut() = *interner_cell.borrow_mut() =
Some(Interner::new_replica(rn.clone().map(|ir: IntReq| ir.into_root()))); Some(Interner::new_replica(rn.clone().map(|ir: api::IntReq| ir.into_root())));
spawner(Box::pin(clone!(spawner; async move { spawner(Box::pin(clone!(spawner; async move {
let mut streams = stream_select! { in_recv.map(Some), exit_recv.map(|_| None) }; let mut streams = stream_select! { in_recv.map(Some), exit_recv.map(|_| None) };
while let Some(item) = streams.next().await { while let Some(item) = streams.next().await {
@@ -368,7 +412,7 @@ pub fn extension_init(
ExtInit { ExtInit {
header: ext_header, header: ext_header,
port: Box::new(ExtensionOwner { port: Box::new(ExtensionOwner {
out_recv, out_recv: Mutex::new(out_recv),
out_send, out_send,
_interner_cell: interner_cell, _interner_cell: interner_cell,
_systems_lock: systems_lock, _systems_lock: systems_lock,

View File

@@ -1,9 +1,11 @@
use std::cell::RefCell;
use std::fmt; use std::fmt;
use std::hash::Hash;
use std::rc::Rc; use std::rc::Rc;
use async_once_cell::OnceCell; use async_once_cell::OnceCell;
use derive_destructure::destructure; use derive_destructure::destructure;
use orchid_api::ExtAtomPrint; use hashbrown::HashSet;
use orchid_base::error::OrcErrv; use orchid_base::error::OrcErrv;
use orchid_base::format::{FmtCtx, FmtUnit, Format}; use orchid_base::format::{FmtCtx, FmtUnit, Format};
use orchid_base::location::Pos; use orchid_base::location::Pos;
@@ -14,26 +16,71 @@ use crate::atom::ForeignAtom;
use crate::gen_expr::{GExpr, GExprKind}; use crate::gen_expr::{GExpr, GExprKind};
use crate::system::SysCtx; use crate::system::SysCtx;
pub struct BorrowedExprStore(RefCell<Option<HashSet<Rc<ExprHandle>>>>);
impl BorrowedExprStore {
pub(crate) fn new() -> Self { Self(RefCell::new(Some(HashSet::new()))) }
pub async fn dispose(self) {
let elements = self.0.borrow_mut().take().unwrap();
for handle in elements {
handle.drop_one().await
}
}
}
impl Drop for BorrowedExprStore {
fn drop(&mut self) {
if self.0.borrow().is_some() {
panic!("This should always be explicitly disposed")
}
}
}
#[derive(destructure)] #[derive(destructure)]
pub struct ExprHandle { pub struct ExprHandle {
pub tk: api::ExprTicket, pub tk: api::ExprTicket,
pub ctx: SysCtx, pub ctx: SysCtx,
} }
impl ExprHandle { impl ExprHandle {
/// # Safety /// This function does not signal to take ownership of the expr.
/// pub fn borrowed(ctx: SysCtx, tk: api::ExprTicket, store: &BorrowedExprStore) -> Rc<Self> {
/// This function does not signal to take ownership of the expr. It must only let this = Rc::new(Self { ctx, tk });
/// be called on tickets that are already implicitly owned. store.0.borrow_mut().as_mut().unwrap().insert(this.clone());
pub unsafe fn from_args(ctx: SysCtx, tk: api::ExprTicket) -> Self { Self { ctx, tk } } this
}
pub fn deserialize(ctx: SysCtx, tk: api::ExprTicket) -> Rc<Self> { Rc::new(Self { ctx, tk }) }
pub fn get_ctx(&self) -> SysCtx { self.ctx.clone() } pub fn get_ctx(&self) -> SysCtx { self.ctx.clone() }
pub async fn clone(&self) -> Self { /// Drop one instance of the handle silently; if it's the last one, do
self.ctx.reqnot().notify(api::Acquire(self.ctx.sys_id(), self.tk)).await; /// nothing, otherwise send an Acquire
Self { ctx: self.ctx.clone(), tk: self.tk } pub async fn drop_one(self: Rc<Self>) {
match Rc::try_unwrap(self) {
Err(rc) => {
eprintln!("Extending lifetime for {:?}", rc.tk);
rc.ctx.reqnot().notify(api::Acquire(rc.ctx.sys_id(), rc.tk)).await
},
Ok(hand) => {
// avoid calling destructor
hand.destructure();
},
}
} }
/// Drop the handle and get the ticket without a release notification. /// Drop the handle and get the ticket without a release notification.
/// Use this with messages that imply ownership transfer. This function is /// Use this with messages that imply ownership transfer. This function is
/// safe because abusing it is a memory leak. /// safe because abusing it is a memory leak.
pub fn into_tk(self) -> api::ExprTicket { self.destructure().0 } pub fn serialize(self) -> api::ExprTicket {
eprintln!("Skipping destructor for {:?}", self.tk);
self.destructure().0
}
}
impl Eq for ExprHandle {}
impl PartialEq for ExprHandle {
fn eq(&self, other: &Self) -> bool {
self.ctx.sys_id() == other.ctx.sys_id() && self.tk == other.tk
}
}
impl Hash for ExprHandle {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.ctx.sys_id().hash(state);
self.tk.hash(state);
}
} }
impl fmt::Debug for ExprHandle { impl fmt::Debug for ExprHandle {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -92,8 +139,9 @@ impl Format for Expr {
match &self.data().await.kind { match &self.data().await.kind {
ExprKind::Opaque => "OPAQUE".to_string().into(), ExprKind::Opaque => "OPAQUE".to_string().into(),
ExprKind::Bottom(b) => format!("Bottom({b})").into(), ExprKind::Bottom(b) => format!("Bottom({b})").into(),
ExprKind::Atom(a) => ExprKind::Atom(a) => FmtUnit::from_api(
FmtUnit::from_api(&self.handle.ctx.reqnot().request(ExtAtomPrint(a.atom.clone())).await), &self.handle.ctx.reqnot().request(api::ExtAtomPrint(a.atom.clone())).await,
),
} }
} }
} }

View File

@@ -1,13 +1,13 @@
use std::any::TypeId;
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::future::Future; use std::future::Future;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
use async_std::io::Write;
use async_std::sync::Mutex;
use futures::FutureExt;
use futures::future::LocalBoxFuture; use futures::future::LocalBoxFuture;
use futures::lock::Mutex;
use futures::{AsyncWrite, FutureExt};
use itertools::Itertools; use itertools::Itertools;
use never::Never; use never::Never;
use orchid_api_traits::Encode; use orchid_api_traits::Encode;
@@ -20,6 +20,7 @@ use trait_set::trait_set;
use crate::atom::Atomic; use crate::atom::Atomic;
use crate::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant}; use crate::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
use crate::conv::ToExpr; use crate::conv::ToExpr;
use crate::coroutine_exec::{ExecHandle, exec};
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::GExpr; use crate::gen_expr::GExpr;
use crate::system::{SysCtx, SysCtxEntry}; use crate::system::{SysCtx, SysCtxEntry};
@@ -29,13 +30,40 @@ trait_set! {
} }
pub trait ExprFunc<I, O>: Clone + 'static { pub trait ExprFunc<I, O>: Clone + 'static {
const ARITY: u8; fn argtyps() -> &'static [TypeId];
fn apply(&self, v: Vec<Expr>) -> impl Future<Output = OrcRes<GExpr>>; fn apply<'a>(&self, hand: ExecHandle<'a>, v: Vec<Expr>) -> impl Future<Output = OrcRes<GExpr>>;
} }
#[derive(Default)] #[derive(Default)]
struct FunsCtx(Mutex<HashMap<Sym, (u8, Rc<dyn FunCB>)>>); struct FunsCtx(Mutex<HashMap<Sym, FunRecord>>);
impl SysCtxEntry for FunsCtx {} impl SysCtxEntry for FunsCtx {}
#[derive(Clone)]
struct FunRecord {
argtyps: &'static [TypeId],
fun: Rc<dyn FunCB>,
}
async fn process_args<I, O, F: ExprFunc<I, O>>(
debug: impl AsRef<str> + Clone + 'static,
f: F,
) -> FunRecord {
let argtyps = F::argtyps();
let fun = Rc::new(move |v: Vec<Expr>| {
clone!(f, v mut);
exec(debug.clone(), async move |mut hand| {
let mut norm_args = Vec::with_capacity(v.len());
for (expr, typ) in v.into_iter().zip(argtyps) {
if *typ != TypeId::of::<Expr>() {
norm_args.push(hand.exec(expr).await?);
}
}
f.apply(hand, norm_args).await
})
.map(Ok)
.boxed_local()
});
FunRecord { argtyps, fun }
}
/// An Atom representing a partially applied named native function. These /// An Atom representing a partially applied named native function. These
/// partial calls are serialized into the name of the native function and the /// partial calls are serialized into the name of the native function and the
@@ -46,23 +74,22 @@ impl SysCtxEntry for FunsCtx {}
pub(crate) struct Fun { pub(crate) struct Fun {
path: Sym, path: Sym,
args: Vec<Expr>, args: Vec<Expr>,
arity: u8, record: FunRecord,
fun: Rc<dyn FunCB>,
} }
impl Fun { impl Fun {
pub async fn new<I, O, F: ExprFunc<I, O>>(path: Sym, ctx: SysCtx, f: F) -> Self { pub async fn new<I, O, F: ExprFunc<I, O>>(path: Sym, ctx: SysCtx, f: F) -> Self {
let funs: &FunsCtx = ctx.get_or_default(); let funs: &FunsCtx = ctx.get_or_default();
let mut fung = funs.0.lock().await; let mut fung = funs.0.lock().await;
let fun = if let Some(x) = fung.get(&path) { let record = if let Some(record) = fung.get(&path) {
x.1.clone() record.clone()
} else { } else {
let fun = Rc::new(move |v| clone!(f; async move { f.apply(v).await }.boxed_local())); let record = process_args(path.to_string(), f).await;
fung.insert(path.clone(), (F::ARITY, fun.clone())); fung.insert(path.clone(), record.clone());
fun record
}; };
Self { args: vec![], arity: F::ARITY, path, fun } Self { args: vec![], path, record }
} }
pub fn arity(&self) -> u8 { self.arity } pub fn arity(&self) -> u8 { self.record.argtyps.len() as u8 }
} }
impl Atomic for Fun { impl Atomic for Fun {
type Data = (); type Data = ();
@@ -72,28 +99,28 @@ impl OwnedAtom for Fun {
type Refs = Vec<Expr>; type Refs = Vec<Expr>;
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) } async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
async fn call_ref(&self, arg: Expr) -> GExpr { async fn call_ref(&self, arg: Expr) -> GExpr {
std::io::Write::flush(&mut std::io::stderr()).unwrap();
let new_args = self.args.iter().cloned().chain([arg]).collect_vec(); let new_args = self.args.iter().cloned().chain([arg]).collect_vec();
if new_args.len() == self.arity.into() { if new_args.len() == self.record.argtyps.len() {
(self.fun)(new_args).await.to_expr() (self.record.fun)(new_args).await.to_expr().await
} else { } else {
Self { args: new_args, arity: self.arity, fun: self.fun.clone(), path: self.path.clone() } Self { args: new_args, record: self.record.clone(), path: self.path.clone() }.to_expr().await
.to_expr()
} }
} }
async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await } async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await }
async fn serialize(&self, _: SysCtx, write: Pin<&mut (impl Write + ?Sized)>) -> Self::Refs { async fn serialize(&self, _: SysCtx, write: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
self.path.to_api().encode(write).await; self.path.to_api().encode(write).await;
self.args.clone() self.args.clone()
} }
async fn deserialize(mut ctx: impl DeserializeCtx, args: Self::Refs) -> Self { async fn deserialize(mut ctx: impl DeserializeCtx, args: Self::Refs) -> Self {
let sys = ctx.sys(); let sys = ctx.sys();
let path = Sym::from_api(ctx.decode().await, sys.i()).await; let path = Sym::from_api(ctx.decode().await, sys.i()).await;
let (arity, fun) = sys.get_or_default::<FunsCtx>().0.lock().await.get(&path).unwrap().clone(); let record = (sys.get::<FunsCtx>().0.lock().await.get(&path))
Self { args, arity, path, fun } .expect("Function missing during deserialization")
.clone();
Self { args, path, record }
} }
async fn print<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
format!("{}:{}/{}", self.path, self.args.len(), self.arity).into() format!("{}:{}/{}", self.path, self.args.len(), self.arity()).into()
} }
} }
@@ -104,13 +131,14 @@ impl OwnedAtom for Fun {
#[derive(Clone)] #[derive(Clone)]
pub struct Lambda { pub struct Lambda {
args: Vec<Expr>, args: Vec<Expr>,
arity: u8, record: FunRecord,
fun: Rc<dyn FunCB>,
} }
impl Lambda { impl Lambda {
pub fn new<I, O, F: ExprFunc<I, O>>(f: F) -> Self { pub async fn new<I, O, F: ExprFunc<I, O>>(
let fun = Rc::new(move |v| clone!(f; async move { f.apply(v).await }.boxed_local())); debug: impl AsRef<str> + Clone + 'static,
Self { args: vec![], arity: F::ARITY, fun } f: F,
) -> Self {
Self { args: vec![], record: process_args(debug, f).await }
} }
} }
impl Atomic for Lambda { impl Atomic for Lambda {
@@ -122,53 +150,59 @@ impl OwnedAtom for Lambda {
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) } async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
async fn call_ref(&self, arg: Expr) -> GExpr { async fn call_ref(&self, arg: Expr) -> GExpr {
let new_args = self.args.iter().cloned().chain([arg]).collect_vec(); let new_args = self.args.iter().cloned().chain([arg]).collect_vec();
if new_args.len() == self.arity.into() { if new_args.len() == self.record.argtyps.len() {
(self.fun)(new_args).await.to_expr() (self.record.fun)(new_args).await.to_expr().await
} else { } else {
Self { args: new_args, arity: self.arity, fun: self.fun.clone() }.to_expr() Self { args: new_args, record: self.record.clone() }.to_expr().await
} }
} }
async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await } async fn call(self, arg: Expr) -> GExpr { self.call_ref(arg).await }
} }
mod expr_func_derives { mod expr_func_derives {
use std::any::TypeId;
use std::sync::OnceLock;
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use super::ExprFunc; use super::ExprFunc;
use crate::conv::{ToExpr, TryFromExpr}; use crate::conv::{ToExpr, TryFromExpr};
use crate::func_atom::Expr; use crate::func_atom::{ExecHandle, Expr};
use crate::gen_expr::GExpr; use crate::gen_expr::GExpr;
macro_rules! expr_func_derive { macro_rules! expr_func_derive {
($arity: tt, $($t:ident),*) => { ($($t:ident),*) => {
pastey::paste!{ pastey::paste!{
impl< impl<
$($t: TryFromExpr, )* $($t: TryFromExpr + 'static, )*
Out: ToExpr, Out: ToExpr,
Func: AsyncFn($($t,)*) -> Out + Clone + Send + Sync + 'static Func: AsyncFn($($t,)*) -> Out + Clone + Send + Sync + 'static
> ExprFunc<($($t,)*), Out> for Func { > ExprFunc<($($t,)*), Out> for Func {
const ARITY: u8 = $arity; fn argtyps() -> &'static [TypeId] {
async fn apply(&self, v: Vec<Expr>) -> OrcRes<GExpr> { static STORE: OnceLock<Vec<TypeId>> = OnceLock::new();
assert_eq!(v.len(), Self::ARITY.into(), "Arity mismatch"); &*STORE.get_or_init(|| vec![$(TypeId::of::<$t>()),*])
}
async fn apply<'a>(&self, _: ExecHandle<'a>, v: Vec<Expr>) -> OrcRes<GExpr> {
assert_eq!(v.len(), Self::argtyps().len(), "Arity mismatch");
let [$([< $t:lower >],)*] = v.try_into().unwrap_or_else(|_| panic!("Checked above")); let [$([< $t:lower >],)*] = v.try_into().unwrap_or_else(|_| panic!("Checked above"));
Ok(self($($t::try_from_expr([< $t:lower >]).await?,)*).await.to_expr()) Ok(self($($t::try_from_expr([< $t:lower >]).await?,)*).await.to_expr().await)
} }
} }
} }
}; };
} }
expr_func_derive!(1, A); expr_func_derive!(A);
expr_func_derive!(2, A, B); expr_func_derive!(A, B);
expr_func_derive!(3, A, B, C); expr_func_derive!(A, B, C);
expr_func_derive!(4, A, B, C, D); expr_func_derive!(A, B, C, D);
expr_func_derive!(5, A, B, C, D, E); expr_func_derive!(A, B, C, D, E);
expr_func_derive!(6, A, B, C, D, E, F); // expr_func_derive!(A, B, C, D, E, F);
expr_func_derive!(7, A, B, C, D, E, F, G); // expr_func_derive!(A, B, C, D, E, F, G);
expr_func_derive!(8, A, B, C, D, E, F, G, H); // expr_func_derive!(A, B, C, D, E, F, G, H);
expr_func_derive!(9, A, B, C, D, E, F, G, H, I); // expr_func_derive!(A, B, C, D, E, F, G, H, I);
expr_func_derive!(10, A, B, C, D, E, F, G, H, I, J); // expr_func_derive!(A, B, C, D, E, F, G, H, I, J);
expr_func_derive!(11, A, B, C, D, E, F, G, H, I, J, K); // expr_func_derive!(A, B, C, D, E, F, G, H, I, J, K);
expr_func_derive!(12, A, B, C, D, E, F, G, H, I, J, K, L); // expr_func_derive!(A, B, C, D, E, F, G, H, I, J, K, L);
expr_func_derive!(13, A, B, C, D, E, F, G, H, I, J, K, L, M); // expr_func_derive!(A, B, C, D, E, F, G, H, I, J, K, L, M);
expr_func_derive!(14, A, B, C, D, E, F, G, H, I, J, K, L, M, N); // expr_func_derive!(A, B, C, D, E, F, G, H, I, J, K, L, M, N);
} }

View File

@@ -1,3 +1,4 @@
use std::mem;
use std::rc::Rc; use std::rc::Rc;
use futures::FutureExt; use futures::FutureExt;
@@ -5,14 +6,11 @@ use orchid_base::error::{OrcErr, OrcErrv};
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants}; use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::ReqHandlish;
use orchid_base::{match_mapping, tl_cache}; use orchid_base::{match_mapping, tl_cache};
use crate::api; use crate::api;
use crate::atom::{AtomFactory, ToAtom}; use crate::atom::{AtomFactory, ToAtom};
use crate::conv::{ToExpr, TryFromExpr};
use crate::expr::Expr; use crate::expr::Expr;
use crate::func_atom::Lambda;
use crate::system::SysCtx; use crate::system::SysCtx;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@@ -21,20 +19,25 @@ pub struct GExpr {
pub pos: Pos, pub pos: Pos,
} }
impl GExpr { impl GExpr {
pub async fn api_return(self, ctx: SysCtx, hand: &impl ReqHandlish) -> api::Expression { pub async fn api_return(self, ctx: SysCtx) -> api::Expression {
if let GExprKind::Slot(ex) = self.kind { if let GExprKind::Slot(ex) = self.kind {
hand.defer_drop(ex.handle()); let hand = ex.handle();
mem::drop(ex);
api::Expression { api::Expression {
location: api::Location::SlotTarget, location: api::Location::SlotTarget,
kind: api::ExpressionKind::Slot(ex.handle().tk), kind: match Rc::try_unwrap(hand) {
Ok(h) => api::ExpressionKind::Slot { tk: h.serialize(), by_value: true },
Err(rc) => api::ExpressionKind::Slot { tk: rc.tk, by_value: false },
},
} }
} else { } else {
api::Expression { api::Expression {
location: api::Location::Inherit, location: api::Location::Inherit,
kind: self.kind.api_return(ctx, hand).boxed_local().await, kind: self.kind.api_return(ctx).boxed_local().await,
} }
} }
} }
pub fn at(self, pos: Pos) -> Self { GExpr { pos, kind: self.kind } }
} }
impl Format for GExpr { impl Format for GExpr {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
@@ -54,20 +57,19 @@ pub enum GExprKind {
Bottom(OrcErrv), Bottom(OrcErrv),
} }
impl GExprKind { impl GExprKind {
pub async fn api_return(self, ctx: SysCtx, hand: &impl ReqHandlish) -> api::ExpressionKind { pub async fn api_return(self, ctx: SysCtx) -> api::ExpressionKind {
match_mapping!(self, Self => api::ExpressionKind { match_mapping!(self, Self => api::ExpressionKind {
Call( Call(
f => Box::new(f.api_return(ctx.clone(), hand).await), f => Box::new(f.api_return(ctx.clone()).await),
x => Box::new(x.api_return(ctx, hand).await) x => Box::new(x.api_return(ctx).await)
), ),
Seq( Seq(
a => Box::new(a.api_return(ctx.clone(), hand).await), a => Box::new(a.api_return(ctx.clone()).await),
b => Box::new(b.api_return(ctx, hand).await) b => Box::new(b.api_return(ctx).await)
), ),
Lambda(arg, body => Box::new(body.api_return(ctx, hand).await)), Lambda(arg, body => Box::new(body.api_return(ctx).await)),
Arg(arg), Arg(arg),
Const(name.to_api()), Const(name.to_api()),
Const(name.to_api()),
Bottom(err.to_api()), Bottom(err.to_api()),
NewAtom(fac.clone().build(ctx).await), NewAtom(fac.clone().build(ctx).await),
} { } {
@@ -129,10 +131,3 @@ pub fn call(v: impl IntoIterator<Item = GExpr>) -> GExpr {
pub fn bot(ev: impl IntoIterator<Item = OrcErr>) -> GExpr { pub fn bot(ev: impl IntoIterator<Item = OrcErr>) -> GExpr {
inherit(GExprKind::Bottom(OrcErrv::new(ev).unwrap())) inherit(GExprKind::Bottom(OrcErrv::new(ev).unwrap()))
} }
pub fn with<I: TryFromExpr, O: ToExpr>(
expr: GExpr,
cont: impl AsyncFn(I) -> O + Clone + Send + Sync + 'static,
) -> GExpr {
call([lambda(0, [seq([arg(0), call([Lambda::new(cont).to_expr(), arg(0)])])]), expr])
}

View File

@@ -4,48 +4,64 @@ use std::ops::RangeInclusive;
use futures::FutureExt; use futures::FutureExt;
use futures::future::LocalBoxFuture; use futures::future::LocalBoxFuture;
use orchid_base::error::{OrcErr, OrcRes, mk_err}; use orchid_base::error::{OrcErrv, OrcRes, Reporter, mk_errv};
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::{Interner, Tok};
use orchid_base::location::{Pos, SrcRange}; use orchid_base::location::{Pos, SrcRange};
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::ParseCtx;
use orchid_base::reqnot::Requester; use orchid_base::reqnot::Requester;
use crate::api; use crate::api;
use crate::expr::BorrowedExprStore;
use crate::parser::PTokTree;
use crate::system::SysCtx; use crate::system::SysCtx;
use crate::tree::GenTokTree; use crate::tree::GenTokTree;
pub async fn err_cascade(i: &Interner) -> OrcErr { pub async fn ekey_cascade(i: &Interner) -> Tok<String> {
mk_err( i.i("An error cascading from a recursive call").await
i.i("An error cascading from a recursive call").await, }
"This error is a sentinel for the extension library.\ pub async fn ekey_not_applicable(i: &Interner) -> Tok<String> {
it should not be emitted by the extension.", i.i("Pseudo-error to communicate that the current branch in a dispatch doesn't apply").await
[Pos::None.into()], }
) const MSG_INTERNAL_ERROR: &str = "This error is a sentinel for the extension library.\
it should not be emitted by the extension.";
pub async fn err_cascade(i: &Interner) -> OrcErrv {
mk_errv(ekey_cascade(i).await, MSG_INTERNAL_ERROR, [Pos::None])
} }
pub async fn err_not_applicable(i: &Interner) -> OrcErr { pub async fn err_not_applicable(i: &Interner) -> OrcErrv {
mk_err( mk_errv(ekey_not_applicable(i).await, MSG_INTERNAL_ERROR, [Pos::None])
i.i("Pseudo-error to communicate that the current branch in a dispatch doesn't apply").await,
&*err_cascade(i).await.message,
[Pos::None.into()],
)
} }
pub struct LexContext<'a> { pub struct LexContext<'a> {
pub(crate) exprs: &'a BorrowedExprStore,
pub ctx: SysCtx, pub ctx: SysCtx,
pub text: &'a Tok<String>, pub text: &'a Tok<String>,
pub id: api::ParsId, pub id: api::ParsId,
pub pos: u32, pub pos: u32,
pub src: Sym, pub(crate) src: Sym,
pub(crate) rep: &'a Reporter,
} }
impl<'a> LexContext<'a> { impl<'a> LexContext<'a> {
pub async fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, GenTokTree)> { pub fn src(&self) -> &Sym { &self.src }
/// This function returns [PTokTree] because it can never return
/// [orchid_base::tree::Token::NewExpr]. You can use
/// [crate::parser::p_tree2gen] to convert this to [crate::tree::GenTokTree]
/// for embedding in the return value.
pub async fn recurse(&self, tail: &'a str) -> OrcRes<(&'a str, PTokTree)> {
let start = self.pos(tail); let start = self.pos(tail);
let Some(lx) = self.ctx.reqnot().request(api::SubLex { pos: start, id: self.id }).await else { let Some(lx) = self.ctx.reqnot().request(api::SubLex { pos: start, id: self.id }).await else {
return Err(err_cascade(self.ctx.i()).await.into()); return Err(err_cascade(self.ctx.i()).await);
}; };
let tree = let tree = PTokTree::from_api(
GenTokTree::from_api(&lx.tree, &mut self.ctx.clone(), &mut (), &self.src, self.ctx.i()).await; &lx.tree,
&mut (self.ctx.clone(), self.exprs),
&mut (),
&self.src,
self.ctx.i(),
)
.await;
Ok((&self.text[lx.pos as usize..], tree)) Ok((&self.text[lx.pos as usize..], tree))
} }
@@ -57,8 +73,10 @@ impl<'a> LexContext<'a> {
pub fn pos_lt(&self, len: impl TryInto<u32, Error: fmt::Debug>, tail: &'a str) -> SrcRange { pub fn pos_lt(&self, len: impl TryInto<u32, Error: fmt::Debug>, tail: &'a str) -> SrcRange {
SrcRange::new(self.pos(tail) - len.try_into().unwrap()..self.pos(tail), &self.src) SrcRange::new(self.pos(tail) - len.try_into().unwrap()..self.pos(tail), &self.src)
} }
}
pub fn i(&self) -> &Interner { self.ctx.i() } impl ParseCtx for LexContext<'_> {
fn i(&self) -> &Interner { self.ctx.i() }
fn rep(&self) -> &Reporter { self.rep }
} }
pub trait Lexer: Send + Sync + Sized + Default + 'static { pub trait Lexer: Send + Sync + Sized + Default + 'static {

View File

@@ -4,14 +4,16 @@ pub mod atom;
pub mod atom_owned; pub mod atom_owned;
pub mod atom_thin; pub mod atom_thin;
pub mod conv; pub mod conv;
pub mod coroutine_exec;
pub mod entrypoint; pub mod entrypoint;
pub mod expr; pub mod expr;
pub mod func_atom; pub mod func_atom;
pub mod gen_expr; pub mod gen_expr;
pub mod lexer; pub mod lexer;
pub mod msg; // pub mod msg;
pub mod other_system; pub mod other_system;
pub mod parser; pub mod parser;
pub mod reflection;
pub mod system; pub mod system;
pub mod system_ctor; pub mod system_ctor;
pub mod tokio; pub mod tokio;

View File

@@ -1,12 +1,9 @@
use std::pin::pin; use std::pin::pin;
use async_once_cell::OnceCell; use async_once_cell::OnceCell;
use async_std::io::{self, Stdout}; use futures::lock::Mutex;
use async_std::sync::Mutex;
use orchid_base::msg::{recv_msg, send_msg}; use orchid_base::msg::{recv_msg, send_msg};
static STDOUT: OnceCell<Mutex<Stdout>> = OnceCell::new();
pub async fn send_parent_msg(msg: &[u8]) -> io::Result<()> { pub async fn send_parent_msg(msg: &[u8]) -> io::Result<()> {
let stdout_lk = STDOUT.get_or_init(async { Mutex::new(io::stdout()) }).await; let stdout_lk = STDOUT.get_or_init(async { Mutex::new(io::stdout()) }).await;
let mut stdout_g = stdout_lk.lock().await; let mut stdout_g = stdout_lk.lock().await;

View File

@@ -1,25 +1,47 @@
use std::marker::PhantomData; use std::marker::PhantomData;
use futures::FutureExt; use async_fn_stream::stream;
use futures::future::{LocalBoxFuture, join_all}; use futures::future::{LocalBoxFuture, join_all};
use futures::{FutureExt, Stream, StreamExt};
use itertools::Itertools; use itertools::Itertools;
use orchid_api::ResolveNames; use never::Never;
use orchid_base::error::OrcRes; use orchid_base::error::{OrcErrv, OrcRes, Reporter};
use orchid_base::id_store::IdStore; use orchid_base::id_store::IdStore;
use orchid_base::interner::Tok; use orchid_base::interner::{Interner, Tok};
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
use orchid_base::match_mapping;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::{Comment, Snippet}; use orchid_base::parse::{Comment, ParseCtx, Snippet};
use orchid_base::reqnot::{ReqHandlish, Requester}; use orchid_base::reqnot::Requester;
use orchid_base::tree::ttv_into_api; use orchid_base::tree::{TokTree, Token, ttv_into_api};
use crate::api; use crate::api;
use crate::conv::ToExpr;
use crate::expr::Expr; use crate::expr::Expr;
use crate::gen_expr::GExpr; use crate::gen_expr::GExpr;
use crate::system::{SysCtx, SysCtxEntry}; use crate::system::{SysCtx, SysCtxEntry};
use crate::tree::GenTokTree; use crate::tree::{GenTok, GenTokTree};
pub type GenSnippet<'a> = Snippet<'a, Expr, GExpr>; pub type PTok = Token<Expr, Never>;
pub type PTokTree = TokTree<Expr, Never>;
pub type PSnippet<'a> = Snippet<'a, Expr, Never>;
pub fn p_tok2gen(tok: PTok) -> GenTok {
match_mapping!(tok, PTok => GenTok {
Comment(s), Name(n), BR, Handle(ex), Bottom(err),
LambdaHead(arg => Box::new(p_tree2gen(*arg))),
NS(n, arg => Box::new(p_tree2gen(*arg))),
S(p, body () p_v2gen),
} {
PTok::NewExpr(never) => match never {}
})
}
pub fn p_tree2gen(tree: PTokTree) -> GenTokTree {
TokTree { tok: p_tok2gen(tree.tok), sr: tree.sr }
}
pub fn p_v2gen(v: impl IntoIterator<Item = PTokTree>) -> Vec<GenTokTree> {
v.into_iter().map(p_tree2gen).collect_vec()
}
pub trait Parser: Send + Sync + Sized + Default + 'static { pub trait Parser: Send + Sync + Sized + Default + 'static {
const LINE_HEAD: &'static str; const LINE_HEAD: &'static str;
@@ -27,7 +49,7 @@ pub trait Parser: Send + Sync + Sized + Default + 'static {
ctx: ParsCtx<'a>, ctx: ParsCtx<'a>,
exported: bool, exported: bool,
comments: Vec<Comment>, comments: Vec<Comment>,
line: GenSnippet<'a>, line: PSnippet<'a>,
) -> impl Future<Output = OrcRes<Vec<ParsedLine>>> + 'a; ) -> impl Future<Output = OrcRes<Vec<ParsedLine>>> + 'a;
} }
@@ -38,7 +60,7 @@ pub trait DynParser: Send + Sync + 'static {
ctx: ParsCtx<'a>, ctx: ParsCtx<'a>,
exported: bool, exported: bool,
comments: Vec<Comment>, comments: Vec<Comment>,
line: GenSnippet<'a>, line: PSnippet<'a>,
) -> LocalBoxFuture<'a, OrcRes<Vec<ParsedLine>>>; ) -> LocalBoxFuture<'a, OrcRes<Vec<ParsedLine>>>;
} }
@@ -49,7 +71,7 @@ impl<T: Parser> DynParser for T {
ctx: ParsCtx<'a>, ctx: ParsCtx<'a>,
exported: bool, exported: bool,
comments: Vec<Comment>, comments: Vec<Comment>,
line: GenSnippet<'a>, line: PSnippet<'a>,
) -> LocalBoxFuture<'a, OrcRes<Vec<ParsedLine>>> { ) -> LocalBoxFuture<'a, OrcRes<Vec<ParsedLine>>> {
Box::pin(async move { Self::parse(ctx, exported, comments, line).await }) Box::pin(async move { Self::parse(ctx, exported, comments, line).await })
} }
@@ -61,18 +83,25 @@ pub struct ParsCtx<'a> {
_parse: PhantomData<&'a mut ()>, _parse: PhantomData<&'a mut ()>,
ctx: SysCtx, ctx: SysCtx,
module: Sym, module: Sym,
reporter: &'a Reporter,
} }
impl ParsCtx<'_> { impl<'a> ParsCtx<'a> {
pub(crate) fn new(ctx: SysCtx, module: Sym) -> Self { Self { _parse: PhantomData, ctx, module } } pub(crate) fn new(ctx: SysCtx, module: Sym, reporter: &'a Reporter) -> Self {
Self { _parse: PhantomData, ctx, module, reporter }
}
pub fn ctx(&self) -> &SysCtx { &self.ctx } pub fn ctx(&self) -> &SysCtx { &self.ctx }
pub fn module(&self) -> Sym { self.module.clone() } pub fn module(&self) -> Sym { self.module.clone() }
} }
impl ParseCtx for ParsCtx<'_> {
fn i(&self) -> &Interner { self.ctx.i() }
fn rep(&self) -> &Reporter { self.reporter }
}
type BoxConstCallback = Box<dyn FnOnce(ConstCtx) -> LocalBoxFuture<'static, GExpr>>; type BoxConstCallback = Box<dyn FnOnce(ConstCtx) -> LocalBoxFuture<'static, GExpr>>;
#[derive(Default)] #[derive(Default)]
struct ParsedConstCtxEntry { pub(crate) struct ParsedConstCtxEntry {
consts: IdStore<BoxConstCallback>, pub(crate) consts: IdStore<BoxConstCallback>,
} }
impl SysCtxEntry for ParsedConstCtxEntry {} impl SysCtxEntry for ParsedConstCtxEntry {}
@@ -82,7 +111,32 @@ pub struct ParsedLine {
pub kind: ParsedLineKind, pub kind: ParsedLineKind,
} }
impl ParsedLine { impl ParsedLine {
pub async fn into_api(self, ctx: SysCtx, hand: &dyn ReqHandlish) -> api::ParsedLine { pub fn cnst<'a, R: ToExpr + 'static, F: AsyncFnOnce(ConstCtx) -> R + 'static>(
sr: &SrcRange,
comments: impl IntoIterator<Item = &'a Comment>,
exported: bool,
name: Tok<String>,
f: F,
) -> Self {
let cb = Box::new(|ctx| async move { f(ctx).await.to_expr().await }.boxed_local());
let kind = ParsedLineKind::Mem(ParsedMem { name, exported, kind: ParsedMemKind::Const(cb) });
let comments = comments.into_iter().cloned().collect();
ParsedLine { comments, sr: sr.clone(), kind }
}
pub fn module<'a>(
sr: &SrcRange,
comments: impl IntoIterator<Item = &'a Comment>,
exported: bool,
name: &Tok<String>,
use_prelude: bool,
lines: impl IntoIterator<Item = ParsedLine>,
) -> Self {
let mem_kind = ParsedMemKind::Mod { lines: lines.into_iter().collect(), use_prelude };
let line_kind = ParsedLineKind::Mem(ParsedMem { name: name.clone(), exported, kind: mem_kind });
let comments = comments.into_iter().cloned().collect();
ParsedLine { comments, sr: sr.clone(), kind: line_kind }
}
pub async fn into_api(self, mut ctx: SysCtx) -> api::ParsedLine {
api::ParsedLine { api::ParsedLine {
comments: self.comments.into_iter().map(|c| c.to_api()).collect(), comments: self.comments.into_iter().map(|c| c.to_api()).collect(),
source_range: self.sr.to_api(), source_range: self.sr.to_api(),
@@ -94,23 +148,21 @@ impl ParsedLine {
ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId( ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId(
ctx.get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(), ctx.get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(),
)), )),
ParsedMemKind::Mod(plv) => ParsedMemKind::Mod { lines, use_prelude } => api::ParsedMemberKind::Module {
api::ParsedMemberKind::Module(linev_into_api(plv, ctx, hand).boxed_local().await), lines: linev_into_api(lines, ctx).boxed_local().await,
use_prelude,
},
}, },
}), }),
ParsedLineKind::Rec(tv) => ParsedLineKind::Rec(tv) =>
api::ParsedLineKind::Recursive(ttv_into_api(tv, &mut (), &mut (ctx, hand)).await), api::ParsedLineKind::Recursive(ttv_into_api(tv, &mut (), &mut ctx).await),
}, },
} }
} }
} }
pub(crate) async fn linev_into_api( pub(crate) async fn linev_into_api(v: Vec<ParsedLine>, ctx: SysCtx) -> Vec<api::ParsedLine> {
v: Vec<ParsedLine>, join_all(v.into_iter().map(|l| l.into_api(ctx.clone()))).await
ctx: SysCtx,
hand: &dyn ReqHandlish,
) -> Vec<api::ParsedLine> {
join_all(v.into_iter().map(|l| l.into_api(ctx.clone(), hand))).await
} }
pub enum ParsedLineKind { pub enum ParsedLineKind {
@@ -119,60 +171,50 @@ pub enum ParsedLineKind {
} }
pub struct ParsedMem { pub struct ParsedMem {
name: Tok<String>, pub name: Tok<String>,
exported: bool, pub exported: bool,
kind: ParsedMemKind, pub kind: ParsedMemKind,
} }
pub enum ParsedMemKind { pub enum ParsedMemKind {
Const(BoxConstCallback), Const(BoxConstCallback),
Mod(Vec<ParsedLine>), Mod { lines: Vec<ParsedLine>, use_prelude: bool },
} }
impl ParsedMemKind { #[derive(Clone)]
pub fn cnst<F: AsyncFnOnce(ConstCtx) -> GExpr + 'static>(f: F) -> Self {
Self::Const(Box::new(|ctx| Box::pin(f(ctx))))
}
}
/* TODO: how the macro runner uses the multi-stage loader
Since the macro runner actually has to invoke the interpreter,
it'll run at const-time and not at postparse-time anyway.
pasing stage establishes the role of every constant as a macro keyword
postparse / const load links up constants with every macro they can directly invoke
the constants representing the keywords might not actually be postparsed,
\ the connection is instead made by detecting in the macro system that the
\ resolved name is owned by a macro
the returned constant from this call is always an entrypoint call to
\ the macro system
the constants representing the keywords resolve to panic
execute relies on these links detected in the extension to dispatch relevant macros
*/
pub struct ConstCtx { pub struct ConstCtx {
ctx: SysCtx, ctx: SysCtx,
constid: api::ParsedConstId, constid: api::ParsedConstId,
} }
impl ConstCtx { impl ConstCtx {
pub async fn names<const N: usize>(&self, names: [&Sym; N]) -> [Option<Sym>; N] { pub fn ctx(&self) -> &SysCtx { &self.ctx }
let resolve_names = ResolveNames { pub fn i(&self) -> &Interner { self.ctx.i() }
pub fn names<'b>(
&'b self,
names: impl IntoIterator<Item = &'b Sym> + 'b,
) -> impl Stream<Item = OrcRes<Sym>> + 'b {
let resolve_names = api::ResolveNames {
constid: self.constid, constid: self.constid,
sys: self.ctx.sys_id(), sys: self.ctx.sys_id(),
names: names.into_iter().map(|n| n.to_api()).collect_vec(), names: names.into_iter().map(|n| n.to_api()).collect_vec(),
}; };
let names = self.ctx.reqnot().request(resolve_names).await; stream(async |mut cx| {
let mut results = [const { None }; N]; for name_opt in self.ctx.reqnot().request(resolve_names).await {
for (i, name) in names.into_iter().enumerate().filter_map(|(i, n)| Some((i, n?))) { cx.emit(match name_opt {
results[i] = Some(Sym::from_api(name, self.ctx.i()).await); Err(e) => Err(OrcErrv::from_api(&e, self.ctx.i()).await),
Ok(name) => Ok(Sym::from_api(name, self.ctx.i()).await),
})
.await
} }
results })
}
pub async fn names_n<const N: usize>(&self, names: [&Sym; N]) -> [OrcRes<Sym>; N] {
self.names(names).collect::<Vec<_>>().await.try_into().expect("Lengths must match")
} }
} }
pub(crate) async fn get_const(id: api::ParsedConstId, ctx: SysCtx) -> GExpr { pub(crate) async fn get_const(id: api::ParsedConstId, ctx: SysCtx) -> GExpr {
let ent = ctx.get::<ParsedConstCtxEntry>(); let ent = ctx.get_or_default::<ParsedConstCtxEntry>();
let rec = ent.consts.get(id.0).expect("Bad ID or double read of parsed const"); let rec = ent.consts.get(id.0).expect("Bad ID or double read of parsed const");
let ctx = ConstCtx { constid: id, ctx: ctx.clone() }; let ctx = ConstCtx { constid: id, ctx: ctx.clone() };
rec.remove()(ctx).await rec.remove()(ctx).await

View File

@@ -0,0 +1,163 @@
use std::cell::OnceCell;
use std::rc::Rc;
use futures::FutureExt;
use futures::lock::Mutex;
use memo_map::MemoMap;
use orchid_base::interner::Tok;
use orchid_base::name::{NameLike, VPath};
use orchid_base::reqnot::Requester;
use crate::api;
use crate::system::{SysCtx, SysCtxEntry, WeakSysCtx};
pub struct ReflMemData {
// None for inferred steps
public: OnceCell<bool>,
kind: ReflMemKind,
}
#[derive(Clone)]
pub struct ReflMem(Rc<ReflMemData>);
impl ReflMem {
pub fn kind(&self) -> ReflMemKind { self.0.kind.clone() }
}
#[derive(Clone)]
pub enum ReflMemKind {
Const,
Mod(ReflMod),
}
pub struct ReflModData {
inferred: Mutex<bool>,
path: VPath,
ctx: WeakSysCtx,
members: MemoMap<Tok<String>, ReflMem>,
}
#[derive(Clone)]
pub struct ReflMod(Rc<ReflModData>);
impl ReflMod {
fn ctx(&self) -> SysCtx {
self.0.ctx.upgrade().expect("ReflectedModule accessed after context drop")
}
pub fn path(&self) -> &[Tok<String>] { &self.0.path[..] }
pub fn is_root(&self) -> bool { self.0.path.is_empty() }
async fn try_populate(&self) -> Result<(), api::LsModuleError> {
let ctx = self.ctx();
let path_tok = ctx.i().i(&self.0.path[..]).await;
let reply = match ctx.reqnot().request(api::LsModule(ctx.sys_id(), path_tok.to_api())).await {
Err(api::LsModuleError::TreeUnavailable) =>
panic!("Reflected tree accessed outside an interpreter call. This extension is faulty."),
Err(err) => return Err(err),
Ok(details) => details,
};
for (k, v) in reply.members {
let k = ctx.i().ex(k).await;
let mem = match self.0.members.get(&k) {
Some(mem) => mem,
None => {
let path = self.0.path.clone().name_with_suffix(k.clone()).to_sym(ctx.i()).await;
let kind = match v.kind {
api::MemberInfoKind::Constant => ReflMemKind::Const,
api::MemberInfoKind::Module =>
ReflMemKind::Mod(default_module(&ctx, VPath::new(path.segs()))),
};
self.0.members.get_or_insert(&k, || default_member(self.is_root(), kind))
},
};
let _ = mem.0.public.set(v.public);
}
Ok(())
}
pub async fn get_child(&self, key: &Tok<String>) -> Option<ReflMem> {
let inferred_g = self.0.inferred.lock().await;
if let Some(mem) = self.0.members.get(key) {
return Some(mem.clone());
}
if !*inferred_g {
return None;
}
match self.try_populate().await {
Err(api::LsModuleError::InvalidPath) =>
panic!("Path became invalid since module was created"),
Err(api::LsModuleError::IsConstant) =>
panic!("Path previously contained a module but now contains a constant"),
Err(api::LsModuleError::TreeUnavailable) => unreachable!(),
Ok(()) => (),
}
self.0.members.get(key).cloned()
}
pub async fn get_by_path(&self, path: &[Tok<String>]) -> Result<ReflMem, InvalidPathError> {
let ctx = self.ctx();
let (next, tail) = path.split_first().expect("Attempted to walk by empty path");
let inferred_g = self.0.inferred.lock().await;
if let Some(next) = self.0.members.get(next) {
return if tail.is_empty() {
Ok(next.clone())
} else {
match next.kind() {
ReflMemKind::Const => Err(InvalidPathError { keep_ancestry: true }),
ReflMemKind::Mod(m) => m.get_by_path(tail).boxed_local().await,
}
};
}
if !*inferred_g {
return Err(InvalidPathError { keep_ancestry: true });
}
let candidate = default_module(&ctx, self.0.path.clone().suffix([next.clone()]));
if tail.is_empty() {
return match candidate.try_populate().await {
Ok(()) => {
let tgt_mem = default_member(self.is_root(), ReflMemKind::Mod(candidate));
self.0.members.insert(next.clone(), tgt_mem.clone());
Ok(tgt_mem)
},
Err(api::LsModuleError::InvalidPath) => Err(InvalidPathError { keep_ancestry: false }),
Err(api::LsModuleError::IsConstant) => {
let const_mem = default_member(self.is_root(), ReflMemKind::Const);
self.0.members.insert(next.clone(), const_mem);
Err(InvalidPathError { keep_ancestry: true })
},
Err(api::LsModuleError::TreeUnavailable) => unreachable!(),
};
}
match candidate.get_by_path(tail).boxed_local().await {
e @ Err(InvalidPathError { keep_ancestry: false }) => e,
res @ Err(InvalidPathError { keep_ancestry: true }) | res @ Ok(_) => {
let tgt_mem = default_member(self.is_root(), ReflMemKind::Mod(candidate));
self.0.members.insert(next.clone(), tgt_mem);
res
},
}
}
}
struct ReflRoot(ReflMod);
impl SysCtxEntry for ReflRoot {}
pub struct InvalidPathError {
keep_ancestry: bool,
}
fn default_module(ctx: &SysCtx, path: VPath) -> ReflMod {
ReflMod(Rc::new(ReflModData {
ctx: ctx.downgrade(),
inferred: Mutex::new(true),
path,
members: MemoMap::new(),
}))
}
fn default_member(is_root: bool, kind: ReflMemKind) -> ReflMem {
ReflMem(Rc::new(ReflMemData {
public: if is_root { true.into() } else { OnceCell::new() },
kind,
}))
}
fn get_root(ctx: &SysCtx) -> &ReflRoot {
ctx.get_or_insert(|| ReflRoot(default_module(ctx, VPath::new([]))))
}
pub fn refl(ctx: &SysCtx) -> ReflMod { get_root(ctx).0.clone() }

View File

@@ -3,22 +3,23 @@ use std::fmt;
use std::future::Future; use std::future::Future;
use std::num::NonZero; use std::num::NonZero;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::{Rc, Weak};
use futures::future::LocalBoxFuture; use futures::future::LocalBoxFuture;
use memo_map::MemoMap; use memo_map::MemoMap;
use orchid_api::ExtMsgSet;
use orchid_api_traits::{Coding, Decode}; use orchid_api_traits::{Coding, Decode};
use orchid_base::boxed_iter::BoxedIter; use orchid_base::boxed_iter::BoxedIter;
use orchid_base::builtin::Spawner; use orchid_base::builtin::Spawner;
use orchid_base::interner::Interner; use orchid_base::interner::Interner;
use orchid_base::logging::Logger; use orchid_base::logging::Logger;
use orchid_base::name::Sym;
use orchid_base::reqnot::{Receipt, ReqNot}; use orchid_base::reqnot::{Receipt, ReqNot};
use crate::api; use crate::api;
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId, AtomicFeatures, ForeignAtom, TypAtom, get_info}; use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId, AtomicFeatures, ForeignAtom, TypAtom, get_info};
use crate::coroutine_exec::Replier;
use crate::entrypoint::ExtReq; use crate::entrypoint::ExtReq;
use crate::func_atom::Fun; use crate::func_atom::{Fun, Lambda};
use crate::lexer::LexerObj; use crate::lexer::LexerObj;
use crate::parser::ParserObj; use crate::parser::ParserObj;
use crate::system_ctor::{CtedObj, SystemCtor}; use crate::system_ctor::{CtedObj, SystemCtor};
@@ -35,14 +36,14 @@ pub trait DynSystemCard: Send + Sync + 'static {
fn name(&self) -> &'static str; fn name(&self) -> &'static str;
/// Atoms explicitly defined by the system card. Do not rely on this for /// Atoms explicitly defined by the system card. Do not rely on this for
/// querying atoms as it doesn't include the general atom types /// querying atoms as it doesn't include the general atom types
fn atoms(&self) -> BoxedIter<Option<Box<dyn AtomDynfo>>>; fn atoms(&'_ self) -> BoxedIter<'_, Option<Box<dyn AtomDynfo>>>;
} }
/// Atoms supported by this package which may appear in all extensions. /// Atoms supported by this package which may appear in all extensions.
/// The indices of these are bitwise negated, such that the MSB of an atom index /// The indices of these are bitwise negated, such that the MSB of an atom index
/// marks whether it belongs to this package (0) or the importer (1) /// marks whether it belongs to this package (0) or the importer (1)
fn general_atoms() -> impl Iterator<Item = Option<Box<dyn AtomDynfo>>> { fn general_atoms() -> impl Iterator<Item = Option<Box<dyn AtomDynfo>>> {
[Some(Fun::dynfo())].into_iter() [Some(Fun::dynfo()), Some(Lambda::dynfo()), Some(Replier::dynfo())].into_iter()
} }
pub fn atom_info_for( pub fn atom_info_for(
@@ -70,17 +71,20 @@ pub async fn resolv_atom(
sys: &(impl DynSystemCard + ?Sized), sys: &(impl DynSystemCard + ?Sized),
atom: &api::Atom, atom: &api::Atom,
) -> Box<dyn AtomDynfo> { ) -> Box<dyn AtomDynfo> {
let tid = AtomTypeId::decode(Pin::new(&mut &atom.data[..])).await; let tid = AtomTypeId::decode(Pin::new(&mut &atom.data.0[..])).await;
atom_by_idx(sys, tid).expect("Value of nonexistent type found") atom_by_idx(sys, tid).expect("Value of nonexistent type found")
} }
impl<T: SystemCard> DynSystemCard for T { impl<T: SystemCard> DynSystemCard for T {
fn name(&self) -> &'static str { T::Ctor::NAME } fn name(&self) -> &'static str { T::Ctor::NAME }
fn atoms(&self) -> BoxedIter<Option<Box<dyn AtomDynfo>>> { Box::new(Self::atoms().into_iter()) } fn atoms(&'_ self) -> BoxedIter<'_, Option<Box<dyn AtomDynfo>>> {
Box::new(Self::atoms().into_iter())
}
} }
/// System as defined by author /// System as defined by author
pub trait System: Send + Sync + SystemCard + 'static { pub trait System: Send + Sync + SystemCard + 'static {
fn prelude(i: &Interner) -> impl Future<Output = Vec<Sym>>;
fn env() -> Vec<GenMember>; fn env() -> Vec<GenMember>;
fn lexers() -> Vec<LexerObj>; fn lexers() -> Vec<LexerObj>;
fn parsers() -> Vec<ParserObj>; fn parsers() -> Vec<ParserObj>;
@@ -88,7 +92,8 @@ pub trait System: Send + Sync + SystemCard + 'static {
} }
pub trait DynSystem: Send + Sync + DynSystemCard + 'static { pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
fn dyn_env(&self) -> Vec<GenMember>; fn dyn_prelude<'a>(&'a self, i: &'a Interner) -> LocalBoxFuture<'a, Vec<Sym>>;
fn dyn_env(&'_ self) -> Vec<GenMember>;
fn dyn_lexers(&self) -> Vec<LexerObj>; fn dyn_lexers(&self) -> Vec<LexerObj>;
fn dyn_parsers(&self) -> Vec<ParserObj>; fn dyn_parsers(&self) -> Vec<ParserObj>;
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>>; fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>>;
@@ -96,7 +101,10 @@ pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
} }
impl<T: System> DynSystem for T { impl<T: System> DynSystem for T {
fn dyn_env(&self) -> Vec<GenMember> { Self::env() } fn dyn_prelude<'a>(&'a self, i: &'a Interner) -> LocalBoxFuture<'a, Vec<Sym>> {
Box::pin(Self::prelude(i))
}
fn dyn_env(&'_ self) -> Vec<GenMember> { Self::env() }
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() } fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() } fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>> { fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>> {
@@ -109,7 +117,7 @@ impl<T: System> DynSystem for T {
pub async fn downcast_atom<A>(foreign: ForeignAtom) -> Result<TypAtom<A>, ForeignAtom> pub async fn downcast_atom<A>(foreign: ForeignAtom) -> Result<TypAtom<A>, ForeignAtom>
where A: AtomicFeatures { where A: AtomicFeatures {
let mut data = &foreign.atom.data[..]; let mut data = &foreign.atom.data.0[..];
let ctx = foreign.ctx().clone(); let ctx = foreign.ctx().clone();
let value = AtomTypeId::decode(Pin::new(&mut data)).await; let value = AtomTypeId::decode(Pin::new(&mut data)).await;
let own_inst = ctx.get::<CtedObj>().inst(); let own_inst = ctx.get::<CtedObj>().inst();
@@ -126,24 +134,14 @@ where A: AtomicFeatures {
} }
let val = dynfo.decode(AtomCtx(data, foreign.atom.drop, ctx)).await; let val = dynfo.decode(AtomCtx(data, foreign.atom.drop, ctx)).await;
let value = *val.downcast::<A::Data>().expect("atom decode returned wrong type"); let value = *val.downcast::<A::Data>().expect("atom decode returned wrong type");
Ok(TypAtom { value, data: foreign }) Ok(TypAtom { value, untyped: foreign })
} }
// #[derive(Clone)] #[derive(Clone)]
// pub struct SysCtx { pub struct WeakSysCtx(Weak<MemoMap<TypeId, Box<dyn Any>>>);
// pub reqnot: ReqNot<api::ExtMsgSet>, impl WeakSysCtx {
// pub spawner: Spawner, pub fn upgrade(&self) -> Option<SysCtx> { Some(SysCtx(self.0.upgrade()?)) }
// pub id: api::SysId, }
// pub cted: CtedObj,
// pub logger: Logger,
// pub obj_store: ObjStore,
// pub i: Rc<Interner>,
// }
// impl fmt::Debug for SysCtx {
// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// write!(f, "SysCtx({:?})", self.id)
// }
// }
#[derive(Clone)] #[derive(Clone)]
pub struct SysCtx(Rc<MemoMap<TypeId, Box<dyn Any>>>); pub struct SysCtx(Rc<MemoMap<TypeId, Box<dyn Any>>>);
@@ -151,7 +149,7 @@ impl SysCtx {
pub fn new( pub fn new(
id: api::SysId, id: api::SysId,
i: Interner, i: Interner,
reqnot: ReqNot<ExtMsgSet>, reqnot: ReqNot<api::ExtMsgSet>,
spawner: Spawner, spawner: Spawner,
logger: Logger, logger: Logger,
cted: CtedObj, cted: CtedObj,
@@ -160,6 +158,7 @@ impl SysCtx {
this.add(id).add(i).add(reqnot).add(spawner).add(logger).add(cted); this.add(id).add(i).add(reqnot).add(spawner).add(logger).add(cted);
this this
} }
pub fn downgrade(&self) -> WeakSysCtx { WeakSysCtx(Rc::downgrade(&self.0)) }
pub fn add<T: SysCtxEntry>(&self, t: T) -> &Self { pub fn add<T: SysCtxEntry>(&self, t: T) -> &Self {
assert!(self.0.insert(TypeId::of::<T>(), Box::new(t)), "Key already exists"); assert!(self.0.insert(TypeId::of::<T>(), Box::new(t)), "Key already exists");
self self
@@ -168,13 +167,7 @@ impl SysCtx {
(self.0.get_or_insert_owned(TypeId::of::<T>(), || Box::new(f())).downcast_ref()) (self.0.get_or_insert_owned(TypeId::of::<T>(), || Box::new(f())).downcast_ref())
.expect("Keyed by TypeId") .expect("Keyed by TypeId")
} }
pub fn get_or_default<T: SysCtxEntry + Default>(&self) -> &T { pub fn get_or_default<T: SysCtxEntry + Default>(&self) -> &T { self.get_or_insert(T::default) }
self.get_or_insert(|| {
let rc_id = self.0.as_ref() as *const _ as *const () as usize;
eprintln!("Default-initializing {} in {}", type_name::<T>(), rc_id);
T::default()
})
}
pub fn try_get<T: SysCtxEntry>(&self) -> Option<&T> { pub fn try_get<T: SysCtxEntry>(&self) -> Option<&T> {
Some(self.0.get(&TypeId::of::<T>())?.downcast_ref().expect("Keyed by TypeId")) Some(self.0.get(&TypeId::of::<T>())?.downcast_ref().expect("Keyed by TypeId"))
} }
@@ -184,7 +177,7 @@ impl SysCtx {
/// Shorthand to get the [Interner] instance /// Shorthand to get the [Interner] instance
pub fn i(&self) -> &Interner { self.get::<Interner>() } pub fn i(&self) -> &Interner { self.get::<Interner>() }
/// Shorthand to get the messaging link /// Shorthand to get the messaging link
pub fn reqnot(&self) -> &ReqNot<ExtMsgSet> { self.get::<ReqNot<ExtMsgSet>>() } pub fn reqnot(&self) -> &ReqNot<api::ExtMsgSet> { self.get::<ReqNot<api::ExtMsgSet>>() }
/// Shorthand to get the system ID /// Shorthand to get the system ID
pub fn sys_id(&self) -> api::SysId { *self.get::<api::SysId>() } pub fn sys_id(&self) -> api::SysId { *self.get::<api::SysId>() }
/// Shorthand to get the task spawner callback /// Shorthand to get the task spawner callback

View File

@@ -62,7 +62,7 @@ pub trait SystemCtor: Send + Sync + 'static {
type Instance: System; type Instance: System;
const NAME: &'static str; const NAME: &'static str;
const VERSION: f64; const VERSION: f64;
fn inst() -> Option<Self::Instance>; fn inst(deps: <Self::Deps as DepDef>::Sat) -> Self::Instance;
} }
pub trait DynSystemCtor: Send + Sync + 'static { pub trait DynSystemCtor: Send + Sync + 'static {
@@ -81,8 +81,8 @@ impl<T: SystemCtor> DynSystemCtor for T {
} }
fn new_system(&self, api::NewSystem { system: _, id: _, depends }: &api::NewSystem) -> CtedObj { fn new_system(&self, api::NewSystem { system: _, id: _, depends }: &api::NewSystem) -> CtedObj {
let mut ids = depends.iter().copied(); let mut ids = depends.iter().copied();
let inst = Arc::new(T::inst().expect("Constructor did not create system"));
let deps = T::Deps::create(&mut || ids.next().unwrap()); let deps = T::Deps::create(&mut || ids.next().unwrap());
let inst = Arc::new(T::inst(deps.clone()));
Arc::new(Cted::<T> { deps, inst }) Arc::new(Cted::<T> { deps, inst })
} }
} }

View File

@@ -2,25 +2,28 @@ use crate::entrypoint::ExtensionData;
#[cfg(feature = "tokio")] #[cfg(feature = "tokio")]
pub async fn tokio_main(data: ExtensionData) { pub async fn tokio_main(data: ExtensionData) {
use std::io::Write; use std::io::{ErrorKind, Write};
use std::mem; use std::mem;
use std::pin::Pin; use std::pin::{Pin, pin};
use std::rc::Rc; use std::rc::Rc;
use async_std::io; use async_once_cell::OnceCell;
use futures::StreamExt; use futures::StreamExt;
use futures::future::LocalBoxFuture; use futures::future::LocalBoxFuture;
use futures::lock::Mutex;
use futures::stream::FuturesUnordered; use futures::stream::FuturesUnordered;
use orchid_api_traits::{Decode, Encode}; use orchid_api_traits::{Decode, Encode};
use orchid_base::msg::{recv_msg, send_msg};
use tokio::io::{Stdout, stdin, stdout};
use tokio::task::{LocalSet, spawn_local}; use tokio::task::{LocalSet, spawn_local};
use tokio_util::compat::{Compat, TokioAsyncReadCompatExt, TokioAsyncWriteCompatExt};
use crate::api; use crate::api;
use crate::entrypoint::extension_init; use crate::entrypoint::extension_init;
use crate::msg::{recv_parent_msg, send_parent_msg};
let local_set = LocalSet::new(); let local_set = LocalSet::new();
local_set.spawn_local(async { local_set.spawn_local(async {
let host_header = api::HostHeader::decode(Pin::new(&mut async_std::io::stdin())).await; let host_header = api::HostHeader::decode(Pin::new(&mut stdin().compat())).await;
let init = let init =
Rc::new(extension_init(data, host_header, Rc::new(|fut| mem::drop(spawn_local(fut))))); Rc::new(extension_init(data, host_header, Rc::new(|fut| mem::drop(spawn_local(fut)))));
let mut buf = Vec::new(); let mut buf = Vec::new();
@@ -32,17 +35,20 @@ pub async fn tokio_main(data: ExtensionData) {
let mut io = FuturesUnordered::<LocalBoxFuture<()>>::new(); let mut io = FuturesUnordered::<LocalBoxFuture<()>>::new();
io.push(Box::pin(async { io.push(Box::pin(async {
loop { loop {
match recv_parent_msg().await { match recv_msg(pin!(stdin().compat())).await {
Ok(msg) => init.send(&msg[..]).await, Ok(msg) => init.send(&msg[..]).await,
Err(e) if e.kind() == io::ErrorKind::BrokenPipe => break, Err(e) if e.kind() == ErrorKind::BrokenPipe => break,
Err(e) if e.kind() == io::ErrorKind::UnexpectedEof => break, Err(e) if e.kind() == ErrorKind::UnexpectedEof => break,
Err(e) => panic!("{e}"), Err(e) => panic!("{e}"),
} }
} }
})); }));
io.push(Box::pin(async { io.push(Box::pin(async {
while let Some(msg) = init.recv().await { while let Some(msg) = init.recv().await {
send_parent_msg(&msg[..]).await.unwrap(); static STDOUT: OnceCell<Mutex<Compat<Stdout>>> = OnceCell::new();
let stdout_lk = STDOUT.get_or_init(async { Mutex::new(stdout().compat_write()) }).await;
let mut stdout_g = stdout_lk.lock().await;
send_msg(pin!(&mut *stdout_g), &msg[..]).await.expect("Parent pipe broken");
} }
})); }));
io.next().await; io.next().await;

View File

@@ -1,7 +1,6 @@
use std::num::NonZero; use std::num::NonZero;
use std::rc::Rc;
use async_stream::stream; use async_fn_stream::stream;
use dyn_clone::{DynClone, clone_box}; use dyn_clone::{DynClone, clone_box};
use futures::future::{LocalBoxFuture, join_all}; use futures::future::{LocalBoxFuture, join_all};
use futures::{FutureExt, StreamExt}; use futures::{FutureExt, StreamExt};
@@ -10,7 +9,6 @@ use itertools::Itertools;
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::{Interner, Tok};
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::ReqHandlish;
use orchid_base::tree::{TokTree, Token, TokenVariant}; use orchid_base::tree::{TokTree, Token, TokenVariant};
use substack::Substack; use substack::Substack;
use trait_set::trait_set; use trait_set::trait_set;
@@ -18,9 +16,9 @@ use trait_set::trait_set;
use crate::api; use crate::api;
use crate::conv::ToExpr; use crate::conv::ToExpr;
use crate::entrypoint::MemberRecord; use crate::entrypoint::MemberRecord;
use crate::expr::{Expr, ExprHandle}; use crate::expr::{BorrowedExprStore, Expr, ExprHandle};
use crate::func_atom::{ExprFunc, Fun}; use crate::func_atom::{ExprFunc, Fun};
use crate::gen_expr::{GExpr, arg, call, lambda, seq, sym_ref}; use crate::gen_expr::{GExpr, sym_ref};
use crate::system::SysCtx; use crate::system::SysCtx;
pub type GenTokTree = TokTree<Expr, GExpr>; pub type GenTokTree = TokTree<Expr, GExpr>;
@@ -28,7 +26,7 @@ pub type GenTok = Token<Expr, GExpr>;
impl TokenVariant<api::Expression> for GExpr { impl TokenVariant<api::Expression> for GExpr {
type FromApiCtx<'a> = (); type FromApiCtx<'a> = ();
type ToApiCtx<'a> = (SysCtx, &'a dyn ReqHandlish); type ToApiCtx<'a> = SysCtx;
async fn from_api( async fn from_api(
_: &api::Expression, _: &api::Expression,
_: &mut Self::FromApiCtx<'_>, _: &mut Self::FromApiCtx<'_>,
@@ -37,45 +35,44 @@ impl TokenVariant<api::Expression> for GExpr {
) -> Self { ) -> Self {
panic!("Received new expression from host") panic!("Received new expression from host")
} }
async fn into_api(self, (ctx, hand): &mut Self::ToApiCtx<'_>) -> api::Expression { async fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> api::Expression {
self.api_return(ctx.clone(), hand).await self.api_return(ctx.clone()).await
} }
} }
impl TokenVariant<api::ExprTicket> for Expr { impl TokenVariant<api::ExprTicket> for Expr {
type FromApiCtx<'a> = SysCtx; type FromApiCtx<'a> = (SysCtx, &'a BorrowedExprStore);
async fn from_api( async fn from_api(
api: &api::ExprTicket, api: &api::ExprTicket,
ctx: &mut Self::FromApiCtx<'_>, (ctx, exprs): &mut Self::FromApiCtx<'_>,
_: SrcRange, _: SrcRange,
_: &Interner, _: &Interner,
) -> Self { ) -> Self {
// SAFETY: receiving trees from sublexers implies ownership transfer // SAFETY: receiving trees from sublexers implies borrowing
Expr::from_handle(Rc::new(unsafe { ExprHandle::from_args(ctx.clone(), *api) })) Expr::from_handle(ExprHandle::borrowed(ctx.clone(), *api, exprs))
} }
type ToApiCtx<'a> = (); type ToApiCtx<'a> = ();
async fn into_api(self, (): &mut Self::ToApiCtx<'_>) -> api::ExprTicket { async fn into_api(self, (): &mut Self::ToApiCtx<'_>) -> api::ExprTicket { self.handle().tk }
let hand = self.handle();
std::mem::drop(self);
let h = match Rc::try_unwrap(hand) {
Ok(h) => h,
Err(h) => h.as_ref().clone().await,
};
h.into_tk()
}
} }
pub fn x_tok(x: impl ToExpr) -> GenTok { GenTok::NewExpr(x.to_expr()) } pub async fn x_tok(x: impl ToExpr) -> GenTok { GenTok::NewExpr(x.to_expr().await) }
pub fn ref_tok(path: Sym) -> GenTok { GenTok::NewExpr(sym_ref(path)) } pub async fn ref_tok(path: Sym) -> GenTok { GenTok::NewExpr(sym_ref(path)) }
pub fn cnst(public: bool, name: &str, value: impl ToExpr) -> Vec<GenMember> { pub fn lazy(
public: bool,
name: &str,
cb: impl AsyncFnOnce(Sym, SysCtx) -> MemKind + Clone + 'static,
) -> Vec<GenMember> {
vec![GenMember { vec![GenMember {
name: name.to_string(), name: name.to_string(),
kind: MemKind::Const(value.to_expr()), kind: MemKind::Lazy(LazyMemberFactory::new(cb)),
comments: vec![], comments: vec![],
public, public,
}] }]
} }
pub fn cnst(public: bool, name: &str, value: impl ToExpr + Clone + 'static) -> Vec<GenMember> {
lazy(public, name, async |_, _| MemKind::Const(value.to_expr().await))
}
pub fn module( pub fn module(
public: bool, public: bool,
name: &str, name: &str,
@@ -89,19 +86,8 @@ pub fn root_mod(name: &str, mems: impl IntoIterator<Item = Vec<GenMember>>) -> (
(name.to_string(), kind) (name.to_string(), kind)
} }
pub fn fun<I, O>(public: bool, name: &str, xf: impl ExprFunc<I, O>) -> Vec<GenMember> { pub fn fun<I, O>(public: bool, name: &str, xf: impl ExprFunc<I, O>) -> Vec<GenMember> {
let fac = LazyMemberFactory::new(move |sym, ctx| async { let fac = LazyMemberFactory::new(async move |sym, ctx| {
return MemKind::Const(build_lambdas(Fun::new(sym, ctx, xf).await, 0)); MemKind::Const(Fun::new(sym, ctx, xf).await.to_expr().await)
fn build_lambdas(fun: Fun, i: u64) -> GExpr {
if i < fun.arity().into() {
return lambda(i, [build_lambdas(fun, i + 1)]);
}
let arity = fun.arity();
seq(
(0..arity)
.map(|i| arg(i as u64))
.chain([call([fun.to_expr()].into_iter().chain((0..arity).map(|i| arg(i as u64))))]),
)
}
}); });
vec![GenMember { name: name.to_string(), kind: MemKind::Lazy(fac), public, comments: vec![] }] vec![GenMember { name: name.to_string(), kind: MemKind::Lazy(fac), public, comments: vec![] }]
} }
@@ -201,9 +187,15 @@ impl MemKind {
pub async fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind { pub async fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::MemberKind {
match self { match self {
Self::Lazy(lazy) => api::MemberKind::Lazy(ctx.with_lazy(lazy)), Self::Lazy(lazy) => api::MemberKind::Lazy(ctx.with_lazy(lazy)),
Self::Const(c) => api::MemberKind::Const(c.api_return(ctx.sys(), ctx.req()).await), Self::Const(c) => api::MemberKind::Const(c.api_return(ctx.sys()).await),
Self::Mod { members } => api::MemberKind::Module(api::Module { Self::Mod { members } => api::MemberKind::Module(api::Module {
members: Box::pin(stream! { for m in members { yield m.into_api(ctx).await } }.collect()) members: stream(async |mut cx| {
for m in members {
cx.emit(m.into_api(ctx).await).await
}
})
.collect()
.boxed_local()
.await, .await,
}), }),
} }
@@ -214,22 +206,19 @@ pub trait TreeIntoApiCtx {
fn sys(&self) -> SysCtx; fn sys(&self) -> SysCtx;
fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId; fn with_lazy(&mut self, fac: LazyMemberFactory) -> api::TreeId;
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx; fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx;
fn req(&self) -> &impl ReqHandlish;
} }
pub struct TreeIntoApiCtxImpl<'a, 'b, RH: ReqHandlish> { pub struct TreeIntoApiCtxImpl<'a, 'b> {
pub sys: SysCtx, pub sys: SysCtx,
pub basepath: &'a [Tok<String>], pub basepath: &'a [Tok<String>],
pub path: Substack<'a, Tok<String>>, pub path: Substack<'a, Tok<String>>,
pub lazy_members: &'b mut HashMap<api::TreeId, MemberRecord>, pub lazy_members: &'b mut HashMap<api::TreeId, MemberRecord>,
pub req: &'a RH,
} }
impl<RH: ReqHandlish> TreeIntoApiCtx for TreeIntoApiCtxImpl<'_, '_, RH> { impl TreeIntoApiCtx for TreeIntoApiCtxImpl<'_, '_> {
fn sys(&self) -> SysCtx { self.sys.clone() } fn sys(&self) -> SysCtx { self.sys.clone() }
fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx { fn push_path(&mut self, seg: Tok<String>) -> impl TreeIntoApiCtx {
TreeIntoApiCtxImpl { TreeIntoApiCtxImpl {
req: self.req,
lazy_members: self.lazy_members, lazy_members: self.lazy_members,
sys: self.sys.clone(), sys: self.sys.clone(),
basepath: self.basepath, basepath: self.basepath,
@@ -242,5 +231,4 @@ impl<RH: ReqHandlish> TreeIntoApiCtx for TreeIntoApiCtxImpl<'_, '_, RH> {
self.lazy_members.insert(id, MemberRecord::Gen(path, fac)); self.lazy_members.insert(id, MemberRecord::Gen(path, fac));
id id
} }
fn req(&self) -> &impl ReqHandlish { self.req }
} }

View File

@@ -6,14 +6,14 @@ edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
async-lock = "3.4.1"
async-once-cell = "0.5.4" async-once-cell = "0.5.4"
async-process = "2.3.0" async-process = "2.4.0"
async-std = "1.13.0"
async-stream = "0.3.6"
bound = "0.6.0" bound = "0.6.0"
derive_destructure = "1.0.0" derive_destructure = "1.0.0"
futures = "0.3.31" futures = { version = "0.3.31", features = ["std"], default-features = false }
hashbrown = "0.15.2" hashbrown = "0.16.0"
itertools = "0.14.0" itertools = "0.14.0"
lazy_static = "1.5.0" lazy_static = "1.5.0"
memo-map = "0.3.3" memo-map = "0.3.3"
@@ -23,7 +23,7 @@ orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
orchid-base = { version = "0.1.0", path = "../orchid-base" } orchid-base = { version = "0.1.0", path = "../orchid-base" }
ordered-float = "5.0.0" ordered-float = "5.0.0"
paste = "1.0.15" pastey = "0.1.1"
substack = "1.1.1" substack = "1.1.1"
test_executors = "0.3.2" test_executors = "0.3.5"
trait-set = "0.3.0" trait-set = "0.3.0"

View File

@@ -1,6 +1,7 @@
use std::fmt; use std::fmt;
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use async_lock::OnceCell;
use derive_destructure::destructure; use derive_destructure::destructure;
use orchid_base::format::{FmtCtx, FmtUnit, Format, take_first_fmt}; use orchid_base::format::{FmtCtx, FmtUnit, Format, take_first_fmt};
use orchid_base::location::Pos; use orchid_base::location::Pos;
@@ -9,7 +10,7 @@ use orchid_base::tree::AtomRepr;
use crate::api; use crate::api;
use crate::ctx::Ctx; use crate::ctx::Ctx;
use crate::expr::Expr; use crate::expr::{Expr, ExprParseCtx, PathSetBuilder};
use crate::extension::Extension; use crate::extension::Extension;
use crate::system::System; use crate::system::System;
@@ -18,16 +19,17 @@ pub struct AtomData {
owner: System, owner: System,
drop: Option<api::AtomId>, drop: Option<api::AtomId>,
data: Vec<u8>, data: Vec<u8>,
pub(crate) display: OnceCell<FmtUnit>,
} }
impl AtomData { impl AtomData {
#[must_use] #[must_use]
fn api(self) -> api::Atom { fn api(self) -> api::Atom {
let (owner, drop, data) = self.destructure(); let (owner, drop, data, _display) = self.destructure();
api::Atom { data, drop, owner: owner.id() } api::Atom { data: api::AtomData(data), drop, owner: owner.id() }
} }
#[must_use] #[must_use]
fn api_ref(&self) -> api::Atom { fn api_ref(&self) -> api::Atom {
api::Atom { data: self.data.clone(), drop: self.drop, owner: self.owner.id() } api::Atom { data: api::AtomData(self.data.clone()), drop: self.drop, owner: self.owner.id() }
} }
} }
impl Drop for AtomData { impl Drop for AtomData {
@@ -50,35 +52,22 @@ impl fmt::Debug for AtomData {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct AtomHand(Rc<AtomData>); pub struct AtomHand(Rc<AtomData>);
impl AtomHand { impl AtomHand {
#[must_use] pub(crate) fn new(data: Vec<u8>, owner: System, drop: Option<api::AtomId>) -> Self {
pub(crate) async fn new(api::Atom { data, drop, owner }: api::Atom, ctx: &Ctx) -> Self { Self(Rc::new(AtomData { owner, drop, data, display: OnceCell::new() }))
let create = || async {
let owner = ctx.system_inst(owner).await.expect("Dropped system created atom");
AtomHand(Rc::new(AtomData { data, owner, drop }))
};
if let Some(id) = drop {
let mut owned_g = ctx.owned_atoms.write().await;
if let Some(data) = owned_g.get(&id) {
if let Some(atom) = data.upgrade() {
return atom;
}
}
let new = create().await;
owned_g.insert(id, new.downgrade());
new
} else {
create().await
}
} }
#[must_use] #[must_use]
pub async fn call(self, arg: Expr) -> api::Expression { pub async fn call(self, arg: Expr) -> Expr {
let owner_sys = self.0.owner.clone(); let owner_sys = self.0.owner.clone();
let reqnot = owner_sys.reqnot(); let reqnot = owner_sys.reqnot();
owner_sys.ext().exprs().give_expr(arg.clone()); owner_sys.ext().exprs().give_expr(arg.clone());
match Rc::try_unwrap(self.0) { let ret = match Rc::try_unwrap(self.0) {
Ok(data) => reqnot.request(api::FinalCall(data.api(), arg.id())).await, Ok(data) => reqnot.request(api::FinalCall(data.api(), arg.id())).await,
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), arg.id())).await, Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), arg.id())).await,
} };
let mut parse_ctx = ExprParseCtx { ctx: owner_sys.ctx(), exprs: owner_sys.ext().exprs() };
let val = Expr::from_api(&ret, PathSetBuilder::new(), &mut parse_ctx).await;
owner_sys.ext().exprs().take_expr(arg.id());
val
} }
#[must_use] #[must_use]
pub fn sys(&self) -> &System { &self.0.owner } pub fn sys(&self) -> &System { &self.0.owner }
@@ -96,13 +85,23 @@ impl AtomHand {
} }
impl Format for AtomHand { impl Format for AtomHand {
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
(self.0.display.get_or_init(|| async {
FmtUnit::from_api(&self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())).await) FmtUnit::from_api(&self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())).await)
}))
.await
.clone()
} }
} }
impl AtomRepr for AtomHand { impl AtomRepr for AtomHand {
type Ctx = Ctx; type Ctx = Ctx;
async fn from_api(atom: &orchid_api::Atom, _: Pos, ctx: &mut Self::Ctx) -> Self { async fn from_api(atom: &api::Atom, _: Pos, ctx: &mut Self::Ctx) -> Self {
Self::new(atom.clone(), ctx).await let api::Atom { data, drop, owner } = atom.clone();
let sys = ctx.system_inst(owner).await.expect("Dropped system created atom");
if let Some(id) = drop {
sys.new_atom(data.0, id).await
} else {
AtomHand::new(data.0, sys, drop)
}
} }
async fn to_api(&self) -> orchid_api::Atom { self.api_ref() } async fn to_api(&self) -> orchid_api::Atom { self.api_ref() }
} }

View File

@@ -3,14 +3,12 @@ use std::num::{NonZero, NonZeroU16};
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use std::{fmt, ops}; use std::{fmt, ops};
use async_std::sync::RwLock; use async_lock::RwLock;
use hashbrown::HashMap; use hashbrown::HashMap;
use orchid_api::SysId;
use orchid_base::builtin::Spawner; use orchid_base::builtin::Spawner;
use orchid_base::interner::Interner; use orchid_base::interner::Interner;
use crate::api; use crate::api;
use crate::atom::WeakAtomHand;
use crate::expr_store::ExprStore; use crate::expr_store::ExprStore;
use crate::system::{System, WeakSystem}; use crate::system::{System, WeakSystem};
use crate::tree::WeakRoot; use crate::tree::WeakRoot;
@@ -20,7 +18,6 @@ pub struct CtxData {
pub spawn: Spawner, pub spawn: Spawner,
pub systems: RwLock<HashMap<api::SysId, WeakSystem>>, pub systems: RwLock<HashMap<api::SysId, WeakSystem>>,
pub system_id: RefCell<NonZeroU16>, pub system_id: RefCell<NonZeroU16>,
pub owned_atoms: RwLock<HashMap<api::AtomId, WeakAtomHand>>,
pub common_exprs: ExprStore, pub common_exprs: ExprStore,
pub root: RwLock<WeakRoot>, pub root: RwLock<WeakRoot>,
} }
@@ -46,7 +43,6 @@ impl Ctx {
i: Interner::default(), i: Interner::default(),
systems: RwLock::default(), systems: RwLock::default(),
system_id: RefCell::new(NonZero::new(1).unwrap()), system_id: RefCell::new(NonZero::new(1).unwrap()),
owned_atoms: RwLock::default(),
common_exprs: ExprStore::default(), common_exprs: ExprStore::default(),
root: RwLock::default(), root: RwLock::default(),
})) }))
@@ -59,7 +55,7 @@ impl Ctx {
pub(crate) fn next_sys_id(&self) -> api::SysId { pub(crate) fn next_sys_id(&self) -> api::SysId {
let mut g = self.system_id.borrow_mut(); let mut g = self.system_id.borrow_mut();
*g = g.checked_add(1).unwrap_or(NonZeroU16::new(1).unwrap()); *g = g.checked_add(1).unwrap_or(NonZeroU16::new(1).unwrap());
SysId(*g) api::SysId(*g)
} }
#[must_use] #[must_use]
pub fn downgrade(&self) -> WeakCtx { WeakCtx(Rc::downgrade(&self.0)) } pub fn downgrade(&self) -> WeakCtx { WeakCtx(Rc::downgrade(&self.0)) }

View File

@@ -1,6 +1,6 @@
use hashbrown::HashSet; use hashbrown::HashSet;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcErr, OrcRes, Reporter, mk_err, mk_errv}; use orchid_base::error::{OrcErrv, OrcRes, Reporter, mk_errv};
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::{Interner, Tok};
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::VName; use orchid_base::name::VName;
@@ -16,7 +16,7 @@ pub enum AbsPathError {
RootPath, RootPath,
} }
impl AbsPathError { impl AbsPathError {
pub async fn err_obj(self, i: &Interner, pos: Pos, path: &str) -> OrcErr { pub async fn err_obj(self, i: &Interner, pos: Pos, path: &str) -> OrcErrv {
let (descr, msg) = match self { let (descr, msg) = match self {
AbsPathError::RootPath => ( AbsPathError::RootPath => (
i.i("Path ends on root module").await, i.i("Path ends on root module").await,
@@ -30,7 +30,7 @@ impl AbsPathError {
format!("{path} is leading outside the root."), format!("{path} is leading outside the root."),
), ),
}; };
mk_err(descr, msg, [pos.into()]) mk_errv(descr, msg, [pos])
} }
} }

View File

@@ -1,6 +1,6 @@
use std::mem; use std::mem;
use async_std::sync::RwLockWriteGuard; use async_lock::RwLockWriteGuard;
use bound::Bound; use bound::Bound;
use futures::FutureExt; use futures::FutureExt;
use orchid_base::error::OrcErrv; use orchid_base::error::OrcErrv;
@@ -9,7 +9,7 @@ use orchid_base::location::Pos;
use orchid_base::logging::Logger; use orchid_base::logging::Logger;
use crate::ctx::Ctx; use crate::ctx::Ctx;
use crate::expr::{Expr, ExprKind, ExprParseCtx, PathSet, PathSetBuilder, Step}; use crate::expr::{Expr, ExprKind, PathSet, Step};
use crate::tree::Root; use crate::tree::Root;
type ExprGuard = Bound<RwLockWriteGuard<'static, ExprKind>, Expr>; type ExprGuard = Bound<RwLockWriteGuard<'static, ExprKind>, Expr>;
@@ -109,11 +109,8 @@ impl ExecCtx {
ExprKind::Call(f, x) if !self.did_pop => (ExprKind::Call(f.clone(), x), StackOp::Push(f)), ExprKind::Call(f, x) if !self.did_pop => (ExprKind::Call(f.clone(), x), StackOp::Push(f)),
ExprKind::Call(f, x) => match f.try_into_owned_atom().await { ExprKind::Call(f, x) => match f.try_into_owned_atom().await {
Ok(atom) => { Ok(atom) => {
let ext = atom.sys().ext().clone();
let x_norm = self.unpack_ident(&x).await; let x_norm = self.unpack_ident(&x).await;
let mut parse_ctx = ExprParseCtx { ctx: &self.ctx, exprs: ext.exprs() }; let val = atom.call(x_norm).await;
let val =
Expr::from_api(&atom.call(x_norm).await, PathSetBuilder::new(), &mut parse_ctx).await;
(ExprKind::Identity(val.clone()), StackOp::Swap(val)) (ExprKind::Identity(val.clone()), StackOp::Swap(val))
}, },
Err(f) => match &*f.kind().read().await { Err(f) => match &*f.kind().read().await {
@@ -121,15 +118,9 @@ impl ExecCtx {
panic!("This should not appear outside function bodies"), panic!("This should not appear outside function bodies"),
ExprKind::Missing => panic!("Should have been replaced"), ExprKind::Missing => panic!("Should have been replaced"),
ExprKind::Atom(a) => { ExprKind::Atom(a) => {
let ext = a.sys().ext().clone();
let x_norm = self.unpack_ident(&x).await; let x_norm = self.unpack_ident(&x).await;
let val = Expr::from_api( let ret = a.clone().call(x_norm).await;
&a.clone().call(x_norm).await, (ExprKind::Identity(ret.clone()), StackOp::Swap(ret))
PathSetBuilder::new(),
&mut ExprParseCtx { ctx: ext.ctx(), exprs: ext.exprs() },
)
.await;
(ExprKind::Identity(val.clone()), StackOp::Swap(val))
}, },
ExprKind::Bottom(exprv) => (ExprKind::Bottom(exprv.clone()), StackOp::Pop), ExprKind::Bottom(exprv) => (ExprKind::Bottom(exprv.clone()), StackOp::Pop),
ExprKind::Lambda(None, body) => ExprKind::Lambda(None, body) =>

View File

@@ -4,9 +4,8 @@ use std::num::NonZeroU64;
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use std::{fmt, mem}; use std::{fmt, mem};
use async_std::sync::RwLock; use async_lock::RwLock;
use futures::FutureExt; use futures::FutureExt;
use hashbrown::HashSet;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::OrcErrv; use orchid_base::error::OrcErrv;
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants}; use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
@@ -90,7 +89,10 @@ impl Expr {
}, },
api::ExpressionKind::NewAtom(a) => api::ExpressionKind::NewAtom(a) =>
ExprKind::Atom(AtomHand::from_api(a, pos.clone(), &mut ctx.ctx.clone()).await), ExprKind::Atom(AtomHand::from_api(a, pos.clone(), &mut ctx.ctx.clone()).await),
api::ExpressionKind::Slot(tk) => return ctx.exprs.get_expr(*tk).expect("Invalid slot"), api::ExpressionKind::Slot { tk, by_value: false } =>
return ctx.exprs.get_expr(*tk).expect("Invalid slot"),
api::ExpressionKind::Slot { tk, by_value: true } =>
return ctx.exprs.take_expr(*tk).expect("Invalid slot"),
api::ExpressionKind::Seq(a, b) => { api::ExpressionKind::Seq(a, b) => {
let (apsb, bpsb) = psb.split(); let (apsb, bpsb) = psb.split();
ExprKind::Seq( ExprKind::Seq(
@@ -116,19 +118,18 @@ impl Expr {
} }
impl Format for Expr { impl Format for Expr {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
return print_expr(self, c, &mut HashSet::new()).await; return print_expr(self, c, Substack::Bottom).await;
} }
} }
async fn print_expr<'a>( async fn print_expr<'a>(
expr: &'a Expr, expr: &'a Expr,
c: &'a (impl FmtCtx + ?Sized + 'a), c: &'a (impl FmtCtx + ?Sized + 'a),
visited: &mut HashSet<api::ExprTicket>, visited: Substack<'_, api::ExprTicket>,
) -> FmtUnit { ) -> FmtUnit {
if visited.contains(&expr.id()) { if visited.iter().any(|id| id == &expr.id()) {
return "CYCLIC_EXPR".to_string().into(); return "CYCLIC_EXPR".to_string().into();
} }
visited.insert(expr.id()); print_exprkind(&*expr.kind().read().await, c, visited.push(expr.id())).boxed_local().await
print_exprkind(&*expr.kind().read().await, c, visited).boxed_local().await
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@@ -152,13 +153,13 @@ impl ExprKind {
} }
impl Format for ExprKind { impl Format for ExprKind {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
print_exprkind(self, c, &mut HashSet::new()).await print_exprkind(self, c, Substack::Bottom).await
} }
} }
async fn print_exprkind<'a>( async fn print_exprkind<'a>(
ek: &ExprKind, ek: &ExprKind,
c: &'a (impl FmtCtx + ?Sized + 'a), c: &'a (impl FmtCtx + ?Sized + 'a),
visited: &mut HashSet<api::ExprTicket>, visited: Substack<'_, api::ExprTicket>,
) -> FmtUnit { ) -> FmtUnit {
match &ek { match &ek {
ExprKind::Arg => "Arg".to_string().into(), ExprKind::Arg => "Arg".to_string().into(),

View File

@@ -2,6 +2,7 @@ use std::cell::RefCell;
use std::fmt; use std::fmt;
use std::rc::Rc; use std::rc::Rc;
use bound::Bound;
use hashbrown::HashMap; use hashbrown::HashMap;
use hashbrown::hash_map::Entry; use hashbrown::hash_map::Entry;
@@ -12,15 +13,29 @@ use crate::expr::Expr;
pub struct ExprStoreData { pub struct ExprStoreData {
exprs: RefCell<HashMap<api::ExprTicket, (u32, Expr)>>, exprs: RefCell<HashMap<api::ExprTicket, (u32, Expr)>>,
parent: Option<ExprStore>, parent: Option<ExprStore>,
tracking_parent: bool,
} }
#[derive(Clone, Default)] #[derive(Clone, Default)]
pub struct ExprStore(Rc<ExprStoreData>); pub struct ExprStore(Rc<ExprStoreData>);
impl ExprStore { impl ExprStore {
/// If tracking_parent is false, get_expr can fall back to the parent if none
/// is found here.
///
/// If tracking_parent is true, get_expr can still fall back to the parent,
/// but operations on the parent can access the child exprs too until this
/// store is dropped.
#[must_use] #[must_use]
pub fn derive(&self) -> Self { pub fn derive(&self, tracking_parent: bool) -> Self {
Self(Rc::new(ExprStoreData { exprs: RefCell::default(), parent: Some(self.clone()) })) Self(Rc::new(ExprStoreData {
exprs: RefCell::default(),
parent: Some(self.clone()),
tracking_parent,
}))
} }
pub fn give_expr(&self, expr: Expr) { pub fn give_expr(&self, expr: Expr) {
if self.0.tracking_parent {
self.0.parent.as_ref().unwrap().give_expr(expr.clone());
}
match self.0.exprs.borrow_mut().entry(expr.id()) { match self.0.exprs.borrow_mut().entry(expr.id()) {
Entry::Occupied(mut oe) => oe.get_mut().0 += 1, Entry::Occupied(mut oe) => oe.get_mut().0 += 1,
Entry::Vacant(v) => { Entry::Vacant(v) => {
@@ -28,15 +43,29 @@ impl ExprStore {
}, },
} }
} }
pub fn take_expr(&self, ticket: api::ExprTicket) { pub fn take_expr(&self, ticket: api::ExprTicket) -> Option<Expr> {
(self.0.exprs.borrow_mut().entry(ticket)) if self.0.tracking_parent {
.and_replace_entry_with(|_, (rc, rt)| (1 < rc).then_some((rc - 1, rt))); self.0.parent.as_ref().unwrap().take_expr(ticket);
}
match self.0.exprs.borrow_mut().entry(ticket) {
Entry::Vacant(_) => panic!("Attempted to double-take expression"),
Entry::Occupied(oe) if oe.get().0 == 1 => Some(oe.remove().1),
Entry::Occupied(mut oe) => {
oe.get_mut().0 -= 1;
Some(oe.get().1.clone())
},
}
} }
#[must_use] #[must_use]
pub fn get_expr(&self, ticket: api::ExprTicket) -> Option<Expr> { pub fn get_expr(&self, ticket: api::ExprTicket) -> Option<Expr> {
(self.0.exprs.borrow().get(&ticket).map(|(_, expr)| expr.clone())) (self.0.exprs.borrow().get(&ticket).map(|(_, expr)| expr.clone()))
.or_else(|| self.0.parent.as_ref()?.get_expr(ticket)) .or_else(|| self.0.parent.as_ref()?.get_expr(ticket))
} }
pub fn iter(&self) -> impl Iterator<Item = (u32, Expr)> {
let r = Bound::new(self.clone(), |this| this.0.exprs.borrow());
let mut iter = Bound::new(r, |r| r.values());
std::iter::from_fn(move || iter.wrapped_mut().next().cloned())
}
} }
impl fmt::Display for ExprStore { impl fmt::Display for ExprStore {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -45,3 +74,19 @@ impl fmt::Display for ExprStore {
write!(f, "Store holding {rc} refs to {} exprs", r.len()) write!(f, "Store holding {rc} refs to {} exprs", r.len())
} }
} }
impl Drop for ExprStore {
fn drop(&mut self) {
if 1 < Rc::strong_count(&self.0) {
return;
}
if !self.0.tracking_parent {
return;
}
let parent = self.0.parent.as_ref().unwrap();
for (id, (count, _)) in self.0.exprs.borrow().iter() {
for _ in 0..*count {
parent.take_expr(*id);
}
}
}
}

View File

@@ -5,12 +5,12 @@ use std::num::NonZeroU64;
use std::pin::pin; use std::pin::pin;
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use async_std::channel::{self, Sender}; use async_fn_stream::stream;
use async_std::sync::Mutex;
use async_stream::stream;
use derive_destructure::destructure; use derive_destructure::destructure;
use futures::channel::mpsc::{Sender, channel};
use futures::future::{join, join_all}; use futures::future::{join, join_all};
use futures::{StreamExt, stream}; use futures::lock::Mutex;
use futures::{SinkExt, StreamExt, stream};
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use orchid_api_traits::Request; use orchid_api_traits::Request;
@@ -18,9 +18,11 @@ use orchid_base::builtin::ExtInit;
use orchid_base::clone; use orchid_base::clone;
use orchid_base::format::{FmtCtxImpl, Format}; use orchid_base::format::{FmtCtxImpl, Format};
use orchid_base::interner::Tok; use orchid_base::interner::Tok;
use orchid_base::location::Pos;
use orchid_base::logging::Logger; use orchid_base::logging::Logger;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::reqnot::{DynRequester, ReqNot, Requester as _}; use orchid_base::reqnot::{DynRequester, ReqNot, Requester as _};
use orchid_base::tree::AtomRepr;
use crate::api; use crate::api;
use crate::atom::AtomHand; use crate::atom::AtomHand;
@@ -38,6 +40,7 @@ pub struct ReqPair<R: Request>(R, Sender<R::Response>);
/// upgrading fails. /// upgrading fails.
#[derive(destructure)] #[derive(destructure)]
pub struct ExtensionData { pub struct ExtensionData {
name: String,
ctx: Ctx, ctx: Ctx,
reqnot: ReqNot<api::HostMsgSet>, reqnot: ReqNot<api::HostMsgSet>,
systems: Vec<SystemCtor>, systems: Vec<SystemCtor>,
@@ -45,12 +48,12 @@ pub struct ExtensionData {
next_pars: RefCell<NonZeroU64>, next_pars: RefCell<NonZeroU64>,
exprs: ExprStore, exprs: ExprStore,
exiting_snd: Sender<()>, exiting_snd: Sender<()>,
lex_recur: Mutex<HashMap<api::ParsId, channel::Sender<ReqPair<api::SubLex>>>>, lex_recur: Mutex<HashMap<api::ParsId, Sender<ReqPair<api::SubLex>>>>,
} }
impl Drop for ExtensionData { impl Drop for ExtensionData {
fn drop(&mut self) { fn drop(&mut self) {
let reqnot = self.reqnot.clone(); let reqnot = self.reqnot.clone();
let exiting_snd = self.exiting_snd.clone(); let mut exiting_snd = self.exiting_snd.clone();
(self.ctx.spawn)(Box::pin(async move { (self.ctx.spawn)(Box::pin(async move {
reqnot.notify(api::HostExtNotif::Exit).await; reqnot.notify(api::HostExtNotif::Exit).await;
exiting_snd.send(()).await.unwrap() exiting_snd.send(()).await.unwrap()
@@ -64,9 +67,15 @@ impl Extension {
pub fn new(init: ExtInit, logger: Logger, msg_logger: Logger, ctx: Ctx) -> io::Result<Self> { pub fn new(init: ExtInit, logger: Logger, msg_logger: Logger, ctx: Ctx) -> io::Result<Self> {
Ok(Self(Rc::new_cyclic(|weak: &Weak<ExtensionData>| { Ok(Self(Rc::new_cyclic(|weak: &Weak<ExtensionData>| {
let init = Rc::new(init); let init = Rc::new(init);
let (exiting_snd, exiting_rcv) = channel::bounded::<()>(1); let (exiting_snd, exiting_rcv) = channel::<()>(0);
(ctx.spawn)(clone!(init, weak, ctx; Box::pin(async move { (ctx.spawn)({
let rcv_stream = stream! { loop { yield init.recv().await } }; clone!(init, weak, ctx);
Box::pin(async move {
let rcv_stream = stream(async |mut cx| {
loop {
cx.emit(init.recv().await).await
}
});
let mut event_stream = pin!(stream::select(exiting_rcv.map(|()| None), rcv_stream)); let mut event_stream = pin!(stream::select(exiting_rcv.map(|()| None), rcv_stream));
while let Some(Some(msg)) = event_stream.next().await { while let Some(Some(msg)) = event_stream.next().await {
if let Some(reqnot) = weak.upgrade().map(|rc| rc.reqnot.clone()) { if let Some(reqnot) = weak.upgrade().map(|rc| rc.reqnot.clone()) {
@@ -76,10 +85,12 @@ impl Extension {
})) }))
} }
} }
}))); })
});
ExtensionData { ExtensionData {
name: init.name.clone(),
exiting_snd, exiting_snd,
exprs: ctx.common_exprs.derive(), exprs: ctx.common_exprs.derive(false),
ctx: ctx.clone(), ctx: ctx.clone(),
systems: (init.systems.iter().cloned()) systems: (init.systems.iter().cloned())
.map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) }) .map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) })
@@ -93,17 +104,26 @@ impl Extension {
clone!(weak; move |notif, _| { clone!(weak; move |notif, _| {
clone!(weak; Box::pin(async move { clone!(weak; Box::pin(async move {
let this = Extension(weak.upgrade().unwrap()); let this = Extension(weak.upgrade().unwrap());
if !matches!(notif, api::ExtHostNotif::Log(_)) {
writeln!(this.reqnot().logger(), "Host received notif {notif:?}");
}
match notif { match notif {
api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => { api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => {
let target = this.0.exprs.get_expr(acq.1).expect("Invalid ticket"); let target = this.0.exprs.get_expr(acq.1).expect("Invalid ticket");
this.0.exprs.give_expr(target) this.0.exprs.give_expr(target)
} }
api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => { api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => {
this.assert_own_sys(rel.0).await; if this.is_own_sys(rel.0).await {
this.0.exprs.take_expr(rel.1) this.0.exprs.take_expr(rel.1);
} else {
writeln!(this.reqnot().logger(), "Not our system {:?}", rel.0)
}
} }
api::ExtHostNotif::ExprNotif(api::ExprNotif::Move(mov)) => { api::ExtHostNotif::ExprNotif(api::ExprNotif::Move(mov)) => {
this.assert_own_sys(mov.dec).await; if !this.is_own_sys(mov.dec).await {
writeln!(this.reqnot().logger(), "Not our system {:?}", mov.dec);
return;
}
let recp = this.ctx().system_inst(mov.inc).await.expect("invallid recipient sys id"); let recp = this.ctx().system_inst(mov.inc).await.expect("invallid recipient sys id");
let expr = this.0.exprs.get_expr(mov.expr).expect("invalid ticket"); let expr = this.0.exprs.get_expr(mov.expr).expect("invalid ticket");
recp.ext().0.exprs.give_expr(expr); recp.ext().0.exprs.give_expr(expr);
@@ -118,7 +138,9 @@ impl Extension {
clone!(weak, ctx); clone!(weak, ctx);
Box::pin(async move { Box::pin(async move {
let this = Self(weak.upgrade().unwrap()); let this = Self(weak.upgrade().unwrap());
if !matches!(req, api::ExtHostReq::ExtAtomPrint(_)) {
writeln!(this.reqnot().logger(), "Host received request {req:?}"); writeln!(this.reqnot().logger(), "Host received request {req:?}");
}
let i = this.ctx().i.clone(); let i = this.ctx().i.clone();
match req { match req {
api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()).await, api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()).await,
@@ -147,13 +169,14 @@ impl Extension {
hand.handle(fw, &sys.request(body.clone()).await).await hand.handle(fw, &sys.request(body.clone()).await).await
}, },
api::ExtHostReq::SubLex(sl) => { api::ExtHostReq::SubLex(sl) => {
let (rep_in, rep_out) = channel::bounded(1); let (rep_in, mut rep_out) = channel(0);
{ {
let lex_g = this.0.lex_recur.lock().await; let lex_g = this.0.lex_recur.lock().await;
let req_in = lex_g.get(&sl.id).expect("Sublex for nonexistent lexid"); let mut req_in =
lex_g.get(&sl.id).cloned().expect("Sublex for nonexistent lexid");
req_in.send(ReqPair(sl.clone(), rep_in)).await.unwrap(); req_in.send(ReqPair(sl.clone(), rep_in)).await.unwrap();
} }
hand.handle(&sl, &rep_out.recv().await.unwrap()).await hand.handle(&sl, &rep_out.next().await.unwrap()).await
}, },
api::ExtHostReq::ExprReq(api::ExprReq::Inspect( api::ExtHostReq::ExprReq(api::ExprReq::Inspect(
ins @ api::Inspect { target }, ins @ api::Inspect { target },
@@ -172,8 +195,8 @@ impl Extension {
let path = i.ex(path).await; let path = i.ex(path).await;
let root = (ctx.root.read().await.upgrade()) let root = (ctx.root.read().await.upgrade())
.expect("LSModule called when root isn't in context"); .expect("LSModule called when root isn't in context");
let root_data = &mut *root.0.write().await; let root_data = &*root.0.read().await;
let mut walk_ctx = (ctx.clone(), &mut root_data.consts); let mut walk_ctx = (ctx.clone(), &root_data.consts);
let module = let module =
match walk(&root_data.root, false, path.iter().cloned(), &mut walk_ctx) match walk(&root_data.root, false, path.iter().cloned(), &mut walk_ctx)
.await .await
@@ -188,7 +211,7 @@ impl Extension {
}; };
let mut members = std::collections::HashMap::new(); let mut members = std::collections::HashMap::new();
for (k, v) in &module.members { for (k, v) in &module.members {
let kind = match v.kind(ctx.clone(), &mut root_data.consts).await { let kind = match v.kind(ctx.clone(), &root_data.consts).await {
MemberKind::Const => api::MemberInfoKind::Constant, MemberKind::Const => api::MemberInfoKind::Constant,
MemberKind::Module(_) => api::MemberInfoKind::Module, MemberKind::Module(_) => api::MemberInfoKind::Module,
}; };
@@ -206,16 +229,21 @@ impl Extension {
let sys = weak_sys.upgrade().expect("ResolveNames after sys drop"); let sys = weak_sys.upgrade().expect("ResolveNames after sys drop");
sys.name_resolver(*constid).await sys.name_resolver(*constid).await
}; };
let mut responses = vec![const { None }; names.len()]; let responses = stream(async |mut cx| {
for (i, name) in names.iter().enumerate() { for name in names {
if let Some(abs) = resolver(&ctx.i.ex(*name).await[..]).await { cx.emit(match resolver(&ctx.i.ex(*name).await[..]).await {
responses[i] = Some(abs.to_sym(&ctx.i).await.to_api()) Ok(abs) => Ok(abs.to_sym(&ctx.i).await.to_api()),
} Err(e) => Err(e.to_api()),
})
.await
} }
})
.collect()
.await;
hand.handle(rn, &responses).await hand.handle(rn, &responses).await
}, },
api::ExtHostReq::ExtAtomPrint(ref eap @ api::ExtAtomPrint(ref atom)) => { api::ExtHostReq::ExtAtomPrint(ref eap @ api::ExtAtomPrint(ref atom)) => {
let atom = AtomHand::new(atom.clone(), &ctx).await; let atom = AtomHand::from_api(atom, Pos::None, &mut ctx.clone()).await;
let unit = atom.print(&FmtCtxImpl { i: &this.ctx().i }).await; let unit = atom.print(&FmtCtxImpl { i: &this.ctx().i }).await;
hand.handle(eap, &unit.to_api()).await hand.handle(eap, &unit.to_api()).await
}, },
@@ -227,8 +255,9 @@ impl Extension {
} }
}))) })))
} }
pub fn name(&self) -> &String { &self.0.name }
#[must_use] #[must_use]
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.reqnot } pub fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.reqnot }
#[must_use] #[must_use]
pub fn ctx(&self) -> &Ctx { &self.0.ctx } pub fn ctx(&self) -> &Ctx { &self.0.ctx }
#[must_use] #[must_use]
@@ -238,12 +267,12 @@ impl Extension {
pub fn exprs(&self) -> &ExprStore { &self.0.exprs } pub fn exprs(&self) -> &ExprStore { &self.0.exprs }
#[must_use] #[must_use]
pub async fn is_own_sys(&self, id: api::SysId) -> bool { pub async fn is_own_sys(&self, id: api::SysId) -> bool {
let sys = self.ctx().system_inst(id).await.expect("invalid sender sys id"); let Some(sys) = self.ctx().system_inst(id).await else {
writeln!(self.logger(), "Invalid system ID {id:?}");
return false;
};
Rc::ptr_eq(&self.0, &sys.ext().0) Rc::ptr_eq(&self.0, &sys.ext().0)
} }
pub async fn assert_own_sys(&self, id: api::SysId) {
assert!(self.is_own_sys(id).await, "Incoming message impersonates separate system");
}
#[must_use] #[must_use]
pub fn next_pars(&self) -> NonZeroU64 { pub fn next_pars(&self) -> NonZeroU64 {
let mut next_pars = self.0.next_pars.borrow_mut(); let mut next_pars = self.0.next_pars.borrow_mut();
@@ -261,7 +290,7 @@ impl Extension {
// get unique lex ID // get unique lex ID
let id = api::ParsId(self.next_pars()); let id = api::ParsId(self.next_pars());
// create and register channel // create and register channel
let (req_in, req_out) = channel::bounded(1); let (req_in, mut req_out) = channel(0);
self.0.lex_recur.lock().await.insert(id, req_in); // lex_recur released self.0.lex_recur.lock().await.insert(id, req_in); // lex_recur released
let (ret, ()) = join( let (ret, ()) = join(
async { async {
@@ -273,7 +302,7 @@ impl Extension {
res res
}, },
async { async {
while let Ok(ReqPair(sublex, rep_in)) = req_out.recv().await { while let Some(ReqPair(sublex, mut rep_in)) = req_out.next().await {
(rep_in.send(r(sublex.pos).await).await) (rep_in.send(r(sublex.pos).await).await)
.expect("Response channel dropped while request pending") .expect("Response channel dropped while request pending")
} }
@@ -285,7 +314,7 @@ impl Extension {
pub fn system_drop(&self, id: api::SysId) { pub fn system_drop(&self, id: api::SysId) {
let rc = self.clone(); let rc = self.clone();
(self.ctx().spawn)(Box::pin(async move { (self.ctx().spawn)(Box::pin(async move {
rc.reqnot().notify(api::SystemDrop(id)).await; rc.reqnot().request(api::SystemDrop(id)).await;
rc.ctx().systems.write().await.remove(&id); rc.ctx().systems.write().await.remove(&id);
})) }))
} }
@@ -293,6 +322,7 @@ impl Extension {
pub fn downgrade(&self) -> WeakExtension { WeakExtension(Rc::downgrade(&self.0)) } pub fn downgrade(&self) -> WeakExtension { WeakExtension(Rc::downgrade(&self.0)) }
} }
#[derive(Clone)]
pub struct WeakExtension(Weak<ExtensionData>); pub struct WeakExtension(Weak<ExtensionData>);
impl WeakExtension { impl WeakExtension {
#[must_use] #[must_use]

View File

@@ -1,7 +1,8 @@
use std::rc::Rc; use std::rc::Rc;
use async_std::sync::Mutex;
use futures::FutureExt; use futures::FutureExt;
use futures::lock::Mutex;
use orchid_base::clone;
use orchid_base::error::{OrcErrv, OrcRes, mk_errv}; use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
use orchid_base::interner::Tok; use orchid_base::interner::Tok;
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
@@ -12,8 +13,9 @@ use orchid_base::tree::recur;
use crate::api; use crate::api;
use crate::ctx::Ctx; use crate::ctx::Ctx;
use crate::expr::{Expr, ExprParseCtx, ExprWillPanic}; use crate::expr::{Expr, ExprParseCtx};
use crate::parsed::{ParsTok, ParsTokTree}; use crate::expr_store::ExprStore;
use crate::parsed::{ParsTok, ParsTokTree, tt_to_api};
use crate::system::System; use crate::system::System;
pub struct LexCtx<'a> { pub struct LexCtx<'a> {
@@ -53,21 +55,14 @@ impl<'a> LexCtx<'a> {
false false
} }
#[must_use] #[must_use]
pub async fn ser_subtree(&mut self, subtree: ParsTokTree) -> api::TokenTree { pub async fn ser_subtree(&mut self, subtree: ParsTokTree, exprs: ExprStore) -> api::TokenTree {
let mut exprs = self.ctx.common_exprs.clone(); tt_to_api(&mut { exprs }, subtree).await
let without_new_expr = recur(subtree, &|tt, r| {
if let ParsTok::NewExpr(expr) = tt.tok {
return ParsTok::Handle(expr).at(tt.sr);
}
r(tt)
});
without_new_expr.into_api(&mut exprs, &mut ExprWillPanic).await
} }
#[must_use] #[must_use]
pub async fn des_subtree(&mut self, tree: &api::TokenTree) -> ParsTokTree { pub async fn des_subtree(&mut self, tree: &api::TokenTree, exprs: ExprStore) -> ParsTokTree {
ParsTokTree::from_api( ParsTokTree::from_api(
tree, tree,
&mut self.ctx.common_exprs.clone(), &mut { exprs },
&mut ExprParseCtx { ctx: self.ctx, exprs: &self.ctx.common_exprs }, &mut ExprParseCtx { ctx: self.ctx, exprs: &self.ctx.common_exprs },
self.path, self.path,
&self.ctx.i, &self.ctx.i,
@@ -125,21 +120,12 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
let end = tail.find(['\n', '\r']).map_or(tail.len(), |n| n - 1); let end = tail.find(['\n', '\r']).map_or(tail.len(), |n| n - 1);
ctx.push_pos(end as u32); ctx.push_pos(end as u32);
ParsTok::Comment(Rc::new(tail[2..end].to_string())) ParsTok::Comment(Rc::new(tail[2..end].to_string()))
} else if ctx.strip_char('\\') { } else if let Some(tail) = ctx.tail.strip_prefix('\\').filter(|t| t.starts_with(name_start)) {
let mut arg = Vec::new(); // fanciness like \$placeh in templates is resolved in the macro engine.
ctx.set_tail(tail);
let arg = lex_once(ctx).boxed_local().await?;
ctx.trim_ws(); ctx.trim_ws();
while !ctx.strip_char('.') { ParsTok::LambdaHead(Box::new(arg))
if ctx.tail.is_empty() {
return Err(mk_errv(
ctx.ctx.i.i("Unclosed lambda").await,
"Lambdae started with \\ should separate arguments from body with .",
[SrcRange::new(start..start + 1, ctx.path)],
));
}
arg.push(lex_once(ctx).boxed_local().await?);
ctx.trim_ws();
}
ParsTok::LambdaHead(arg)
} else if let Some((lp, rp, paren)) = PARENS.iter().find(|(lp, ..)| ctx.strip_char(*lp)) { } else if let Some((lp, rp, paren)) = PARENS.iter().find(|(lp, ..)| ctx.strip_char(*lp)) {
let mut body = Vec::new(); let mut body = Vec::new();
ctx.trim_ws(); ctx.trim_ws();
@@ -162,16 +148,24 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
let (source, pos, path) = (ctx.source.clone(), ctx.get_pos(), ctx.path.clone()); let (source, pos, path) = (ctx.source.clone(), ctx.get_pos(), ctx.path.clone());
let ctx_lck = &Mutex::new(&mut *ctx); let ctx_lck = &Mutex::new(&mut *ctx);
let errors_lck = &Mutex::new(&mut errors); let errors_lck = &Mutex::new(&mut errors);
let temp_store = sys.ext().exprs().derive(true);
let temp_store_cb = temp_store.clone();
let lx = sys let lx = sys
.lex(source, path, pos, |pos| async move { .lex(source, path, pos, |pos| {
clone!(temp_store_cb);
async move {
let mut ctx_g = ctx_lck.lock().await; let mut ctx_g = ctx_lck.lock().await;
match lex_once(&mut ctx_g.push(pos)).boxed_local().await { match lex_once(&mut ctx_g.push(pos)).boxed_local().await {
Ok(t) => Some(api::SubLexed { pos: t.sr.end(), tree: ctx_g.ser_subtree(t).await }), Ok(t) => Some(api::SubLexed {
pos: t.sr.end(),
tree: ctx_g.ser_subtree(t, temp_store_cb.clone()).await,
}),
Err(e) => { Err(e) => {
errors_lck.lock().await.push(e); errors_lck.lock().await.push(e);
None None
}, },
} }
}
}) })
.await; .await;
match lx { match lx {
@@ -181,7 +175,14 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
), ),
Ok(Some(lexed)) => { Ok(Some(lexed)) => {
ctx.set_pos(lexed.pos); ctx.set_pos(lexed.pos);
return Ok(ctx.des_subtree(&lexed.expr).await); let lexed_tree = ctx.des_subtree(&lexed.expr, temp_store).await;
let stable_tree = recur(lexed_tree, &|tt, r| {
if let ParsTok::NewExpr(expr) = tt.tok {
return ParsTok::Handle(expr).at(tt.sr);
}
r(tt)
});
return Ok(stable_tree);
}, },
Ok(None) => match errors.into_iter().reduce(|a, b| a + b) { Ok(None) => match errors.into_iter().reduce(|a, b| a + b) {
Some(errors) => return Err(errors), Some(errors) => return Err(errors),

View File

@@ -11,5 +11,6 @@ pub mod lex;
pub mod parse; pub mod parse;
pub mod parsed; pub mod parsed;
pub mod subprocess; pub mod subprocess;
mod sys_parser;
pub mod system; pub mod system;
pub mod tree; pub mod tree;

View File

@@ -1,10 +1,9 @@
use futures::FutureExt;
use futures::future::join_all; use futures::future::join_all;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcRes, Reporter, mk_errv}; use orchid_base::error::{OrcRes, Reporter, mk_errv};
use orchid_base::format::fmt; use orchid_base::format::fmt;
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::{Interner, Tok};
use orchid_base::name::{Sym, VPath}; use orchid_base::name::Sym;
use orchid_base::parse::{ use orchid_base::parse::{
Comment, Import, ParseCtx, Parsed, Snippet, expect_end, line_items, parse_multiname, Comment, Import, ParseCtx, Parsed, Snippet, expect_end, line_items, parse_multiname,
try_pop_no_fluff, try_pop_no_fluff,
@@ -13,7 +12,7 @@ use orchid_base::tree::{Paren, TokTree, Token};
use substack::Substack; use substack::Substack;
use crate::ctx::Ctx; use crate::ctx::Ctx;
use crate::expr::{Expr, ExprKind, PathSetBuilder}; use crate::expr::Expr;
use crate::parsed::{Item, ItemKind, ParsedMember, ParsedMemberKind, ParsedModule}; use crate::parsed::{Item, ItemKind, ParsedMember, ParsedMemberKind, ParsedModule};
use crate::system::System; use crate::system::System;
@@ -27,13 +26,14 @@ pub struct HostParseCtxImpl<'a> {
} }
impl ParseCtx for HostParseCtxImpl<'_> { impl ParseCtx for HostParseCtxImpl<'_> {
fn reporter(&self) -> &Reporter { self.rep } fn rep(&self) -> &Reporter { self.rep }
fn i(&self) -> &Interner { &self.ctx.i } fn i(&self) -> &Interner { &self.ctx.i }
} }
impl HostParseCtx for HostParseCtxImpl<'_> { impl HostParseCtx for HostParseCtxImpl<'_> {
fn ctx(&self) -> &Ctx { &self.ctx } fn ctx(&self) -> &Ctx { &self.ctx }
fn systems(&self) -> impl Iterator<Item = &System> { self.systems.iter() } fn systems(&self) -> impl Iterator<Item = &System> { self.systems.iter() }
fn src_path(&self) -> Sym { self.src.clone() }
} }
pub trait HostParseCtx: ParseCtx { pub trait HostParseCtx: ParseCtx {
@@ -41,6 +41,8 @@ pub trait HostParseCtx: ParseCtx {
fn ctx(&self) -> &Ctx; fn ctx(&self) -> &Ctx;
#[must_use] #[must_use]
fn systems(&self) -> impl Iterator<Item = &System>; fn systems(&self) -> impl Iterator<Item = &System>;
#[must_use]
fn src_path(&self) -> Sym;
} }
pub async fn parse_items( pub async fn parse_items(
@@ -110,12 +112,9 @@ pub async fn parse_exportable_item<'a>(
let kind = if discr == ctx.i().i("mod").await { let kind = if discr == ctx.i().i("mod").await {
let (name, body) = parse_module(ctx, path, tail).await?; let (name, body) = parse_module(ctx, path, tail).await?;
ItemKind::Member(ParsedMember { name, exported, kind: ParsedMemberKind::Mod(body) }) ItemKind::Member(ParsedMember { name, exported, kind: ParsedMemberKind::Mod(body) })
} else if discr == ctx.i().i("const").await { } else if let Some(parser) = ctx.systems().find_map(|s| s.get_parser(discr.clone())) {
let (name, expr) = parse_const(ctx, tail, path.clone()).await?; return parser
ItemKind::Member(ParsedMember { name, exported, kind: ParsedMemberKind::ParsedConst(expr) }) .parse(ctx, path, tail.to_vec(), exported, comments, &mut async |stack, lines| {
} else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) {
return sys
.parse(path, tail.to_vec(), exported, comments, &mut async |stack, lines| {
let source = Snippet::new(lines.first().unwrap(), &lines); let source = Snippet::new(lines.first().unwrap(), &lines);
parse_items(ctx, stack, source).await parse_items(ctx, stack, source).await
}) })
@@ -124,7 +123,7 @@ pub async fn parse_exportable_item<'a>(
let ext_lines = ctx.systems().flat_map(System::line_types).join(", "); let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
return Err(mk_errv( return Err(mk_errv(
ctx.i().i("Unrecognized line type").await, ctx.i().i("Unrecognized line type").await,
format!("Line types are: const, mod, macro, grammar, {ext_lines}"), format!("Line types are: mod, {ext_lines}"),
[tail.prev().sr()], [tail.prev().sr()],
)); ));
}; };
@@ -156,107 +155,5 @@ pub async fn parse_module<'a>(
)); ));
}; };
let path = path.push(name.clone()); let path = path.push(name.clone());
Ok((name, ParsedModule::new(parse_items(ctx, path, body).await?))) Ok((name, ParsedModule::new(true, parse_items(ctx, path, body).await?)))
}
pub async fn parse_const<'a>(
ctx: &impl HostParseCtx,
tail: ParsSnippet<'a>,
path: Substack<'_, Tok<String>>,
) -> OrcRes<(Tok<String>, Expr)> {
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
let Some(name) = output.as_name() else {
return Err(mk_errv(
ctx.i().i("Missing module name").await,
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
[output.sr()],
));
};
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
if !output.is_kw(ctx.i().i("=").await) {
return Err(mk_errv(
ctx.i().i("Missing = separator").await,
format!("Expected = , found {}", fmt(output, ctx.i()).await),
[output.sr()],
));
}
try_pop_no_fluff(ctx, tail).await?;
// ctx.save_const(path, tail[..].to_vec()).await;
let final_path =
VPath::new(path.unreverse()).name_with_suffix(name.clone()).to_sym(ctx.i()).await;
let val = parse_expr(ctx, final_path, PathSetBuilder::new(), tail).await?;
Ok((name, val))
}
pub async fn parse_expr(
ctx: &impl HostParseCtx,
path: Sym,
psb: PathSetBuilder<'_, Tok<String>>,
tail: ParsSnippet<'_>,
) -> OrcRes<Expr> {
let Some((last_idx, _)) = (tail.iter().enumerate().find(|(_, tt)| tt.as_lambda().is_some()))
.or_else(|| tail.iter().enumerate().rev().find(|(_, tt)| !tt.is_fluff()))
else {
return Err(mk_errv(ctx.i().i("Empty expression").await, "Expression ends abruptly here", [
tail.sr(),
]));
};
let (function, value) = tail.split_at(last_idx as u32);
let pos = tail.sr().pos();
if !function.iter().all(TokTree::is_fluff) {
let (f_psb, x_psb) = psb.split();
let x_expr = parse_expr(ctx, path.clone(), x_psb, value).boxed_local().await?;
let f_expr = parse_expr(ctx, path, f_psb, function).boxed_local().await?;
return Ok(ExprKind::Call(f_expr, x_expr).at(pos));
}
let Parsed { output: head, tail } = try_pop_no_fluff(ctx, value).await?;
match &head.tok {
Token::BR | Token::Comment(_) => panic!("Fluff skipped"),
Token::Bottom(b) => Ok(ExprKind::Bottom(b.clone()).at(pos.clone())),
Token::Handle(expr) => Ok(expr.clone()),
Token::NS(n, nametail) => {
let mut nametail = nametail;
let mut segments = vec![n.clone()];
while let Token::NS(n, newtail) = &nametail.tok {
segments.push(n.clone());
nametail = newtail;
}
let Token::Name(n) = &nametail.tok else {
return Err(mk_errv(
ctx.i().i("Loose namespace prefix in constant").await,
"Namespace prefixes in constants must be followed by names",
[pos],
));
};
segments.push(n.clone());
Ok(ExprKind::Const(Sym::new(segments, ctx.i()).await.unwrap()).at(pos.clone()))
},
Token::LambdaHead(h) => {
let [TokTree { tok: Token::Name(arg), .. }] = &h[..] else {
return Err(mk_errv(
ctx.i().i("Complex lambda binding in constant").await,
"Lambda args in constants must be identified by a single name",
[pos],
));
};
let lambda_builder = psb.lambda(arg);
let body = parse_expr(ctx, path.clone(), lambda_builder.stack(), tail).boxed_local().await?;
Ok(ExprKind::Lambda(lambda_builder.collect(), body).at(pos.clone()))
},
Token::S(Paren::Round, body) =>
parse_expr(ctx, path, psb, Snippet::new(head, body)).boxed_local().await,
Token::S(..) =>
return Err(mk_errv(
ctx.i().i("Constants may only contain (), not [] or {}").await,
"It seems like you are trying to call a macro. Consider a 'let' line",
[pos],
)),
Token::Name(n) =>
if psb.register_arg(n) {
Ok(ExprKind::Arg.at(pos))
} else {
Ok(ExprKind::Const(Sym::new([n.clone()], ctx.i()).await.unwrap()).at(pos))
},
Token::NewExpr(ex) => Ok(ex.clone()),
}
} }

View File

@@ -1,8 +1,8 @@
use std::fmt::Debug; use std::fmt::{self, Debug};
use std::rc::Rc; use std::rc::Rc;
use futures::FutureExt; use futures::FutureExt;
use futures::future::join_all; use futures::future::{LocalBoxFuture, join_all};
use hashbrown::HashSet; use hashbrown::HashSet;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants}; use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
@@ -10,11 +10,12 @@ use orchid_base::interner::Tok;
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
use orchid_base::parse::{Comment, Import}; use orchid_base::parse::{Comment, Import};
use orchid_base::tl_cache; use orchid_base::tl_cache;
use orchid_base::tree::{TokTree, Token}; use orchid_base::tree::{TokTree, Token, recur};
use crate::api; use crate::api;
use crate::dealias::{ChildErrorKind, ChildResult, Tree}; use crate::dealias::{ChildErrorKind, ChildResult, Tree};
use crate::expr::Expr; use crate::expr::{Expr, ExprWillPanic};
use crate::expr_store::ExprStore;
use crate::system::System; use crate::system::System;
pub type ParsTokTree = TokTree<Expr, Expr>; pub type ParsTokTree = TokTree<Expr, Expr>;
@@ -26,6 +27,11 @@ pub struct Item {
pub comments: Vec<Comment>, pub comments: Vec<Comment>,
pub kind: ItemKind, pub kind: ItemKind,
} }
impl Item {
pub fn new(sr: SrcRange, kind: impl Into<ItemKind>) -> Self {
Self { sr, comments: Vec::new(), kind: kind.into() }
}
}
#[derive(Debug)] #[derive(Debug)]
pub enum ItemKind { pub enum ItemKind {
@@ -36,6 +42,12 @@ impl ItemKind {
#[must_use] #[must_use]
pub fn at(self, sr: SrcRange) -> Item { Item { comments: vec![], sr, kind: self } } pub fn at(self, sr: SrcRange) -> Item { Item { comments: vec![], sr, kind: self } }
} }
impl From<ParsedMember> for ItemKind {
fn from(value: ParsedMember) -> Self { Self::Member(value) }
}
impl From<Import> for ItemKind {
fn from(value: Import) -> Self { Self::Import(value) }
}
impl Format for Item { impl Format for Item {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
@@ -43,10 +55,7 @@ impl Format for Item {
let item_text = match &self.kind { let item_text = match &self.kind {
ItemKind::Import(i) => format!("import {i}").into(), ItemKind::Import(i) => format!("import {i}").into(),
ItemKind::Member(mem) => match &mem.kind { ItemKind::Member(mem) => match &mem.kind {
ParsedMemberKind::ParsedConst(expr) => ParsedMemberKind::Const(_, sys) =>
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} = {1l}")))
.units([mem.name.rc().into(), expr.print(c).await]),
ParsedMemberKind::DeferredConst(_, sys) =>
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} via {1}"))) tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} via {1}")))
.units([mem.name.rc().into(), sys.print(c).await]), .units([mem.name.rc().into(), sys.print(c).await]),
ParsedMemberKind::Mod(module) => ParsedMemberKind::Mod(module) =>
@@ -67,6 +76,9 @@ pub struct ParsedMember {
impl ParsedMember { impl ParsedMember {
#[must_use] #[must_use]
pub fn name(&self) -> Tok<String> { self.name.clone() } pub fn name(&self) -> Tok<String> { self.name.clone() }
pub fn new(exported: bool, name: Tok<String>, kind: impl Into<ParsedMemberKind>) -> Self {
Self { exported, name, kind: kind.into() }
}
} }
impl Debug for ParsedMember { impl Debug for ParsedMember {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@@ -77,36 +89,52 @@ impl Debug for ParsedMember {
} }
} }
#[derive(Debug)] pub(crate) type ParsedExprCallback =
pub enum ParsedMemberKind { Rc<dyn for<'a> Fn(&'a [Tok<String>]) -> LocalBoxFuture<'a, Expr>>;
DeferredConst(api::ParsedConstId, System),
ParsedConst(Expr), pub struct ParsedExpr {
Mod(ParsedModule), pub(crate) debug: String,
pub(crate) callback: ParsedExprCallback,
}
impl ParsedExpr {
pub async fn run(self, imported_names: &[Tok<String>]) -> Expr {
(self.callback)(imported_names).await
}
}
impl fmt::Debug for ParsedExpr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.debug) }
} }
// TODO: cannot determine alias origin at this stage; parsed tree is never #[derive(Debug)]
// walkable! pub enum ParsedMemberKind {
Const(api::ParsedConstId, System),
Mod(ParsedModule),
}
impl From<ParsedModule> for ParsedMemberKind {
fn from(value: ParsedModule) -> Self { Self::Mod(value) }
}
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct ParsedModule { pub struct ParsedModule {
pub exports: Vec<Tok<String>>, pub exports: Vec<Tok<String>>,
pub items: Vec<Item>, pub items: Vec<Item>,
pub use_prelude: bool,
} }
impl ParsedModule { impl ParsedModule {
#[must_use] #[must_use]
pub fn new(items: impl IntoIterator<Item = Item>) -> Self { pub fn new(use_prelude: bool, items: impl IntoIterator<Item = Item>) -> Self {
let items = items.into_iter().collect_vec(); let items = items.into_iter().collect_vec();
let exports = (items.iter()) let exports = (items.iter())
.filter_map(|i| if let ItemKind::Member(m) = &i.kind { Some(m) } else { None }) .filter_map(|i| if let ItemKind::Member(m) = &i.kind { Some(m) } else { None })
.filter(|m| m.exported) .filter(|m| m.exported)
.map(|m| m.name.clone()) .map(|m| m.name.clone())
.collect_vec(); .collect_vec();
Self { exports, items } Self { exports, items, use_prelude }
} }
pub fn merge(&mut self, other: ParsedModule) { pub fn merge(&mut self, other: ParsedModule) {
let mut swap = ParsedModule::default(); let mut swap = ParsedModule::default();
std::mem::swap(self, &mut swap); std::mem::swap(self, &mut swap);
*self = ParsedModule::new(swap.items.into_iter().chain(other.items)) assert_eq!(self.use_prelude, other.use_prelude, "merging modules that disagree on prelude");
*self = ParsedModule::new(self.use_prelude, swap.items.into_iter().chain(other.items))
} }
#[must_use] #[must_use]
pub fn get_imports(&self) -> impl IntoIterator<Item = &Import> { pub fn get_imports(&self) -> impl IntoIterator<Item = &Import> {
@@ -134,8 +162,7 @@ impl Tree for ParsedModule {
.find(|m| m.name == key) .find(|m| m.name == key)
{ {
match &member.kind { match &member.kind {
ParsedMemberKind::DeferredConst(..) | ParsedMemberKind::ParsedConst(_) => ParsedMemberKind::Const(..) => return ChildResult::Err(ChildErrorKind::Constant),
return ChildResult::Err(ChildErrorKind::Constant),
ParsedMemberKind::Mod(m) => return ChildResult::Ok(m), ParsedMemberKind::Mod(m) => return ChildResult::Ok(m),
} }
} }
@@ -164,15 +191,6 @@ impl Format for ParsedModule {
} }
} }
/// TODO:
///
/// idea, does the host need an IR here or can we figure out a way to transcribe
/// these? Should we spin off a new stage for value parsing so that ParsTokTree
/// doesn't appear in the interpreter's ingress?
pub struct Const {
pub source: Option<Vec<ParsTokTree>>,
}
/// Selects a code element /// Selects a code element
/// ///
/// Either the steps point to a constant and rule_loc is None, or the steps /// Either the steps point to a constant and rule_loc is None, or the steps
@@ -185,3 +203,13 @@ impl ConstPath {
#[must_use] #[must_use]
pub fn to_const(steps: Tok<Vec<Tok<String>>>) -> Self { Self { steps } } pub fn to_const(steps: Tok<Vec<Tok<String>>>) -> Self { Self { steps } }
} }
pub async fn tt_to_api(exprs: &mut ExprStore, subtree: ParsTokTree) -> api::TokenTree {
let without_new_expr = recur(subtree, &|tt, r| {
if let ParsTok::NewExpr(expr) = tt.tok {
return ParsTok::Handle(expr).at(tt.sr);
}
r(tt)
});
without_new_expr.into_api(exprs, &mut ExprWillPanic).await
}

View File

@@ -1,12 +1,12 @@
use std::cell::RefCell; use std::cell::RefCell;
use std::io::Write; use std::io::{self, Write};
use std::pin::Pin; use std::pin::Pin;
use async_process::{self, Child, ChildStdin, ChildStdout}; use async_process::{self, Child, ChildStdin, ChildStdout};
use async_std::io::{self, BufReadExt, BufReader};
use async_std::sync::Mutex;
use futures::AsyncWriteExt;
use futures::future::LocalBoxFuture; use futures::future::LocalBoxFuture;
use futures::io::BufReader;
use futures::lock::Mutex;
use futures::{self, AsyncBufReadExt, AsyncWriteExt};
use orchid_api_traits::{Decode, Encode}; use orchid_api_traits::{Decode, Encode};
use orchid_base::builtin::{ExtInit, ExtPort}; use orchid_base::builtin::{ExtInit, ExtPort};
use orchid_base::logging::Logger; use orchid_base::logging::Logger;

View File

@@ -0,0 +1,116 @@
use futures::FutureExt;
use futures::future::join_all;
use itertools::Itertools;
use orchid_base::error::{OrcErrv, OrcRes};
use orchid_base::interner::{Interner, Tok};
use orchid_base::location::SrcRange;
use orchid_base::name::Sym;
use orchid_base::parse::Comment;
use orchid_base::reqnot::Requester;
use orchid_base::tree::ttv_from_api;
use substack::Substack;
use crate::api;
use crate::expr::ExprParseCtx;
use crate::expr_store::ExprStore;
use crate::parse::HostParseCtx;
use crate::parsed::{
Item, ItemKind, ParsTokTree, ParsedMember, ParsedMemberKind, ParsedModule, tt_to_api,
};
use crate::system::System;
pub struct Parser {
pub(crate) system: System,
pub(crate) idx: u16,
}
type ModPath<'a> = Substack<'a, Tok<String>>;
impl Parser {
pub async fn parse(
&self,
ctx: &impl HostParseCtx,
path: ModPath<'_>,
line: Vec<ParsTokTree>,
exported: bool,
comments: Vec<Comment>,
callback: &mut impl AsyncFnMut(ModPath<'_>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>,
) -> OrcRes<Vec<Item>> {
let mut temp_store = self.system.ext().exprs().derive(true);
let src_path = line.first().expect("cannot be empty").sr.path();
let line =
join_all((line.into_iter()).map(|t| async { tt_to_api(&mut temp_store.clone(), t).await }))
.await;
let mod_path = ctx.src_path().suffix(path.unreverse(), self.system.i()).await;
let comments = comments.iter().map(Comment::to_api).collect_vec();
let req = api::ParseLine {
idx: self.idx,
module: mod_path.to_api(),
src: src_path.to_api(),
exported,
sys: self.system.id(),
comments,
line,
};
match self.system.reqnot().request(req).await {
Ok(parsed_v) =>
conv(parsed_v, path, callback, &mut ConvCtx {
i: self.system.i(),
mod_path: &mod_path,
ext_exprs: &mut temp_store,
pctx: &mut ExprParseCtx { ctx: self.system.ctx(), exprs: self.system.ext().exprs() },
src_path: &src_path,
sys: &self.system,
})
.await,
Err(e) => Err(OrcErrv::from_api(&e, &self.system.ctx().i).await),
}
}
}
struct ConvCtx<'a> {
sys: &'a System,
mod_path: &'a Sym,
src_path: &'a Sym,
i: &'a Interner,
ext_exprs: &'a mut ExprStore,
pctx: &'a mut ExprParseCtx<'a>,
}
async fn conv(
parsed_v: Vec<api::ParsedLine>,
module: Substack<'_, Tok<String>>,
callback: &'_ mut impl AsyncFnMut(Substack<'_, Tok<String>>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>,
ctx: &mut ConvCtx<'_>,
) -> OrcRes<Vec<Item>> {
let mut items = Vec::new();
for parsed in parsed_v {
let (name, exported, kind) = match parsed.kind {
api::ParsedLineKind::Member(api::ParsedMember { name, exported, kind }) =>
(name, exported, kind),
api::ParsedLineKind::Recursive(rec) => {
let tokens = ttv_from_api(rec, ctx.ext_exprs, ctx.pctx, ctx.src_path, ctx.i).await;
items.extend(callback(module.clone(), tokens).await?);
continue;
},
};
let name = ctx.i.ex(name).await;
let mkind = match kind {
api::ParsedMemberKind::Module { lines, use_prelude } => {
let items = conv(lines, module.push(name.clone()), callback, ctx).boxed_local().await?;
ParsedMemberKind::Mod(ParsedModule::new(use_prelude, items))
},
api::ParsedMemberKind::Constant(cid) => {
ctx.sys.0.const_paths.insert(cid, ctx.mod_path.suffix(module.unreverse(), ctx.i).await);
ParsedMemberKind::Const(cid, ctx.sys.clone())
},
};
items.push(Item {
comments: join_all(
parsed.comments.iter().map(|c| Comment::from_api(c, ctx.src_path.clone(), ctx.i)),
)
.await,
sr: SrcRange::from_api(&parsed.source_range, ctx.i).await,
kind: ItemKind::Member(ParsedMember { name, exported, kind: mkind }),
})
}
Ok(items)
}

View File

@@ -3,35 +3,32 @@ use std::fmt;
use std::future::Future; use std::future::Future;
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use async_lock::RwLock;
use derive_destructure::destructure; use derive_destructure::destructure;
use futures::FutureExt;
use futures::future::join_all; use futures::future::join_all;
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use memo_map::MemoMap; use memo_map::MemoMap;
use orchid_base::char_filter::char_filter_match; use orchid_base::char_filter::char_filter_match;
use orchid_base::error::{OrcErrv, OrcRes}; use orchid_base::error::{OrcRes, mk_errv_floating};
use orchid_base::format::{FmtCtx, FmtUnit, Format}; use orchid_base::format::{FmtCtx, FmtUnit, Format};
use orchid_base::interner::{Interner, Tok}; use orchid_base::interner::{Interner, Tok};
use orchid_base::location::SrcRange; use orchid_base::iter_utils::IteratorPrint;
use orchid_base::name::{NameLike, Sym, VName}; use orchid_base::name::{NameLike, Sym, VName, VPath};
use orchid_base::parse::Comment;
use orchid_base::reqnot::{ReqNot, Requester}; use orchid_base::reqnot::{ReqNot, Requester};
use orchid_base::tree::ttv_from_api;
use ordered_float::NotNan; use ordered_float::NotNan;
use substack::{Stackframe, Substack}; use substack::{Stackframe, Substack};
use crate::api; use crate::api;
use crate::atom::{AtomHand, WeakAtomHand};
use crate::ctx::Ctx; use crate::ctx::Ctx;
use crate::dealias::{absolute_path, walk}; use crate::dealias::walk;
use crate::expr::{ExprParseCtx, ExprWillPanic};
use crate::expr_store::ExprStore;
use crate::extension::{Extension, WeakExtension}; use crate::extension::{Extension, WeakExtension};
use crate::parsed::{Item, ItemKind, ParsTokTree, ParsedMember, ParsedMemberKind, ParsedModule}; use crate::sys_parser::Parser;
use crate::tree::Root; use crate::tree::Root;
#[derive(destructure)] #[derive(destructure)]
struct SystemInstData { pub(crate) struct SystemInstData {
deps: Vec<System>, deps: Vec<System>,
ctx: Ctx, ctx: Ctx,
ext: Extension, ext: Extension,
@@ -39,6 +36,8 @@ struct SystemInstData {
lex_filter: api::CharFilter, lex_filter: api::CharFilter,
id: api::SysId, id: api::SysId,
line_types: Vec<Tok<String>>, line_types: Vec<Tok<String>>,
prelude: Vec<Sym>,
owned_atoms: RwLock<HashMap<api::AtomId, WeakAtomHand>>,
pub(crate) const_paths: MemoMap<api::ParsedConstId, Sym>, pub(crate) const_paths: MemoMap<api::ParsedConstId, Sym>,
} }
impl Drop for SystemInstData { impl Drop for SystemInstData {
@@ -56,8 +55,12 @@ impl fmt::Debug for SystemInstData {
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct System(Rc<SystemInstData>); pub struct System(pub(crate) Rc<SystemInstData>);
impl System { impl System {
#[must_use]
pub async fn atoms(&self) -> impl std::ops::Deref<Target = HashMap<api::AtomId, WeakAtomHand>> {
self.0.owned_atoms.read().await
}
#[must_use] #[must_use]
pub fn id(&self) -> api::SysId { self.0.id } pub fn id(&self) -> api::SysId { self.0.id }
#[must_use] #[must_use]
@@ -69,6 +72,11 @@ impl System {
#[must_use] #[must_use]
pub fn deps(&self) -> &[System] { &self.0.deps } pub fn deps(&self) -> &[System] { &self.0.deps }
#[must_use] #[must_use]
pub fn ctor(&self) -> SystemCtor {
(self.0.ext.system_ctors().find(|c| c.decl.id == self.0.decl_id).cloned())
.expect("Ctor was used to create ext")
}
#[must_use]
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { self.0.ext.reqnot() } pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { self.0.ext.reqnot() }
#[must_use] #[must_use]
pub async fn get_tree(&self, id: api::TreeId) -> api::MemberKind { pub async fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
@@ -78,6 +86,8 @@ impl System {
pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() } pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() }
#[must_use] #[must_use]
pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) } pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) }
#[must_use]
pub fn prelude(&self) -> Vec<Sym> { self.0.prelude.clone() }
/// Have this system lex a part of the source. It is assumed that /// Have this system lex a part of the source. It is assumed that
/// [Self::can_lex] was called and returned true. /// [Self::can_lex] was called and returned true.
pub async fn lex<F: Future<Output = Option<api::SubLexed>>>( pub async fn lex<F: Future<Output = Option<api::SubLexed>>>(
@@ -90,120 +100,76 @@ impl System {
self.0.ext.lex_req(source, src, pos, self.id(), r).await self.0.ext.lex_req(source, src, pos, self.id(), r).await
} }
#[must_use] #[must_use]
pub fn can_parse(&self, ltyp: Tok<String>) -> bool { self.0.line_types.contains(&ltyp) } pub fn get_parser(&self, ltyp: Tok<String>) -> Option<Parser> {
(self.0.line_types.iter().enumerate())
.find(|(_, txt)| *txt == &ltyp)
.map(|(idx, _)| Parser { idx: idx as u16, system: self.clone() })
}
pub fn line_types(&self) -> impl Iterator<Item = &Tok<String>> + '_ { self.0.line_types.iter() } pub fn line_types(&self) -> impl Iterator<Item = &Tok<String>> + '_ { self.0.line_types.iter() }
pub async fn parse(
&self,
path: Substack<'_, Tok<String>>,
line: Vec<ParsTokTree>,
exported: bool,
comments: Vec<Comment>,
callback: &mut impl AsyncFnMut(Substack<'_, Tok<String>>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>,
) -> OrcRes<Vec<Item>> {
let src_path = line.first().expect("cannot be empty").sr.path();
let line = join_all(line.into_iter().map(|t| async {
let mut expr_store = self.0.ext.exprs().clone();
t.into_api(&mut expr_store, &mut ExprWillPanic).await
}))
.await;
let comments = comments.iter().map(Comment::to_api).collect_vec();
let req = api::ParseLine {
module: self.i().i(&path.unreverse()).await.to_api(),
src: src_path.to_api(),
exported,
sys: self.id(),
comments,
line,
};
match self.reqnot().request(req).await {
Ok(parsed_v) => {
let mut ext_exprs = self.ext().exprs().clone();
struct ConvCtx<'a> {
sys: &'a System,
src_path: &'a Sym,
i: &'a Interner,
ext_exprs: &'a mut ExprStore,
pctx: &'a mut ExprParseCtx<'a>,
}
async fn conv(
parsed_v: Vec<api::ParsedLine>,
module: Substack<'_, Tok<String>>,
callback: &'_ mut impl AsyncFnMut(
Substack<'_, Tok<String>>,
Vec<ParsTokTree>,
) -> OrcRes<Vec<Item>>,
ctx: &mut ConvCtx<'_>,
) -> OrcRes<Vec<Item>> {
let mut items = Vec::new();
for parsed in parsed_v {
let (name, exported, kind) = match parsed.kind {
api::ParsedLineKind::Member(api::ParsedMember { name, exported, kind }) =>
(name, exported, kind),
api::ParsedLineKind::Recursive(rec) => {
let tokens = ttv_from_api(rec, ctx.ext_exprs, ctx.pctx, ctx.src_path, ctx.i).await;
items.extend(callback(module.clone(), tokens).await?);
continue;
},
};
let name = ctx.i.ex(name).await;
let mkind = match kind {
api::ParsedMemberKind::Module(items) => {
let items =
conv(items, module.push(name.clone()), callback, ctx).boxed_local().await?;
ParsedMemberKind::Mod(ParsedModule::new(items))
},
api::ParsedMemberKind::Constant(cid) =>
ParsedMemberKind::DeferredConst(cid, ctx.sys.clone()),
};
items.push(Item {
comments: join_all(
parsed.comments.iter().map(|c| Comment::from_api(c, ctx.src_path.clone(), ctx.i)),
)
.await,
sr: SrcRange::from_api(&parsed.source_range, ctx.i).await,
kind: ItemKind::Member(ParsedMember { name, exported, kind: mkind }),
})
}
Ok(items)
}
conv(parsed_v, path, callback, &mut ConvCtx {
i: self.i(),
ext_exprs: &mut ext_exprs,
pctx: &mut ExprParseCtx { ctx: self.ctx(), exprs: self.ext().exprs() },
src_path: &src_path,
sys: self,
})
.await
},
Err(e) => Err(OrcErrv::from_api(&e, &self.ctx().i).await),
}
}
#[must_use] #[must_use]
pub async fn request(&self, req: Vec<u8>) -> Vec<u8> { pub async fn request(&self, req: Vec<u8>) -> Vec<u8> {
self.reqnot().request(api::SysFwded(self.id(), req)).await self.reqnot().request(api::SysFwded(self.id(), req)).await
} }
pub(crate) fn drop_atom(&self, drop: api::AtomId) { pub(crate) async fn new_atom(&self, data: Vec<u8>, id: api::AtomId) -> AtomHand {
let mut owned_g = self.0.owned_atoms.write().await;
if let Some(data) = owned_g.get(&id)
&& let Some(atom) = data.upgrade()
{
return atom;
}
let new = AtomHand::new(data, self.clone(), Some(id));
owned_g.insert(id, new.downgrade());
new
}
pub(crate) fn drop_atom(&self, dropped_atom_id: api::AtomId) {
let this = self.0.clone(); let this = self.0.clone();
(self.0.ctx.spawn)(Box::pin(async move { (self.0.ctx.spawn)(Box::pin(async move {
this.ctx.owned_atoms.write().await.remove(&drop); this.ext.reqnot().request(api::AtomDrop(this.id, dropped_atom_id)).await;
this.owned_atoms.write().await.remove(&dropped_atom_id);
})) }))
} }
#[must_use] #[must_use]
pub fn downgrade(&self) -> WeakSystem { WeakSystem(Rc::downgrade(&self.0)) } pub fn downgrade(&self) -> WeakSystem {
WeakSystem(Rc::downgrade(&self.0), self.0.decl_id, self.ext().downgrade())
}
/// Implementation of [api::ResolveNames] /// Implementation of [api::ResolveNames]
pub(crate) async fn name_resolver( pub(crate) async fn name_resolver(
&self, &self,
orig: api::ParsedConstId, orig: api::ParsedConstId,
) -> impl AsyncFnMut(&[Tok<String>]) -> Option<VName> + use<> { ) -> impl AsyncFnMut(&[Tok<String>]) -> OrcRes<VName> + use<> {
let root = self.0.ctx.root.read().await.upgrade().expect("find_names when root not in context"); let root = self.0.ctx.root.read().await.upgrade().expect("find_names when root not in context");
let orig = self.0.const_paths.get(&orig).expect("origin for find_names invalid").clone(); let orig = self.0.const_paths.get(&orig).expect("origin for find_names invalid").clone();
let ctx = self.0.ctx.clone(); let ctx = self.0.ctx.clone();
async move |rel| { async move |rel| {
let cwd = orig.split_last().1; let cwd = orig.split_last_seg().1;
let abs = absolute_path(cwd, rel, &ctx.i).await.ok()?;
let root_data = &mut *root.0.write().await; let root_data = &mut *root.0.write().await;
let walk_ctx = &mut (ctx.clone(), &mut root_data.consts); let walk_ctx = &mut (ctx.clone(), &root_data.consts);
walk(&root_data.root, false, abs.iter(), walk_ctx).await.is_ok().then_some(abs) let cmod = (walk(&root_data.root, false, cwd.iter().cloned(), walk_ctx).await)
.expect("the parent module of a constant should exist");
let (selector, tail) = rel.split_first().expect("Names cannot be empty");
if cmod.members.get(selector).is_some() {
return Ok(VName::new(cwd.iter().chain(rel).cloned()).unwrap());
}
match cmod.imports.get(selector) {
Some(Ok(dest)) => return Ok(dest.target.to_vname().suffix(tail.iter().cloned())),
Some(Err(dests)) =>
return Err(mk_errv_floating(
ctx.i.i("Ambiguous name").await,
format!(
"{selector} could refer to {}",
dests.iter().map(|ri| &ri.target).display("or")
),
)),
None => (),
}
if tail.is_empty() {
return Ok(VPath::new(cwd.iter().cloned()).name_with_suffix(selector.clone()));
}
Err(mk_errv_floating(
ctx.i.i("Invalid name").await,
format!("{selector} doesn't refer to a module"),
))
} }
} }
} }
@@ -215,12 +181,17 @@ impl Format for System {
} }
} }
pub struct WeakSystem(Weak<SystemInstData>); pub struct WeakSystem(Weak<SystemInstData>, api::SysDeclId, WeakExtension);
impl WeakSystem { impl WeakSystem {
#[must_use] #[must_use]
pub fn upgrade(&self) -> Option<System> { self.0.upgrade().map(System) } pub fn upgrade(&self) -> Option<System> { self.0.upgrade().map(System) }
pub fn ext(&self) -> Option<Extension> { self.2.upgrade() }
pub fn ctor(&self) -> Option<SystemCtor> {
self.ext()?.system_ctors().find(|ctor| ctor.decl.id == self.1).cloned()
}
} }
#[derive(Clone)]
pub struct SystemCtor { pub struct SystemCtor {
pub(crate) decl: api::SystemDecl, pub(crate) decl: api::SystemDecl,
pub(crate) ext: WeakExtension, pub(crate) ext: WeakExtension,
@@ -228,6 +199,10 @@ pub struct SystemCtor {
impl SystemCtor { impl SystemCtor {
#[must_use] #[must_use]
pub fn name(&self) -> &str { &self.decl.name } pub fn name(&self) -> &str { &self.decl.name }
pub async fn name_tok(&self) -> Sym {
(Sym::parse(&self.decl.name, &self.ext.upgrade().expect("ext dropped early").ctx().i).await)
.expect("System cannot have empty name")
}
#[must_use] #[must_use]
pub fn priority(&self) -> NotNan<f64> { self.decl.priority } pub fn priority(&self) -> NotNan<f64> { self.decl.priority }
#[must_use] #[must_use]
@@ -252,6 +227,8 @@ impl SystemCtor {
line_types: join_all(sys_inst.line_types.iter().map(|m| Tok::from_api(*m, &ext.ctx().i))) line_types: join_all(sys_inst.line_types.iter().map(|m| Tok::from_api(*m, &ext.ctx().i)))
.await, .await,
id, id,
prelude: join_all(sys_inst.prelude.iter().map(|tok| Sym::from_api(*tok, &ext.ctx().i))).await,
owned_atoms: RwLock::new(HashMap::new()),
const_paths: MemoMap::new(), const_paths: MemoMap::new(),
})); }));
let api_module_root = api::Module { let api_module_root = api::Module {

View File

@@ -2,19 +2,21 @@
//! once //! once
use std::cell::RefCell; use std::cell::RefCell;
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
use std::slice;
use async_lock::RwLock;
use async_once_cell::OnceCell; use async_once_cell::OnceCell;
use async_std::sync::RwLock; use derive_destructure::destructure;
use futures::{FutureExt, StreamExt, stream}; use futures::{FutureExt, StreamExt, stream};
use hashbrown::HashMap; use hashbrown::HashMap;
use hashbrown::hash_map::Entry; use hashbrown::hash_map::Entry;
use itertools::Itertools; use itertools::Itertools;
use orchid_api::FetchParsedConst; use memo_map::MemoMap;
use orchid_base::clone; use orchid_base::clone;
use orchid_base::error::{OrcRes, Reporter, mk_err, mk_errv}; use orchid_base::error::{OrcRes, Reporter, mk_errv};
use orchid_base::interner::Tok; use orchid_base::interner::Tok;
use orchid_base::location::{Pos, SrcRange}; use orchid_base::location::{CodeGenInfo, Pos};
use orchid_base::name::{Sym, VPath}; use orchid_base::name::{NameLike, Sym, VPath};
use orchid_base::reqnot::Requester; use orchid_base::reqnot::Requester;
use crate::api; use crate::api;
@@ -26,7 +28,7 @@ use crate::system::System;
pub struct RootData { pub struct RootData {
pub root: Module, pub root: Module,
pub consts: HashMap<Sym, Expr>, pub consts: MemoMap<Sym, Expr>,
pub ctx: Ctx, pub ctx: Ctx,
} }
#[derive(Clone)] #[derive(Clone)]
@@ -36,23 +38,25 @@ impl Root {
pub fn new(ctx: Ctx) -> Self { pub fn new(ctx: Ctx) -> Self {
Root(Rc::new(RwLock::new(RootData { Root(Rc::new(RwLock::new(RootData {
root: Module::default(), root: Module::default(),
consts: HashMap::default(), consts: MemoMap::default(),
ctx, ctx,
}))) })))
} }
#[must_use] #[must_use]
pub async fn from_api(api: api::Module, sys: &System) -> Self { pub async fn from_api(api: api::Module, sys: &System) -> Self {
let mut consts = HashMap::new(); let consts = MemoMap::new();
let mut tfac = TreeFromApiCtx { consts: &mut consts, path: sys.i().i(&[][..]).await, sys }; let mut tfac = TreeFromApiCtx { consts: &consts, path: sys.i().i(&[][..]).await, sys };
let root = Module::from_api(api, &mut tfac).await; let root = Module::from_api(api, &mut tfac).await;
Root(Rc::new(RwLock::new(RootData { root, consts, ctx: sys.ctx().clone() }))) Root(Rc::new(RwLock::new(RootData { root, consts, ctx: sys.ctx().clone() })))
} }
pub async fn merge(&self, new: &Root) -> Result<Self, MergeErr> { pub async fn merge(&self, new: &Root) -> Result<Self, MergeErr> {
let this = self.0.read().await; let this = self.0.write().await;
let that = new.0.read().await; let that = new.0.write().await;
let mut consts = let consts = MemoMap::new();
this.consts.iter().chain(&that.consts).map(|(k, v)| (k.clone(), v.clone())).collect(); for (k, v) in this.consts.iter().chain(that.consts.iter()) {
let root = this.root.merge(&that.root, this.ctx.clone(), &mut consts).await?; consts.insert(k.clone(), v.clone());
}
let root = this.root.merge(&that.root, this.ctx.clone(), &consts).await?;
Ok(Self(Rc::new(RwLock::new(RootData { root, consts, ctx: this.ctx.clone() })))) Ok(Self(Rc::new(RwLock::new(RootData { root, consts, ctx: this.ctx.clone() }))))
} }
#[must_use] #[must_use]
@@ -60,11 +64,12 @@ impl Root {
let mut ref_this = self.0.write().await; let mut ref_this = self.0.write().await;
let this = &mut *ref_this; let this = &mut *ref_this;
let mut deferred_consts = HashMap::new(); let mut deferred_consts = HashMap::new();
let consts = this.consts.clone();
let mut tfpctx = FromParsedCtx { let mut tfpctx = FromParsedCtx {
pars_root: parsed, pars_root: parsed,
deferred_consts: &mut deferred_consts, deferred_consts: &mut deferred_consts,
consts: &consts,
pars_prefix: pars_prefix.clone(), pars_prefix: pars_prefix.clone(),
consts: &mut this.consts,
root: &this.root, root: &this.root,
ctx: &this.ctx, ctx: &this.ctx,
rep, rep,
@@ -78,14 +83,13 @@ impl Root {
)]); )]);
module = Module { imports: HashMap::new(), members } module = Module { imports: HashMap::new(), members }
} }
let mut consts = this.consts.clone(); let root = (this.root.merge(&module, this.ctx.clone(), &consts).await)
let root = (this.root.merge(&module, this.ctx.clone(), &mut consts).await)
.expect("Merge conflict between parsed and existing module"); .expect("Merge conflict between parsed and existing module");
let new = Root(Rc::new(RwLock::new(RootData { root, consts, ctx: this.ctx.clone() }))); let new = Root(Rc::new(RwLock::new(RootData { root, consts, ctx: this.ctx.clone() })));
*this.ctx.root.write().await = new.downgrade(); *this.ctx.root.write().await = new.downgrade();
for (path, (sys_id, pc_id)) in deferred_consts { for (path, (sys_id, pc_id)) in deferred_consts {
let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing"); let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing");
let api_expr = sys.reqnot().request(FetchParsedConst { id: pc_id, sys: sys.id() }).await; let api_expr = sys.reqnot().request(api::FetchParsedConst(sys.id(), pc_id)).await;
let mut xp_ctx = ExprParseCtx { ctx: &this.ctx, exprs: sys.ext().exprs() }; let mut xp_ctx = ExprParseCtx { ctx: &this.ctx, exprs: sys.ext().exprs() };
let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), &mut xp_ctx).await; let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), &mut xp_ctx).await;
new.0.write().await.consts.insert(path, expr); new.0.write().await.consts.insert(path, expr);
@@ -93,7 +97,7 @@ impl Root {
new new
} }
pub async fn get_const_value(&self, name: Sym, pos: Pos) -> OrcRes<Expr> { pub async fn get_const_value(&self, name: Sym, pos: Pos) -> OrcRes<Expr> {
let this = &mut *self.0.write().await; let this = &*self.0.read().await;
// shortcut for previously visited // shortcut for previously visited
if let Some(val) = this.consts.get(&name) { if let Some(val) = this.consts.get(&name) {
return Ok(val.clone()); return Ok(val.clone());
@@ -101,7 +105,7 @@ impl Root {
// load the node, then check if this "walk" call added it to the map // load the node, then check if this "walk" call added it to the map
let ctx = this.ctx.clone(); let ctx = this.ctx.clone();
let module = let module =
walk(&this.root, false, name.iter().cloned(), &mut (ctx.clone(), &mut this.consts)).await; walk(&this.root, false, name.iter().cloned(), &mut (ctx.clone(), &this.consts)).await;
if let Some(val) = this.consts.get(&name) { if let Some(val) = this.consts.get(&name) {
return Ok(val.clone()); return Ok(val.clone());
} }
@@ -140,21 +144,21 @@ impl Default for WeakRoot {
pub struct TreeFromApiCtx<'a> { pub struct TreeFromApiCtx<'a> {
pub sys: &'a System, pub sys: &'a System,
pub consts: &'a mut HashMap<Sym, Expr>, pub consts: &'a MemoMap<Sym, Expr>,
pub path: Tok<Vec<Tok<String>>>, pub path: Tok<Vec<Tok<String>>>,
} }
impl<'a> TreeFromApiCtx<'a> { impl<'a> TreeFromApiCtx<'a> {
#[must_use] #[must_use]
pub async fn push<'c>(&'c mut self, name: Tok<String>) -> TreeFromApiCtx<'c> { pub async fn push<'c>(&'c self, name: Tok<String>) -> TreeFromApiCtx<'c> {
let path = self.sys.ctx().i.i(&self.path.iter().cloned().chain([name]).collect_vec()).await; let path = self.sys.ctx().i.i(&self.path.iter().cloned().chain([name]).collect_vec()).await;
TreeFromApiCtx { path, consts: &mut *self.consts, sys: self.sys } TreeFromApiCtx { path, consts: self.consts, sys: self.sys }
} }
} }
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub struct ResolvedImport { pub struct ResolvedImport {
target: Sym, pub target: Sym,
sr: SrcRange, pub pos: Pos,
} }
#[derive(Clone, Default)] #[derive(Clone, Default)]
@@ -208,7 +212,7 @@ impl Module {
Ok(abs_path) => { Ok(abs_path) => {
let names_res = match abs_path.strip_prefix(&ctx.pars_prefix[..]) { let names_res = match abs_path.strip_prefix(&ctx.pars_prefix[..]) {
None => { None => {
let mut tree_ctx = (ctx.ctx.clone(), &mut *ctx.consts); let mut tree_ctx = (ctx.ctx.clone(), ctx.consts);
resolv_glob(&path, ctx.root, &abs_path, pos, &ctx.ctx.i, &mut tree_ctx).await resolv_glob(&path, ctx.root, &abs_path, pos, &ctx.ctx.i, &mut tree_ctx).await
}, },
Some(sub_tgt) => { Some(sub_tgt) => {
@@ -234,6 +238,22 @@ impl Module {
} }
} }
let mut imports = HashMap::new(); let mut imports = HashMap::new();
if parsed.use_prelude {
let systems = ctx.ctx.systems.read().await;
for sys in systems.values().flat_map(|weak| weak.upgrade()) {
for prelude_item in sys.prelude() {
imports.insert(
prelude_item.last_seg(),
Ok(ResolvedImport {
target: prelude_item,
pos: CodeGenInfo::new_details(sys.ctor().name_tok().await, "In prelude", &ctx.ctx.i)
.await
.pos(),
}),
);
}
}
}
let conflicting_imports_msg = ctx.ctx.i.i("Conflicting imports").await; let conflicting_imports_msg = ctx.ctx.i.i("Conflicting imports").await;
for (key, values) in imports_by_name { for (key, values) in imports_by_name {
if values.len() == 1 { if values.len() == 1 {
@@ -243,16 +263,16 @@ impl Module {
match abs_path_res { match abs_path_res {
Err(e) => ctx.rep.report(e.err_obj(&ctx.ctx.i, sr.pos(), &import.to_string()).await), Err(e) => ctx.rep.report(e.err_obj(&ctx.ctx.i, sr.pos(), &import.to_string()).await),
Ok(abs_path) => { Ok(abs_path) => {
imports let target = abs_path.to_sym(&ctx.ctx.i).await;
.insert(key, Ok(ResolvedImport { target: abs_path.to_sym(&ctx.ctx.i).await, sr })); imports.insert(key, Ok(ResolvedImport { target, pos: sr.pos() }));
}, },
} }
} else { } else {
for item in values { for item in values {
ctx.rep.report(mk_err( ctx.rep.report(mk_errv(
conflicting_imports_msg.clone(), conflicting_imports_msg.clone(),
format!("{key} is imported multiple times from different modules"), format!("{key} is imported multiple times from different modules"),
[item.sr.pos().into()], [item.sr.pos()],
)); ));
} }
} }
@@ -263,7 +283,10 @@ impl Module {
let values = stream::iter(values) let values = stream::iter(values)
.then(|(n, sr)| { .then(|(n, sr)| {
clone!(key; async move { clone!(key; async move {
ResolvedImport { target: n.to_vname().suffix([key.clone()]).to_sym(i).await, sr } ResolvedImport {
target: n.to_vname().suffix([key.clone()]).to_sym(i).await,
pos: sr.pos(),
}
}) })
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
@@ -274,11 +297,12 @@ impl Module {
let self_referential_msg = ctx.ctx.i.i("Self-referential import").await; let self_referential_msg = ctx.ctx.i.i("Self-referential import").await;
for (key, value) in imports.iter() { for (key, value) in imports.iter() {
let Ok(import) = value else { continue }; let Ok(import) = value else { continue };
if import.target.strip_prefix(&path[..]).is_some_and(|t| t.starts_with(&[key.clone()])) { if import.target.strip_prefix(&path[..]).is_some_and(|t| t.starts_with(slice::from_ref(key)))
ctx.rep.report(mk_err( {
ctx.rep.report(mk_errv(
self_referential_msg.clone(), self_referential_msg.clone(),
format!("import {} points to itself or a path within itself", &import.target), format!("import {} points to itself or a path within itself", &import.target),
[import.sr.pos().into()], [import.pos.clone()],
)); ));
} }
} }
@@ -302,7 +326,7 @@ impl Module {
&self, &self,
other: &Module, other: &Module,
ctx: Ctx, ctx: Ctx,
consts: &mut HashMap<Sym, Expr>, consts: &MemoMap<Sym, Expr>,
) -> Result<Module, MergeErr> { ) -> Result<Module, MergeErr> {
if !self.imports.is_empty() || !other.imports.is_empty() { if !self.imports.is_empty() || !other.imports.is_empty() {
return Err(MergeErr { path: VPath::new([]), kind: MergeErrKind::Imports }); return Err(MergeErr { path: VPath::new([]), kind: MergeErrKind::Imports });
@@ -363,14 +387,14 @@ pub struct FromParsedCtx<'a> {
pars_prefix: Sym, pars_prefix: Sym,
pars_root: &'a ParsedModule, pars_root: &'a ParsedModule,
root: &'a Module, root: &'a Module,
consts: &'a mut HashMap<Sym, Expr>,
rep: &'a Reporter, rep: &'a Reporter,
ctx: &'a Ctx, ctx: &'a Ctx,
consts: &'a MemoMap<Sym, Expr>,
deferred_consts: &'a mut HashMap<Sym, (api::SysId, api::ParsedConstId)>, deferred_consts: &'a mut HashMap<Sym, (api::SysId, api::ParsedConstId)>,
} }
impl Tree for Module { impl Tree for Module {
type Ctx<'a> = (Ctx, &'a mut HashMap<Sym, Expr>); type Ctx<'a> = (Ctx, &'a MemoMap<Sym, Expr>);
async fn child( async fn child(
&self, &self,
key: Tok<String>, key: Tok<String>,
@@ -400,7 +424,7 @@ pub struct Member {
} }
impl Member { impl Member {
#[must_use] #[must_use]
pub async fn kind<'a>(&'a self, ctx: Ctx, consts: &mut HashMap<Sym, Expr>) -> &'a MemberKind { pub async fn kind<'a>(&'a self, ctx: Ctx, consts: &MemoMap<Sym, Expr>) -> &'a MemberKind {
(self.kind.get_or_init(async { (self.kind.get_or_init(async {
let handle = self.lazy.borrow_mut().take().expect("If kind is uninit, lazy must be Some"); let handle = self.lazy.borrow_mut().take().expect("If kind is uninit, lazy must be Some");
handle.run(ctx, consts).await handle.run(ctx, consts).await
@@ -417,11 +441,7 @@ impl MemberKind {
#[must_use] #[must_use]
async fn from_parsed(parsed: &ParsedMemberKind, path: Sym, ctx: &mut FromParsedCtx<'_>) -> Self { async fn from_parsed(parsed: &ParsedMemberKind, path: Sym, ctx: &mut FromParsedCtx<'_>) -> Self {
match parsed { match parsed {
ParsedMemberKind::ParsedConst(expr) => { ParsedMemberKind::Const(id, sys) => {
ctx.consts.insert(path, expr.clone());
MemberKind::Const
},
ParsedMemberKind::DeferredConst(id, sys) => {
ctx.deferred_consts.insert(path, (sys.id(), *id)); ctx.deferred_consts.insert(path, (sys.id(), *id));
MemberKind::Const MemberKind::Const
}, },
@@ -431,6 +451,7 @@ impl MemberKind {
} }
} }
#[derive(destructure)]
pub struct LazyMemberHandle { pub struct LazyMemberHandle {
id: api::TreeId, id: api::TreeId,
sys: api::SysId, sys: api::SysId,
@@ -438,18 +459,26 @@ pub struct LazyMemberHandle {
} }
impl LazyMemberHandle { impl LazyMemberHandle {
#[must_use] #[must_use]
pub async fn run(self, ctx: Ctx, consts: &mut HashMap<Sym, Expr>) -> MemberKind { pub async fn run(mut self, ctx: Ctx, consts: &MemoMap<Sym, Expr>) -> MemberKind {
let sys = ctx.system_inst(self.sys).await.expect("Missing system for lazy member"); let sys = ctx.system_inst(self.sys).await.expect("Missing system for lazy member");
match sys.get_tree(self.id).await { match sys.get_tree(self.id).await {
api::MemberKind::Const(c) => { api::MemberKind::Const(c) => {
let mut pctx = ExprParseCtx { ctx: &ctx, exprs: sys.ext().exprs() }; let mut pctx = ExprParseCtx { ctx: &ctx, exprs: sys.ext().exprs() };
consts.insert(self.path, Expr::from_api(&c, PathSetBuilder::new(), &mut pctx).await); let expr = Expr::from_api(&c, PathSetBuilder::new(), &mut pctx).await;
let (.., path) = self.destructure();
consts.insert(path, expr);
MemberKind::Const MemberKind::Const
}, },
api::MemberKind::Module(m) => MemberKind::Module( api::MemberKind::Module(m) => {
Module::from_api(m, &mut TreeFromApiCtx { sys: &sys, consts, path: self.path.tok() }).await, let (.., path) = self.destructure();
), MemberKind::Module(
api::MemberKind::Lazy(id) => Self { id, ..self }.run(ctx, consts).boxed_local().await, Module::from_api(m, &mut TreeFromApiCtx { sys: &sys, consts, path: path.tok() }).await,
)
},
api::MemberKind::Lazy(id) => {
self.id = id;
self.run(ctx, consts).boxed_local().await
},
} }
} }
#[must_use] #[must_use]

View File

@@ -1,6 +0,0 @@
[package]
name = "orchid-macros"
version = "0.1.0"
edition = "2024"
[dependencies]

View File

@@ -1,3 +0,0 @@
fn main() {
println!("Hello, world!");
}

View File

@@ -5,13 +5,11 @@ edition = "2024"
[dependencies] [dependencies]
async-once-cell = "0.5.4" async-once-cell = "0.5.4"
async-std = "1.13.0" futures = { version = "0.3.31", features = ["std"], default-features = false }
async-stream = "0.3.6" hashbrown = "0.16.0"
futures = "0.3.31"
hashbrown = "0.15.2"
itertools = "0.14.0" itertools = "0.14.0"
never = "0.1.0" never = "0.1.0"
once_cell = "1.20.2" once_cell = "1.21.3"
orchid-api = { version = "0.1.0", path = "../orchid-api" } orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" } orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" } orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
@@ -20,8 +18,9 @@ orchid-extension = { version = "0.1.0", path = "../orchid-extension", features =
"tokio", "tokio",
] } ] }
ordered-float = "5.0.0" ordered-float = "5.0.0"
rust_decimal = "1.36.0" rust_decimal = "1.37.2"
tokio = { version = "1.43.0", features = ["full"] } substack = "1.1.1"
tokio = { version = "1.47.1", features = ["full"] }
[dev-dependencies] [dev-dependencies]
test_executors = "0.3.2" test_executors = "0.3.5"

View File

@@ -1,6 +1,9 @@
// mod macros; mod macros;
mod std; mod std;
pub use std::number::num_atom::{Float, HomoArray, Int, Num}; pub use std::number::num_atom::{Float, HomoArray, Int, Num};
pub use std::std_system::StdSystem; pub use std::std_system::StdSystem;
pub use std::string::str_atom::OrcString; pub use std::string::str_atom::OrcString;
pub use macros::macro_system::MacroSystem;
pub use macros::mactree::{MacTok, MacTree};

View File

@@ -0,0 +1,49 @@
use std::borrow::Cow;
use never::Never;
use orchid_extension::atom::{Atomic, TypAtom};
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant, own};
use orchid_extension::conv::{ToExpr, TryFromExpr};
use orchid_extension::expr::Expr;
use orchid_extension::gen_expr::GExpr;
use crate::macros::mactree::{MacTok, MacTree, map_mactree};
#[derive(Clone)]
pub struct InstantiateTplCall {
pub(crate) tpl: MacTree,
pub(crate) argc: usize,
pub(crate) argv: Vec<MacTree>,
}
impl Atomic for InstantiateTplCall {
type Variant = OwnedVariant;
type Data = ();
}
impl OwnedAtom for InstantiateTplCall {
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
type Refs = Never;
// Technically must be supported but shouldn't actually ever be called
async fn call_ref(&self, arg: Expr) -> GExpr {
eprintln!(
"Copying partially applied instantiate_tpl call. This is an internal value.\
\nIt should be fully consumed within generated code."
);
self.clone().call(arg).await
}
async fn call(mut self, arg: Expr) -> GExpr {
match TypAtom::<MacTree>::try_from_expr(arg).await {
Err(e) => return Err::<Never, _>(e).to_expr().await,
Ok(t) => self.argv.push(own(t).await),
};
if self.argv.len() < self.argc {
return self.to_expr().await;
}
let mut args = self.argv.into_iter();
let ret = map_mactree(&self.tpl, &mut false, &mut |mt| match mt.tok() {
MacTok::Slot => Some(args.next().expect("Not enough arguments to fill all slots")),
_ => None,
});
assert!(args.next().is_none(), "Too many arguments for all slots");
ret.to_expr().await
}
}

View File

@@ -0,0 +1,120 @@
use std::pin::pin;
use futures::{FutureExt, StreamExt, stream};
use hashbrown::HashMap;
use itertools::Itertools;
use orchid_base::error::{OrcRes, Reporter};
use orchid_base::name::Sym;
use orchid_base::parse::{
Comment, ParseCtx, Parsed, Snippet, expect_tok, token_errv, try_pop_no_fluff,
};
use orchid_base::sym;
use orchid_base::tree::Paren;
use orchid_extension::gen_expr::{atom, call, sym_ref};
use orchid_extension::parser::{ConstCtx, PSnippet, PTok, PTokTree, ParsCtx, ParsedLine, Parser};
use crate::macros::mactree::{MacTok, MacTree, glossary_v, map_mactree_v};
#[derive(Default)]
pub struct LetLine;
impl Parser for LetLine {
const LINE_HEAD: &'static str = "let";
async fn parse<'a>(
ctx: ParsCtx<'a>,
exported: bool,
comments: Vec<Comment>,
line: PSnippet<'a>,
) -> OrcRes<Vec<ParsedLine>> {
let sr = line.sr();
let Parsed { output: name_tok, tail } = try_pop_no_fluff(&ctx, line).await?;
let Some(name) = name_tok.as_name() else {
let err = token_errv(&ctx, name_tok, "Constant must have a name", |t| {
format!("Expected a name but found {t}")
});
return Err(err.await);
};
let Parsed { tail, .. } = expect_tok(&ctx, tail, ctx.i().i("=").await).await?;
let aliased = parse_tokv(tail, &ctx).await;
Ok(vec![ParsedLine::cnst(&line.sr(), &comments, exported, name, async move |ctx| {
let rep = Reporter::new();
let dealiased = dealias_mac_v(aliased, &ctx, &rep).await;
let macro_input = MacTok::S(Paren::Round, dealiased).at(sr.pos());
if let Some(e) = rep.errv() {
return Err(e);
}
Ok(call([
sym_ref(sym!(macros::lower; ctx.i()).await),
call([sym_ref(sym!(macros::resolve; ctx.i()).await), atom(macro_input)]),
]))
})])
}
}
pub async fn dealias_mac_v(aliased: Vec<MacTree>, ctx: &ConstCtx, rep: &Reporter) -> Vec<MacTree> {
let keys = glossary_v(&aliased).collect_vec();
let mut names: HashMap<_, _> = HashMap::new();
let mut stream = pin!(ctx.names(&keys).zip(stream::iter(&keys)));
while let Some((canonical, local)) = stream.next().await {
match canonical {
Err(e) => rep.report(e),
Ok(name) => {
names.insert(local.clone(), name);
},
}
}
map_mactree_v(&aliased, &mut false, &mut |tree| match &*tree.tok {
MacTok::Name(n) => names.get(n).map(|new_n| MacTok::Name(new_n.clone()).at(tree.pos())),
_ => None,
})
}
pub async fn parse_tokv(line: PSnippet<'_>, ctx: &impl ParseCtx) -> Vec<MacTree> {
if let Some((idx, arg)) = line.iter().enumerate().find_map(|(i, x)| Some((i, x.as_lambda()?))) {
let (head, lambda) = line.split_at(idx as u32);
let (_, body) = lambda.pop_front().unwrap();
let body = parse_tokv(body, ctx).boxed_local().await;
let mut all = parse_tokv_no_lambdas(&head, ctx).await;
match parse_tok(arg, ctx).await {
Some(arg) => all.push(MacTok::Lambda(arg, body).at(lambda.sr().pos())),
None => ctx.rep().report(
token_errv(ctx, arg, "Lambda argument fluff", |arg| {
format!("Lambda arguments must be a valid token, found meaningless fragment {arg}")
})
.await,
),
};
all
} else {
parse_tokv_no_lambdas(&line, ctx).await
}
}
async fn parse_tokv_no_lambdas(line: &[PTokTree], ctx: &impl ParseCtx) -> Vec<MacTree> {
stream::iter(line).filter_map(|tt| parse_tok(tt, ctx)).collect().await
}
pub async fn parse_tok(tree: &PTokTree, ctx: &impl ParseCtx) -> Option<MacTree> {
let tok = match &tree.tok {
PTok::Bottom(errv) => MacTok::Bottom(errv.clone()),
PTok::BR | PTok::Comment(_) => return None,
PTok::Name(n) => MacTok::Name(Sym::new([n.clone()], ctx.i()).await.unwrap()),
PTok::NS(..) => match tree.as_multiname() {
Ok(mn) => MacTok::Name(mn.to_sym(ctx.i()).await),
Err(nested) => {
ctx.rep().report(
token_errv(ctx, tree, ":: can only be followed by a name in an expression", |tok| {
format!("Expected name, found {tok}")
})
.await,
);
return parse_tok(nested, ctx).boxed_local().await;
},
},
PTok::Handle(expr) => MacTok::Value(expr.clone()),
PTok::NewExpr(never) => match *never {},
PTok::LambdaHead(_) => panic!("Lambda-head handled in the sequence parser"),
PTok::S(p, body) =>
MacTok::S(*p, parse_tokv(Snippet::new(tree, body), ctx).boxed_local().await),
};
Some(tok.at(tree.sr().pos()))
}

View File

@@ -0,0 +1,83 @@
use hashbrown::HashMap;
use itertools::Itertools;
use orchid_base::error::Reporter;
use orchid_base::sym;
use orchid_extension::atom::TypAtom;
use orchid_extension::atom_owned::own;
use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::exec;
use orchid_extension::gen_expr::{atom, call, sym_ref};
use orchid_extension::reflection::{ReflMemKind, refl};
use orchid_extension::tree::{GenMember, comments, fun, prefix};
use substack::Substack;
use crate::Int;
use crate::macros::instantiate_tpl::InstantiateTplCall;
use crate::macros::macro_line::{Macro, Matcher};
use crate::macros::mactree::{LowerCtx, MacTree};
use crate::macros::recur_state::RecurState;
use crate::macros::resolve::{ResolveCtx, resolve};
pub fn gen_macro_lib() -> Vec<GenMember> {
prefix("macros", [
comments(
["This is an internal function, you can't obtain a value of its argument type.", "hidden"],
fun(true, "instantiate_tpl", |tpl: TypAtom<MacTree>, right: Int| async move {
InstantiateTplCall {
tpl: own(tpl).await,
argc: right.0.try_into().unwrap(),
argv: Vec::new(),
}
}),
),
fun(true, "resolve", |tpl: TypAtom<MacTree>| async move {
call([
sym_ref(sym!(macros::resolve_recur; tpl.untyped.ctx().i()).await),
atom(RecurState::Bottom),
tpl.untyped.ex().to_expr().await,
])
}),
fun(true, "lower", |tpl: TypAtom<MacTree>| async move {
let ctx = LowerCtx { sys: tpl.untyped.ctx().clone(), rep: &Reporter::new() };
let res = own(tpl).await.lower(ctx, Substack::Bottom).await;
if let Some(e) = Reporter::new().errv() { Err(e) } else { Ok(res) }
}),
fun(true, "resolve_recur", |state: TypAtom<RecurState>, tpl: TypAtom<MacTree>| async move {
exec("macros::resolve_recur", async move |mut h| {
let ctx = tpl.ctx().clone();
let root = refl(&ctx);
let tpl = own(tpl.clone()).await;
let mut macros = HashMap::new();
for n in tpl.glossary() {
if let Ok(ReflMemKind::Const) = root.get_by_path(n).await.map(|m| m.kind()) {
let Ok(mac) = h.exec::<TypAtom<Macro>>(sym_ref(n.clone())).await else { continue };
let mac = own(mac).await;
macros.entry(mac.0.own_kws[0].clone()).or_insert(mac);
}
}
let mut named = HashMap::new();
let mut priod = Vec::new();
for (_, mac) in macros.iter() {
for rule in mac.0.rules.iter() {
if rule.glossary.is_subset(tpl.glossary()) {
match &rule.pattern {
Matcher::Named(m) =>
named.entry(m.head()).or_insert(Vec::new()).push((m, mac, rule)),
Matcher::Priod(p) => priod.push((mac.0.prio, (p, mac, rule))),
}
}
}
}
let priod = priod.into_iter().sorted_unstable_by_key(|(p, _)| *p).map(|(_, r)| r).collect();
let mut rctx = ResolveCtx { h, recur: own(state).await, ctx: ctx.clone(), named, priod };
let resolve_res = resolve(&mut rctx, &tpl).await;
std::mem::drop(rctx);
match resolve_res {
Some(out_tree) => out_tree.to_expr().await,
None => tpl.to_expr().await,
}
})
.await
}),
])
}

View File

@@ -0,0 +1,228 @@
use std::borrow::Cow;
use std::cell::RefCell;
use std::rc::Rc;
use async_once_cell::OnceCell;
use futures::{StreamExt, stream};
use hashbrown::{HashMap, HashSet};
use itertools::Itertools;
use never::Never;
use orchid_base::error::{OrcRes, Reporter, mk_errv};
use orchid_base::interner::Tok;
use orchid_base::location::Pos;
use orchid_base::name::Sym;
use orchid_base::parse::{
Comment, ParseCtx, Parsed, Snippet, expect_end, expect_tok, line_items, token_errv,
try_pop_no_fluff,
};
use orchid_base::tree::{Paren, Token};
use orchid_base::{clone, sym};
use orchid_extension::atom::{Atomic, TypAtom};
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
use orchid_extension::conv::{ToExpr, TryFromExpr};
use orchid_extension::gen_expr::{atom, call, sym_ref};
use orchid_extension::parser::{PSnippet, ParsCtx, ParsedLine, Parser};
use crate::macros::let_line::{dealias_mac_v, parse_tokv};
use crate::macros::mactree::{glossary_v, map_mactree_v};
use crate::macros::recur_state::{RecurState, RulePath};
use crate::macros::rule::matcher::{NamedMatcher, PriodMatcher};
use crate::{Int, MacTok};
#[derive(Default)]
pub struct MacroLine;
impl Parser for MacroLine {
const LINE_HEAD: &'static str = "macro";
async fn parse<'a>(
ctx: ParsCtx<'a>,
exported: bool,
comments: Vec<Comment>,
line: PSnippet<'a>,
) -> OrcRes<Vec<ParsedLine>> {
if exported {
return Err(mk_errv(
ctx.i().i("macros are always exported").await,
"The export keyword is forbidden here to avoid confusion\n\
because macros are exported by default",
[line.sr()],
));
}
let module = ctx.module();
let Parsed { output, tail } = try_pop_no_fluff(&ctx, line).await?;
let bad_first_item_err = || {
token_errv(&ctx, output, "Expected priority or block", |s| {
format!("Expected a priority number or a () block, found {s}")
})
};
let (prio, body) = match &output.tok {
Token::S(Paren::Round, body) => (None, body),
Token::Handle(expr) => match TypAtom::<Int>::try_from_expr(expr.clone()).await {
Err(e) => {
return Err(e + bad_first_item_err().await);
},
Ok(prio) => {
let Token::S(Paren::Round, block) = &output.tok else {
return Err(
token_errv(&ctx, output, "Expected () block", |s| {
format!("Expected a () block, found {s}")
})
.await,
);
};
(Some(prio), block)
},
},
_ => return Err(bad_first_item_err().await),
};
expect_end(&ctx, tail).await?;
let lines = line_items(&ctx, Snippet::new(output, body)).await;
let Some((kw_line, rule_lines)) = lines.split_first() else { return Ok(Vec::new()) };
let mut keywords = HashMap::new();
let Parsed { tail: kw_tail, .. } =
expect_tok(&ctx, kw_line.tail, ctx.i().i("keywords").await).await?;
for kw_tok in kw_tail.iter().filter(|kw| !kw.is_fluff()) {
match kw_tok.as_name() {
Some(kw) => {
keywords.insert(kw, kw_tok.sr());
},
None => ctx.rep().report(
token_errv(&ctx, kw_tok, "invalid macro keywords list", |tok| {
format!("The keywords list must be a sequence of names; received {tok}")
})
.await,
),
}
}
let Some(macro_name) = keywords.keys().next().cloned() else {
return Err(mk_errv(
ctx.i().i("macro with no keywords").await,
"Macros must define at least one macro of their own.",
[kw_line.tail.sr()],
));
};
let mut rules = Vec::new();
let mut lines = Vec::new();
for (idx, line) in rule_lines.iter().enumerate().map(|(n, v)| (n as u32, v)) {
let path = RulePath { module: module.clone(), main_kw: macro_name.clone(), rule: idx };
let sr = line.tail.sr();
let name = ctx.i().i(&path.name()).await;
let Parsed { tail, .. } = expect_tok(&ctx, line.tail, ctx.i().i("rule").await).await?;
let arrow_token = ctx.i().i("=>").await;
let Some((pattern, body)) = tail.split_once(|tok| tok.is_kw(arrow_token.clone())) else {
ctx.rep().report(mk_errv(
ctx.i().i("Missing => in rule").await,
"Rule lines are of the form `rule ...pattern => ...body`",
[line.tail.sr()],
));
continue;
};
let pattern = parse_tokv(pattern, &ctx).await;
let mut placeholders = Vec::new();
map_mactree_v(&pattern, &mut false, &mut |tok| {
if let MacTok::Ph(ph) = tok.tok() {
placeholders.push((ph.clone(), tok.pos()))
}
None
});
let mut body_mactree = parse_tokv(body, &ctx).await;
for (ph, ph_pos) in placeholders.iter().rev() {
let name = ctx.module().suffix([ph.name.clone()], ctx.i()).await;
body_mactree = vec![
MacTok::Lambda(MacTok::Name(name).at(ph_pos.clone()), body_mactree).at(ph_pos.clone()),
]
}
let body_sr = body.sr();
rules.push((name.clone(), placeholders, rules.len() as u32, sr.pos(), pattern));
lines.push(ParsedLine::cnst(&sr, &line.output, true, name, async move |ctx| {
let rep = Reporter::new();
let body = dealias_mac_v(body_mactree, &ctx, &rep).await;
let macro_input = MacTok::S(Paren::Round, body).at(body_sr.pos());
if let Some(e) = rep.errv() {
return Err(e);
}
Ok(call([
sym_ref(sym!(macros::resolve_recur; ctx.i()).await),
atom(RecurState::base(path)),
macro_input.to_expr().await,
]))
}))
}
let mac_cell = Rc::new(OnceCell::new());
let keywords = Rc::new(keywords);
let rules = Rc::new(RefCell::new(Some(rules)));
for (kw, sr) in &*keywords {
clone!(mac_cell, keywords, rules, module, prio);
lines.push(ParsedLine::cnst(&sr.clone(), &comments, true, kw.clone(), async move |cctx| {
let mac = mac_cell
.get_or_init(async {
let rep = Reporter::new();
let rules = rules.borrow_mut().take().expect("once cell initializer runs");
let rules = stream::iter(rules)
.then(|(body_name, placeholders, index, pos, pattern_macv)| {
let cctx = &cctx;
let rep = &rep;
let prio = &prio;
async move {
let pattern_abs = dealias_mac_v(pattern_macv, cctx, rep).await;
let glossary = glossary_v(&pattern_abs).collect();
let pattern_res = match prio {
None => NamedMatcher::new(&pattern_abs, cctx.i()).await.map(Matcher::Named),
Some(_) => PriodMatcher::new(&pattern_abs, cctx.i()).await.map(Matcher::Priod),
};
let placeholders = placeholders.into_iter().map(|(ph, _)| ph.name).collect_vec();
match pattern_res {
Ok(pattern) =>
Some(Rule { index, pos, body_name, pattern, glossary, placeholders }),
Err(e) => {
rep.report(e);
None
},
}
}
})
.flat_map(stream::iter)
.collect::<Vec<_>>()
.await;
let own_kws = keywords.keys().cloned().collect_vec();
Macro(Rc::new(MacroData { module, prio: prio.map(|i| i.0 as u64), rules, own_kws }))
})
.await;
atom(mac.clone())
}))
}
Ok(lines)
}
}
#[derive(Debug)]
pub struct MacroData {
pub module: Sym,
pub prio: Option<u64>,
pub rules: Vec<Rule>,
pub own_kws: Vec<Tok<String>>,
}
#[derive(Clone, Debug)]
pub struct Macro(pub Rc<MacroData>);
#[derive(Debug)]
pub struct Rule {
pub index: u32,
pub pos: Pos,
pub pattern: Matcher,
pub glossary: HashSet<Sym>,
pub placeholders: Vec<Tok<String>>,
pub body_name: Tok<String>,
}
#[derive(Debug)]
pub enum Matcher {
Named(NamedMatcher),
Priod(PriodMatcher),
}
impl Atomic for Macro {
type Data = ();
type Variant = OwnedVariant;
}
impl OwnedAtom for Macro {
type Refs = Never;
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
}

View File

@@ -1,34 +1,49 @@
use never::Never; use never::Never;
use orchid_base::interner::Interner;
use orchid_base::name::Sym;
use orchid_base::reqnot::Receipt; use orchid_base::reqnot::Receipt;
use orchid_extension::atom::AtomDynfo; use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
use orchid_extension::entrypoint::ExtReq; use orchid_extension::entrypoint::ExtReq;
use orchid_extension::fs::DeclFs;
use orchid_extension::lexer::LexerObj; use orchid_extension::lexer::LexerObj;
use orchid_extension::other_system::SystemHandle;
use orchid_extension::parser::ParserObj; use orchid_extension::parser::ParserObj;
use orchid_extension::system::{System, SystemCard}; use orchid_extension::system::{System, SystemCard};
use orchid_extension::system_ctor::SystemCtor; use orchid_extension::system_ctor::SystemCtor;
use orchid_extension::tree::GenMember; use orchid_extension::tree::GenMember;
use crate::macros::instantiate_tpl::InstantiateTplCall;
use crate::macros::let_line::LetLine;
use crate::macros::macro_lib::gen_macro_lib;
use crate::macros::macro_line::{Macro, MacroLine};
use crate::macros::mactree_lexer::MacTreeLexer; use crate::macros::mactree_lexer::MacTreeLexer;
use crate::macros::recur_state::RecurState;
use crate::{MacTree, StdSystem};
#[derive(Default)] #[derive(Default)]
pub struct MacroSystem; pub struct MacroSystem;
impl SystemCtor for MacroSystem { impl SystemCtor for MacroSystem {
type Deps = (); type Deps = StdSystem;
type Instance = Self; type Instance = Self;
const NAME: &'static str = "macros"; const NAME: &'static str = "orchid::macros";
const VERSION: f64 = 0.00_01; const VERSION: f64 = 0.00_01;
fn inst() -> Option<Self::Instance> { Some(Self) } fn inst(_: SystemHandle<StdSystem>) -> Self::Instance { Self }
} }
impl SystemCard for MacroSystem { impl SystemCard for MacroSystem {
type Ctor = Self; type Ctor = Self;
type Req = Never; type Req = Never;
fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>> { [] } fn atoms() -> impl IntoIterator<Item = Option<Box<dyn AtomDynfo>>> {
[
Some(InstantiateTplCall::dynfo()),
Some(MacTree::dynfo()),
Some(RecurState::dynfo()),
Some(Macro::dynfo()),
]
}
} }
impl System for MacroSystem { impl System for MacroSystem {
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} } async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} }
fn vfs() -> orchid_extension::fs::DeclFs { DeclFs::Mod(&[]) } async fn prelude(_: &Interner) -> Vec<Sym> { vec![] }
fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer] } fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer] }
fn parsers() -> Vec<ParserObj> { vec![] } fn parsers() -> Vec<ParserObj> { vec![&LetLine, &MacroLine] }
fn env() -> Vec<GenMember> { vec![] } fn env() -> Vec<GenMember> { gen_macro_lib() }
} }

View File

@@ -2,23 +2,85 @@ use std::borrow::Cow;
use std::fmt::Display; use std::fmt::Display;
use std::rc::Rc; use std::rc::Rc;
use futures::FutureExt;
use futures::future::join_all; use futures::future::join_all;
use orchid_api::Paren; use hashbrown::HashSet;
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants}; use itertools::Itertools;
use orchid_base::error::{OrcErrv, Reporter, mk_errv};
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants, fmt};
use orchid_base::interner::Tok; use orchid_base::interner::Tok;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::tl_cache; use orchid_base::tl_cache;
use orchid_base::tree::{Paren, indent};
use orchid_extension::atom::Atomic; use orchid_extension::atom::Atomic;
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant}; use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
use orchid_extension::conv::ToExpr;
use orchid_extension::expr::Expr; use orchid_extension::expr::Expr;
use orchid_extension::gen_expr::{GExpr, arg, bot, call, lambda, sym_ref};
use orchid_extension::system::SysCtx;
use substack::Substack;
#[derive(Clone)]
pub struct LowerCtx<'a> {
pub sys: SysCtx,
pub rep: &'a Reporter,
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct MacTree { pub struct MacTree {
pub pos: Pos, pub pos: Pos,
pub tok: Rc<MacTok>, pub tok: Rc<MacTok>,
pub glossary: Rc<HashSet<Sym>>,
}
impl MacTree {
pub fn tok(&self) -> &MacTok { &self.tok }
pub fn pos(&self) -> Pos { self.pos.clone() }
pub fn glossary(&self) -> &HashSet<Sym> { &self.glossary }
pub async fn lower(&self, ctx: LowerCtx<'_>, args: Substack<'_, Sym>) -> GExpr {
let expr = match self.tok() {
MacTok::Bottom(e) => bot(e.clone()),
MacTok::Lambda(arg, body) => {
let MacTok::Name(name) = &*arg.tok else {
let err = mk_errv(
ctx.sys.i().i("Syntax error after macros").await,
"This token ends up as a binding, consider replacing it with a name",
[arg.pos()],
);
ctx.rep.report(err.clone());
return bot(err);
};
lambda(args.len() as u64, lower_v(body, ctx, args.push(name.clone())).await)
},
MacTok::Name(name) => match args.iter().enumerate().find(|(_, n)| *n == name) {
None => sym_ref(name.clone()),
Some((i, _)) => arg((args.len() - i) as u64),
},
MacTok::Ph(ph) => {
let err = mk_errv(
ctx.sys.i().i("Placeholder in value").await,
format!("Placeholder {ph} is only supported in macro patterns"),
[self.pos()],
);
ctx.rep.report(err.clone());
return bot(err);
},
MacTok::S(Paren::Round, body) => call(lower_v(body, ctx, args).await),
MacTok::S(..) => {
let err = mk_errv(
ctx.sys.i().i("[] or {} after macros").await,
format!("{} didn't match any macro", fmt(self, ctx.sys.i()).await),
[self.pos()],
);
ctx.rep.report(err.clone());
return bot(err);
},
MacTok::Slot => panic!("Uninstantiated template should never be exposed"),
MacTok::Value(v) => v.clone().to_expr().await,
};
expr.at(self.pos())
}
} }
impl MacTree {}
impl Atomic for MacTree { impl Atomic for MacTree {
type Data = (); type Data = ();
type Variant = OwnedVariant; type Variant = OwnedVariant;
@@ -27,11 +89,20 @@ impl OwnedAtom for MacTree {
type Refs = (); type Refs = ();
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) } async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
async fn print_atom<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
self.tok.print(c).await
}
}
impl Format for MacTree {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
self.tok.print(c).await self.tok.print(c).await
} }
} }
pub async fn lower_v(v: &[MacTree], ctx: LowerCtx<'_>, args: Substack<'_, Sym>) -> Vec<GExpr> {
join_all(v.iter().map(|t| t.lower(ctx.clone(), args.clone())).collect::<Vec<_>>()).await
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum MacTok { pub enum MacTok {
S(Paren, Vec<MacTree>), S(Paren, Vec<MacTree>),
@@ -39,8 +110,25 @@ pub enum MacTok {
/// Only permitted in arguments to `instantiate_tpl` /// Only permitted in arguments to `instantiate_tpl`
Slot, Slot,
Value(Expr), Value(Expr),
Lambda(Vec<MacTree>, Vec<MacTree>), Lambda(MacTree, Vec<MacTree>),
/// Only permitted in "pattern" values produced by macro blocks, which are
/// never accessed as variables by usercode
Ph(Ph), Ph(Ph),
Bottom(OrcErrv),
}
impl MacTok {
pub fn build_glossary(&self) -> HashSet<Sym> {
match self {
MacTok::Bottom(_) | MacTok::Ph(_) | MacTok::Slot | MacTok::Value(_) => HashSet::new(),
MacTok::Name(sym) => HashSet::from([sym.clone()]),
MacTok::S(_, body) => body.iter().flat_map(|mt| &*mt.glossary).cloned().collect(),
MacTok::Lambda(arg, body) =>
body.iter().chain([arg]).flat_map(|mt| &*mt.glossary).cloned().collect(),
}
}
pub fn at(self, pos: impl Into<Pos>) -> MacTree {
MacTree { pos: pos.into(), glossary: Rc::new(self.build_glossary()), tok: Rc::new(self) }
}
} }
impl Format for MacTok { impl Format for MacTok {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
@@ -50,7 +138,7 @@ impl Format for MacTok {
tl_cache!(Rc<Variants>: Rc::new(Variants::default() tl_cache!(Rc<Variants>: Rc::new(Variants::default()
.unbounded("\\{0b}.{1l}") .unbounded("\\{0b}.{1l}")
.bounded("(\\{0b}.{1b})"))), .bounded("(\\{0b}.{1b})"))),
[mtreev_fmt(arg, c).await, mtreev_fmt(b, c).await], [arg.print(c).boxed_local().await, mtreev_fmt(b, c).await],
), ),
Self::Name(n) => format!("{n}").into(), Self::Name(n) => format!("{n}").into(),
Self::Ph(ph) => format!("{ph}").into(), Self::Ph(ph) => format!("{ph}").into(),
@@ -62,7 +150,9 @@ impl Format for MacTok {
}, },
[mtreev_fmt(body, c).await], [mtreev_fmt(body, c).await],
), ),
Self::Slot => "SLOT".into(), Self::Slot => "$SLOT".into(),
Self::Bottom(err) if err.len() == 1 => format!("Bottom({}) ", err.one().unwrap()).into(),
Self::Bottom(err) => format!("Botttom(\n{}) ", indent(&err.to_string())).into(),
} }
} }
} }
@@ -96,3 +186,46 @@ pub enum PhKind {
Scalar, Scalar,
Vector { at_least_one: bool, priority: u8 }, Vector { at_least_one: bool, priority: u8 },
} }
pub fn map_mactree<F: FnMut(MacTree) -> Option<MacTree>>(
src: &MacTree,
changed: &mut bool,
map: &mut F,
) -> MacTree {
let tok = match map(src.clone()) {
Some(new_tok) => {
*changed = true;
return new_tok;
},
None => match &*src.tok {
MacTok::Lambda(arg, body) => MacTok::Lambda(
ro(changed, |changed| map_mactree(arg, changed, map)),
map_mactree_v(body, changed, map),
),
MacTok::Name(_) | MacTok::Value(_) => return src.clone(),
MacTok::Slot | MacTok::Ph(_) | MacTok::Bottom(_) => return src.clone(),
MacTok::S(p, body) => MacTok::S(*p, map_mactree_v(body, changed, map)),
},
};
if *changed { tok.at(src.pos()) } else { src.clone() }
}
pub fn map_mactree_v<F: FnMut(MacTree) -> Option<MacTree>>(
src: &[MacTree],
changed: &mut bool,
map: &mut F,
) -> Vec<MacTree> {
src.iter().map(|tree| ro(changed, |changed| map_mactree(tree, changed, map))).collect_vec()
}
/// reverse "or". Inside, the flag is always false, but raising it will raise
/// the outside flag too.
fn ro<T>(flag: &mut bool, cb: impl FnOnce(&mut bool) -> T) -> T {
let mut new_flag = false;
let val = cb(&mut new_flag);
*flag |= new_flag;
val
}
pub fn glossary_v(src: &[MacTree]) -> impl Iterator<Item = Sym> {
src.iter().flat_map(|mt| mt.glossary()).cloned()
}

View File

@@ -1,12 +1,16 @@
use std::ops::RangeInclusive; use std::ops::RangeInclusive;
use std::rc::Rc;
use futures::FutureExt; use futures::FutureExt;
use orchid_base::error::{OrcRes, mk_errv}; use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::parse::ParseCtx;
use orchid_base::sym;
use orchid_base::tokens::PARENS; use orchid_base::tokens::PARENS;
use orchid_base::tree::Paren;
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable}; use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
use orchid_extension::tree::{GenTok, GenTokTree, x_tok}; use orchid_extension::parser::p_tree2gen;
use orchid_extension::tree::{GenTok, GenTokTree, ref_tok, x_tok};
use crate::macros::let_line::parse_tok;
use crate::macros::mactree::{MacTok, MacTree}; use crate::macros::mactree::{MacTok, MacTree};
#[derive(Default)] #[derive(Default)]
@@ -15,24 +19,41 @@ impl Lexer for MacTreeLexer {
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['\''..='\'']; const CHAR_FILTER: &'static [RangeInclusive<char>] = &['\''..='\''];
async fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> { async fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
let Some(tail2) = tail.strip_prefix('\'') else { let Some(tail2) = tail.strip_prefix('\'') else {
return Err(err_not_applicable(ctx.i()).await.into()); return Err(err_not_applicable(ctx.i()).await);
}; };
let tail3 = tail2.trim_start(); let tail3 = tail2.trim_start();
return match mac_tree(tail3, ctx).await { let mut args = Vec::new();
Ok((tail4, mactree)) => Ok((tail4, x_tok(mactree).at(ctx.pos_tt(tail, tail4)))), return match mac_tree(tail3, &mut args, ctx).await {
Ok((tail4, mactree)) => {
let range = ctx.pos_tt(tail, tail4);
let tok = match &args[..] {
[] => x_tok(mactree).await,
_ => {
let call = ([
ref_tok(sym!(macros::instantiate_tpl; ctx.i()).await).await.at(range.clone()),
x_tok(mactree).await.at(range.clone()),
]
.into_iter())
.chain(args.into_iter());
GenTok::S(Paren::Round, call.collect())
},
};
Ok((tail4, tok.at(range)))
},
Err(e) => Ok((tail2, GenTok::Bottom(e).at(ctx.pos_lt(1, tail2)))), Err(e) => Ok((tail2, GenTok::Bottom(e).at(ctx.pos_lt(1, tail2)))),
}; };
async fn mac_tree<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, MacTree)> { async fn mac_tree<'a>(
tail: &'a str,
args: &mut Vec<GenTokTree>,
ctx: &'a LexContext<'a>,
) -> OrcRes<(&'a str, MacTree)> {
for (lp, rp, paren) in PARENS { for (lp, rp, paren) in PARENS {
let Some(mut body_tail) = tail.strip_prefix(*lp) else { continue }; let Some(mut body_tail) = tail.strip_prefix(*lp) else { continue };
let mut items = Vec::new(); let mut items = Vec::new();
return loop { return loop {
let tail2 = body_tail.trim(); let tail2 = body_tail.trim_start();
if let Some(tail3) = tail2.strip_prefix(*rp) { if let Some(tail3) = tail2.strip_prefix(*rp) {
break Ok((tail3, MacTree { break Ok((tail3, MacTok::S(*paren, items).at(ctx.pos_tt(tail, tail3).pos())));
pos: ctx.pos_tt(tail, tail3).pos(),
tok: Rc::new(MacTok::S(*paren, items)),
}));
} else if tail2.is_empty() { } else if tail2.is_empty() {
return Err(mk_errv( return Err(mk_errv(
ctx.i().i("Unclosed block").await, ctx.i().i("Unclosed block").await,
@@ -40,22 +61,36 @@ impl Lexer for MacTreeLexer {
[ctx.pos_lt(1, tail)], [ctx.pos_lt(1, tail)],
)); ));
} }
let (new_tail, new_item) = mac_tree(tail2, ctx).boxed_local().await?; let (new_tail, new_item) = mac_tree(tail2, args, ctx).boxed_local().await?;
body_tail = new_tail; body_tail = new_tail;
items.push(new_item); items.push(new_item);
}; };
} }
const INTERPOL: &[&str] = &["$", "..$"]; if let Some(tail2) = tail.strip_prefix("$") {
for pref in INTERPOL { let (tail3, sub) = ctx.recurse(tail2).await?;
let Some(code) = tail.strip_prefix(pref) else { continue }; let sr = ctx.pos_tt(tail, tail3);
todo!("Register parameter, and push this onto the argument stack held in the atom") args.push(p_tree2gen(sub));
return Ok((tail3, MacTok::Slot.at(sr.pos())));
}
if let Some(tail2) = tail.strip_prefix("\\") {
let tail2 = tail2.trim_start();
let (mut tail3, param) = mac_tree(tail2, args, ctx).boxed_local().await?;
let mut body = Vec::new();
loop {
let tail4 = tail3.trim_start();
if tail4.is_empty() || tail4.starts_with(|c| ")]}".contains(c)) {
break;
};
let (tail5, body_tok) = mac_tree(tail4, args, ctx).boxed_local().await?;
body.push(body_tok);
tail3 = tail5;
}
Ok((tail3, MacTok::Lambda(param, body).at(ctx.pos_tt(tail, tail3).pos())))
} else {
let (tail2, sub) = ctx.recurse(tail).await?;
let parsed = parse_tok(&sub, ctx).await.expect("Unexpected invalid token");
Ok((tail2, parsed))
} }
todo!("recursive lexer call");
return Err(mk_errv(
ctx.i().i("Expected token after '").await,
format!("Expected a token after ', found {tail:?}"),
[ctx.pos_lt(1, tail)],
));
} }
} }
} }

View File

@@ -1,6 +1,12 @@
mod macro_system; mod instantiate_tpl;
mod let_line;
mod macro_lib;
mod macro_line;
pub mod macro_system;
pub mod mactree; pub mod mactree;
mod mactree_lexer; mod mactree_lexer;
pub mod recur_state;
mod resolve;
mod rule; mod rule;
use mactree::{MacTok, MacTree}; use mactree::{MacTok, MacTree};

View File

@@ -0,0 +1,71 @@
use std::borrow::Cow;
use std::fmt;
use std::rc::Rc;
use never::Never;
use orchid_base::format::{FmtCtx, FmtUnit};
use orchid_base::interner::Tok;
use orchid_base::name::Sym;
use orchid_extension::atom::Atomic;
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct RulePath {
pub module: Sym,
pub main_kw: Tok<String>,
pub rule: u32,
}
impl RulePath {
pub fn name(&self) -> String { format!("rule::{}::{}", self.main_kw, self.rule) }
}
impl fmt::Display for RulePath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Rule {}::({})::{}", self.module, self.main_kw, self.rule)
}
}
#[derive(Clone)]
pub enum RecurState {
Bottom,
Recursive { path: RulePath, prev: Rc<RecurState> },
}
impl RecurState {
pub fn base(path: RulePath) -> Self {
RecurState::Recursive { path, prev: Rc::new(RecurState::Bottom) }
}
pub fn push(&self, new: RulePath) -> Option<Self> {
let mut cur = self;
while let Self::Recursive { path, prev } = cur {
if &new == path {
return None;
}
cur = prev;
}
Some(Self::Recursive { path: new, prev: Rc::new(self.clone()) })
}
}
impl Atomic for RecurState {
type Data = Option<()>;
type Variant = OwnedVariant;
}
impl OwnedAtom for RecurState {
type Refs = Never;
async fn val(&self) -> Cow<'_, Self::Data> {
Cow::Owned(match self {
Self::Bottom => None,
Self::Recursive { .. } => Some(()),
})
}
async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
self.to_string().into()
}
}
impl fmt::Display for RecurState {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Bottom => write!(f, "RecurState::Bottom"),
Self::Recursive { path, prev } => write!(f, "{path}\n{prev}"),
}
}
}

View File

@@ -0,0 +1,110 @@
use futures::FutureExt;
use hashbrown::HashMap;
use itertools::Itertools;
use orchid_base::error::mk_errv;
use orchid_base::location::Pos;
use orchid_base::name::Sym;
use orchid_base::sym;
use orchid_base::tree::Paren;
use orchid_extension::conv::ToExpr;
use orchid_extension::coroutine_exec::ExecHandle;
use orchid_extension::gen_expr::{GExpr, bot, call, sym_ref};
use orchid_extension::system::SysCtx;
use crate::macros::macro_line::{Macro, Rule};
use crate::macros::recur_state::{RecurState, RulePath};
use crate::macros::rule::matcher::{NamedMatcher, PriodMatcher};
use crate::macros::rule::state::{MatchState, StateEntry};
use crate::{MacTok, MacTree};
pub struct ResolveCtx<'a> {
pub ctx: SysCtx,
pub recur: RecurState,
pub h: ExecHandle<'a>,
pub named: HashMap<Sym, Vec<(&'a NamedMatcher, &'a Macro, &'a Rule)>>,
pub priod: Vec<(&'a PriodMatcher, &'a Macro, &'a Rule)>,
}
pub async fn resolve(ctx: &mut ResolveCtx<'_>, value: &MacTree) -> Option<MacTree> {
match value.tok() {
MacTok::Ph(_) | MacTok::Slot => panic!("Forbidden element in value mactree"),
MacTok::Bottom(_) | MacTok::Value(_) | MacTok::Name(_) => None,
MacTok::Lambda(arg, body) =>
Some(MacTok::Lambda(arg.clone(), resolve_seq(ctx, body).await?).at(value.pos())),
MacTok::S(ptyp, body) => Some(MacTok::S(*ptyp, resolve_seq(ctx, body).await?).at(value.pos())),
}
}
pub async fn resolve_seq(ctx: &mut ResolveCtx<'_>, val: &[MacTree]) -> Option<Vec<MacTree>> {
let mut any_changed = false;
let mut i = 0;
let mut val = val.to_vec();
'all_named: while i < val.len() {
'one_named: {
let MacTok::Name(key) = val[i].tok() else { break 'one_named };
let Some(options) = ctx.named.get(key) else { break 'one_named };
let matches = (options.iter())
.filter_map(|r| Some((r.1, r.2, r.0.apply(&val[i..], |_| false)?)))
.collect_vec();
match matches.len() {
0 => break 'one_named,
1 => {
any_changed = true;
let (mac, rule, (state, tail)) = matches.into_iter().exactly_one().unwrap();
let end = val.len() - tail.len();
let body_call = mk_body_call(mac, rule, &state, &ctx.ctx, ctx.recur.clone()).await;
std::mem::drop(state);
val.splice(i..end, [MacTok::Value(ctx.h.register(body_call).await).at(Pos::None)]);
i = end;
},
2.. => todo!("Named macros conflict!"),
}
continue 'all_named;
}
i += 1;
}
for (matcher, mac, rule) in &ctx.priod {
let Some(state) = matcher.apply(&val, |_| false) else { continue };
return Some(vec![
MacTok::Value(
ctx.h.register(mk_body_call(mac, rule, &state, &ctx.ctx, ctx.recur.clone()).await).await,
)
.at(Pos::None),
]);
}
for expr in val.iter_mut() {
if let Some(new) = resolve(ctx, expr).boxed_local().await {
*expr = new;
any_changed = true;
}
}
if any_changed { Some(val) } else { None }
}
async fn mk_body_call(
mac: &Macro,
rule: &Rule,
state: &MatchState<'_>,
ctx: &SysCtx,
recur: RecurState,
) -> GExpr {
let rule_path =
RulePath { module: mac.0.module.clone(), main_kw: mac.0.own_kws[0].clone(), rule: rule.index };
let Some(new_recur) = recur.push(rule_path.clone()) else {
return bot(mk_errv(
ctx.i().i("Circular macro dependency").await,
format!("The definition of {rule_path} is circular"),
[rule.pos.clone()],
));
};
let mut call_args = vec![sym_ref(mac.0.module.suffix([rule.body_name.clone()], ctx.i()).await)];
for name in rule.placeholders.iter() {
call_args.push(match state.get(name).expect("Missing state entry for placeholder") {
StateEntry::Scalar(scal) => (**scal).clone().to_expr().await,
StateEntry::Vec(vec) => MacTok::S(Paren::Round, vec.to_vec()).at(Pos::None).to_expr().await,
});
}
call_args
.push(call([sym_ref(sym!(macros::resolve_recur; ctx.i()).await), new_recur.to_expr().await]));
call(call_args)
}

View File

@@ -1,5 +1,9 @@
use futures::FutureExt;
use futures::future::join_all;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::interner::Tok; use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::interner::{Interner, Tok};
use orchid_base::join_ok;
use orchid_base::side::Side; use orchid_base::side::Side;
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher}; use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
@@ -12,7 +16,7 @@ pub type MaxVecSplit<'a> = (&'a [MacTree], (Tok<String>, u8, bool), &'a [MacTree
/// Derive the details of the central vectorial and the two sides from a /// Derive the details of the central vectorial and the two sides from a
/// slice of Expr's /// slice of Expr's
#[must_use] #[must_use]
fn split_at_max_vec(pattern: &[MacTree]) -> Option<MaxVecSplit> { fn split_at_max_vec(pattern: &'_ [MacTree]) -> Option<MaxVecSplit<'_>> {
let rngidx = pattern let rngidx = pattern
.iter() .iter()
.position_max_by_key(|expr| vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1))?; .position_max_by_key(|expr| vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1))?;
@@ -27,25 +31,29 @@ fn scal_cnt<'a>(iter: impl Iterator<Item = &'a MacTree>) -> usize {
iter.take_while(|expr| vec_attrs(expr).is_none()).count() iter.take_while(|expr| vec_attrs(expr).is_none()).count()
} }
#[must_use] pub async fn mk_any(pattern: &[MacTree], i: &Interner) -> OrcRes<AnyMatcher> {
pub fn mk_any(pattern: &[MacTree]) -> AnyMatcher {
let left_split = scal_cnt(pattern.iter()); let left_split = scal_cnt(pattern.iter());
if pattern.len() <= left_split { if pattern.len() <= left_split {
return AnyMatcher::Scalar(mk_scalv(pattern)); return Ok(AnyMatcher::Scalar(mk_scalv(pattern, i).await?));
} }
let (left, not_left) = pattern.split_at(left_split); let (left, not_left) = pattern.split_at(left_split);
let right_split = not_left.len() - scal_cnt(pattern.iter().rev()); let right_split = not_left.len() - scal_cnt(pattern.iter().rev());
let (mid, right) = not_left.split_at(right_split); let (mid, right) = not_left.split_at(right_split);
AnyMatcher::Vec { left: mk_scalv(left), mid: mk_vec(mid), right: mk_scalv(right) } join_ok! {
left = mk_scalv(left, i).await;
mid = mk_vec(mid, i).await;
right = mk_scalv(right, i).await;
}
Ok(AnyMatcher::Vec { left, mid, right })
} }
/// Pattern MUST NOT contain vectorial placeholders /// Pattern MUST NOT contain vectorial placeholders
#[must_use] async fn mk_scalv(pattern: &[MacTree], i: &Interner) -> OrcRes<Vec<ScalMatcher>> {
fn mk_scalv(pattern: &[MacTree]) -> Vec<ScalMatcher> { pattern.iter().map(mk_scalar).collect() } join_all(pattern.iter().map(|pat| mk_scalar(pat, i))).await.into_iter().collect()
}
/// Pattern MUST start and end with a vectorial placeholder /// Pattern MUST start and end with a vectorial placeholder
#[must_use] pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> {
pub fn mk_vec(pattern: &[MacTree]) -> VecMatcher {
debug_assert!(!pattern.is_empty(), "pattern cannot be empty"); debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial"); debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial");
debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial"); debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial");
@@ -57,39 +65,56 @@ pub fn mk_vec(pattern: &[MacTree]) -> VecMatcher {
let (l_side, l_sep) = left.split_at(left.len() - l_sep_size); let (l_side, l_sep) = left.split_at(left.len() - l_sep_size);
let main = VecMatcher::Placeh { key: key.clone(), nonzero }; let main = VecMatcher::Placeh { key: key.clone(), nonzero };
match (left, right) { match (left, right) {
(&[], &[]) => VecMatcher::Placeh { key, nonzero }, (&[], &[]) => Ok(VecMatcher::Placeh { key, nonzero }),
(&[], _) => VecMatcher::Scan { (&[], _) => {
join_ok! {
sep = mk_scalv(r_sep, i).await;
right = mk_vec(r_side, i).boxed_local().await;
}
Ok(VecMatcher::Scan {
direction: Side::Left, direction: Side::Left,
left: Box::new(main), left: Box::new(main),
sep: mk_scalv(r_sep), sep,
right: Box::new(mk_vec(r_side)), right: Box::new(right),
})
}, },
(_, &[]) => VecMatcher::Scan { (_, &[]) => {
join_ok! {
left = mk_vec(l_side, i).boxed_local().await;
sep = mk_scalv(l_sep, i).await;
}
Ok(VecMatcher::Scan {
direction: Side::Right, direction: Side::Right,
left: Box::new(mk_vec(l_side)), left: Box::new(left),
sep: mk_scalv(l_sep), sep,
right: Box::new(main), right: Box::new(main),
})
}, },
(..) => { (..) => {
let mut key_order = let mut key_order =
l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>(); l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>();
key_order.sort_by_key(|(_, prio, _)| -(*prio as i64)); key_order.sort_by_key(|(_, prio, _)| -(*prio as i64));
VecMatcher::Middle { join_ok! {
left: Box::new(mk_vec(l_side)), left = mk_vec(l_side, i).boxed_local().await;
left_sep: mk_scalv(l_sep), left_sep = mk_scalv(l_sep, i).await;
mid: Box::new(main), right_sep = mk_scalv(r_sep, i).await;
right_sep: mk_scalv(r_sep), right = mk_vec(r_side, i).boxed_local().await;
right: Box::new(mk_vec(r_side)),
key_order: key_order.into_iter().map(|(n, ..)| n).collect(),
} }
Ok(VecMatcher::Middle {
left: Box::new(left),
left_sep,
mid: Box::new(main),
right_sep,
right: Box::new(right),
key_order: key_order.into_iter().map(|(n, ..)| n).collect(),
})
}, },
} }
} }
/// Pattern MUST NOT be a vectorial placeholder /// Pattern MUST NOT be a vectorial placeholder
#[must_use] async fn mk_scalar(pattern: &MacTree, i: &Interner) -> OrcRes<ScalMatcher> {
fn mk_scalar(pattern: &MacTree) -> ScalMatcher { Ok(match &*pattern.tok {
match &*pattern.tok {
MacTok::Name(n) => ScalMatcher::Name(n.clone()), MacTok::Name(n) => ScalMatcher::Name(n.clone()),
MacTok::Ph(Ph { name, kind }) => match kind { MacTok::Ph(Ph { name, kind }) => match kind {
PhKind::Vector { .. } => { PhKind::Vector { .. } => {
@@ -97,16 +122,20 @@ fn mk_scalar(pattern: &MacTree) -> ScalMatcher {
}, },
PhKind::Scalar => ScalMatcher::Placeh { key: name.clone() }, PhKind::Scalar => ScalMatcher::Placeh { key: name.clone() },
}, },
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body))), MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body, i).boxed_local().await?)),
MacTok::Lambda(arg, body) => ScalMatcher::Lambda(Box::new(mk_any(arg)), Box::new(mk_any(body))), MacTok::Lambda(..) =>
return Err(mk_errv(
i.i("Lambda in matcher").await,
"Lambdas can't be matched for, only generated in templates",
[pattern.pos()],
)),
MacTok::Value(_) | MacTok::Slot => panic!("Only used for templating"), MacTok::Value(_) | MacTok::Slot => panic!("Only used for templating"),
} MacTok::Bottom(errv) => return Err(errv.clone()),
})
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use std::rc::Rc;
use orchid_base::interner::Interner; use orchid_base::interner::Interner;
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
use orchid_base::sym; use orchid_base::sym;
@@ -114,15 +143,14 @@ mod test {
use test_executors::spin_on; use test_executors::spin_on;
use super::mk_any; use super::mk_any;
use crate::macros::MacTok;
use crate::macros::mactree::{Ph, PhKind}; use crate::macros::mactree::{Ph, PhKind};
use crate::macros::{MacTok, MacTree};
#[test] #[test]
fn test_scan() { fn test_scan() {
spin_on(async { spin_on(async {
let i = Interner::new_master(); let i = Interner::new_master();
let ex = let ex = |tok: MacTok| async { tok.at(SrcRange::mock(&i).await.pos()) };
|tok: MacTok| async { MacTree { tok: Rc::new(tok), pos: SrcRange::mock(&i).await.pos() } };
let pattern = vec![ let pattern = vec![
ex(MacTok::Ph(Ph { ex(MacTok::Ph(Ph {
kind: PhKind::Vector { priority: 0, at_least_one: false }, kind: PhKind::Vector { priority: 0, at_least_one: false },
@@ -150,7 +178,7 @@ mod test {
})) }))
.await, .await,
]; ];
let matcher = mk_any(&pattern); let matcher = mk_any(&pattern, &i).await.expect("This matcher isn't broken");
println!("{matcher}"); println!("{matcher}");
}) })
} }

View File

@@ -1,8 +1,8 @@
use std::fmt; use std::fmt;
use std::rc::Rc;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::interner::Interner; use orchid_base::error::OrcRes;
use orchid_base::interner::{Interner, Tok};
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
@@ -15,46 +15,47 @@ use super::vec_match::vec_match;
use crate::macros::mactree::{Ph, PhKind}; use crate::macros::mactree::{Ph, PhKind};
use crate::macros::{MacTok, MacTree}; use crate::macros::{MacTok, MacTree};
pub fn first_is_vec(pattern: &[MacTree]) -> bool { vec_attrs(pattern.first().unwrap()).is_some() } pub struct NamedMatcher {
pub fn last_is_vec(pattern: &[MacTree]) -> bool { vec_attrs(pattern.last().unwrap()).is_some() } inner: AnyMatcher,
head: Sym,
pub struct NamedMatcher(AnyMatcher); after_tok: Tok<String>,
}
impl NamedMatcher { impl NamedMatcher {
pub async fn new(pattern: &[MacTree], i: &Interner) -> Self { pub async fn new(pattern: &[MacTree], i: &Interner) -> OrcRes<Self> {
assert!( let head = match pattern.first().map(|tree| tree.tok()) {
matches!(pattern.first().map(|tree| &*tree.tok), Some(MacTok::Name(_))), Some(MacTok::Name(name)) => name.clone(),
"Named matchers must begin with a name" _ => panic!("Named matchers must begin with a name"),
); };
let after_tok = i.i("::after").await;
match last_is_vec(pattern) { let inner = match pattern.last().and_then(vec_attrs).is_some() {
true => Self(mk_any(pattern)), true => mk_any(pattern, i).await?,
false => { false => {
let kind = PhKind::Vector { priority: 0, at_least_one: false }; let kind = PhKind::Vector { priority: 0, at_least_one: false };
let tok = MacTok::Ph(Ph { name: i.i("::after").await, kind }); let suffix = [MacTok::Ph(Ph { name: after_tok.clone(), kind }).at(Pos::None)];
let suffix = [MacTree { pos: Pos::None, tok: Rc::new(tok) }]; mk_any(&pattern.iter().cloned().chain(suffix).collect_vec(), i).await?
Self(mk_any(&pattern.iter().chain(&suffix).cloned().collect_vec()))
}, },
};
Ok(Self { after_tok, inner, head })
} }
} pub fn head(&self) -> Sym { self.head.clone() }
/// Also returns the tail, if any, which should be matched further /// Also returns the tail, if any, which should be matched further
/// Note that due to how priod works below, the main usable information from /// Note that due to how priod works below, the main usable information from
/// the tail is its length /// the tail is its length
pub async fn apply<'a>( pub fn apply<'a>(
&self, &self,
seq: &'a [MacTree], seq: &'a [MacTree],
i: &Interner,
save_loc: impl Fn(Sym) -> bool, save_loc: impl Fn(Sym) -> bool,
) -> Option<(MatchState<'a>, &'a [MacTree])> { ) -> Option<(MatchState<'a>, &'a [MacTree])> {
let mut state = any_match(&self.0, seq, &save_loc)?; let mut state = any_match(&self.inner, seq, &save_loc)?;
match state.remove(i.i("::after").await) { match state.remove(self.after_tok.clone()) {
Some(StateEntry::Scalar(_)) => panic!("::after can never be a scalar entry!"), Some(StateEntry::Scalar(_)) => panic!("{} can never be a scalar entry!", self.after_tok),
Some(StateEntry::Vec(v)) => Some((state, v)), Some(StateEntry::Vec(v)) => Some((state, v)),
None => Some((state, &[][..])), None => Some((state, &[][..])),
} }
} }
} }
impl fmt::Display for NamedMatcher { impl fmt::Display for NamedMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) }
} }
impl fmt::Debug for NamedMatcher { impl fmt::Debug for NamedMatcher {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") } fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") }
@@ -62,12 +63,12 @@ impl fmt::Debug for NamedMatcher {
pub struct PriodMatcher(VecMatcher); pub struct PriodMatcher(VecMatcher);
impl PriodMatcher { impl PriodMatcher {
pub fn new(pattern: &[MacTree]) -> Self { pub async fn new(pattern: &[MacTree], i: &Interner) -> OrcRes<Self> {
assert!( assert!(
pattern.first().and_then(vec_attrs).is_some() && pattern.last().and_then(vec_attrs).is_some(), pattern.first().and_then(vec_attrs).is_some() && pattern.last().and_then(vec_attrs).is_some(),
"Prioritized matchers must start and end with a vectorial", "Prioritized matchers must start and end with a vectorial",
); );
Self(mk_vec(pattern)) Ok(Self(mk_vec(pattern, i).await?))
} }
/// tokens before the offset always match the prefix /// tokens before the offset always match the prefix
pub fn apply<'a>( pub fn apply<'a>(

View File

@@ -20,8 +20,6 @@ pub fn scal_match<'a>(
Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))), Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))),
(ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 => (ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 =>
any_match(b_mat, &body[..], save_loc), any_match(b_mat, &body[..], save_loc),
(ScalMatcher::Lambda(arg_mat, b_mat), MacTok::Lambda(arg, body)) =>
Some(any_match(arg_mat, arg, save_loc)?.combine(any_match(b_mat, body, save_loc)?)),
_ => None, _ => None,
} }
} }

View File

@@ -11,7 +11,6 @@ use orchid_base::tokens::{PARENS, Paren};
pub enum ScalMatcher { pub enum ScalMatcher {
Name(Sym), Name(Sym),
S(Paren, Box<AnyMatcher>), S(Paren, Box<AnyMatcher>),
Lambda(Box<AnyMatcher>, Box<AnyMatcher>),
Placeh { key: Tok<String> }, Placeh { key: Tok<String> },
} }
@@ -62,7 +61,6 @@ impl fmt::Display for ScalMatcher {
let (l, r, _) = PARENS.iter().find(|r| r.2 == *t).unwrap(); let (l, r, _) = PARENS.iter().find(|r| r.2 == *t).unwrap();
write!(f, "{l}{body}{r}") write!(f, "{l}{body}{r}")
}, },
Self::Lambda(arg, body) => write!(f, "\\{arg}.{body}"),
} }
} }
} }

View File

@@ -54,6 +54,7 @@ impl<'a> MatchState<'a> {
pub fn from_name(name: Sym, location: Pos) -> Self { pub fn from_name(name: Sym, location: Pos) -> Self {
Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() } Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() }
} }
pub fn get(&self, key: &Tok<String>) -> Option<&StateEntry<'a>> { self.placeholders.get(key) }
pub fn remove(&mut self, name: Tok<String>) -> Option<StateEntry<'a>> { pub fn remove(&mut self, name: Tok<String>) -> Option<StateEntry<'a>> {
self.placeholders.remove(&name) self.placeholders.remove(&name)
} }

View File

@@ -1,6 +1,8 @@
use orchid_extension::entrypoint::ExtensionData; use orchid_extension::entrypoint::ExtensionData;
use orchid_extension::tokio::tokio_main; use orchid_extension::tokio::tokio_main;
use orchid_std::StdSystem; use orchid_std::{MacroSystem, StdSystem};
#[tokio::main(flavor = "current_thread")] #[tokio::main(flavor = "current_thread")]
pub async fn main() { tokio_main(ExtensionData::new("orchid-std::main", &[&StdSystem])).await } pub async fn main() {
tokio_main(ExtensionData::new("orchid-std::main", &[&StdSystem, &MacroSystem])).await
}

View File

@@ -1,7 +1,7 @@
use std::ops::RangeInclusive; use std::ops::RangeInclusive;
use orchid_base::error::OrcRes; use orchid_base::error::OrcRes;
use orchid_base::number::{num_to_err, parse_num}; use orchid_base::number::{num_to_errv, parse_num};
use orchid_extension::atom::ToAtom; use orchid_extension::atom::ToAtom;
use orchid_extension::lexer::{LexContext, Lexer}; use orchid_extension::lexer::{LexContext, Lexer};
use orchid_extension::tree::{GenTokTree, x_tok}; use orchid_extension::tree::{GenTokTree, x_tok};
@@ -17,8 +17,8 @@ impl Lexer for NumLexer {
let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len())); let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len()));
let fac = match parse_num(chars) { let fac = match parse_num(chars) {
Ok(numeric) => Num(numeric).to_atom_factory(), Ok(numeric) => Num(numeric).to_atom_factory(),
Err(e) => return Err(num_to_err(e, ctx.pos(all), &ctx.src, ctx.ctx.i()).await.into()), Err(e) => return Err(num_to_errv(e, ctx.pos(all), ctx.src(), ctx.ctx.i()).await),
}; };
Ok((tail, x_tok(fac).at(ctx.pos_lt(chars.len(), tail)))) Ok((tail, x_tok(fac).await.at(ctx.pos_lt(chars.len(), tail))))
} }
} }

View File

@@ -1,5 +1,8 @@
use never::Never; use never::Never;
use orchid_base::interner::Interner;
use orchid_base::name::Sym;
use orchid_base::reqnot::Receipt; use orchid_base::reqnot::Receipt;
use orchid_base::sym;
use orchid_extension::atom::{AtomDynfo, AtomicFeatures}; use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
use orchid_extension::entrypoint::ExtReq; use orchid_extension::entrypoint::ExtReq;
use orchid_extension::lexer::LexerObj; use orchid_extension::lexer::LexerObj;
@@ -22,7 +25,7 @@ impl SystemCtor for StdSystem {
type Instance = Self; type Instance = Self;
const NAME: &'static str = "orchid::std"; const NAME: &'static str = "orchid::std";
const VERSION: f64 = 0.00_01; const VERSION: f64 = 0.00_01;
fn inst() -> Option<Self::Instance> { Some(Self) } fn inst(_: ()) -> Self::Instance { Self }
} }
impl SystemCard for StdSystem { impl SystemCard for StdSystem {
type Ctor = Self; type Ctor = Self;
@@ -36,4 +39,5 @@ impl System for StdSystem {
fn lexers() -> Vec<LexerObj> { vec![&StringLexer, &NumLexer] } fn lexers() -> Vec<LexerObj> { vec![&StringLexer, &NumLexer] }
fn parsers() -> Vec<ParserObj> { vec![] } fn parsers() -> Vec<ParserObj> { vec![] }
fn env() -> Vec<GenMember> { merge_trivial([gen_num_lib(), gen_str_lib()]) } fn env() -> Vec<GenMember> { merge_trivial([gen_num_lib(), gen_str_lib()]) }
async fn prelude(i: &Interner) -> Vec<Sym> { vec![sym!(std; i).await] }
} }

View File

@@ -3,7 +3,7 @@ use std::ops::Deref;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc; use std::rc::Rc;
use async_std::io::Write; use futures::AsyncWrite;
use orchid_api_derive::Coding; use orchid_api_derive::Coding;
use orchid_api_traits::{Encode, Request}; use orchid_api_traits::{Encode, Request};
use orchid_base::error::{OrcRes, mk_errv}; use orchid_base::error::{OrcRes, mk_errv};
@@ -46,10 +46,10 @@ impl Deref for StrAtom {
impl OwnedAtom for StrAtom { impl OwnedAtom for StrAtom {
type Refs = (); type Refs = ();
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) } async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
async fn serialize(&self, _: SysCtx, sink: Pin<&mut (impl Write + ?Sized)>) -> Self::Refs { async fn serialize(&self, _: SysCtx, sink: Pin<&mut (impl AsyncWrite + ?Sized)>) -> Self::Refs {
self.deref().encode(sink).await self.deref().encode(sink).await
} }
async fn print<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
format!("{:?}", &*self.0).into() format!("{:?}", &*self.0).into()
} }
async fn deserialize(mut ctx: impl DeserializeCtx, _: Self::Refs) -> Self { async fn deserialize(mut ctx: impl DeserializeCtx, _: Self::Refs) -> Self {
@@ -69,10 +69,10 @@ impl From<Tok<String>> for IntStrAtom {
impl OwnedAtom for IntStrAtom { impl OwnedAtom for IntStrAtom {
type Refs = (); type Refs = ();
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(self.0.to_api()) } async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(self.0.to_api()) }
async fn print<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print_atom<'a>(&'a self, _: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
format!("{:?}i", *self.0).into() format!("{:?}i", *self.0).into()
} }
async fn serialize(&self, _: SysCtx, write: Pin<&mut (impl Write + ?Sized)>) { async fn serialize(&self, _: SysCtx, write: Pin<&mut (impl AsyncWrite + ?Sized)>) {
self.0.encode(write).await self.0.encode(write).await
} }
async fn deserialize(mut ctx: impl DeserializeCtx, _: ()) -> Self { async fn deserialize(mut ctx: impl DeserializeCtx, _: ()) -> Self {
@@ -108,7 +108,7 @@ impl TryFromExpr for OrcString {
} }
let ctx = expr.ctx(); let ctx = expr.ctx();
match TypAtom::<IntStrAtom>::try_from_expr(expr).await { match TypAtom::<IntStrAtom>::try_from_expr(expr).await {
Ok(t) => Ok(OrcString { ctx: t.data.ctx(), kind: OrcStringKind::Int(t) }), Ok(t) => Ok(OrcString { ctx: t.untyped.ctx().clone(), kind: OrcStringKind::Int(t) }),
Err(e) => Err(mk_errv(ctx.i().i("A string was expected").await, "", e.pos_iter())), Err(e) => Err(mk_errv(ctx.i().i("A string was expected").await, "", e.pos_iter())),
} }
} }

View File

@@ -1,11 +1,13 @@
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcErr, OrcRes, mk_err, mk_errv}; use orchid_base::error::{OrcErr, OrcErrv, OrcRes, mk_errv};
use orchid_base::interner::Interner; use orchid_base::interner::Interner;
use orchid_base::location::SrcRange; use orchid_base::location::SrcRange;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::ParseCtx;
use orchid_base::sym; use orchid_base::sym;
use orchid_base::tree::wrap_tokv; use orchid_base::tree::wrap_tokv;
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable}; use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
use orchid_extension::parser::p_tree2gen;
use orchid_extension::tree::{GenTokTree, ref_tok, x_tok}; use orchid_extension::tree::{GenTokTree, ref_tok, x_tok};
use super::str_atom::IntStrAtom; use super::str_atom::IntStrAtom;
@@ -32,16 +34,16 @@ struct StringError {
impl StringError { impl StringError {
/// Convert into project error for reporting /// Convert into project error for reporting
pub async fn into_proj(self, path: &Sym, pos: u32, i: &Interner) -> OrcErr { pub async fn into_proj(self, path: &Sym, pos: u32, i: &Interner) -> OrcErrv {
let start = pos + self.pos; let start = pos + self.pos;
mk_err( mk_errv(
i.i("Failed to parse string").await, i.i("Failed to parse string").await,
match self.kind { match self.kind {
StringErrorKind::NotHex => "Expected a hex digit", StringErrorKind::NotHex => "Expected a hex digit",
StringErrorKind::BadCodePoint => "The specified number is not a Unicode code point", StringErrorKind::BadCodePoint => "The specified number is not a Unicode code point",
StringErrorKind::BadEscSeq => "Unrecognized escape sequence", StringErrorKind::BadEscSeq => "Unrecognized escape sequence",
}, },
[SrcRange::new(start..start + 1, path).pos().into()], [SrcRange::new(start..start + 1, path).pos()],
) )
} }
} }
@@ -97,7 +99,7 @@ impl Lexer for StringLexer {
const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['"'..='"', '`'..='`']; const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['"'..='"', '`'..='`'];
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> { async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
let Some(mut tail) = all.strip_prefix('"') else { let Some(mut tail) = all.strip_prefix('"') else {
return Err(err_not_applicable(ctx.ctx.i()).await.into()); return Err(err_not_applicable(ctx.ctx.i()).await);
}; };
let mut ret = None; let mut ret = None;
let mut cur = String::new(); let mut cur = String::new();
@@ -110,15 +112,17 @@ impl Lexer for StringLexer {
) -> GenTokTree { ) -> GenTokTree {
let str_val_res = parse_string(&str.split_off(0)); let str_val_res = parse_string(&str.split_off(0));
if let Err(e) = &str_val_res { if let Err(e) = &str_val_res {
err.push(e.clone().into_proj(&ctx.src, ctx.pos(tail) - str.len() as u32, ctx.i()).await); err.extend(e.clone().into_proj(ctx.src(), ctx.pos(tail) - str.len() as u32, ctx.i()).await);
} }
let str_val = str_val_res.unwrap_or_default(); let str_val = str_val_res.unwrap_or_default();
x_tok(IntStrAtom::from(ctx.i().i(&*str_val).await)).at(ctx.pos_lt(str.len() as u32, tail)) x_tok(IntStrAtom::from(ctx.i().i(&*str_val).await))
as GenTokTree .await
.at(ctx.pos_lt(str.len() as u32, tail)) as GenTokTree
} }
let add_frag = |prev: Option<GenTokTree>, new: GenTokTree| async { let add_frag = |prev: Option<GenTokTree>, new: GenTokTree| async {
let Some(prev) = prev else { return new }; let Some(prev) = prev else { return new };
let concat_fn = ref_tok(sym!(std::string::concat; ctx.i()).await) let concat_fn = ref_tok(sym!(std::string::concat; ctx.i()).await)
.await
.at(SrcRange::zw(prev.sr.path(), prev.sr.start())); .at(SrcRange::zw(prev.sr.path(), prev.sr.start()));
wrap_tokv([concat_fn, prev, new]) wrap_tokv([concat_fn, prev, new])
}; };
@@ -129,7 +133,7 @@ impl Lexer for StringLexer {
ret = Some(add_frag(ret, str_to_gen(&mut cur, tail, &mut errors, ctx).await).await); ret = Some(add_frag(ret, str_to_gen(&mut cur, tail, &mut errors, ctx).await).await);
let (new_tail, tree) = ctx.recurse(rest).await?; let (new_tail, tree) = ctx.recurse(rest).await?;
tail = new_tail; tail = new_tail;
ret = Some(add_frag(ret, tree).await); ret = Some(add_frag(ret, p_tree2gen(tree)).await);
} else if tail.starts_with('\\') { } else if tail.starts_with('\\') {
// parse_string will deal with it, we just have to skip the next char // parse_string will deal with it, we just have to skip the next char
tail = &tail[2..]; tail = &tail[2..];
@@ -143,7 +147,7 @@ impl Lexer for StringLexer {
return Err(mk_errv( return Err(mk_errv(
ctx.i().i("No string end").await, ctx.i().i("No string end").await,
"String never terminated with \"", "String never terminated with \"",
[SrcRange::new(range.clone(), &ctx.src)], [SrcRange::new(range.clone(), ctx.src())],
)); ));
} }
} }

View File

@@ -5,49 +5,76 @@
} }
], ],
"settings": { "settings": {
"editor.rulers": [
100 // Important; for accessibility reasons, code cannot be wider than 100ch
],
"[markdown]": { "[markdown]": {
// markdown denotes line breaks with trailing space
"diffEditor.ignoreTrimWhitespace": false,
// Disable editor gadgets in markdown
"editor.unicodeHighlight.ambiguousCharacters": false, "editor.unicodeHighlight.ambiguousCharacters": false,
"editor.unicodeHighlight.invisibleCharacters": false, "editor.unicodeHighlight.invisibleCharacters": false,
"diffEditor.ignoreTrimWhitespace": false, "editor.glyphMargin": false,
"editor.wordWrap": "bounded", "editor.guides.indentation": false,
"editor.wordWrapColumn": 80, "editor.lineNumbers": "off",
"editor.quickSuggestions": { "editor.quickSuggestions": {
"comments": "off", "comments": "off",
"strings": "off", "strings": "off",
"other": "off" "other": "off",
}, },
"editor.lineNumbers": "off",
"editor.glyphMargin": false,
"editor.rulers": [], "editor.rulers": [],
"editor.guides.indentation": false, "editor.wordWrap": "bounded",
"editor.wordWrapColumn": 80,
// wrap lines as we go
"editor.formatOnType": true, "editor.formatOnType": true,
"editor.detectIndentation": false,
"editor.insertSpaces": false,
}, },
// Orchid is a human-made project
"chat.commandCenter.enabled": false,
// use spaces for indentation for Rust for now due to a rustfmt bug
"editor.tabSize": 2,
"editor.stickyTabStops": true,
"editor.detectIndentation": false,
"editor.insertSpaces": true,
// Important; for accessibility reasons, code cannot be wider than 100ch
"editor.rulers": [ 100 ],
"editor.formatOnSave": true, "editor.formatOnSave": true,
"rust-analyzer.showUnlinkedFileNotification": false, "files.watcherExclude": {
"rust-analyzer.checkOnSave": true, "**/.git/objects/**": true,
"rust-analyzer.check.command": "clippy", "**/.git/subtree-cache/**": true,
"rust-analyzer.rustfmt.extraArgs": [ "**/.hg/store/**": true,
"+nightly" "target": true,
],
"rust-analyzer.cargo.features": "all",
"rust-analyzer.check.features": "all",
"files.associations": {
"*.mjsd": "markdown"
}, },
"git.confirmSync": false,
"git.enableSmartCommit": true,
"git.autofetch": true,
"rust-analyzer.assist.emitMustUse": true,
"rust-analyzer.assist.preferSelf": true,
"rust-analyzer.cargo.features": "all",
"rust-analyzer.check.command": "clippy",
"rust-analyzer.check.features": "all",
"rust-analyzer.checkOnSave": true,
"rust-analyzer.completion.fullFunctionSignatures.enable": true,
"rust-analyzer.completion.termSearch.enable": true,
"rust-analyzer.inlayHints.parameterHints.enable": false,
"rust-analyzer.inlayHints.typeHints.enable": false,
"rust-analyzer.rustfmt.extraArgs": [
"+nightly",
],
"rust-analyzer.showUnlinkedFileNotification": false,
"swissknife.notesEnabled": false, "swissknife.notesEnabled": false,
"todo-tree.filtering.excludeGlobs": [
"**/node_modules/*/**",
"orchidlang/**"
]
}, },
"extensions": { "extensions": {
"recommendations": [ "recommendations": [
"maptz.regionfolder", "fill-labs.dependi",
"tamasfe.even-better-toml",
"yzhang.markdown-all-in-one",
"gruntfuggly.todo-tree", "gruntfuggly.todo-tree",
"vadimcn.vscode-lldb", "maptz.regionfolder",
"rust-lang.rust-analyzer", "rust-lang.rust-analyzer",
"fill-labs.dependi" "tamasfe.even-better-toml",
"vadimcn.vscode-lldb",
"yzhang.markdown-all-in-one",
] ]
}, },
} }

View File

@@ -6,13 +6,13 @@ edition = "2024"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
async-std = "1.13.0" async-fn-stream = { version = "0.1.0", path = "../async-fn-stream" }
async-stream = "0.3.6"
camino = "1.1.9" camino = "1.1.9"
clap = { version = "4.5.24", features = ["derive", "env"] } clap = { version = "4.5.24", features = ["derive", "env"] }
ctrlc = "3.4.5" ctrlc = "3.4.5"
futures = "0.3.31" futures = "0.3.31"
itertools = "0.14.0" itertools = "0.14.0"
orchid-api = { version = "0.1.0", path = "../orchid-api" }
orchid-base = { version = "0.1.0", path = "../orchid-base" } orchid-base = { version = "0.1.0", path = "../orchid-base" }
orchid-host = { version = "0.1.0", path = "../orchid-host" } orchid-host = { version = "0.1.0", path = "../orchid-host" }
substack = "1.1.1" substack = "1.1.1"

View File

@@ -7,27 +7,30 @@ use std::mem;
use std::process::{Command, ExitCode}; use std::process::{Command, ExitCode};
use std::rc::Rc; use std::rc::Rc;
use async_std::io::stdin; use async_fn_stream::try_stream;
use async_std::path::PathBuf;
use async_stream::try_stream;
use camino::Utf8PathBuf; use camino::Utf8PathBuf;
use clap::{Parser, Subcommand}; use clap::{Parser, Subcommand};
use futures::{Stream, TryStreamExt, io}; use futures::{Stream, TryStreamExt, io};
use itertools::Itertools;
use orchid_base::error::Reporter; use orchid_base::error::Reporter;
use orchid_base::format::{FmtCtxImpl, Format, take_first}; use orchid_base::format::{FmtCtxImpl, Format, take_first};
use orchid_base::location::SrcRange;
use orchid_base::logging::{LogStrategy, Logger}; use orchid_base::logging::{LogStrategy, Logger};
use orchid_base::parse::Snippet; use orchid_base::name::{NameLike, VPath};
use orchid_base::parse::{Import, Snippet};
use orchid_base::sym; use orchid_base::sym;
use orchid_base::tree::ttv_fmt; use orchid_base::tree::{Token, ttv_fmt};
use orchid_host::ctx::Ctx; use orchid_host::ctx::Ctx;
use orchid_host::execute::{ExecCtx, ExecResult}; use orchid_host::execute::{ExecCtx, ExecResult};
use orchid_host::expr::PathSetBuilder; use orchid_host::expr::ExprKind;
use orchid_host::extension::Extension; use orchid_host::extension::Extension;
use orchid_host::lex::lex; use orchid_host::lex::lex;
use orchid_host::parse::{HostParseCtxImpl, parse_expr, parse_items}; use orchid_host::parse::{HostParseCtxImpl, parse_item, parse_items};
use orchid_host::parsed::{Item, ItemKind, ParsTokTree, ParsedMember, ParsedModule};
use orchid_host::subprocess::ext_command; use orchid_host::subprocess::ext_command;
use orchid_host::system::init_systems; use orchid_host::system::init_systems;
use substack::Substack; use substack::Substack;
use tokio::io::{AsyncBufReadExt, BufReader, stdin};
use tokio::task::{LocalSet, spawn_local}; use tokio::task::{LocalSet, spawn_local};
use crate::parse_folder::parse_folder; use crate::parse_folder::parse_folder;
@@ -58,7 +61,7 @@ pub enum Commands {
file: Utf8PathBuf, file: Utf8PathBuf,
}, },
Repl, Repl,
Execute { Exec {
#[arg(long)] #[arg(long)]
proj: Option<Utf8PathBuf>, proj: Option<Utf8PathBuf>,
#[arg()] #[arg()]
@@ -72,19 +75,16 @@ fn get_all_extensions<'a>(
msg_logger: &'a Logger, msg_logger: &'a Logger,
ctx: &'a Ctx, ctx: &'a Ctx,
) -> impl Stream<Item = io::Result<Extension>> + 'a { ) -> impl Stream<Item = io::Result<Extension>> + 'a {
try_stream! { try_stream(async |mut cx| {
for ext_path in args.extension.iter() { for ext_path in args.extension.iter() {
let exe = if cfg!(windows) { let exe = if cfg!(windows) { ext_path.with_extension("exe") } else { ext_path.clone() };
ext_path.with_extension("exe") let init =
} else { ext_command(Command::new(exe.as_os_str()), logger.clone(), msg_logger.clone(), ctx.clone())
ext_path.clone() .await?;
}; cx.emit(Extension::new(init, logger.clone(), msg_logger.clone(), ctx.clone())?).await;
let init = ext_command(Command::new(exe.as_os_str()), logger.clone(), msg_logger.clone(), ctx.clone()).await
.unwrap();
let ext = Extension::new(init, logger.clone(), msg_logger.clone(), ctx.clone())?;
yield ext
}
} }
Ok(cx)
})
} }
#[tokio::main] #[tokio::main]
@@ -145,20 +145,29 @@ async fn main() -> io::Result<ExitCode> {
println!("{}", take_first(&item.print(&FmtCtxImpl { i }).await, true)) println!("{}", take_first(&item.print(&FmtCtxImpl { i }).await, true))
} }
}, },
Commands::Repl => loop { Commands::Repl => {
let (root, systems) = init_systems(&args.system, &extensions).await.unwrap(); let mut counter = 0;
let mut imports = Vec::new();
let usercode_path = sym!(usercode; i).await;
let mut stdin = BufReader::new(stdin());
loop {
counter += 1;
let (mut root, systems) = init_systems(&args.system, &extensions).await.unwrap();
print!("\\.> "); print!("\\.> ");
std::io::stdout().flush().unwrap(); std::io::stdout().flush().unwrap();
let mut prompt = String::new(); let mut prompt = String::new();
stdin().read_line(&mut prompt).await.unwrap(); stdin.read_line(&mut prompt).await.unwrap();
eprintln!("lexing"); let name = i.i(&format!("_{counter}")).await;
let lexemes = let path = usercode_path.suffix([name.clone()], i).await;
lex(i.i(prompt.trim()).await, sym!(usercode; i).await, &systems, ctx).await.unwrap(); let mut lexemes =
eprintln!("lexed"); lex(i.i(prompt.trim()).await, path.clone(), &systems, ctx).await.unwrap();
let Some(discr) = lexemes.first() else { continue };
if args.logs { if args.logs {
println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true)); println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true));
} }
let path = sym!(usercode; i).await; let prefix_sr = SrcRange::zw(path.clone(), 0);
let process_lexemes = async |lexemes: &[ParsTokTree]| {
let snippet = Snippet::new(&lexemes[0], lexemes);
let reporter = Reporter::new(); let reporter = Reporter::new();
let parse_ctx = HostParseCtxImpl { let parse_ctx = HostParseCtxImpl {
ctx: ctx.clone(), ctx: ctx.clone(),
@@ -166,37 +175,64 @@ async fn main() -> io::Result<ExitCode> {
src: path.clone(), src: path.clone(),
systems: &systems[..], systems: &systems[..],
}; };
let parse_res = parse_expr( let parse_result = parse_item(&parse_ctx, Substack::Bottom, vec![], snippet).await;
&parse_ctx, match reporter.merge(parse_result) {
path.clone(), Ok(items) => Some(items),
PathSetBuilder::new(),
Snippet::new(&lexemes[0], &lexemes),
)
.await;
eprintln!("parsed");
let expr = match reporter.merge(parse_res) {
Ok(expr) => expr,
Err(e) => { Err(e) => {
eprintln!("{e}"); eprintln!("{e}");
continue; None
}, },
}
}; };
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root.clone(), expr).await; let add_imports = |items: &mut Vec<Item>, imports: &[Import]| {
items.extend(imports.iter().map(|import| Item::new(import.sr.clone(), import.clone())));
};
if discr.is_kw(i.i("import").await) {
let Some(import_lines) = process_lexemes(&lexemes).await else { continue };
imports.extend(import_lines.into_iter().map(|it| match it.kind {
ItemKind::Import(imp) => imp,
_ => panic!("Expected imports from import line"),
}));
continue;
}
if !discr.is_kw(i.i("let").await) {
let prefix = [i.i("export").await, i.i("let").await, name.clone(), i.i("=").await];
lexemes.splice(0..0, prefix.map(|n| Token::Name(n).at(prefix_sr.clone())));
}
let Some(mut new_lines) = process_lexemes(&lexemes).await else { continue };
let const_decl = new_lines.iter().exactly_one().expect("Multiple lines from let");
let input_sr = const_decl.sr.map_range(|_| 0..0);
let const_name = match &const_decl.kind {
ItemKind::Member(ParsedMember { name: const_name, .. }) => const_name.clone(),
_ => panic!("Expected exactly one constant declaration from let"),
};
add_imports(&mut new_lines, &imports);
imports.push(Import::new(input_sr.clone(), VPath::new(path.segs()), const_name.clone()));
let new_module = ParsedModule::new(true, new_lines);
let reporter = Reporter::new();
root = root.add_parsed(&new_module, path.clone(), &reporter).await;
eprintln!("parsed");
let entrypoint =
ExprKind::Const(path.suffix([const_name.clone()], i).await).at(input_sr.pos());
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root.clone(), entrypoint).await;
eprintln!("executed"); eprintln!("executed");
xctx.set_gas(Some(1000)); xctx.set_gas(Some(1000));
xctx.execute().await; xctx.execute().await;
match xctx.result() { match xctx.result() {
ExecResult::Value(val) => ExecResult::Value(val) =>
println!("{}", take_first(&val.print(&FmtCtxImpl { i }).await, false)), println!("{const_name} = {}", take_first(&val.print(&FmtCtxImpl { i }).await, false)),
ExecResult::Err(e) => println!("error: {e}"), ExecResult::Err(e) => println!("error: {e}"),
ExecResult::Gas(_) => println!("Ran out of gas!"), ExecResult::Gas(_) => println!("Ran out of gas!"),
} }
}
}, },
Commands::Execute { proj, code } => { Commands::Exec { proj, code } => {
let reporter = Reporter::new(); let reporter = Reporter::new();
let path = sym!(usercode; i).await;
let prefix_sr = SrcRange::zw(path.clone(), 0);
let (mut root, systems) = init_systems(&args.system, &extensions).await.unwrap(); let (mut root, systems) = init_systems(&args.system, &extensions).await.unwrap();
if let Some(proj_path) = proj { if let Some(proj_path) = proj {
let path = PathBuf::from(proj_path.into_std_path_buf()); let path = proj_path.into_std_path_buf();
match parse_folder(&root, path, sym!(src; i).await, &reporter, ctx.clone()).await { match parse_folder(&root, path, sym!(src; i).await, &reporter, ctx.clone()).await {
Ok(r) => root = r, Ok(r) => root = r,
Err(e) => { Err(e) => {
@@ -206,33 +242,32 @@ async fn main() -> io::Result<ExitCode> {
}, },
} }
} }
let lexemes = let mut lexemes = lex(i.i(code.trim()).await, path.clone(), &systems, ctx).await.unwrap();
lex(i.i(code.trim()).await, sym!(usercode; i).await, &systems, ctx).await.unwrap();
if args.logs { if args.logs {
println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true)); println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true));
} }
let path = sym!(usercode; i).await;
let parse_ctx = HostParseCtxImpl { let parse_ctx = HostParseCtxImpl {
ctx: ctx.clone(), ctx: ctx.clone(),
rep: &reporter, rep: &reporter,
src: path.clone(), src: path.clone(),
systems: &systems[..], systems: &systems[..],
}; };
let parse_res = parse_expr( let prefix =
&parse_ctx, [i.i("export").await, i.i("let").await, i.i("entrypoint").await, i.i("=").await];
path.clone(), lexemes.splice(0..0, prefix.map(|n| Token::Name(n).at(prefix_sr.clone())));
PathSetBuilder::new(), let snippet = Snippet::new(&lexemes[0], &lexemes);
Snippet::new(&lexemes[0], &lexemes), let parse_res = parse_item(&parse_ctx, Substack::Bottom, vec![], snippet).await;
) let entrypoint = match reporter.merge(parse_res) {
.await; Ok(items) => ParsedModule::new(true, items),
let expr = match reporter.merge(parse_res) {
Ok(expr) => expr,
Err(e) => { Err(e) => {
eprintln!("{e}"); eprintln!("{e}");
*exit_code1.borrow_mut() = ExitCode::FAILURE; *exit_code1.borrow_mut() = ExitCode::FAILURE;
return; return;
}, },
}; };
let reporter = Reporter::new();
let root = root.add_parsed(&entrypoint, path.clone(), &reporter).await;
let expr = ExprKind::Const(sym!(usercode::entrypoint; i).await).at(prefix_sr.pos());
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root, expr).await; let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root, expr).await;
xctx.set_gas(Some(1000)); xctx.set_gas(Some(1000));
xctx.execute().await; xctx.execute().await;

View File

@@ -1,10 +1,6 @@
use std::ffi::OsStr; use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use async_std::fs;
use async_std::fs::File;
use async_std::io::ReadExt;
use async_std::path::{Path, PathBuf};
use async_std::stream::StreamExt;
use futures::FutureExt; use futures::FutureExt;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcRes, Reporter, async_io_err, mk_errv, os_str_to_string}; use orchid_base::error::{OrcRes, Reporter, async_io_err, mk_errv, os_str_to_string};
@@ -17,6 +13,8 @@ use orchid_host::parse::{HostParseCtxImpl, parse_items};
use orchid_host::parsed::ParsedModule; use orchid_host::parsed::ParsedModule;
use orchid_host::tree::Root; use orchid_host::tree::Root;
use substack::Substack; use substack::Substack;
use tokio::fs::{self, File};
use tokio::io::AsyncReadExt;
pub async fn parse_folder( pub async fn parse_folder(
root: &Root, root: &Root,
@@ -30,7 +28,7 @@ pub async fn parse_folder(
return Ok(root.add_parsed(&parsed_module, ns, rep).await); return Ok(root.add_parsed(&parsed_module, ns, rep).await);
async fn recur(path: &Path, ns: Sym, rep: &Reporter, ctx: Ctx) -> OrcRes<Option<ParsedModule>> { async fn recur(path: &Path, ns: Sym, rep: &Reporter, ctx: Ctx) -> OrcRes<Option<ParsedModule>> {
let sr = SrcRange::new(0..0, &ns); let sr = SrcRange::new(0..0, &ns);
if path.is_dir().await { if path.is_dir() {
let Some(name_os) = path.file_name() else { let Some(name_os) = path.file_name() else {
return Err(mk_errv( return Err(mk_errv(
ctx.i.i("Could not read directory name").await, ctx.i.i("Could not read directory name").await,
@@ -39,16 +37,17 @@ pub async fn parse_folder(
)); ));
}; };
let name = ctx.i.i(os_str_to_string(name_os, &ctx.i, [sr]).await?).await; let name = ctx.i.i(os_str_to_string(name_os, &ctx.i, [sr]).await?).await;
let ns = ns.push(name.clone(), &ctx.i).await; let ns = ns.suffix([name.clone()], &ctx.i).await;
let sr = SrcRange::new(0..0, &ns); let sr = SrcRange::new(0..0, &ns);
let mut items = Vec::new(); let mut items = Vec::new();
let mut stream = match fs::read_dir(path).await { let mut stream = match fs::read_dir(path).await {
Err(err) => return Err(async_io_err(err, &ctx.i, [sr]).await), Err(err) => return Err(async_io_err(err, &ctx.i, [sr]).await),
Ok(s) => s, Ok(s) => s,
}; };
while let Some(entry_res) = stream.next().await { loop {
let entry = match entry_res { let entry = match stream.next_entry().await {
Ok(ent) => ent, Ok(Some(ent)) => ent,
Ok(None) => break,
Err(err) => { Err(err) => {
rep.report(async_io_err(err, &ctx.i, [sr.clone()]).await); rep.report(async_io_err(err, &ctx.i, [sr.clone()]).await);
continue; continue;
@@ -63,11 +62,11 @@ pub async fn parse_folder(
Ok(Some(module)) => items.push(module.default_item(name.clone(), sr.clone())), Ok(Some(module)) => items.push(module.default_item(name.clone(), sr.clone())),
} }
} }
Ok(Some(ParsedModule::new(items))) Ok(Some(ParsedModule::new(false, items)))
} else if path.extension() == Some(OsStr::new("orc")) { } else if path.extension() == Some(OsStr::new("orc")) {
let name_os = path.file_stem().expect("If there is an extension, there must be a stem"); let name_os = path.file_stem().expect("If there is an extension, there must be a stem");
let name = ctx.i.i(os_str_to_string(name_os, &ctx.i, [sr]).await?).await; let name = ctx.i.i(os_str_to_string(name_os, &ctx.i, [sr]).await?).await;
let ns = ns.push(name, &ctx.i).await; let ns = ns.suffix([name], &ctx.i).await;
let sr = SrcRange::new(0..0, &ns); let sr = SrcRange::new(0..0, &ns);
let mut file = match File::open(path).await { let mut file = match File::open(path).await {
Err(e) => return Err(async_io_err(e, &ctx.i, [sr]).await), Err(e) => return Err(async_io_err(e, &ctx.i, [sr]).await),
@@ -81,9 +80,9 @@ pub async fn parse_folder(
ctx.systems.read().await.iter().filter_map(|(_, sys)| sys.upgrade()).collect_vec(); ctx.systems.read().await.iter().filter_map(|(_, sys)| sys.upgrade()).collect_vec();
let lexemes = lex(ctx.i.i(&text).await, ns.clone(), &systems, &ctx).await?; let lexemes = lex(ctx.i.i(&text).await, ns.clone(), &systems, &ctx).await?;
let hpctx = HostParseCtxImpl { ctx: ctx.clone(), rep, src: ns.clone(), systems: &systems }; let hpctx = HostParseCtxImpl { ctx: ctx.clone(), rep, src: ns.clone(), systems: &systems };
let Some(fst) = lexemes.first() else { return Ok(Some(ParsedModule::new([]))) }; let Some(fst) = lexemes.first() else { return Ok(Some(ParsedModule::new(false, []))) };
let items = parse_items(&hpctx, Substack::Bottom, Snippet::new(fst, &lexemes)).await?; let items = parse_items(&hpctx, Substack::Bottom, Snippet::new(fst, &lexemes)).await?;
Ok(Some(ParsedModule::new(items))) Ok(Some(ParsedModule::new(false, items)))
} else { } else {
Ok(None) Ok(None)
} }

View File

@@ -1 +0,0 @@
Upsending: [ff ff ff ff ff ff ff f7 00 00 00 00 00 00 00 08 22 75 73 65 72 21 22 69]

View File

@@ -20,7 +20,7 @@ pub fn check_api_refs(_args: &Args) -> io::Result<()> {
continue; continue;
} }
let dname = file.path().to_string_lossy().to_string(); let dname = file.path().to_string_lossy().to_string();
eprintln!("orchid_api imported in {dname} at {};{}", l + 1, c + 1) eprintln!("orchid_api imported in {dname}:{}:{}", l + 1, c + 1)
} }
} }
Ok(()) Ok(())

Some files were not shown because too many files have changed in this diff Show More