forked from Orchid/orchid
base and extension fully compiles, host in good shape
This commit is contained in:
98
orchid-host/src/atom.rs
Normal file
98
orchid-host/src/atom.rs
Normal file
@@ -0,0 +1,98 @@
|
||||
use std::fmt;
|
||||
use std::rc::{Rc, Weak};
|
||||
|
||||
use derive_destructure::destructure;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::reqnot::Requester;
|
||||
use orchid_base::tree::AtomRepr;
|
||||
|
||||
use crate::api;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::Expr;
|
||||
use crate::system::System;
|
||||
|
||||
#[derive(destructure)]
|
||||
pub struct AtomData {
|
||||
owner: System,
|
||||
drop: Option<api::AtomId>,
|
||||
data: Vec<u8>,
|
||||
}
|
||||
impl AtomData {
|
||||
fn api(self) -> api::Atom {
|
||||
let (owner, drop, data) = self.destructure();
|
||||
api::Atom { data, drop, owner: owner.id() }
|
||||
}
|
||||
fn api_ref(&self) -> api::Atom {
|
||||
api::Atom { data: self.data.clone(), drop: self.drop, owner: self.owner.id() }
|
||||
}
|
||||
}
|
||||
impl Drop for AtomData {
|
||||
fn drop(&mut self) {
|
||||
if let Some(id) = self.drop {
|
||||
self.owner.drop_atom(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
impl fmt::Debug for AtomData {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("AtomData")
|
||||
.field("drop", &self.drop)
|
||||
.field("data", &self.data)
|
||||
.field("owner", &self.owner.id())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AtomHand(Rc<AtomData>);
|
||||
impl AtomHand {
|
||||
pub(crate) async fn new(api::Atom { data, drop, owner }: api::Atom, ctx: &Ctx) -> Self {
|
||||
let create = || async {
|
||||
let owner = ctx.system_inst(owner).await.expect("Dropped system created atom");
|
||||
AtomHand(Rc::new(AtomData { data, owner, drop }))
|
||||
};
|
||||
if let Some(id) = drop {
|
||||
let mut owned_g = ctx.owned_atoms.write().await;
|
||||
if let Some(data) = owned_g.get(&id) {
|
||||
if let Some(atom) = data.upgrade() {
|
||||
return atom;
|
||||
}
|
||||
}
|
||||
let new = create().await;
|
||||
owned_g.insert(id, new.downgrade());
|
||||
new
|
||||
} else {
|
||||
create().await
|
||||
}
|
||||
}
|
||||
pub async fn call(self, arg: Expr) -> api::Expression {
|
||||
let owner_sys = self.0.owner.clone();
|
||||
let reqnot = owner_sys.reqnot();
|
||||
owner_sys.ext().exprs().give_expr(arg.clone());
|
||||
match Rc::try_unwrap(self.0) {
|
||||
Ok(data) => reqnot.request(api::FinalCall(data.api(), arg.id())).await,
|
||||
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), arg.id())).await,
|
||||
}
|
||||
}
|
||||
pub async fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
|
||||
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req)).await
|
||||
}
|
||||
pub fn api_ref(&self) -> api::Atom { self.0.api_ref() }
|
||||
pub async fn to_string(&self) -> String {
|
||||
self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())).await
|
||||
}
|
||||
pub fn downgrade(&self) -> WeakAtomHand { WeakAtomHand(Rc::downgrade(&self.0)) }
|
||||
}
|
||||
impl AtomRepr for AtomHand {
|
||||
type Ctx = Ctx;
|
||||
async fn from_api(atom: &orchid_api::Atom, _: Pos, ctx: &mut Self::Ctx) -> Self {
|
||||
Self::new(atom.clone(), &ctx).await
|
||||
}
|
||||
async fn to_api(&self) -> orchid_api::Atom { self.api_ref() }
|
||||
async fn print(&self) -> String { self.to_string().await }
|
||||
}
|
||||
|
||||
pub struct WeakAtomHand(Weak<AtomData>);
|
||||
impl WeakAtomHand {
|
||||
pub fn upgrade(&self) -> Option<AtomHand> { self.0.upgrade().map(AtomHand) }
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
use std::sync::Mutex;
|
||||
use std::{fmt, io, mem, process};
|
||||
|
||||
use orchid_base::msg::{recv_msg, send_msg};
|
||||
|
||||
pub struct SharedChild {
|
||||
child: process::Child,
|
||||
stdin: Mutex<process::ChildStdin>,
|
||||
stdout: Mutex<process::ChildStdout>,
|
||||
debug: Option<(String, Mutex<Box<dyn fmt::Write>>)>,
|
||||
}
|
||||
impl SharedChild {
|
||||
pub fn new(
|
||||
command: &mut process::Command,
|
||||
debug: Option<(&str, impl fmt::Write + 'static)>,
|
||||
) -> io::Result<Self> {
|
||||
let mut child =
|
||||
command.stdin(process::Stdio::piped()).stdout(process::Stdio::piped()).spawn()?;
|
||||
let stdin = Mutex::new(child.stdin.take().expect("Piped stdin above"));
|
||||
let stdout = Mutex::new(child.stdout.take().expect("Piped stdout above"));
|
||||
let debug = debug.map(|(n, w)| (n.to_string(), Mutex::new(Box::new(w) as Box<dyn fmt::Write>)));
|
||||
Ok(Self { child, stdin, stdout, debug })
|
||||
}
|
||||
|
||||
pub fn send_msg(&self, msg: &[u8]) -> io::Result<()> {
|
||||
if let Some((n, dbg)) = &self.debug {
|
||||
let mut dbg = dbg.lock().unwrap();
|
||||
writeln!(dbg, "To {n}: {msg:?}").unwrap();
|
||||
}
|
||||
send_msg(&mut *self.stdin.lock().unwrap(), msg)
|
||||
}
|
||||
|
||||
pub fn recv_msg(&self) -> io::Result<Vec<u8>> {
|
||||
let msg = recv_msg(&mut *self.stdout.lock().unwrap());
|
||||
if let Some((n, dbg)) = &self.debug {
|
||||
let mut dbg = dbg.lock().unwrap();
|
||||
writeln!(dbg, "From {n}: {msg:?}").unwrap();
|
||||
}
|
||||
msg
|
||||
}
|
||||
}
|
||||
impl Drop for SharedChild {
|
||||
fn drop(&mut self) { mem::drop(self.child.kill()) }
|
||||
}
|
||||
46
orchid-host/src/ctx.rs
Normal file
46
orchid-host/src/ctx.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use std::cell::RefCell;
|
||||
use std::num::NonZeroU16;
|
||||
use std::rc::Rc;
|
||||
use std::{fmt, ops};
|
||||
|
||||
use async_std::sync::RwLock;
|
||||
use futures::task::LocalSpawn;
|
||||
use hashbrown::HashMap;
|
||||
use orchid_api::SysId;
|
||||
use orchid_base::interner::Interner;
|
||||
|
||||
use crate::api;
|
||||
use crate::atom::WeakAtomHand;
|
||||
use crate::system::{System, WeakSystem};
|
||||
|
||||
pub struct CtxData {
|
||||
pub i: Rc<Interner>,
|
||||
pub spawn: Rc<dyn LocalSpawn>,
|
||||
pub systems: RwLock<HashMap<api::SysId, WeakSystem>>,
|
||||
pub system_id: RefCell<NonZeroU16>,
|
||||
pub owned_atoms: RwLock<HashMap<api::AtomId, WeakAtomHand>>,
|
||||
}
|
||||
#[derive(Clone)]
|
||||
pub struct Ctx(Rc<CtxData>);
|
||||
impl ops::Deref for Ctx {
|
||||
type Target = CtxData;
|
||||
fn deref(&self) -> &Self::Target { &*self.0 }
|
||||
}
|
||||
impl Ctx {
|
||||
pub(crate) async fn system_inst(&self, id: api::SysId) -> Option<System> {
|
||||
self.systems.read().await.get(&id).and_then(WeakSystem::upgrade)
|
||||
}
|
||||
pub(crate) fn next_sys_id(&self) -> api::SysId {
|
||||
let mut g = self.system_id.borrow_mut();
|
||||
*g = g.checked_add(1).unwrap_or(NonZeroU16::new(1).unwrap());
|
||||
SysId(*g)
|
||||
}
|
||||
}
|
||||
impl fmt::Debug for Ctx {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("Ctx")
|
||||
.field("i", &self.i)
|
||||
.field("system_id", &self.system_id)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::num::NonZeroU64;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::{Arc, RwLock};
|
||||
use std::rc::{Rc, Weak};
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use lazy_static::lazy_static;
|
||||
use async_std::sync::RwLock;
|
||||
use futures::FutureExt;
|
||||
use orchid_base::error::OrcErrv;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::match_mapping;
|
||||
@@ -12,69 +12,56 @@ use orchid_base::name::Sym;
|
||||
use orchid_base::tree::AtomRepr;
|
||||
|
||||
use crate::api;
|
||||
use crate::extension::AtomHand;
|
||||
use crate::atom::AtomHand;
|
||||
use crate::extension::Extension;
|
||||
|
||||
pub type ExprParseCtx = ();
|
||||
pub type ExprParseCtx = Extension;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExprData {
|
||||
is_canonical: AtomicBool,
|
||||
pos: Pos,
|
||||
kind: RwLock<ExprKind>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Expr {
|
||||
is_canonical: Arc<AtomicBool>,
|
||||
pos: Pos,
|
||||
kind: Arc<RwLock<ExprKind>>,
|
||||
}
|
||||
pub struct Expr(Rc<ExprData>);
|
||||
impl Expr {
|
||||
pub fn pos(&self) -> Pos { self.pos.clone() }
|
||||
pub fn pos(&self) -> Pos { self.0.pos.clone() }
|
||||
pub fn as_atom(&self) -> Option<AtomHand> { todo!() }
|
||||
pub fn strong_count(&self) -> usize { todo!() }
|
||||
pub fn id(&self) -> api::ExprTicket {
|
||||
api::ExprTicket(
|
||||
NonZeroU64::new(self.kind.as_ref() as *const RwLock<_> as usize as u64)
|
||||
NonZeroU64::new(self.0.as_ref() as *const ExprData as usize as u64)
|
||||
.expect("this is a ref, it cannot be null"),
|
||||
)
|
||||
}
|
||||
pub fn canonicalize(&self) -> api::ExprTicket {
|
||||
if !self.is_canonical.swap(true, Ordering::Relaxed) {
|
||||
KNOWN_EXPRS.write().unwrap().entry(self.id()).or_insert_with(|| self.clone());
|
||||
}
|
||||
self.id()
|
||||
}
|
||||
pub fn resolve(tk: api::ExprTicket) -> Option<Self> {
|
||||
KNOWN_EXPRS.read().unwrap().get(&tk).cloned()
|
||||
}
|
||||
pub fn from_api(api: &api::Expression, ctx: &mut ExprParseCtx) -> Self {
|
||||
// pub fn canonicalize(&self) -> api::ExprTicket {
|
||||
// if !self.is_canonical.swap(true, Ordering::Relaxed) {
|
||||
// KNOWN_EXPRS.write().unwrap().entry(self.id()).or_insert_with(||
|
||||
// self.clone()); }
|
||||
// self.id()
|
||||
// }
|
||||
// pub fn resolve(tk: api::ExprTicket) -> Option<Self> {
|
||||
// KNOWN_EXPRS.read().unwrap().get(&tk).cloned()
|
||||
// }
|
||||
pub async fn from_api(api: &api::Expression, ctx: &mut ExprParseCtx) -> Self {
|
||||
if let api::ExpressionKind::Slot(tk) = &api.kind {
|
||||
return Self::resolve(*tk).expect("Invalid slot");
|
||||
}
|
||||
Self {
|
||||
kind: Arc::new(RwLock::new(ExprKind::from_api(&api.kind, ctx))),
|
||||
is_canonical: Arc::default(),
|
||||
pos: Pos::from_api(&api.location),
|
||||
return ctx.exprs().get_expr(*tk).expect("Invalid slot");
|
||||
}
|
||||
let pos = Pos::from_api(&api.location, &ctx.ctx().i).await;
|
||||
let kind = RwLock::new(ExprKind::from_api(&api.kind, pos.clone(), ctx).boxed_local().await);
|
||||
Self(Rc::new(ExprData { is_canonical: AtomicBool::new(false), pos, kind }))
|
||||
}
|
||||
pub fn to_api(&self) -> api::InspectedKind {
|
||||
pub async fn to_api(&self) -> api::InspectedKind {
|
||||
use api::InspectedKind as K;
|
||||
match &*self.kind.read().unwrap() {
|
||||
ExprKind::Atom(a) => K::Atom(a.to_api()),
|
||||
match &*self.0.kind.read().await {
|
||||
ExprKind::Atom(a) => K::Atom(a.to_api().await),
|
||||
ExprKind::Bottom(b) => K::Bottom(b.to_api()),
|
||||
_ => K::Opaque,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Drop for Expr {
|
||||
fn drop(&mut self) {
|
||||
// If the only two references left are this and known, remove from known
|
||||
if Arc::strong_count(&self.kind) == 2 && self.is_canonical.load(Ordering::Relaxed) {
|
||||
// if known is poisoned, a leak is preferable to a panicking destructor
|
||||
if let Ok(mut w) = KNOWN_EXPRS.write() {
|
||||
w.remove(&self.id());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref KNOWN_EXPRS: RwLock<HashMap<api::ExprTicket, Expr>> = RwLock::default();
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ExprKind {
|
||||
@@ -87,16 +74,20 @@ pub enum ExprKind {
|
||||
Const(Sym),
|
||||
}
|
||||
impl ExprKind {
|
||||
pub fn from_api(api: &api::ExpressionKind, ctx: &mut ExprParseCtx) -> Self {
|
||||
pub async fn from_api(api: &api::ExpressionKind, pos: Pos, ctx: &mut ExprParseCtx) -> Self {
|
||||
match_mapping!(api, api::ExpressionKind => ExprKind {
|
||||
Lambda(id => PathSet::from_api(*id, api), b => Expr::from_api(b, ctx)),
|
||||
Bottom(b => OrcErrv::from_api(b)),
|
||||
Call(f => Expr::from_api(f, ctx), x => Expr::from_api(x, ctx)),
|
||||
Const(c => Sym::from_api(*c)),
|
||||
Seq(a => Expr::from_api(a, ctx), b => Expr::from_api(b, ctx)),
|
||||
Lambda(id => PathSet::from_api(*id, api), b => Expr::from_api(b, ctx).await),
|
||||
Bottom(b => OrcErrv::from_api(b, &ctx.ctx().i).await),
|
||||
Call(f => Expr::from_api(f, ctx).await, x => Expr::from_api(x, ctx).await),
|
||||
Const(c => Sym::from_api(*c, &ctx.ctx().i).await),
|
||||
Seq(a => Expr::from_api(a, ctx).await, b => Expr::from_api(b, ctx).await),
|
||||
} {
|
||||
api::ExpressionKind::Arg(_) => ExprKind::Arg,
|
||||
api::ExpressionKind::NewAtom(a) => ExprKind::Atom(AtomHand::from_api(a.clone())),
|
||||
api::ExpressionKind::NewAtom(a) => ExprKind::Atom(AtomHand::from_api(
|
||||
a,
|
||||
pos,
|
||||
&mut ctx.ctx().clone()
|
||||
).await),
|
||||
api::ExpressionKind::Slot(_) => panic!("Handled in Expr"),
|
||||
})
|
||||
}
|
||||
@@ -139,3 +130,8 @@ impl PathSet {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct WeakExpr(Weak<ExprData>);
|
||||
impl WeakExpr {
|
||||
pub fn upgrade(&self) -> Option<Expr> { self.0.upgrade().map(Expr) }
|
||||
}
|
||||
|
||||
35
orchid-host/src/expr_store.rs
Normal file
35
orchid-host/src/expr_store.rs
Normal file
@@ -0,0 +1,35 @@
|
||||
use std::cell::RefCell;
|
||||
use std::fmt;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use hashbrown::hash_map::Entry;
|
||||
|
||||
use crate::api;
|
||||
use crate::expr::Expr;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ExprStore(RefCell<HashMap<api::ExprTicket, (u32, Expr)>>);
|
||||
impl ExprStore {
|
||||
pub fn give_expr(&self, expr: Expr) {
|
||||
match self.0.borrow_mut().entry(expr.id()) {
|
||||
Entry::Occupied(mut oe) => oe.get_mut().0 += 1,
|
||||
Entry::Vacant(v) => {
|
||||
v.insert((1, expr));
|
||||
},
|
||||
}
|
||||
}
|
||||
pub fn take_expr(&self, ticket: api::ExprTicket) {
|
||||
(self.0.borrow_mut().entry(ticket))
|
||||
.and_replace_entry_with(|_, (rc, rt)| (1 < rc).then_some((rc - 1, rt)));
|
||||
}
|
||||
pub fn get_expr(&self, ticket: api::ExprTicket) -> Option<Expr> {
|
||||
self.0.borrow().get(&ticket).map(|(_, expr)| expr.clone())
|
||||
}
|
||||
}
|
||||
impl fmt::Display for ExprStore {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let r = self.0.borrow();
|
||||
let rc: u32 = r.values().map(|v| v.0).sum();
|
||||
write!(f, "Store holding {rc} refs to {} exprs", r.len())
|
||||
}
|
||||
}
|
||||
@@ -1,111 +1,35 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::num::NonZero;
|
||||
use std::ops::Deref;
|
||||
use std::sync::atomic::{AtomicU16, AtomicU32, AtomicU64, Ordering};
|
||||
use std::sync::mpsc::{SyncSender, sync_channel};
|
||||
use std::sync::{Arc, Mutex, OnceLock, RwLock, Weak};
|
||||
use std::{fmt, io, thread};
|
||||
use std::cell::RefCell;
|
||||
use std::future::Future;
|
||||
use std::io;
|
||||
use std::num::NonZeroU64;
|
||||
use std::rc::{Rc, Weak};
|
||||
|
||||
use async_std::channel::{self, Sender};
|
||||
use async_std::sync::Mutex;
|
||||
use derive_destructure::destructure;
|
||||
use futures::FutureExt;
|
||||
use futures::future::{join, join_all};
|
||||
use futures::task::LocalSpawnExt;
|
||||
use hashbrown::HashMap;
|
||||
use hashbrown::hash_map::Entry;
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use orchid_api::HostMsgSet;
|
||||
use orchid_api_traits::Request;
|
||||
use orchid_base::builtin::{ExtFactory, ExtPort};
|
||||
use orchid_base::char_filter::char_filter_match;
|
||||
use orchid_base::builtin::ExtInit;
|
||||
use orchid_base::clone;
|
||||
use orchid_base::error::{OrcErrv, OrcRes};
|
||||
use orchid_base::interner::{Tok, intern};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::logging::Logger;
|
||||
use orchid_base::macros::mtreev_from_api;
|
||||
use orchid_base::parse::Comment;
|
||||
use orchid_base::reqnot::{ReqNot, Requester as _};
|
||||
use orchid_base::tree::{AtomRepr, ttv_from_api};
|
||||
use ordered_float::NotNan;
|
||||
use substack::{Stackframe, Substack};
|
||||
use orchid_base::tree::AtomRepr;
|
||||
|
||||
use crate::api;
|
||||
use crate::expr::Expr;
|
||||
use crate::atom::AtomHand;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr_store::ExprStore;
|
||||
use crate::macros::{macro_recur, macro_treev_to_api};
|
||||
use crate::tree::{Member, ParsTokTree};
|
||||
use crate::system::SystemCtor;
|
||||
|
||||
#[derive(Debug, destructure)]
|
||||
pub struct AtomData {
|
||||
owner: System,
|
||||
drop: Option<api::AtomId>,
|
||||
data: Vec<u8>,
|
||||
}
|
||||
impl AtomData {
|
||||
fn api(self) -> api::Atom {
|
||||
let (owner, drop, data) = self.destructure();
|
||||
api::Atom { data, drop, owner: owner.id() }
|
||||
}
|
||||
fn api_ref(&self) -> api::Atom {
|
||||
api::Atom { data: self.data.clone(), drop: self.drop, owner: self.owner.id() }
|
||||
}
|
||||
}
|
||||
impl Drop for AtomData {
|
||||
fn drop(&mut self) {
|
||||
if let Some(id) = self.drop {
|
||||
self.owner.reqnot().notify(api::AtomDrop(self.owner.id(), id))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AtomHand(Arc<AtomData>);
|
||||
impl AtomHand {
|
||||
pub fn from_api(atom: api::Atom) -> Self {
|
||||
fn create_new(api::Atom { data, drop, owner }: api::Atom) -> AtomHand {
|
||||
let owner = System::resolve(owner).expect("Atom owned by non-existing system");
|
||||
AtomHand(Arc::new(AtomData { data, drop, owner }))
|
||||
}
|
||||
if let Some(id) = atom.drop {
|
||||
lazy_static! {
|
||||
static ref OWNED_ATOMS: Mutex<HashMap<(api::SysId, api::AtomId), Weak<AtomData>>> =
|
||||
Mutex::default();
|
||||
}
|
||||
let owner = atom.owner;
|
||||
let mut owned_g = OWNED_ATOMS.lock().unwrap();
|
||||
if let Some(data) = owned_g.get(&(owner, id)) {
|
||||
if let Some(atom) = data.upgrade() {
|
||||
return Self(atom);
|
||||
}
|
||||
}
|
||||
let new = create_new(atom);
|
||||
owned_g.insert((owner, id), Arc::downgrade(&new.0));
|
||||
new
|
||||
} else {
|
||||
create_new(atom)
|
||||
}
|
||||
}
|
||||
pub fn call(self, arg: Expr) -> api::Expression {
|
||||
let owner_sys = self.0.owner.clone();
|
||||
let reqnot = owner_sys.reqnot();
|
||||
let ticket = owner_sys.give_expr(arg.canonicalize(), || arg);
|
||||
match Arc::try_unwrap(self.0) {
|
||||
Ok(data) => reqnot.request(api::FinalCall(data.api(), ticket)),
|
||||
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), ticket)),
|
||||
}
|
||||
}
|
||||
pub fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
|
||||
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req))
|
||||
}
|
||||
pub fn api_ref(&self) -> api::Atom { self.0.api_ref() }
|
||||
pub fn print(&self) -> String { self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())) }
|
||||
}
|
||||
impl AtomRepr for AtomHand {
|
||||
type Ctx = ();
|
||||
fn from_api(atom: &orchid_api::Atom, _: Pos, (): &mut Self::Ctx) -> Self {
|
||||
Self::from_api(atom.clone())
|
||||
}
|
||||
fn to_api(&self) -> orchid_api::Atom { self.api_ref() }
|
||||
}
|
||||
impl fmt::Display for AtomHand {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(&self.print()) }
|
||||
}
|
||||
pub struct ReqPair<R: Request>(R, Sender<R::Response>);
|
||||
|
||||
/// Data held about an Extension. This is refcounted within [Extension]. It's
|
||||
/// important to only ever access parts of this struct through the [Arc] because
|
||||
@@ -113,320 +37,194 @@ impl fmt::Display for AtomHand {
|
||||
/// upgrading fails.
|
||||
#[derive(destructure)]
|
||||
pub struct ExtensionData {
|
||||
port: Mutex<Box<dyn ExtPort>>,
|
||||
// child: Mutex<process::Child>,
|
||||
// child_stdin: Mutex<ChildStdin>,
|
||||
ctx: Ctx,
|
||||
init: ExtInit,
|
||||
reqnot: ReqNot<api::HostMsgSet>,
|
||||
systems: Vec<SystemCtor>,
|
||||
logger: Logger,
|
||||
next_pars: RefCell<NonZeroU64>,
|
||||
exprs: ExprStore,
|
||||
lex_recur: Mutex<HashMap<api::ParsId, channel::Sender<ReqPair<api::SubLex>>>>,
|
||||
}
|
||||
impl Drop for ExtensionData {
|
||||
fn drop(&mut self) { self.reqnot.notify(api::HostExtNotif::Exit); }
|
||||
}
|
||||
|
||||
fn acq_expr(sys: api::SysId, extk: api::ExprTicket) {
|
||||
(System::resolve(sys).expect("Expr acq'd by invalid system"))
|
||||
.give_expr(extk, || Expr::resolve(extk).expect("Invalid expr acq'd"));
|
||||
}
|
||||
|
||||
fn rel_expr(sys: api::SysId, extk: api::ExprTicket) {
|
||||
let sys = System::resolve(sys).unwrap();
|
||||
let mut exprs = sys.0.exprs.write().unwrap();
|
||||
exprs.entry(extk).and_replace_entry_with(|_, (rc, rt)| {
|
||||
(0 < rc.fetch_sub(1, Ordering::Relaxed)).then_some((rc, rt))
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Extension(Arc<ExtensionData>);
|
||||
pub struct Extension(Rc<ExtensionData>);
|
||||
impl Extension {
|
||||
pub fn new(fac: Box<dyn ExtFactory>, logger: Logger) -> io::Result<Self> {
|
||||
Ok(Self(Arc::new_cyclic(|weak: &Weak<ExtensionData>| {
|
||||
let (eh, port) = fac.run(Box::new(clone!(weak; move |msg| {
|
||||
weak.upgrade().inspect(|xd| xd.reqnot.receive(msg));
|
||||
})));
|
||||
ExtensionData {
|
||||
systems: (eh.systems.iter().cloned())
|
||||
.map(|decl| SystemCtor { decl, ext: weak.clone() })
|
||||
.collect(),
|
||||
logger,
|
||||
port: Mutex::new(port),
|
||||
reqnot: ReqNot::new(
|
||||
clone!(weak; move |sfn, _| {
|
||||
let data = weak.upgrade().unwrap();
|
||||
data.logger.log_buf("Downsending", sfn);
|
||||
data.port.lock().unwrap().send(sfn);
|
||||
}),
|
||||
clone!(weak; move |notif, _| match notif {
|
||||
api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => acq_expr(acq.0, acq.1),
|
||||
api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => rel_expr(rel.0, rel.1),
|
||||
pub fn new(init: ExtInit, logger: Logger, ctx: Ctx) -> io::Result<Self> {
|
||||
Ok(Self(Rc::new_cyclic(|weak: &Weak<ExtensionData>| ExtensionData {
|
||||
exprs: ExprStore::default(),
|
||||
ctx: ctx.clone(),
|
||||
systems: (init.systems.iter().cloned())
|
||||
.map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) })
|
||||
.collect(),
|
||||
logger,
|
||||
init,
|
||||
next_pars: RefCell::new(NonZeroU64::new(1).unwrap()),
|
||||
lex_recur: Mutex::default(),
|
||||
reqnot: ReqNot::new(
|
||||
clone!(weak; move |sfn, _| clone!(weak; async move {
|
||||
let data = weak.upgrade().unwrap();
|
||||
data.logger.log_buf("Downsending", sfn);
|
||||
data.init.send(sfn).await
|
||||
}.boxed_local())),
|
||||
clone!(weak; move |notif, _| clone!(weak; async move {
|
||||
let this = Extension(weak.upgrade().unwrap());
|
||||
match notif {
|
||||
api::ExtHostNotif::ExprNotif(api::ExprNotif::Acquire(acq)) => {
|
||||
let target = this.0.exprs.get_expr(acq.1).expect("Invalid ticket");
|
||||
this.0.exprs.give_expr(target)
|
||||
}
|
||||
api::ExtHostNotif::ExprNotif(api::ExprNotif::Release(rel)) => {
|
||||
this.assert_own_sys(rel.0).await;
|
||||
this.0.exprs.take_expr(rel.1)
|
||||
}
|
||||
api::ExtHostNotif::ExprNotif(api::ExprNotif::Move(mov)) => {
|
||||
acq_expr(mov.inc, mov.expr);
|
||||
rel_expr(mov.dec, mov.expr);
|
||||
this.assert_own_sys(mov.dec).await;
|
||||
let recp = this.ctx().system_inst(mov.inc).await.expect("invallid recipient sys id");
|
||||
let expr = this.0.exprs.get_expr(mov.expr).expect("invalid ticket");
|
||||
recp.ext().0.exprs.give_expr(expr);
|
||||
this.0.exprs.take_expr(mov.expr);
|
||||
},
|
||||
api::ExtHostNotif::Log(api::Log(str)) => weak.upgrade().unwrap().logger.log(str),
|
||||
}),
|
||||
|hand, req| match req {
|
||||
api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()),
|
||||
api::ExtHostReq::IntReq(intreq) => match intreq {
|
||||
api::IntReq::InternStr(s) => hand.handle(&s, &intern(&**s.0).to_api()),
|
||||
api::IntReq::InternStrv(v) => hand.handle(&v, &intern(&*v.0).to_api()),
|
||||
api::IntReq::ExternStr(si) => hand.handle(&si, &Tok::<String>::from_api(si.0).arc()),
|
||||
api::IntReq::ExternStrv(vi) => hand.handle(
|
||||
&vi,
|
||||
&Arc::new(
|
||||
Tok::<Vec<Tok<String>>>::from_api(vi.0).iter().map(|t| t.to_api()).collect_vec(),
|
||||
),
|
||||
),
|
||||
},
|
||||
api::ExtHostReq::Fwd(ref fw @ api::Fwd(ref atom, ref key, ref body)) => {
|
||||
let sys = System::resolve(atom.owner).unwrap();
|
||||
hand.handle(fw, &sys.reqnot().request(api::Fwded(fw.0.clone(), *key, body.clone())))
|
||||
},
|
||||
api::ExtHostReq::SysFwd(ref fw @ api::SysFwd(id, ref body)) => {
|
||||
let sys = System::resolve(id).unwrap();
|
||||
hand.handle(fw, &sys.request(body.clone()))
|
||||
},
|
||||
api::ExtHostReq::SubLex(sl) => {
|
||||
let (rep_in, rep_out) = sync_channel(0);
|
||||
let lex_g = LEX_RECUR.lock().unwrap();
|
||||
let req_in = lex_g.get(&sl.id).expect("Sublex for nonexistent lexid");
|
||||
req_in.send(ReqPair(sl.clone(), rep_in)).unwrap();
|
||||
hand.handle(&sl, &rep_out.recv().unwrap())
|
||||
},
|
||||
api::ExtHostReq::ExprReq(api::ExprReq::Inspect(ins @ api::Inspect { target })) => {
|
||||
let expr = Expr::resolve(target).expect("Invalid ticket");
|
||||
hand.handle(&ins, &api::Inspected {
|
||||
refcount: expr.strong_count() as u32,
|
||||
location: expr.pos().to_api(),
|
||||
kind: expr.to_api(),
|
||||
})
|
||||
},
|
||||
api::ExtHostReq::RunMacros(ref rm @ api::RunMacros { ref run_id, ref query }) => hand
|
||||
.handle(
|
||||
rm,
|
||||
¯o_recur(
|
||||
*run_id,
|
||||
mtreev_from_api(query, &mut |_| panic!("Recursion never contains atoms")),
|
||||
)
|
||||
.map(|x| macro_treev_to_api(*run_id, x)),
|
||||
),
|
||||
},
|
||||
),
|
||||
}
|
||||
api::ExtHostNotif::Log(api::Log(str)) => this.logger().log(str),
|
||||
}
|
||||
}.boxed_local())),
|
||||
{
|
||||
clone!(weak, ctx);
|
||||
move |hand, req| {
|
||||
clone!(weak, ctx);
|
||||
async move {
|
||||
let this = Self(weak.upgrade().unwrap());
|
||||
let i = this.ctx().i.clone();
|
||||
match req {
|
||||
api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()).await,
|
||||
api::ExtHostReq::IntReq(intreq) => match intreq {
|
||||
api::IntReq::InternStr(s) => hand.handle(&s, &i.i(&*s.0).await.to_api()).await,
|
||||
api::IntReq::InternStrv(v) => {
|
||||
let tokens = join_all(v.0.iter().map(|m| i.ex(*m))).await;
|
||||
hand.handle(&v, &i.i(&tokens).await.to_api()).await
|
||||
},
|
||||
api::IntReq::ExternStr(si) =>
|
||||
hand.handle(&si, &Tok::<String>::from_api(si.0, &i).await.rc()).await,
|
||||
api::IntReq::ExternStrv(vi) => {
|
||||
let markerv = (i.ex(vi.0).await.iter()).map(|t| t.to_api()).collect_vec();
|
||||
hand.handle(&vi, &markerv).await
|
||||
},
|
||||
},
|
||||
api::ExtHostReq::Fwd(ref fw @ api::Fwd(ref atom, ref key, ref body)) => {
|
||||
let sys = ctx.system_inst(atom.owner).await.expect("owner of live atom dropped");
|
||||
let reply =
|
||||
sys.reqnot().request(api::Fwded(fw.0.clone(), *key, body.clone())).await;
|
||||
hand.handle(fw, &reply).await
|
||||
},
|
||||
api::ExtHostReq::SysFwd(ref fw @ api::SysFwd(id, ref body)) => {
|
||||
let sys = ctx.system_inst(id).await.unwrap();
|
||||
hand.handle(fw, &sys.request(body.clone()).await).await
|
||||
},
|
||||
api::ExtHostReq::SubLex(sl) => {
|
||||
let (rep_in, rep_out) = channel::bounded(0);
|
||||
let lex_g = this.0.lex_recur.lock().await;
|
||||
let req_in = lex_g.get(&sl.id).expect("Sublex for nonexistent lexid");
|
||||
req_in.send(ReqPair(sl.clone(), rep_in)).await.unwrap();
|
||||
hand.handle(&sl, &rep_out.recv().await.unwrap()).await
|
||||
},
|
||||
api::ExtHostReq::ExprReq(api::ExprReq::Inspect(ins @ api::Inspect { target })) => {
|
||||
let expr = this.exprs().get_expr(target).expect("Invalid ticket");
|
||||
hand
|
||||
.handle(&ins, &api::Inspected {
|
||||
refcount: expr.strong_count() as u32,
|
||||
location: expr.pos().to_api(),
|
||||
kind: expr.to_api().await,
|
||||
})
|
||||
.await
|
||||
},
|
||||
api::ExtHostReq::RunMacros(ref rm @ api::RunMacros { ref run_id, ref query }) => {
|
||||
let mtreev =
|
||||
mtreev_from_api(query, &i, &mut |_| panic!("Atom in macro recur")).await;
|
||||
match macro_recur(*run_id, mtreev).await {
|
||||
Some(x) => hand.handle(rm, &Some(macro_treev_to_api(*run_id, x).await)).await,
|
||||
None => hand.handle(rm, &None).await,
|
||||
}
|
||||
},
|
||||
api::ExtHostReq::ExtAtomPrint(ref eap @ api::ExtAtomPrint(ref atom)) =>
|
||||
hand.handle(eap, &AtomHand::new(atom.clone(), &ctx).await.print().await).await,
|
||||
}
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
},
|
||||
),
|
||||
})))
|
||||
}
|
||||
pub fn systems(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
|
||||
}
|
||||
|
||||
pub struct SystemCtor {
|
||||
decl: api::SystemDecl,
|
||||
ext: Weak<ExtensionData>,
|
||||
}
|
||||
impl SystemCtor {
|
||||
pub fn name(&self) -> &str { &self.decl.name }
|
||||
pub fn priority(&self) -> NotNan<f64> { self.decl.priority }
|
||||
pub fn depends(&self) -> impl ExactSizeIterator<Item = &str> {
|
||||
self.decl.depends.iter().map(|s| &**s)
|
||||
pub(crate) fn reqnot(&self) -> &ReqNot<HostMsgSet> { &self.0.reqnot }
|
||||
pub fn ctx(&self) -> &Ctx { &self.0.ctx }
|
||||
pub fn logger(&self) -> &Logger { &self.0.logger }
|
||||
pub fn system_ctors(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
|
||||
pub fn exprs(&self) -> &ExprStore { &self.0.exprs }
|
||||
pub async fn is_own_sys(&self, id: api::SysId) -> bool {
|
||||
let sys = self.ctx().system_inst(id).await.expect("invalid sender sys id");
|
||||
Rc::ptr_eq(&self.0, &sys.ext().0)
|
||||
}
|
||||
pub fn run<'a>(&self, depends: impl IntoIterator<Item = &'a System>) -> System {
|
||||
let mut inst_g = SYSTEM_INSTS.write().unwrap();
|
||||
let depends = depends.into_iter().map(|si| si.id()).collect_vec();
|
||||
debug_assert_eq!(depends.len(), self.decl.depends.len(), "Wrong number of deps provided");
|
||||
let ext = self.ext.upgrade().expect("SystemCtor should be freed before Extension");
|
||||
static NEXT_ID: AtomicU16 = AtomicU16::new(1);
|
||||
let id =
|
||||
api::SysId(NonZero::new(NEXT_ID.fetch_add(1, Ordering::Relaxed)).expect("next_id wrapped"));
|
||||
let sys_inst = ext.reqnot.request(api::NewSystem { depends, id, system: self.decl.id });
|
||||
let data = System(Arc::new(SystemInstData {
|
||||
decl_id: self.decl.id,
|
||||
ext: Extension(ext),
|
||||
exprs: RwLock::default(),
|
||||
lex_filter: sys_inst.lex_filter,
|
||||
const_root: OnceLock::new(),
|
||||
line_types: sys_inst.line_types.into_iter().map(Tok::from_api).collect(),
|
||||
id,
|
||||
}));
|
||||
let root = (sys_inst.const_root.into_iter())
|
||||
.map(|(k, v)| {
|
||||
Member::from_api(
|
||||
api::Member { name: k, kind: v },
|
||||
Substack::Bottom.push(Tok::from_api(k)),
|
||||
&data,
|
||||
)
|
||||
})
|
||||
.collect_vec();
|
||||
data.0.const_root.set(root).unwrap();
|
||||
inst_g.insert(id, data.clone());
|
||||
data
|
||||
pub async fn assert_own_sys(&self, id: api::SysId) {
|
||||
assert!(self.is_own_sys(id).await, "Incoming message impersonates separate system");
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref SYSTEM_INSTS: RwLock<HashMap<api::SysId, System>> = RwLock::default();
|
||||
static ref LEX_RECUR: Mutex<HashMap<api::ParsId, SyncSender<ReqPair<api::SubLex>>>> =
|
||||
Mutex::default();
|
||||
}
|
||||
|
||||
pub struct ReqPair<R: Request>(R, pub SyncSender<R::Response>);
|
||||
|
||||
#[derive(destructure)]
|
||||
pub struct SystemInstData {
|
||||
exprs: RwLock<HashMap<api::ExprTicket, (AtomicU32, Expr)>>,
|
||||
ext: Extension,
|
||||
decl_id: api::SysDeclId,
|
||||
lex_filter: api::CharFilter,
|
||||
id: api::SysId,
|
||||
const_root: OnceLock<Vec<Member>>,
|
||||
line_types: Vec<Tok<String>>,
|
||||
}
|
||||
impl Drop for SystemInstData {
|
||||
fn drop(&mut self) {
|
||||
self.ext.0.reqnot.notify(api::SystemDrop(self.id));
|
||||
if let Ok(mut g) = SYSTEM_INSTS.write() {
|
||||
g.remove(&self.id);
|
||||
}
|
||||
pub fn next_pars(&self) -> NonZeroU64 {
|
||||
let mut next_pars = self.0.next_pars.borrow_mut();
|
||||
*next_pars = next_pars.checked_add(1).unwrap_or(NonZeroU64::new(1).unwrap());
|
||||
*next_pars
|
||||
}
|
||||
}
|
||||
#[derive(Clone)]
|
||||
pub struct System(Arc<SystemInstData>);
|
||||
impl System {
|
||||
pub fn id(&self) -> api::SysId { self.id }
|
||||
fn resolve(id: api::SysId) -> Option<System> { SYSTEM_INSTS.read().unwrap().get(&id).cloned() }
|
||||
fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.ext.0.reqnot }
|
||||
fn give_expr(&self, ticket: api::ExprTicket, get_expr: impl FnOnce() -> Expr) -> api::ExprTicket {
|
||||
match self.0.exprs.write().unwrap().entry(ticket) {
|
||||
Entry::Occupied(mut oe) => {
|
||||
oe.get_mut().0.fetch_add(1, Ordering::Relaxed);
|
||||
},
|
||||
Entry::Vacant(v) => {
|
||||
v.insert((AtomicU32::new(1), get_expr()));
|
||||
},
|
||||
}
|
||||
ticket
|
||||
}
|
||||
pub fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
|
||||
self.reqnot().request(api::GetMember(self.0.id, id))
|
||||
}
|
||||
pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() }
|
||||
pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) }
|
||||
/// Have this system lex a part of the source. It is assumed that
|
||||
/// [Self::can_lex] was called and returned true.
|
||||
pub fn lex(
|
||||
pub(crate) async fn lex_req<F: Future<Output = Option<api::SubLexed>>>(
|
||||
&self,
|
||||
source: Tok<String>,
|
||||
pos: u32,
|
||||
mut r: impl FnMut(u32) -> Option<api::SubLexed> + Send,
|
||||
sys: api::SysId,
|
||||
mut r: impl FnMut(u32) -> F,
|
||||
) -> api::OrcResult<Option<api::LexedExpr>> {
|
||||
// get unique lex ID
|
||||
static LEX_ID: AtomicU64 = AtomicU64::new(1);
|
||||
let id = api::ParsId(NonZero::new(LEX_ID.fetch_add(1, Ordering::Relaxed)).unwrap());
|
||||
thread::scope(|s| {
|
||||
// create and register channel
|
||||
let (req_in, req_out) = sync_channel(0);
|
||||
LEX_RECUR.lock().unwrap().insert(id, req_in); // LEX_RECUR released
|
||||
// spawn recursion handler which will exit when the sender is collected
|
||||
s.spawn(move || {
|
||||
while let Ok(ReqPair(sublex, rep_in)) = req_out.recv() {
|
||||
rep_in.send(r(sublex.pos)).unwrap()
|
||||
}
|
||||
});
|
||||
// Pass control to extension
|
||||
let ret =
|
||||
self.reqnot().request(api::LexExpr { id, pos, sys: self.id(), text: source.to_api() });
|
||||
// collect sender to unblock recursion handler thread before returning
|
||||
LEX_RECUR.lock().unwrap().remove(&id);
|
||||
ret.transpose()
|
||||
}) // exit recursion handler thread
|
||||
}
|
||||
pub fn can_parse(&self, line_type: Tok<String>) -> bool { self.line_types.contains(&line_type) }
|
||||
pub fn line_types(&self) -> impl Iterator<Item = Tok<String>> + '_ {
|
||||
self.line_types.iter().cloned()
|
||||
}
|
||||
pub fn parse(
|
||||
&self,
|
||||
line: Vec<ParsTokTree>,
|
||||
exported: bool,
|
||||
comments: Vec<Comment>,
|
||||
) -> OrcRes<Vec<ParsTokTree>> {
|
||||
let line = line.iter().map(|t| t.to_api(&mut |n, _| match *n {})).collect_vec();
|
||||
let comments = comments.iter().map(Comment::to_api).collect_vec();
|
||||
let parsed =
|
||||
(self.reqnot().request(api::ParseLine { exported, sys: self.id(), comments, line }))
|
||||
.map_err(|e| OrcErrv::from_api(&e))?;
|
||||
Ok(ttv_from_api(parsed, &mut ()))
|
||||
}
|
||||
pub fn request(&self, req: Vec<u8>) -> Vec<u8> {
|
||||
self.reqnot().request(api::SysFwded(self.id(), req))
|
||||
}
|
||||
}
|
||||
impl fmt::Debug for System {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let ctor = (self.0.ext.0.systems.iter().find(|c| c.decl.id == self.0.decl_id))
|
||||
.expect("System instance with no associated constructor");
|
||||
write!(f, "System({} @ {} #{})", ctor.decl.name, ctor.decl.priority, self.0.id.0)?;
|
||||
match self.0.exprs.read() {
|
||||
Err(_) => write!(f, "expressions unavailable"),
|
||||
Ok(r) => {
|
||||
let rc: u32 = r.values().map(|v| v.0.load(Ordering::Relaxed)).sum();
|
||||
write!(f, "{rc} refs to {} exprs", r.len())
|
||||
let id = api::ParsId(self.next_pars());
|
||||
// create and register channel
|
||||
let (req_in, req_out) = channel::bounded(0);
|
||||
self.0.lex_recur.lock().await.insert(id, req_in); // lex_recur released
|
||||
let (ret, ()) = join(
|
||||
async {
|
||||
let res =
|
||||
(self.reqnot()).request(api::LexExpr { id, pos, sys, text: source.to_api() }).await;
|
||||
// collect sender to unblock recursion handler branch before returning
|
||||
self.0.lex_recur.lock().await.remove(&id);
|
||||
res
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Deref for System {
|
||||
type Target = SystemInstData;
|
||||
fn deref(&self) -> &Self::Target { self.0.as_ref() }
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum SysResolvErr {
|
||||
Loop(Vec<String>),
|
||||
Missing(String),
|
||||
}
|
||||
|
||||
pub fn init_systems(tgts: &[String], exts: &[Extension]) -> Result<Vec<System>, SysResolvErr> {
|
||||
let mut to_load = HashMap::<&str, &SystemCtor>::new();
|
||||
let mut to_find = tgts.iter().map(|s| s.as_str()).collect::<VecDeque<&str>>();
|
||||
while let Some(target) = to_find.pop_front() {
|
||||
if to_load.contains_key(target) {
|
||||
continue;
|
||||
}
|
||||
let ctor = (exts.iter())
|
||||
.flat_map(|e| e.systems().filter(|c| c.decl.name == target))
|
||||
.max_by_key(|c| c.decl.priority)
|
||||
.ok_or_else(|| SysResolvErr::Missing(target.to_string()))?;
|
||||
to_load.insert(target, ctor);
|
||||
to_find.extend(ctor.decl.depends.iter().map(|s| s.as_str()));
|
||||
}
|
||||
let mut to_load_ordered = Vec::new();
|
||||
fn walk_deps<'a>(
|
||||
graph: &mut HashMap<&str, &'a SystemCtor>,
|
||||
list: &mut Vec<&'a SystemCtor>,
|
||||
chain: Stackframe<&str>,
|
||||
) -> Result<(), SysResolvErr> {
|
||||
if let Some(ctor) = graph.remove(chain.item) {
|
||||
// if the above is none, the system is already queued. Missing systems are
|
||||
// detected above
|
||||
for dep in ctor.decl.depends.iter() {
|
||||
if Substack::Frame(chain).iter().any(|c| c == dep) {
|
||||
let mut circle = vec![dep.to_string()];
|
||||
circle.extend(Substack::Frame(chain).iter().map(|s| s.to_string()));
|
||||
return Err(SysResolvErr::Loop(circle));
|
||||
async {
|
||||
while let Ok(ReqPair(sublex, rep_in)) = req_out.recv().await {
|
||||
(rep_in.send(r(sublex.pos).await).await)
|
||||
.expect("Response channel dropped while request pending")
|
||||
}
|
||||
walk_deps(graph, list, Substack::Frame(chain).new_frame(dep))?
|
||||
}
|
||||
list.push(ctor);
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
.await;
|
||||
ret.transpose()
|
||||
}
|
||||
for tgt in tgts {
|
||||
walk_deps(&mut to_load, &mut to_load_ordered, Substack::Bottom.new_frame(tgt))?;
|
||||
pub async fn recv_one(&self) {
|
||||
let reqnot = self.0.reqnot.clone();
|
||||
self
|
||||
.0
|
||||
.init
|
||||
.recv(Box::new(move |msg| async move { reqnot.receive(msg).await }.boxed_local()))
|
||||
.await;
|
||||
}
|
||||
let mut systems = HashMap::<&str, System>::new();
|
||||
for ctor in to_load_ordered.iter() {
|
||||
let sys = ctor.run(ctor.depends().map(|n| &systems[n]));
|
||||
systems.insert(ctor.name(), sys);
|
||||
pub fn system_drop(&self, id: api::SysId) {
|
||||
let rc = self.clone();
|
||||
(self.ctx().spawn.spawn_local(async move {
|
||||
rc.reqnot().notify(api::SystemDrop(id)).await;
|
||||
rc.ctx().systems.write().await.remove(&id);
|
||||
}))
|
||||
.expect("Failed to drop system!");
|
||||
}
|
||||
Ok(systems.into_values().collect_vec())
|
||||
pub fn downgrade(&self) -> WeakExtension { WeakExtension(Rc::downgrade(&self.0)) }
|
||||
}
|
||||
|
||||
pub struct WeakExtension(Weak<ExtensionData>);
|
||||
impl WeakExtension {
|
||||
pub fn upgrade(&self) -> Option<Extension> { self.0.upgrade().map(Extension) }
|
||||
}
|
||||
|
||||
@@ -1,18 +1,22 @@
|
||||
use std::num::NonZeroU64;
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_std::sync::Mutex;
|
||||
use futures::FutureExt;
|
||||
use hashbrown::HashMap;
|
||||
use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
|
||||
use orchid_base::interner::{Tok, intern};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::match_mapping;
|
||||
use orchid_base::number::{num_to_err, parse_num};
|
||||
use orchid_base::parse::{name_char, name_start, op_char, unrep_space};
|
||||
use orchid_base::tokens::PARENS;
|
||||
use orchid_base::tree::Ph;
|
||||
use orchid_base::{intern, match_mapping};
|
||||
use orchid_base::tree::{AtomRepr, Ph};
|
||||
|
||||
use crate::api;
|
||||
use crate::extension::{AtomHand, System};
|
||||
use crate::atom::AtomHand;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::system::System;
|
||||
use crate::tree::{ParsTok, ParsTokTree};
|
||||
|
||||
pub struct LexCtx<'a> {
|
||||
@@ -20,6 +24,7 @@ pub struct LexCtx<'a> {
|
||||
pub source: &'a Tok<String>,
|
||||
pub tail: &'a str,
|
||||
pub sub_trees: &'a mut HashMap<api::TreeTicket, ParsTokTree>,
|
||||
pub ctx: &'a Ctx,
|
||||
}
|
||||
impl<'a> LexCtx<'a> {
|
||||
pub fn push<'b>(&'b mut self, pos: u32) -> LexCtx<'b>
|
||||
@@ -29,6 +34,7 @@ impl<'a> LexCtx<'a> {
|
||||
tail: &self.source[pos as usize..],
|
||||
systems: self.systems,
|
||||
sub_trees: &mut *self.sub_trees,
|
||||
ctx: &self.ctx,
|
||||
}
|
||||
}
|
||||
pub fn get_pos(&self) -> u32 { self.end_pos() - self.tail.len() as u32 }
|
||||
@@ -70,7 +76,7 @@ impl<'a> LexCtx<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
pub async fn lex_once<'a>(ctx: &mut LexCtx<'a>) -> OrcRes<ParsTokTree> {
|
||||
let start = ctx.get_pos();
|
||||
assert!(
|
||||
!ctx.tail.is_empty() && !ctx.tail.starts_with(unrep_space),
|
||||
@@ -82,11 +88,13 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
} else if ctx.strip_prefix("::") {
|
||||
ParsTok::NS
|
||||
} else if ctx.strip_prefix("--[") {
|
||||
let (cmt, tail) = ctx.tail.split_once("]--").ok_or_else(|| {
|
||||
mk_errv(intern!(str: "Unterminated block comment"), "This block comment has no ending ]--", [
|
||||
Pos::Range(start..start + 3).into(),
|
||||
])
|
||||
})?;
|
||||
let Some((cmt, tail)) = ctx.tail.split_once("]--") else {
|
||||
return Err(mk_errv(
|
||||
ctx.ctx.i.i("Unterminated block comment").await,
|
||||
"This block comment has no ending ]--",
|
||||
[Pos::Range(start..start + 3).into()],
|
||||
));
|
||||
};
|
||||
ctx.set_tail(tail);
|
||||
ParsTok::Comment(Arc::new(cmt.to_string()))
|
||||
} else if let Some(tail) = ctx.tail.strip_prefix("--").filter(|t| !t.starts_with(op_char)) {
|
||||
@@ -99,12 +107,12 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
while !ctx.strip_char('.') {
|
||||
if ctx.tail.is_empty() {
|
||||
return Err(mk_errv(
|
||||
intern!(str: "Unclosed lambda"),
|
||||
ctx.ctx.i.i("Unclosed lambda").await,
|
||||
"Lambdae started with \\ should separate arguments from body with .",
|
||||
[Pos::Range(start..start + 1).into()],
|
||||
));
|
||||
}
|
||||
arg.push(lex_once(ctx)?);
|
||||
arg.push(lex_once(ctx).boxed_local().await?);
|
||||
ctx.trim_ws();
|
||||
}
|
||||
ParsTok::LambdaHead(arg)
|
||||
@@ -114,12 +122,12 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
while !ctx.strip_char(*rp) {
|
||||
if ctx.tail.is_empty() {
|
||||
return Err(mk_errv(
|
||||
intern!(str: "unclosed paren"),
|
||||
ctx.ctx.i.i("unclosed paren").await,
|
||||
format!("this {lp} has no matching {rp}"),
|
||||
[Pos::Range(start..start + 1).into()],
|
||||
));
|
||||
}
|
||||
body.push(lex_once(ctx)?);
|
||||
body.push(lex_once(ctx).boxed_local().await?);
|
||||
ctx.trim_ws();
|
||||
}
|
||||
ParsTok::S(*paren, body)
|
||||
@@ -130,8 +138,10 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
if ctx.strip_char('(') {
|
||||
let pos = ctx.get_pos();
|
||||
let numstr = ctx.get_start_matches(|x| x != ')').trim();
|
||||
let num = parse_num(numstr).map_err(|e| num_to_err(e, pos))?;
|
||||
ParsTok::Macro(Some(num.to_f64()))
|
||||
match parse_num(numstr) {
|
||||
Ok(num) => ParsTok::Macro(Some(num.to_f64())),
|
||||
Err(e) => return Err(num_to_err(e, pos, &*ctx.ctx.i).await.into()),
|
||||
}
|
||||
} else {
|
||||
ParsTok::Macro(None)
|
||||
}
|
||||
@@ -139,17 +149,26 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
for sys in ctx.systems {
|
||||
let mut errors = Vec::new();
|
||||
if ctx.tail.starts_with(|c| sys.can_lex(c)) {
|
||||
let lx =
|
||||
sys.lex(ctx.source.clone(), ctx.get_pos(), |pos| match lex_once(&mut ctx.push(pos)) {
|
||||
Ok(t) => Some(api::SubLexed { pos, ticket: ctx.add_subtree(t) }),
|
||||
Err(e) => {
|
||||
errors.push(e);
|
||||
None
|
||||
},
|
||||
});
|
||||
let (source, pos) = (ctx.source.clone(), ctx.get_pos());
|
||||
let ctx_lck = &Mutex::new(&mut *ctx);
|
||||
let errors_lck = &Mutex::new(&mut errors);
|
||||
let lx = sys
|
||||
.lex(source, pos, |pos| async move {
|
||||
match lex_once(&mut ctx_lck.lock().await.push(pos)).boxed_local().await {
|
||||
Ok(t) => Some(api::SubLexed { pos, ticket: ctx_lck.lock().await.add_subtree(t) }),
|
||||
Err(e) => {
|
||||
errors_lck.lock().await.push(e);
|
||||
None
|
||||
},
|
||||
}
|
||||
})
|
||||
.await;
|
||||
match lx {
|
||||
Err(e) => return Err(errors.into_iter().fold(OrcErrv::from_api(&e), |a, b| a + b)),
|
||||
Ok(Some(lexed)) => return Ok(tt_to_owned(&lexed.expr, &mut ctx.push(lexed.pos))),
|
||||
Err(e) =>
|
||||
return Err(
|
||||
errors.into_iter().fold(OrcErrv::from_api(&e, &*ctx.ctx.i).await, |a, b| a + b),
|
||||
),
|
||||
Ok(Some(lexed)) => return Ok(tt_to_owned(&lexed.expr, &mut ctx.push(lexed.pos)).await),
|
||||
Ok(None) => match errors.into_iter().reduce(|a, b| a + b) {
|
||||
Some(errors) => return Err(errors),
|
||||
None => continue,
|
||||
@@ -158,12 +177,12 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
}
|
||||
}
|
||||
if ctx.tail.starts_with(name_start) {
|
||||
ParsTok::Name(intern(ctx.get_start_matches(name_char)))
|
||||
ParsTok::Name(ctx.ctx.i.i(ctx.get_start_matches(name_char)).await)
|
||||
} else if ctx.tail.starts_with(op_char) {
|
||||
ParsTok::Name(intern(ctx.get_start_matches(op_char)))
|
||||
ParsTok::Name(ctx.ctx.i.i(ctx.get_start_matches(op_char)).await)
|
||||
} else {
|
||||
return Err(mk_errv(
|
||||
intern!(str: "Unrecognized character"),
|
||||
ctx.ctx.i.i("Unrecognized character").await,
|
||||
"The following syntax is meaningless.",
|
||||
[Pos::Range(start..start + 1).into()],
|
||||
));
|
||||
@@ -172,16 +191,18 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
Ok(ParsTokTree { tok, range: start..ctx.get_pos() })
|
||||
}
|
||||
|
||||
fn tt_to_owned(api: &api::TokenTree, ctx: &mut LexCtx<'_>) -> ParsTokTree {
|
||||
async fn tt_to_owned(api: &api::TokenTree, ctx: &mut LexCtx<'_>) -> ParsTokTree {
|
||||
let tok = match_mapping!(&api.token, api::Token => ParsTok {
|
||||
Atom(atom => AtomHand::from_api(atom.clone())),
|
||||
Bottom(err => OrcErrv::from_api(err)),
|
||||
LambdaHead(arg => ttv_to_owned(arg, ctx)),
|
||||
Name(name => Tok::from_api(*name)),
|
||||
S(p.clone(), b.iter().map(|t| tt_to_owned(t, ctx)).collect()),
|
||||
Atom(atom =>
|
||||
AtomHand::from_api(atom, Pos::Range(api.range.clone()), &mut ctx.ctx.clone()).await
|
||||
),
|
||||
Bottom(err => OrcErrv::from_api(err, &*ctx.ctx.i).await),
|
||||
LambdaHead(arg => ttv_to_owned(arg, ctx).boxed_local().await),
|
||||
Name(name => Tok::from_api(*name, &*ctx.ctx.i).await),
|
||||
S(p.clone(), b => ttv_to_owned(b, ctx).boxed_local().await),
|
||||
BR, NS,
|
||||
Comment(c.clone()),
|
||||
Ph(ph => Ph::from_api(ph)),
|
||||
Ph(ph => Ph::from_api(ph, &*ctx.ctx.i).await),
|
||||
Macro(*prio),
|
||||
} {
|
||||
api::Token::Slot(id) => return ctx.rm_subtree(*id),
|
||||
@@ -189,20 +210,24 @@ fn tt_to_owned(api: &api::TokenTree, ctx: &mut LexCtx<'_>) -> ParsTokTree {
|
||||
ParsTokTree { range: api.range.clone(), tok }
|
||||
}
|
||||
|
||||
fn ttv_to_owned<'a>(
|
||||
async fn ttv_to_owned<'a>(
|
||||
api: impl IntoIterator<Item = &'a api::TokenTree>,
|
||||
ctx: &mut LexCtx<'_>,
|
||||
) -> Vec<ParsTokTree> {
|
||||
api.into_iter().map(|t| tt_to_owned(t, ctx)).collect()
|
||||
let mut out = Vec::new();
|
||||
for tt in api {
|
||||
out.push(tt_to_owned(&tt, ctx).await)
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
pub fn lex(text: Tok<String>, systems: &[System]) -> OrcRes<Vec<ParsTokTree>> {
|
||||
pub async fn lex(text: Tok<String>, systems: &[System], ctx: &Ctx) -> OrcRes<Vec<ParsTokTree>> {
|
||||
let mut sub_trees = HashMap::new();
|
||||
let mut ctx = LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems };
|
||||
let mut ctx = LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems, ctx };
|
||||
let mut tokv = Vec::new();
|
||||
ctx.trim(unrep_space);
|
||||
while !ctx.tail.is_empty() {
|
||||
tokv.push(lex_once(&mut ctx)?);
|
||||
tokv.push(lex_once(&mut ctx).await?);
|
||||
ctx.trim(unrep_space);
|
||||
}
|
||||
Ok(tokv)
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
use orchid_api as api;
|
||||
|
||||
pub mod child;
|
||||
pub mod atom;
|
||||
pub mod ctx;
|
||||
pub mod expr;
|
||||
pub mod expr_store;
|
||||
pub mod extension;
|
||||
pub mod lex;
|
||||
pub mod macros;
|
||||
pub mod parse;
|
||||
pub mod rule;
|
||||
pub mod subprocess;
|
||||
pub mod system;
|
||||
pub mod tree;
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
use std::sync::{Arc, RwLock};
|
||||
use std::rc::Rc;
|
||||
|
||||
use async_std::sync::RwLock;
|
||||
use futures::FutureExt;
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::macros::{MTok, MTree, mtreev_from_api, mtreev_to_api};
|
||||
use orchid_base::name::Sym;
|
||||
use trait_set::trait_set;
|
||||
|
||||
use crate::api;
|
||||
use crate::extension::AtomHand;
|
||||
use crate::atom::AtomHand;
|
||||
use crate::rule::matcher::{NamedMatcher, PriodMatcher};
|
||||
use crate::rule::state::MatchState;
|
||||
use crate::tree::Code;
|
||||
@@ -17,38 +19,41 @@ pub type MacTok = MTok<'static, AtomHand>;
|
||||
pub type MacTree = MTree<'static, AtomHand>;
|
||||
|
||||
trait_set! {
|
||||
trait MacroCB = Fn(Vec<MacTree>) -> Option<Vec<MacTree>> + Send + Sync;
|
||||
trait MacroCB = Fn(Vec<MacTree>) -> Option<Vec<MacTree>>;
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref RECURSION: RwLock<HashMap<api::ParsId, Box<dyn MacroCB>>> = RwLock::default();
|
||||
static ref MACRO_SLOTS: RwLock<HashMap<api::ParsId, HashMap<api::MacroTreeId, Arc<MacTok>>>> =
|
||||
thread_local! {
|
||||
static RECURSION: RwLock<HashMap<api::ParsId, Box<dyn MacroCB>>> = RwLock::default();
|
||||
static MACRO_SLOTS: RwLock<HashMap<api::ParsId, HashMap<api::MacroTreeId, Rc<MacTok>>>> =
|
||||
RwLock::default();
|
||||
}
|
||||
|
||||
pub fn macro_recur(run_id: api::ParsId, input: Vec<MacTree>) -> Option<Vec<MacTree>> {
|
||||
pub async fn macro_recur(run_id: api::ParsId, input: Vec<MacTree>) -> Option<Vec<MacTree>> {
|
||||
(RECURSION.read().unwrap()[&run_id])(input)
|
||||
}
|
||||
|
||||
pub fn macro_treev_to_api(run_id: api::ParsId, mtree: Vec<MacTree>) -> Vec<api::MacroTree> {
|
||||
pub async fn macro_treev_to_api(run_id: api::ParsId, mtree: Vec<MacTree>) -> Vec<api::MacroTree> {
|
||||
let mut g = MACRO_SLOTS.write().unwrap();
|
||||
let run_cache = g.get_mut(&run_id).expect("Parser run not found");
|
||||
mtreev_to_api(&mtree, &mut |a: &AtomHand| {
|
||||
let id = api::MacroTreeId((run_cache.len() as u64 + 1).try_into().unwrap());
|
||||
run_cache.insert(id, Arc::new(MacTok::Atom(a.clone())));
|
||||
run_cache.insert(id, Rc::new(MacTok::Atom(a.clone())));
|
||||
api::MacroToken::Slot(id)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn macro_treev_from_api(api: Vec<api::MacroTree>) -> Vec<MacTree> {
|
||||
mtreev_from_api(&api, &mut |atom| MacTok::Atom(AtomHand::from_api(atom.clone())))
|
||||
pub async fn macro_treev_from_api(api: Vec<api::MacroTree>, i: &Interner) -> Vec<MacTree> {
|
||||
mtreev_from_api(&api, i, &mut |atom| {
|
||||
async { MacTok::Atom(AtomHand::from_api(atom.clone())) }.boxed_local()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn deslot_macro(run_id: api::ParsId, tree: &[MacTree]) -> Option<Vec<MacTree>> {
|
||||
let mut slots = (MACRO_SLOTS.write().unwrap()).remove(&run_id).expect("Run not found");
|
||||
return work(&mut slots, tree);
|
||||
fn work(
|
||||
slots: &mut HashMap<api::MacroTreeId, Arc<MacTok>>,
|
||||
slots: &mut HashMap<api::MacroTreeId, Rc<MacTok>>,
|
||||
tree: &[MacTree],
|
||||
) -> Option<Vec<MacTree>> {
|
||||
let items = (tree.iter())
|
||||
@@ -59,8 +64,8 @@ pub fn deslot_macro(run_id: api::ParsId, tree: &[MacTree]) -> Option<Vec<MacTree
|
||||
MacTok::Ref(_) => panic!("Ref is an extension-local optimization"),
|
||||
MacTok::Done(_) => panic!("Created and removed by matcher"),
|
||||
MacTok::Slot(slot) => slots.get(&slot.id()).expect("Slot not found").clone(),
|
||||
MacTok::S(paren, b) => Arc::new(MacTok::S(*paren, work(slots, b)?)),
|
||||
MacTok::Lambda(a, b) => Arc::new(match (work(slots, a), work(slots, b)) {
|
||||
MacTok::S(paren, b) => Rc::new(MacTok::S(*paren, work(slots, b)?)),
|
||||
MacTok::Lambda(a, b) => Rc::new(match (work(slots, a), work(slots, b)) {
|
||||
(None, None) => return None,
|
||||
(Some(a), None) => MacTok::Lambda(a, b.clone()),
|
||||
(None, Some(b)) => MacTok::Lambda(a.clone(), b),
|
||||
@@ -92,7 +97,7 @@ pub struct MacroRepo {
|
||||
impl MacroRepo {
|
||||
/// TODO: the recursion inside this function needs to be moved into Orchid.
|
||||
/// See the markdown note
|
||||
pub fn process_exprv(&self, target: &[MacTree]) -> Option<Vec<MacTree>> {
|
||||
pub fn process_exprv(&self, target: &[MacTree], i: &Interner) -> Option<Vec<MacTree>> {
|
||||
let mut workcp = target.to_vec();
|
||||
let mut lexicon;
|
||||
|
||||
@@ -100,24 +105,25 @@ impl MacroRepo {
|
||||
lexicon = HashSet::new();
|
||||
target.iter().for_each(|tgt| fill_lexicon(tgt, &mut lexicon));
|
||||
|
||||
for (i, tree) in workcp.iter().enumerate() {
|
||||
for (idx, tree) in workcp.iter().enumerate() {
|
||||
let MacTok::Name(name) = &*tree.tok else { continue };
|
||||
let matches = (self.named.get(name).into_iter().flatten())
|
||||
.filter(|m| m.deps.is_subset(&lexicon))
|
||||
.filter_map(|mac| {
|
||||
mac.cases.iter().find_map(|cas| cas.0.apply(&workcp[i..], |_| false).map(|s| (cas, s)))
|
||||
(mac.cases.iter())
|
||||
.find_map(|cas| cas.0.apply(&workcp[idx..], i, |_| false).map(|s| (cas, s)))
|
||||
})
|
||||
.collect_vec();
|
||||
assert!(
|
||||
matches.len() < 2,
|
||||
"Multiple conflicting matches on {:?}: {:?}",
|
||||
&workcp[i..],
|
||||
&workcp[idx..],
|
||||
matches
|
||||
);
|
||||
let Some((case, (state, tail))) = matches.into_iter().next() else { continue };
|
||||
let inj = (run_body(&case.1, state).into_iter())
|
||||
.map(|MacTree { pos, tok }| MacTree { pos, tok: Arc::new(MacTok::Done(tok)) });
|
||||
workcp.splice(i..(workcp.len() - tail.len()), inj);
|
||||
.map(|MacTree { pos, tok }| MacTree { pos, tok: Rc::new(MacTok::Done(tok)) });
|
||||
workcp.splice(idx..(workcp.len() - tail.len()), inj);
|
||||
continue 'try_named;
|
||||
}
|
||||
break;
|
||||
@@ -133,13 +139,14 @@ impl MacroRepo {
|
||||
|
||||
let results = (workcp.into_iter())
|
||||
.map(|mt| match &*mt.tok {
|
||||
MTok::S(p, body) => self.process_exprv(body).map(|body| MTok::S(*p, body).at(mt.pos)),
|
||||
MTok::Lambda(arg, body) => match (self.process_exprv(arg), self.process_exprv(body)) {
|
||||
(Some(arg), Some(body)) => Some(MTok::Lambda(arg, body).at(mt.pos)),
|
||||
(Some(arg), None) => Some(MTok::Lambda(arg, body.to_vec()).at(mt.pos)),
|
||||
(None, Some(body)) => Some(MTok::Lambda(arg.to_vec(), body).at(mt.pos)),
|
||||
(None, None) => None,
|
||||
},
|
||||
MTok::S(p, body) => self.process_exprv(body, i).map(|body| MTok::S(*p, body).at(mt.pos)),
|
||||
MTok::Lambda(arg, body) =>
|
||||
match (self.process_exprv(arg, i), self.process_exprv(body, i)) {
|
||||
(Some(arg), Some(body)) => Some(MTok::Lambda(arg, body).at(mt.pos)),
|
||||
(Some(arg), None) => Some(MTok::Lambda(arg, body.to_vec()).at(mt.pos)),
|
||||
(None, Some(body)) => Some(MTok::Lambda(arg.to_vec(), body).at(mt.pos)),
|
||||
(None, None) => None,
|
||||
},
|
||||
_ => None,
|
||||
})
|
||||
.collect_vec();
|
||||
@@ -169,6 +176,6 @@ fn run_body(body: &Code, mut state: MatchState<'_>) -> Vec<MacTree> {
|
||||
let inject: Vec<MacTree> = todo!("Call the interpreter with bindings");
|
||||
inject
|
||||
.into_iter()
|
||||
.map(|MTree { pos, tok }| MTree { pos, tok: Arc::new(MTok::Done(tok)) })
|
||||
.map(|MTree { pos, tok }| MTree { pos, tok: Rc::new(MTok::Done(tok)) })
|
||||
.collect_vec()
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use std::sync::Arc;
|
||||
use std::{iter, thread};
|
||||
use std::rc::Rc;
|
||||
|
||||
use futures::FutureExt;
|
||||
use futures::future::join_all;
|
||||
use itertools::Itertools;
|
||||
use never::Never;
|
||||
use orchid_base::error::{OrcErrv, OrcRes, Reporter, mk_err, mk_errv};
|
||||
use orchid_base::intern;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::macros::{MTok, MTree};
|
||||
@@ -16,8 +16,9 @@ use orchid_base::parse::{
|
||||
use orchid_base::tree::{Paren, TokTree, Token};
|
||||
use substack::Substack;
|
||||
|
||||
use crate::extension::{AtomHand, System};
|
||||
use crate::atom::AtomHand;
|
||||
use crate::macros::MacTree;
|
||||
use crate::system::System;
|
||||
use crate::tree::{
|
||||
Code, CodeLocator, Item, ItemKind, Member, MemberKind, Module, ParsTokTree, Rule, RuleKind,
|
||||
};
|
||||
@@ -29,111 +30,104 @@ pub trait ParseCtx: Send + Sync {
|
||||
fn reporter(&self) -> &impl Reporter;
|
||||
}
|
||||
|
||||
pub fn parse_items(
|
||||
pub async fn parse_items(
|
||||
ctx: &impl ParseCtx,
|
||||
path: Substack<Tok<String>>,
|
||||
items: ParsSnippet,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
items: ParsSnippet<'_>,
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
let lines = line_items(items);
|
||||
let mut ok = iter::from_fn(|| None).take(lines.len()).collect_vec();
|
||||
thread::scope(|s| {
|
||||
let mut threads = Vec::new();
|
||||
for (slot, Parsed { output: cmts, tail }) in ok.iter_mut().zip(lines.into_iter()) {
|
||||
let path = &path;
|
||||
threads.push(s.spawn(move || {
|
||||
*slot = Some(parse_item(ctx, path.clone(), cmts, tail)?);
|
||||
Ok::<(), OrcErrv>(())
|
||||
}))
|
||||
}
|
||||
for t in threads {
|
||||
t.join().unwrap().err().into_iter().flatten().for_each(|e| ctx.reporter().report(e))
|
||||
}
|
||||
});
|
||||
Ok(ok.into_iter().flatten().flatten().collect_vec())
|
||||
let lines = line_items(items).await;
|
||||
let line_res =
|
||||
join_all(lines.into_iter().map(|p| parse_item(ctx, path.clone(), p.output, p.tail))).await;
|
||||
Ok(line_res.into_iter().flat_map(|l| l.ok().into_iter().flatten()).collect())
|
||||
}
|
||||
|
||||
pub fn parse_item(
|
||||
pub async fn parse_item(
|
||||
ctx: &impl ParseCtx,
|
||||
path: Substack<Tok<String>>,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
comments: Vec<Comment>,
|
||||
item: ParsSnippet,
|
||||
item: ParsSnippet<'_>,
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
match item.pop_front() {
|
||||
Some((TokTree { tok: Token::Name(n), .. }, postdisc)) => match n {
|
||||
n if *n == intern!(str: "export") => match try_pop_no_fluff(postdisc)? {
|
||||
n if *n == item.i("export").await => match try_pop_no_fluff(postdisc).await? {
|
||||
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } =>
|
||||
parse_exportable_item(ctx, path, comments, true, n.clone(), tail),
|
||||
parse_exportable_item(ctx, path, comments, true, n.clone(), tail).await,
|
||||
Parsed { output: TokTree { tok: Token::NS, .. }, tail } => {
|
||||
let Parsed { output: exports, tail } = parse_multiname(ctx.reporter(), tail)?;
|
||||
let Parsed { output: exports, tail } = parse_multiname(ctx.reporter(), tail).await?;
|
||||
let mut ok = Vec::new();
|
||||
exports.into_iter().for_each(|(e, pos)| match (&e.path.as_slice(), e.name) {
|
||||
([], Some(n)) =>
|
||||
ok.push(Item { comments: comments.clone(), pos, kind: ItemKind::Export(n) }),
|
||||
(_, Some(_)) => ctx.reporter().report(mk_err(
|
||||
intern!(str: "Compound export"),
|
||||
"Cannot export compound names (names containing the :: separator)",
|
||||
[pos.into()],
|
||||
)),
|
||||
(_, None) => ctx.reporter().report(mk_err(
|
||||
intern!(str: "Wildcard export"),
|
||||
"Exports cannot contain the globstar *",
|
||||
[pos.into()],
|
||||
)),
|
||||
});
|
||||
expect_end(tail)?;
|
||||
for (e, pos) in exports {
|
||||
match (&e.path.as_slice(), e.name) {
|
||||
([], Some(n)) =>
|
||||
ok.push(Item { comments: comments.clone(), pos, kind: ItemKind::Export(n) }),
|
||||
(_, Some(_)) => ctx.reporter().report(mk_err(
|
||||
tail.i("Compound export").await,
|
||||
"Cannot export compound names (names containing the :: separator)",
|
||||
[pos.into()],
|
||||
)),
|
||||
(_, None) => ctx.reporter().report(mk_err(
|
||||
tail.i("Wildcard export").await,
|
||||
"Exports cannot contain the globstar *",
|
||||
[pos.into()],
|
||||
)),
|
||||
}
|
||||
}
|
||||
expect_end(tail).await?;
|
||||
Ok(ok)
|
||||
},
|
||||
Parsed { output, .. } => Err(mk_errv(
|
||||
intern!(str: "Malformed export"),
|
||||
Parsed { output, tail } => Err(mk_errv(
|
||||
tail.i("Malformed export").await,
|
||||
"`export` can either prefix other lines or list names inside ::( ) or ::[ ]",
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
)),
|
||||
},
|
||||
n if *n == intern!(str: "import") => parse_import(ctx, postdisc).map(|v| {
|
||||
n if *n == item.i("import").await => parse_import(ctx, postdisc).await.map(|v| {
|
||||
Vec::from_iter(v.into_iter().map(|(t, pos)| Item {
|
||||
comments: comments.clone(),
|
||||
pos,
|
||||
kind: ItemKind::Import(t),
|
||||
}))
|
||||
}),
|
||||
n => parse_exportable_item(ctx, path, comments, false, n.clone(), postdisc),
|
||||
n => parse_exportable_item(ctx, path, comments, false, n.clone(), postdisc).await,
|
||||
},
|
||||
Some(_) =>
|
||||
Err(mk_errv(intern!(str: "Expected a line type"), "All lines must begin with a keyword", [
|
||||
Err(mk_errv(item.i("Expected a line type").await, "All lines must begin with a keyword", [
|
||||
Pos::Range(item.pos()).into(),
|
||||
])),
|
||||
None => unreachable!("These lines are filtered and aggregated in earlier stages"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_import(ctx: &impl ParseCtx, tail: ParsSnippet) -> OrcRes<Vec<(Import, Pos)>> {
|
||||
let Parsed { output: imports, tail } = parse_multiname(ctx.reporter(), tail)?;
|
||||
expect_end(tail)?;
|
||||
pub async fn parse_import(
|
||||
ctx: &impl ParseCtx,
|
||||
tail: ParsSnippet<'_>,
|
||||
) -> OrcRes<Vec<(Import, Pos)>> {
|
||||
let Parsed { output: imports, tail } = parse_multiname(ctx.reporter(), tail).await?;
|
||||
expect_end(tail).await?;
|
||||
Ok(imports)
|
||||
}
|
||||
|
||||
pub fn parse_exportable_item(
|
||||
pub async fn parse_exportable_item(
|
||||
ctx: &impl ParseCtx,
|
||||
path: Substack<Tok<String>>,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
comments: Vec<Comment>,
|
||||
exported: bool,
|
||||
discr: Tok<String>,
|
||||
tail: ParsSnippet,
|
||||
tail: ParsSnippet<'_>,
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
let kind = if discr == intern!(str: "mod") {
|
||||
let (name, body) = parse_module(ctx, path, tail)?;
|
||||
let kind = if discr == tail.i("mod").await {
|
||||
let (name, body) = parse_module(ctx, path, tail).await?;
|
||||
ItemKind::Member(Member::new(name, MemberKind::Mod(body)))
|
||||
} else if discr == intern!(str: "const") {
|
||||
let (name, val) = parse_const(tail)?;
|
||||
let locator = CodeLocator::to_const(path.push(name.clone()).unreverse());
|
||||
} else if discr == tail.i("const").await {
|
||||
let (name, val) = parse_const(tail).await?;
|
||||
let locator = CodeLocator::to_const(tail.i(&path.push(name.clone()).unreverse()).await);
|
||||
ItemKind::Member(Member::new(name, MemberKind::Const(Code::from_code(locator, val))))
|
||||
} else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) {
|
||||
let line = sys.parse(tail.to_vec(), exported, comments)?;
|
||||
return parse_items(ctx, path, Snippet::new(tail.prev(), &line));
|
||||
let line = sys.parse(tail.to_vec(), exported, comments).await?;
|
||||
return parse_items(ctx, path, Snippet::new(tail.prev(), &line, tail.interner())).await;
|
||||
} else {
|
||||
let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
|
||||
return Err(mk_errv(
|
||||
intern!(str: "Unrecognized line type"),
|
||||
tail.i("Unrecognized line type").await,
|
||||
format!("Line types are: const, mod, macro, grammar, {ext_lines}"),
|
||||
[Pos::Range(tail.prev().range.clone()).into()],
|
||||
));
|
||||
@@ -141,82 +135,90 @@ pub fn parse_exportable_item(
|
||||
Ok(vec![Item { comments, pos: Pos::Range(tail.pos()), kind }])
|
||||
}
|
||||
|
||||
pub fn parse_module(
|
||||
pub async fn parse_module(
|
||||
ctx: &impl ParseCtx,
|
||||
path: Substack<Tok<String>>,
|
||||
tail: ParsSnippet,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
tail: ParsSnippet<'_>,
|
||||
) -> OrcRes<(Tok<String>, Module)> {
|
||||
let (name, tail) = match try_pop_no_fluff(tail)? {
|
||||
let (name, tail) = match try_pop_no_fluff(tail).await? {
|
||||
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => (n.clone(), tail),
|
||||
Parsed { output, .. } => {
|
||||
return Err(mk_errv(
|
||||
intern!(str: "Missing module name"),
|
||||
format!("A name was expected, {output} was found"),
|
||||
tail.i("Missing module name").await,
|
||||
format!("A name was expected, {} was found", output.print().await),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
));
|
||||
},
|
||||
};
|
||||
let Parsed { output, tail: surplus } = try_pop_no_fluff(tail)?;
|
||||
expect_end(surplus)?;
|
||||
let body = output.as_s(Paren::Round).ok_or_else(|| {
|
||||
mk_errv(
|
||||
intern!(str: "Expected module body"),
|
||||
format!("A ( block ) was expected, {output} was found"),
|
||||
let Parsed { output, tail: surplus } = try_pop_no_fluff(tail).await?;
|
||||
expect_end(surplus).await?;
|
||||
let Some(body) = output.as_s(Paren::Round, tail.interner()) else {
|
||||
return Err(mk_errv(
|
||||
tail.i("Expected module body").await,
|
||||
format!("A ( block ) was expected, {} was found", output.print().await),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
)
|
||||
})?;
|
||||
));
|
||||
};
|
||||
let path = path.push(name.clone());
|
||||
Ok((name, Module::new(parse_items(ctx, path, body)?)))
|
||||
Ok((name, Module::new(parse_items(ctx, path, body).await?)))
|
||||
}
|
||||
|
||||
pub fn parse_const(tail: ParsSnippet) -> OrcRes<(Tok<String>, Vec<ParsTokTree>)> {
|
||||
let Parsed { output, tail } = try_pop_no_fluff(tail)?;
|
||||
let name = output.as_name().ok_or_else(|| {
|
||||
mk_errv(
|
||||
intern!(str: "Missing module name"),
|
||||
format!("A name was expected, {output} was found"),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
)
|
||||
})?;
|
||||
let Parsed { output, tail } = try_pop_no_fluff(tail)?;
|
||||
if !output.is_kw(intern!(str: "=")) {
|
||||
pub async fn parse_const(tail: ParsSnippet<'_>) -> OrcRes<(Tok<String>, Vec<ParsTokTree>)> {
|
||||
let Parsed { output, tail } = try_pop_no_fluff(tail).await?;
|
||||
let Some(name) = output.as_name() else {
|
||||
return Err(mk_errv(
|
||||
intern!(str: "Missing walrus := separator"),
|
||||
format!("Expected operator := , found {output}"),
|
||||
tail.i("Missing module name").await,
|
||||
format!("A name was expected, {} was found", output.print().await),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
));
|
||||
};
|
||||
let Parsed { output, tail } = try_pop_no_fluff(tail).await?;
|
||||
if !output.is_kw(tail.i("=").await) {
|
||||
return Err(mk_errv(
|
||||
tail.i("Missing walrus := separator").await,
|
||||
format!("Expected operator := , found {}", output.print().await),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
));
|
||||
}
|
||||
try_pop_no_fluff(tail)?;
|
||||
try_pop_no_fluff(tail).await?;
|
||||
Ok((name, tail.iter().flat_map(strip_fluff).collect_vec()))
|
||||
}
|
||||
|
||||
pub fn parse_mtree(mut snip: ParsSnippet<'_>) -> OrcRes<Vec<MacTree>> {
|
||||
pub async fn parse_mtree(mut snip: ParsSnippet<'_>) -> OrcRes<Vec<MacTree>> {
|
||||
let mut mtreev = Vec::new();
|
||||
while let Some((ttree, tail)) = snip.pop_front() {
|
||||
let (range, tok, tail) = match &ttree.tok {
|
||||
Token::S(p, b) =>
|
||||
(ttree.range.clone(), MTok::S(*p, parse_mtree(Snippet::new(ttree, b))?), tail),
|
||||
Token::S(p, b) => (
|
||||
ttree.range.clone(),
|
||||
MTok::S(*p, parse_mtree(Snippet::new(ttree, b, snip.interner())).boxed_local().await?),
|
||||
tail,
|
||||
),
|
||||
Token::Name(tok) => {
|
||||
let mut segments = vec![tok.clone()];
|
||||
let mut end = ttree.range.end;
|
||||
while let Some((TokTree { tok: Token::NS, .. }, tail)) = snip.pop_front() {
|
||||
let Parsed { output, tail } = try_pop_no_fluff(tail)?;
|
||||
segments.push(output.as_name().ok_or_else(|| {
|
||||
mk_errv(
|
||||
intern!(str: "Namespaced name interrupted"),
|
||||
let Parsed { output, tail } = try_pop_no_fluff(tail).await?;
|
||||
let Some(seg) = output.as_name() else {
|
||||
return Err(mk_errv(
|
||||
tail.i("Namespaced name interrupted").await,
|
||||
"In expression context, :: must always be followed by a name.\n\
|
||||
::() is permitted only in import and export items",
|
||||
::() is permitted only in import and export items",
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
)
|
||||
})?);
|
||||
));
|
||||
};
|
||||
segments.push(seg);
|
||||
snip = tail;
|
||||
end = output.range.end;
|
||||
}
|
||||
(ttree.range.start..end, MTok::Name(Sym::new(segments).unwrap()), snip)
|
||||
(
|
||||
ttree.range.start..end,
|
||||
MTok::Name(Sym::new(segments, snip.interner()).await.unwrap()),
|
||||
snip,
|
||||
)
|
||||
},
|
||||
Token::NS => {
|
||||
return Err(mk_errv(
|
||||
intern!(str: "Unexpected :: in macro pattern"),
|
||||
tail.i("Unexpected :: in macro pattern").await,
|
||||
":: can only follow a name outside export statements",
|
||||
[Pos::Range(ttree.range.clone()).into()],
|
||||
));
|
||||
@@ -224,8 +226,11 @@ pub fn parse_mtree(mut snip: ParsSnippet<'_>) -> OrcRes<Vec<MacTree>> {
|
||||
Token::Ph(ph) => (ttree.range.clone(), MTok::Ph(ph.clone()), tail),
|
||||
Token::Atom(_) | Token::Macro(_) => {
|
||||
return Err(mk_errv(
|
||||
intern!(str: "Unsupported token in macro patterns"),
|
||||
format!("Macro patterns can only contain names, braces, and lambda, not {ttree}."),
|
||||
tail.i("Unsupported token in macro patterns").await,
|
||||
format!(
|
||||
"Macro patterns can only contain names, braces, and lambda, not {}.",
|
||||
ttree.print().await
|
||||
),
|
||||
[Pos::Range(ttree.range.clone()).into()],
|
||||
));
|
||||
},
|
||||
@@ -233,50 +238,57 @@ pub fn parse_mtree(mut snip: ParsSnippet<'_>) -> OrcRes<Vec<MacTree>> {
|
||||
Token::Bottom(e) => return Err(e.clone()),
|
||||
Token::LambdaHead(arg) => (
|
||||
ttree.range.start..snip.pos().end,
|
||||
MTok::Lambda(parse_mtree(Snippet::new(ttree, arg))?, parse_mtree(tail)?),
|
||||
Snippet::new(ttree, &[]),
|
||||
MTok::Lambda(
|
||||
parse_mtree(Snippet::new(ttree, arg, snip.interner())).await?,
|
||||
parse_mtree(tail).await?,
|
||||
),
|
||||
Snippet::new(ttree, &[], snip.interner()),
|
||||
),
|
||||
Token::Slot(_) | Token::X(_) => panic!("Did not expect {} in parsed token tree", &ttree.tok),
|
||||
Token::Slot(_) | Token::X(_) =>
|
||||
panic!("Did not expect {} in parsed token tree", &ttree.tok.print().await),
|
||||
};
|
||||
mtreev.push(MTree { pos: Pos::Range(range.clone()), tok: Arc::new(tok) });
|
||||
mtreev.push(MTree { pos: Pos::Range(range.clone()), tok: Rc::new(tok) });
|
||||
snip = tail;
|
||||
}
|
||||
Ok(mtreev)
|
||||
}
|
||||
|
||||
pub fn parse_macro(
|
||||
tail: ParsSnippet,
|
||||
pub async fn parse_macro(
|
||||
tail: ParsSnippet<'_>,
|
||||
macro_i: u16,
|
||||
path: Substack<Tok<String>>,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
) -> OrcRes<Vec<Rule>> {
|
||||
let (surplus, prev, block) = match try_pop_no_fluff(tail)? {
|
||||
let (surplus, prev, block) = match try_pop_no_fluff(tail).await? {
|
||||
Parsed { tail, output: o @ TokTree { tok: Token::S(Paren::Round, b), .. } } => (tail, o, b),
|
||||
Parsed { output, .. } => {
|
||||
return Err(mk_errv(
|
||||
intern!(str: "m"),
|
||||
tail.i("m").await,
|
||||
"Macro blocks must either start with a block or a ..$:number",
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
));
|
||||
},
|
||||
};
|
||||
expect_end(surplus)?;
|
||||
expect_end(surplus).await?;
|
||||
let mut errors = Vec::new();
|
||||
let mut rules = Vec::new();
|
||||
for (i, item) in line_items(Snippet::new(prev, block)).into_iter().enumerate() {
|
||||
let Parsed { tail, output } = try_pop_no_fluff(item.tail)?;
|
||||
if !output.is_kw(intern!(str: "rule")) {
|
||||
for (i, item) in
|
||||
line_items(Snippet::new(&prev, block, tail.interner())).await.into_iter().enumerate()
|
||||
{
|
||||
let Parsed { tail, output } = try_pop_no_fluff(item.tail).await?;
|
||||
if !output.is_kw(tail.i("rule").await) {
|
||||
errors.extend(mk_errv(
|
||||
intern!(str: "non-rule in macro"),
|
||||
format!("Expected `rule`, got {output}"),
|
||||
tail.i("non-rule in macro").await,
|
||||
format!("Expected `rule`, got {}", output.print().await),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
));
|
||||
continue;
|
||||
};
|
||||
let (pat, body) = match tail.split_once(|t| t.is_kw(intern!(str: "=>"))) {
|
||||
let arrow = tail.i("=>").await;
|
||||
let (pat, body) = match tail.split_once(|t| t.is_kw(arrow.clone())) {
|
||||
Some((a, b)) => (a, b),
|
||||
None => {
|
||||
errors.extend(mk_errv(
|
||||
intern!(str: "no => in macro rule"),
|
||||
tail.i("no => in macro rule").await,
|
||||
"The pattern and body of a rule must be separated by a =>",
|
||||
[Pos::Range(tail.pos()).into()],
|
||||
));
|
||||
@@ -286,9 +298,9 @@ pub fn parse_macro(
|
||||
rules.push(Rule {
|
||||
comments: item.output,
|
||||
pos: Pos::Range(tail.pos()),
|
||||
pattern: parse_mtree(pat)?,
|
||||
pattern: parse_mtree(pat).await?,
|
||||
kind: RuleKind::Native(Code::from_code(
|
||||
CodeLocator::to_rule(path.unreverse(), macro_i, i as u16),
|
||||
CodeLocator::to_rule(tail.i(&path.unreverse()).await, macro_i, i as u16),
|
||||
body.to_vec(),
|
||||
)),
|
||||
})
|
||||
|
||||
@@ -107,43 +107,55 @@ fn mk_scalar(pattern: &MacTree) -> ScalMatcher {
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::sync::Arc;
|
||||
use std::rc::Rc;
|
||||
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::location::SourceRange;
|
||||
use orchid_base::sym;
|
||||
use orchid_base::tokens::Paren;
|
||||
use orchid_base::tree::Ph;
|
||||
use orchid_base::{intern, sym};
|
||||
use test_executors::spin_on;
|
||||
|
||||
use super::mk_any;
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
|
||||
#[test]
|
||||
fn test_scan() {
|
||||
let ex = |tok: MacTok| MacTree { tok: Arc::new(tok), pos: SourceRange::mock().pos() };
|
||||
let pattern = vec![
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: intern!(str: "::prefix"),
|
||||
})),
|
||||
ex(MacTok::Name(sym!(prelude::do))),
|
||||
ex(MacTok::S(Paren::Round, vec![
|
||||
spin_on(async {
|
||||
let i = Interner::new_master();
|
||||
let ex = |tok: MacTok| async {
|
||||
MacTree { tok: Rc::new(tok), pos: SourceRange::mock(&i).await.pos() }
|
||||
};
|
||||
let pattern = vec![
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: intern!(str: "expr"),
|
||||
})),
|
||||
ex(MacTok::Name(sym!(prelude::;))),
|
||||
name: i.i("::prefix").await,
|
||||
}))
|
||||
.await,
|
||||
ex(MacTok::Name(sym!(prelude::do; i).await)).await,
|
||||
ex(MacTok::S(Paren::Round, vec![
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: i.i("expr").await,
|
||||
}))
|
||||
.await,
|
||||
ex(MacTok::Name(sym!(prelude::; ; i).await)).await,
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 1, at_least_one: false },
|
||||
name: i.i("rest").await,
|
||||
}))
|
||||
.await,
|
||||
]))
|
||||
.await,
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 1, at_least_one: false },
|
||||
name: intern!(str: "rest"),
|
||||
})),
|
||||
])),
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: intern!(str: "::suffix"),
|
||||
})),
|
||||
];
|
||||
let matcher = mk_any(&pattern);
|
||||
println!("{matcher}");
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: i.i("::suffix").await,
|
||||
}))
|
||||
.await,
|
||||
];
|
||||
let matcher = mk_any(&pattern);
|
||||
println!("{matcher}");
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::fmt;
|
||||
|
||||
use itertools::Itertools;
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::intern;
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::tree::Ph;
|
||||
@@ -21,7 +21,7 @@ pub fn last_is_vec(pattern: &[MacTree]) -> bool { vec_attrs(pattern.last().unwra
|
||||
|
||||
pub struct NamedMatcher(AnyMatcher);
|
||||
impl NamedMatcher {
|
||||
pub fn new(pattern: &[MacTree]) -> Self {
|
||||
pub async fn new(pattern: &[MacTree], i: &Interner) -> Self {
|
||||
assert!(
|
||||
matches!(pattern.first().map(|tree| &*tree.tok), Some(MacTok::Name(_))),
|
||||
"Named matchers must begin with a name"
|
||||
@@ -31,7 +31,7 @@ impl NamedMatcher {
|
||||
true => Self(mk_any(pattern)),
|
||||
false => {
|
||||
let kind: PhKind = PhKind::Vector { priority: 0, at_least_one: false };
|
||||
let suffix = [MacTok::Ph(Ph { name: intern!(str: "::after"), kind }).at(Pos::None)];
|
||||
let suffix = [MacTok::Ph(Ph { name: i.i("::after").await, kind }).at(Pos::None)];
|
||||
Self(mk_any(&pattern.iter().chain(&suffix).cloned().collect_vec()))
|
||||
},
|
||||
}
|
||||
@@ -39,18 +39,18 @@ impl NamedMatcher {
|
||||
/// Also returns the tail, if any, which should be matched further
|
||||
/// Note that due to how priod works below, the main usable information from
|
||||
/// the tail is its length
|
||||
pub fn apply<'a>(
|
||||
pub async fn apply<'a>(
|
||||
&self,
|
||||
seq: &'a [MacTree],
|
||||
i: &Interner,
|
||||
save_loc: impl Fn(Sym) -> bool,
|
||||
) -> Option<(MatchState<'a>, &'a [MacTree])> {
|
||||
any_match(&self.0, seq, &save_loc).map(|mut state| {
|
||||
match state.remove(intern!(str: "::after")) {
|
||||
Some(StateEntry::Scalar(_)) => panic!("::after can never be a scalar entry!"),
|
||||
Some(StateEntry::Vec(v)) => (state, v),
|
||||
None => (state, &[][..]),
|
||||
}
|
||||
})
|
||||
let mut state = any_match(&self.0, seq, &save_loc)?;
|
||||
match state.remove(i.i("::after").await) {
|
||||
Some(StateEntry::Scalar(_)) => panic!("::after can never be a scalar entry!"),
|
||||
Some(StateEntry::Vec(v)) => Some((state, v)),
|
||||
None => Some((state, &[][..])),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl fmt::Display for NamedMatcher {
|
||||
|
||||
@@ -20,7 +20,7 @@ pub fn scal_match<'a>(
|
||||
(ScalMatcher::Placeh { key }, _) =>
|
||||
Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))),
|
||||
(ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 =>
|
||||
any_match(b_mat, &body[..], save_loc),
|
||||
any_match(&b_mat, &body[..], save_loc),
|
||||
(ScalMatcher::Lambda(arg_mat, b_mat), MacTok::Lambda(arg, body)) =>
|
||||
Some(any_match(arg_mat, arg, save_loc)?.combine(any_match(b_mat, body, save_loc)?)),
|
||||
_ => None,
|
||||
|
||||
@@ -1,79 +1,96 @@
|
||||
use std::io::{self, BufRead as _, Write};
|
||||
use std::cell::RefCell;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::mpsc::sync_channel;
|
||||
use std::{process, thread};
|
||||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
use std::thread;
|
||||
|
||||
use async_process::{self, Child, ChildStdin, ChildStdout};
|
||||
use async_std::io::{self, BufReadExt, BufReader};
|
||||
use async_std::sync::Mutex;
|
||||
use futures::FutureExt;
|
||||
use futures::future::LocalBoxFuture;
|
||||
use futures::task::LocalSpawnExt;
|
||||
use orchid_api_traits::{Decode, Encode};
|
||||
use orchid_base::builtin::{ExtInit, ExtPort};
|
||||
use orchid_base::logging::Logger;
|
||||
use orchid_base::msg::{recv_msg, send_msg};
|
||||
|
||||
use crate::api;
|
||||
use crate::ctx::Ctx;
|
||||
|
||||
pub struct ExtensionCommand(pub process::Command, pub Logger);
|
||||
impl ExtFactory for ExtensionCommand {
|
||||
fn run(self: Box<Self>, onmessage: OnMessage) -> ExtInit {
|
||||
let Self(mut cmd, logger) = *self;
|
||||
let prog_pbuf = PathBuf::from(cmd.get_program());
|
||||
let prog = prog_pbuf.file_stem().unwrap_or(cmd.get_program()).to_string_lossy().to_string();
|
||||
let mut child = cmd
|
||||
.stdin(process::Stdio::piped())
|
||||
.stdout(process::Stdio::piped())
|
||||
.stderr(process::Stdio::piped())
|
||||
.spawn()?;
|
||||
let mut stdin = child.stdin.take().unwrap();
|
||||
api::HostHeader { log_strategy: logger.strat() }.encode(&mut stdin);
|
||||
stdin.flush()?;
|
||||
let mut stdout = child.stdout.take().unwrap();
|
||||
let header = api::ExtensionHeader::decode(&mut stdout);
|
||||
let child_stderr = child.stderr.take().unwrap();
|
||||
let (set_onmessage, recv_onmessage) = sync_channel(0);
|
||||
thread::Builder::new().name(format!("stdout-fwd:{prog}")).spawn(move || {
|
||||
let mut onmessage: Box<dyn FnMut(&[u8]) + Send> = recv_onmessage.recv().unwrap();
|
||||
drop(recv_onmessage);
|
||||
loop {
|
||||
match recv_msg(&mut stdout) {
|
||||
Ok(msg) => onmessage(&msg[..]),
|
||||
Err(e) if e.kind() == io::ErrorKind::BrokenPipe => break,
|
||||
Err(e) => panic!("Failed to read from stdout: {}, {e}", e.kind()),
|
||||
}
|
||||
}
|
||||
})?;
|
||||
thread::Builder::new().name(format!("stderr-fwd:{prog}")).spawn(move || {
|
||||
let mut reader = io::BufReader::new(child_stderr);
|
||||
pub async fn ext_command(
|
||||
cmd: std::process::Command,
|
||||
logger: Logger,
|
||||
ctx: Ctx,
|
||||
) -> io::Result<ExtInit> {
|
||||
let prog_pbuf = PathBuf::from(cmd.get_program());
|
||||
let prog = prog_pbuf.file_stem().unwrap_or(cmd.get_program()).to_string_lossy().to_string();
|
||||
let mut child = async_process::Command::from(cmd)
|
||||
.stdin(async_process::Stdio::piped())
|
||||
.stdout(async_process::Stdio::piped())
|
||||
.stderr(async_process::Stdio::piped())
|
||||
.spawn()?;
|
||||
let mut stdin = child.stdin.take().unwrap();
|
||||
api::HostHeader { log_strategy: logger.strat() }.encode(Pin::new(&mut stdin));
|
||||
let mut stdout = child.stdout.take().unwrap();
|
||||
let header = api::ExtensionHeader::decode(Pin::new(&mut stdout)).await;
|
||||
let child_stderr = child.stderr.take().unwrap();
|
||||
thread::Builder::new().name(format!("stderr-fwd:{prog}")).spawn(move || {
|
||||
async_std::task::block_on(async move {
|
||||
let mut reader = BufReader::new(child_stderr);
|
||||
loop {
|
||||
let mut buf = String::new();
|
||||
if 0 == reader.read_line(&mut buf).unwrap() {
|
||||
if 0 == reader.read_line(&mut buf).await.unwrap() {
|
||||
break;
|
||||
}
|
||||
logger.log(buf);
|
||||
}
|
||||
})?;
|
||||
Ok(Subprocess { child: Mutex::new(child), stdin: Mutex::new(stdin), set_onmessage, header })
|
||||
}
|
||||
})
|
||||
})?;
|
||||
Ok(ExtInit {
|
||||
header,
|
||||
port: Box::new(Subprocess {
|
||||
child: Rc::new(RefCell::new(child)),
|
||||
stdin: Mutex::new(Box::pin(stdin)),
|
||||
stdout: Mutex::new(Box::pin(stdout)),
|
||||
ctx,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
pub struct Subprocess {
|
||||
child: Mutex<process::Child>,
|
||||
stdin: Mutex<process::ChildStdin>,
|
||||
stdout: Mutex<process::ChildStdout>,
|
||||
header: api::ExtensionHeader,
|
||||
}
|
||||
impl Subprocess {
|
||||
pub fn new(mut cmd: process::Command, logger: Logger) -> io::Result<Self> {}
|
||||
child: Rc<RefCell<Child>>,
|
||||
stdin: Mutex<Pin<Box<ChildStdin>>>,
|
||||
stdout: Mutex<Pin<Box<ChildStdout>>>,
|
||||
ctx: Ctx,
|
||||
}
|
||||
impl Drop for Subprocess {
|
||||
fn drop(&mut self) { self.child.lock().unwrap().wait().expect("Extension exited with error"); }
|
||||
fn drop(&mut self) {
|
||||
let child = self.child.clone();
|
||||
(self.ctx.spawn.spawn_local(async move {
|
||||
let status = child.borrow_mut().status().await.expect("Extension exited with error");
|
||||
assert!(status.success(), "Extension exited with error {status}");
|
||||
}))
|
||||
.expect("Could not spawn process terminating future")
|
||||
}
|
||||
}
|
||||
impl ExtPort for Subprocess {
|
||||
fn send(&self, msg: &[u8]) {
|
||||
fn send<'a>(&'a self, msg: &'a [u8]) -> LocalBoxFuture<'a, ()> {
|
||||
if msg.starts_with(&[0, 0, 0, 0x1c]) {
|
||||
panic!("Received unnecessary prefix");
|
||||
}
|
||||
send_msg(&mut *self.stdin.lock().unwrap(), msg).unwrap()
|
||||
async { send_msg(Pin::new(&mut *self.stdin.lock().await), msg).await.unwrap() }.boxed_local()
|
||||
}
|
||||
fn recv<'a>(&self, cb: Box<dyn FnOnce(&[u8]) + Send + 'a>) -> futures::future::BoxFuture<()> {
|
||||
async {}
|
||||
fn recv<'a>(
|
||||
&'a self,
|
||||
cb: Box<dyn FnOnce(&[u8]) -> LocalBoxFuture<'_, ()> + 'a>,
|
||||
) -> LocalBoxFuture<'a, ()> {
|
||||
Box::pin(async {
|
||||
match recv_msg(self.stdout.lock().await.as_mut()).await {
|
||||
Ok(msg) => cb(&msg).await,
|
||||
Err(e) if e.kind() == io::ErrorKind::BrokenPipe => (),
|
||||
Err(e) => panic!("Failed to read from stdout: {}, {e}", e.kind()),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
213
orchid-host/src/system.rs
Normal file
213
orchid-host/src/system.rs
Normal file
@@ -0,0 +1,213 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::fmt;
|
||||
use std::future::Future;
|
||||
use std::rc::{Rc, Weak};
|
||||
|
||||
use async_stream::stream;
|
||||
use derive_destructure::destructure;
|
||||
use futures::StreamExt;
|
||||
use futures::future::join_all;
|
||||
use futures::task::LocalSpawnExt;
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
use orchid_base::async_once_cell::OnceCell;
|
||||
use orchid_base::char_filter::char_filter_match;
|
||||
use orchid_base::clone;
|
||||
use orchid_base::error::{OrcErrv, OrcRes};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::parse::Comment;
|
||||
use orchid_base::reqnot::{ReqNot, Requester};
|
||||
use orchid_base::tree::ttv_from_api;
|
||||
use ordered_float::NotNan;
|
||||
use substack::{Stackframe, Substack};
|
||||
|
||||
use crate::api;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::extension::{Extension, WeakExtension};
|
||||
use crate::tree::{Member, ParsTokTree};
|
||||
|
||||
#[derive(destructure)]
|
||||
struct SystemInstData {
|
||||
ctx: Ctx,
|
||||
ext: Extension,
|
||||
decl_id: api::SysDeclId,
|
||||
lex_filter: api::CharFilter,
|
||||
id: api::SysId,
|
||||
const_root: OnceCell<Vec<Member>>,
|
||||
line_types: Vec<Tok<String>>,
|
||||
}
|
||||
impl Drop for SystemInstData {
|
||||
fn drop(&mut self) { self.ext.system_drop(self.id); }
|
||||
}
|
||||
impl fmt::Debug for SystemInstData {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("SystemInstData")
|
||||
.field("decl_id", &self.decl_id)
|
||||
.field("lex_filter", &self.lex_filter)
|
||||
.field("id", &self.id)
|
||||
.field("const_root", &self.const_root)
|
||||
.field("line_types", &self.line_types)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct System(Rc<SystemInstData>);
|
||||
impl System {
|
||||
pub fn id(&self) -> api::SysId { self.0.id }
|
||||
pub fn ext(&self) -> &Extension { &self.0.ext }
|
||||
pub fn ctx(&self) -> &Ctx { &self.0.ctx }
|
||||
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.ext.reqnot() }
|
||||
pub async fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
|
||||
self.reqnot().request(api::GetMember(self.0.id, id)).await
|
||||
}
|
||||
pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() }
|
||||
pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) }
|
||||
/// Have this system lex a part of the source. It is assumed that
|
||||
/// [Self::can_lex] was called and returned true.
|
||||
pub async fn lex<F: Future<Output = Option<api::SubLexed>>>(
|
||||
&self,
|
||||
source: Tok<String>,
|
||||
pos: u32,
|
||||
r: impl FnMut(u32) -> F,
|
||||
) -> api::OrcResult<Option<api::LexedExpr>> {
|
||||
self.0.ext.lex_req(source, pos, self.id(), r).await
|
||||
}
|
||||
pub fn can_parse(&self, ltyp: Tok<String>) -> bool { self.0.line_types.contains(<yp) }
|
||||
pub fn line_types(&self) -> impl Iterator<Item = &Tok<String>> + '_ { self.0.line_types.iter() }
|
||||
pub async fn parse(
|
||||
&self,
|
||||
line: Vec<ParsTokTree>,
|
||||
exported: bool,
|
||||
comments: Vec<Comment>,
|
||||
) -> OrcRes<Vec<ParsTokTree>> {
|
||||
let line =
|
||||
join_all(line.iter().map(|t| async { t.to_api(&mut |n, _| match *n {}).await })).await;
|
||||
let comments = comments.iter().map(Comment::to_api).collect_vec();
|
||||
match self.reqnot().request(api::ParseLine { exported, sys: self.id(), comments, line }).await {
|
||||
Ok(parsed) => Ok(ttv_from_api(parsed, &mut self.ctx().clone(), &self.ctx().i).await),
|
||||
Err(e) => Err(OrcErrv::from_api(&e, &self.ctx().i).await),
|
||||
}
|
||||
}
|
||||
pub async fn request(&self, req: Vec<u8>) -> Vec<u8> {
|
||||
self.reqnot().request(api::SysFwded(self.id(), req)).await
|
||||
}
|
||||
pub(crate) fn drop_atom(&self, drop: api::AtomId) {
|
||||
let this = self.0.clone();
|
||||
(self.0.ctx.spawn.spawn_local(async move {
|
||||
this.ctx.owned_atoms.write().await.remove(&drop);
|
||||
}))
|
||||
.expect("Failed to drop atom");
|
||||
}
|
||||
pub async fn print(&self) -> String {
|
||||
let ctor = (self.0.ext.system_ctors().find(|c| c.id() == self.0.decl_id))
|
||||
.expect("System instance with no associated constructor");
|
||||
format!("System({} @ {} #{})", ctor.name(), ctor.priority(), self.0.id.0)
|
||||
}
|
||||
pub fn downgrade(&self) -> WeakSystem { WeakSystem(Rc::downgrade(&self.0)) }
|
||||
}
|
||||
|
||||
pub struct WeakSystem(Weak<SystemInstData>);
|
||||
impl WeakSystem {
|
||||
pub fn upgrade(&self) -> Option<System> { self.0.upgrade().map(System) }
|
||||
}
|
||||
|
||||
pub struct SystemCtor {
|
||||
pub(crate) decl: api::SystemDecl,
|
||||
pub(crate) ext: WeakExtension,
|
||||
}
|
||||
impl SystemCtor {
|
||||
pub fn name(&self) -> &str { &self.decl.name }
|
||||
pub fn priority(&self) -> NotNan<f64> { self.decl.priority }
|
||||
pub fn depends(&self) -> impl ExactSizeIterator<Item = &str> {
|
||||
self.decl.depends.iter().map(|s| &**s)
|
||||
}
|
||||
pub fn id(&self) -> api::SysDeclId { self.decl.id }
|
||||
pub async fn run<'a>(&self, depends: impl IntoIterator<Item = &'a System>) -> System {
|
||||
let depends = depends.into_iter().map(|si| si.id()).collect_vec();
|
||||
debug_assert_eq!(depends.len(), self.decl.depends.len(), "Wrong number of deps provided");
|
||||
let ext = self.ext.upgrade().expect("SystemCtor should be freed before Extension");
|
||||
let id = ext.ctx().next_sys_id();
|
||||
let sys_inst = ext.reqnot().request(api::NewSystem { depends, id, system: self.decl.id }).await;
|
||||
let data = System(Rc::new(SystemInstData {
|
||||
decl_id: self.decl.id,
|
||||
ext: ext.clone(),
|
||||
ctx: ext.ctx().clone(),
|
||||
lex_filter: sys_inst.lex_filter,
|
||||
const_root: OnceCell::new(),
|
||||
line_types: join_all(sys_inst.line_types.iter().map(|m| Tok::from_api(*m, &ext.ctx().i)))
|
||||
.await,
|
||||
id,
|
||||
}));
|
||||
(data.0.const_root.get_or_init(
|
||||
clone!(data, ext; stream! {
|
||||
for (k, v) in sys_inst.const_root {
|
||||
yield Member::from_api(
|
||||
api::Member { name: k, kind: v },
|
||||
&mut vec![Tok::from_api(k, &ext.ctx().i).await],
|
||||
&data,
|
||||
).await
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
.await;
|
||||
ext.ctx().systems.write().await.insert(id, data.downgrade());
|
||||
data
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum SysResolvErr {
|
||||
Loop(Vec<String>),
|
||||
Missing(String),
|
||||
}
|
||||
|
||||
pub async fn init_systems(
|
||||
tgts: &[String],
|
||||
exts: &[Extension],
|
||||
) -> Result<Vec<System>, SysResolvErr> {
|
||||
let mut to_load = HashMap::<&str, &SystemCtor>::new();
|
||||
let mut to_find = tgts.iter().map(|s| s.as_str()).collect::<VecDeque<&str>>();
|
||||
while let Some(target) = to_find.pop_front() {
|
||||
if to_load.contains_key(target) {
|
||||
continue;
|
||||
}
|
||||
let ctor = (exts.iter())
|
||||
.flat_map(|e| e.system_ctors().filter(|c| c.name() == target))
|
||||
.max_by_key(|c| c.priority())
|
||||
.ok_or_else(|| SysResolvErr::Missing(target.to_string()))?;
|
||||
to_load.insert(target, ctor);
|
||||
to_find.extend(ctor.depends());
|
||||
}
|
||||
let mut to_load_ordered = Vec::new();
|
||||
fn walk_deps<'a>(
|
||||
graph: &mut HashMap<&str, &'a SystemCtor>,
|
||||
list: &mut Vec<&'a SystemCtor>,
|
||||
chain: Stackframe<&str>,
|
||||
) -> Result<(), SysResolvErr> {
|
||||
if let Some(ctor) = graph.remove(chain.item) {
|
||||
// if the above is none, the system is already queued. Missing systems are
|
||||
// detected above
|
||||
for dep in ctor.depends() {
|
||||
if Substack::Frame(chain).iter().any(|c| *c == dep) {
|
||||
let mut circle = vec![dep.to_string()];
|
||||
circle.extend(Substack::Frame(chain).iter().map(|s| s.to_string()));
|
||||
return Err(SysResolvErr::Loop(circle));
|
||||
}
|
||||
walk_deps(graph, list, Substack::Frame(chain).new_frame(dep))?
|
||||
}
|
||||
list.push(ctor);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
for tgt in tgts {
|
||||
walk_deps(&mut to_load, &mut to_load_ordered, Substack::Bottom.new_frame(tgt))?;
|
||||
}
|
||||
let mut systems = HashMap::<&str, System>::new();
|
||||
for ctor in to_load_ordered.iter() {
|
||||
let sys = ctor.run(ctor.depends().map(|n| &systems[n])).await;
|
||||
systems.insert(ctor.name(), sys);
|
||||
}
|
||||
Ok(systems.into_values().collect_vec())
|
||||
}
|
||||
@@ -1,22 +1,25 @@
|
||||
use std::fmt::Debug;
|
||||
use std::sync::{Mutex, OnceLock};
|
||||
|
||||
use async_stream::stream;
|
||||
use futures::{FutureExt, StreamExt};
|
||||
use itertools::Itertools;
|
||||
use never::Never;
|
||||
use orchid_base::clone;
|
||||
use orchid_base::error::OrcRes;
|
||||
use orchid_base::interner::{Tok, intern};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::macros::mtreev_from_api;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::parse::{Comment, Import};
|
||||
use orchid_base::tree::{TokTree, Token};
|
||||
use orchid_base::tree::{AtomRepr, TokTree, Token};
|
||||
use ordered_float::NotNan;
|
||||
use substack::{Substack, with_iter_stack};
|
||||
|
||||
use crate::api;
|
||||
use crate::atom::AtomHand;
|
||||
use crate::expr::Expr;
|
||||
use crate::extension::{AtomHand, System};
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
use crate::system::System;
|
||||
|
||||
pub type ParsTokTree = TokTree<'static, AtomHand, Never>;
|
||||
pub type ParsTok = Token<'static, AtomHand, Never>;
|
||||
@@ -37,54 +40,79 @@ pub enum ItemKind {
|
||||
}
|
||||
|
||||
impl Item {
|
||||
pub fn from_api(tree: api::Item, path: Substack<Tok<String>>, sys: &System) -> Self {
|
||||
pub async fn from_api(tree: api::Item, path: &mut Vec<Tok<String>>, sys: &System) -> Self {
|
||||
let kind = match tree.kind {
|
||||
api::ItemKind::Member(m) => ItemKind::Member(Member::from_api(m, path, sys)),
|
||||
api::ItemKind::Import(i) =>
|
||||
ItemKind::Import(Import { path: Sym::from_api(i).iter().collect(), name: None }),
|
||||
api::ItemKind::Export(e) => ItemKind::Export(Tok::from_api(e)),
|
||||
api::ItemKind::Macro(api::MacroBlock { priority, rules }) => ItemKind::Macro(priority, {
|
||||
Vec::from_iter(rules.into_iter().map(|api| Rule {
|
||||
pos: Pos::from_api(&api.location),
|
||||
pattern: mtreev_from_api(&api.pattern, &mut |a| {
|
||||
MacTok::Atom(AtomHand::from_api(a.clone()))
|
||||
}),
|
||||
kind: RuleKind::Remote(sys.clone(), api.id),
|
||||
comments: api.comments.iter().map(Comment::from_api).collect_vec(),
|
||||
}))
|
||||
api::ItemKind::Member(m) => ItemKind::Member(Member::from_api(m, path, sys).await),
|
||||
api::ItemKind::Import(name) => ItemKind::Import(Import {
|
||||
path: Sym::from_api(name, &sys.ctx().i).await.iter().collect(),
|
||||
name: None,
|
||||
}),
|
||||
api::ItemKind::Export(e) => ItemKind::Export(Tok::from_api(e, &sys.ctx().i).await),
|
||||
api::ItemKind::Macro(macro_block) => {
|
||||
let mut rules = Vec::new();
|
||||
for rule in macro_block.rules {
|
||||
let mut comments = Vec::new();
|
||||
for comment in rule.comments {
|
||||
comments.push(Comment::from_api(&comment, &sys.ctx().i).await);
|
||||
}
|
||||
let pos = Pos::from_api(&rule.location, &sys.ctx().i).await;
|
||||
let pattern = mtreev_from_api(&rule.pattern, &sys.ctx().i, &mut {
|
||||
clone!(pos, sys);
|
||||
move |a| {
|
||||
clone!(pos, sys);
|
||||
Box::pin(async move {
|
||||
MacTok::Atom(AtomHand::from_api(a, pos.clone(), &mut sys.ctx().clone()).await)
|
||||
})
|
||||
}
|
||||
})
|
||||
.await;
|
||||
rules.push(Rule { pos, pattern, kind: RuleKind::Remote(sys.clone(), rule.id), comments });
|
||||
}
|
||||
ItemKind::Macro(macro_block.priority, rules)
|
||||
},
|
||||
};
|
||||
let comments = tree.comments.iter().map(Comment::from_api).collect_vec();
|
||||
Self { pos: Pos::from_api(&tree.location), comments, kind }
|
||||
let mut comments = Vec::new();
|
||||
for comment in tree.comments.iter() {
|
||||
comments.push(Comment::from_api(comment, &sys.ctx().i).await)
|
||||
}
|
||||
Self { pos: Pos::from_api(&tree.location, &sys.ctx().i).await, comments, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Member {
|
||||
pub name: Tok<String>,
|
||||
pub kind: OnceLock<MemberKind>,
|
||||
pub lazy: Mutex<Option<LazyMemberHandle>>,
|
||||
}
|
||||
impl Member {
|
||||
pub fn from_api(api: api::Member, path: Substack<Tok<String>>, sys: &System) -> Self {
|
||||
let name = Tok::from_api(api.name);
|
||||
let full_path = path.push(name.clone());
|
||||
pub async fn from_api(api: api::Member, path: &mut Vec<Tok<String>>, sys: &System) -> Self {
|
||||
path.push(Tok::from_api(api.name, &sys.ctx().i).await);
|
||||
let kind = match api.kind {
|
||||
api::MemberKind::Lazy(id) => {
|
||||
return LazyMemberHandle(id, sys.clone(), intern(&full_path.unreverse())).into_member(name);
|
||||
let handle = LazyMemberHandle(id, sys.clone(), path.clone());
|
||||
return handle.into_member(path.pop().unwrap());
|
||||
},
|
||||
api::MemberKind::Const(c) => MemberKind::Const(Code::from_expr(
|
||||
CodeLocator::to_const(full_path.unreverse()),
|
||||
Expr::from_api(&c, &mut ()),
|
||||
CodeLocator::to_const(sys.ctx().i.i(&*path).await),
|
||||
Expr::from_api(&c, &mut sys.ext().clone()).await,
|
||||
)),
|
||||
api::MemberKind::Module(m) => MemberKind::Mod(Module::from_api(m, full_path, sys)),
|
||||
api::MemberKind::Module(m) => MemberKind::Mod(Module::from_api(m, path, sys).await),
|
||||
};
|
||||
let name = path.pop().unwrap();
|
||||
Member { name, kind: OnceLock::from(kind), lazy: Mutex::default() }
|
||||
}
|
||||
pub fn new(name: Tok<String>, kind: MemberKind) -> Self {
|
||||
Member { name, kind: OnceLock::from(kind), lazy: Mutex::default() }
|
||||
}
|
||||
}
|
||||
impl Debug for Member {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Member")
|
||||
.field("name", &self.name)
|
||||
.field("kind", &self.kind)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum MemberKind {
|
||||
@@ -109,30 +137,27 @@ impl Module {
|
||||
.collect_vec();
|
||||
Self { imports: vec![], exports, items }
|
||||
}
|
||||
pub fn from_api(m: api::Module, path: Substack<Tok<String>>, sys: &System) -> Self {
|
||||
let mut output = Vec::new();
|
||||
for item in m.items.into_iter() {
|
||||
let next = Item::from_api(item, path.clone(), sys);
|
||||
output.push(next);
|
||||
}
|
||||
Self::new(output)
|
||||
pub async fn from_api(m: api::Module, path: &mut Vec<Tok<String>>, sys: &System) -> Self {
|
||||
Self::new(
|
||||
stream! { for item in m.items { yield Item::from_api(item, path, sys).boxed_local().await } }
|
||||
.collect::<Vec<_>>()
|
||||
.await,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LazyMemberHandle(api::TreeId, System, Tok<Vec<Tok<String>>>);
|
||||
pub struct LazyMemberHandle(api::TreeId, System, Vec<Tok<String>>);
|
||||
impl LazyMemberHandle {
|
||||
pub fn run(self) -> OrcRes<MemberKind> {
|
||||
match self.1.get_tree(self.0) {
|
||||
pub async fn run(self) -> OrcRes<MemberKind> {
|
||||
match self.1.get_tree(self.0).await {
|
||||
api::MemberKind::Const(c) => Ok(MemberKind::Const(Code {
|
||||
bytecode: Expr::from_api(&c, &mut ()).into(),
|
||||
locator: CodeLocator { steps: self.2, rule_loc: None },
|
||||
bytecode: Expr::from_api(&c, &mut self.1.ext().clone()).await.into(),
|
||||
locator: CodeLocator { steps: self.1.ctx().i.i(&self.2).await, rule_loc: None },
|
||||
source: None,
|
||||
})),
|
||||
api::MemberKind::Module(m) => with_iter_stack(self.2.iter().cloned(), |path| {
|
||||
Ok(MemberKind::Mod(Module::from_api(m, path, &self.1)))
|
||||
}),
|
||||
api::MemberKind::Lazy(id) => Self(id, self.1, self.2).run(),
|
||||
api::MemberKind::Module(m) =>
|
||||
Ok(MemberKind::Mod(Module::from_api(m, &mut { self.2 }, &self.1).await)),
|
||||
api::MemberKind::Lazy(id) => Self(id, self.1, self.2).run().boxed_local().await,
|
||||
}
|
||||
}
|
||||
pub fn into_member(self, name: Tok<String>) -> Member {
|
||||
@@ -181,10 +206,8 @@ pub struct CodeLocator {
|
||||
rule_loc: Option<(u16, u16)>,
|
||||
}
|
||||
impl CodeLocator {
|
||||
pub fn to_const(path: impl IntoIterator<Item = Tok<String>>) -> Self {
|
||||
Self { steps: intern(&path.into_iter().collect_vec()), rule_loc: None }
|
||||
}
|
||||
pub fn to_rule(path: impl IntoIterator<Item = Tok<String>>, macro_i: u16, rule_i: u16) -> Self {
|
||||
Self { steps: intern(&path.into_iter().collect_vec()), rule_loc: Some((macro_i, rule_i)) }
|
||||
pub fn to_const(steps: Tok<Vec<Tok<String>>>) -> Self { Self { steps, rule_loc: None } }
|
||||
pub fn to_rule(steps: Tok<Vec<Tok<String>>>, macro_i: u16, rule_i: u16) -> Self {
|
||||
Self { steps, rule_loc: Some((macro_i, rule_i)) }
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user