forked from Orchid/orchid
commit before easter break
This commit is contained in:
@@ -1,8 +1,9 @@
|
||||
use std::cell::RefCell;
|
||||
use std::num::{NonZero, NonZeroU16};
|
||||
use std::rc::Rc;
|
||||
use std::rc::{Rc, Weak};
|
||||
use std::{fmt, ops};
|
||||
|
||||
use async_once_cell::OnceCell;
|
||||
use async_std::sync::RwLock;
|
||||
use hashbrown::HashMap;
|
||||
use orchid_api::SysId;
|
||||
@@ -11,8 +12,9 @@ use orchid_base::interner::Interner;
|
||||
|
||||
use crate::api;
|
||||
use crate::atom::WeakAtomHand;
|
||||
use crate::expr_store::ExprStore;
|
||||
use crate::parsed::Root;
|
||||
use crate::system::{System, WeakSystem};
|
||||
use crate::tree::Module;
|
||||
|
||||
pub struct CtxData {
|
||||
pub i: Rc<Interner>,
|
||||
@@ -20,7 +22,8 @@ pub struct CtxData {
|
||||
pub systems: RwLock<HashMap<api::SysId, WeakSystem>>,
|
||||
pub system_id: RefCell<NonZeroU16>,
|
||||
pub owned_atoms: RwLock<HashMap<api::AtomId, WeakAtomHand>>,
|
||||
// pub root: RwLock<Module>,
|
||||
pub common_exprs: ExprStore,
|
||||
pub root: OnceCell<Weak<Root>>,
|
||||
}
|
||||
#[derive(Clone)]
|
||||
pub struct Ctx(Rc<CtxData>);
|
||||
@@ -36,7 +39,8 @@ impl Ctx {
|
||||
systems: RwLock::default(),
|
||||
system_id: RefCell::new(NonZero::new(1).unwrap()),
|
||||
owned_atoms: RwLock::default(),
|
||||
// root: RwLock::new(Module::default()),
|
||||
common_exprs: ExprStore::default(),
|
||||
root: OnceCell::default(),
|
||||
}))
|
||||
}
|
||||
pub(crate) async fn system_inst(&self, id: api::SysId) -> Option<System> {
|
||||
@@ -47,6 +51,10 @@ impl Ctx {
|
||||
*g = g.checked_add(1).unwrap_or(NonZeroU16::new(1).unwrap());
|
||||
SysId(*g)
|
||||
}
|
||||
pub async fn set_root(&self, root: Weak<Root>) {
|
||||
assert!(self.root.get().is_none(), "Root already assigned");
|
||||
self.root.get_or_init(async { root }).await;
|
||||
}
|
||||
}
|
||||
impl fmt::Debug for Ctx {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
|
||||
@@ -8,9 +8,10 @@ use orchid_base::format::{FmtCtxImpl, Format, take_first};
|
||||
use orchid_base::interner::{Interner, Tok};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::{NameLike, Sym, VName};
|
||||
use substack::Substack;
|
||||
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
use crate::tree::{ItemKind, MemberKind, Module, RuleKind, WalkErrorKind};
|
||||
use crate::expr::Expr;
|
||||
use crate::parsed::{ItemKind, ParsedMemberKind, ParsedModule, WalkErrorKind};
|
||||
|
||||
/// Errors produced by absolute_path
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
@@ -73,28 +74,36 @@ pub fn absolute_path(
|
||||
.map_err(|_| AbsPathError::RootPath)
|
||||
}
|
||||
|
||||
pub struct DealiasCtx<'a> {
|
||||
pub i: &'a Interner,
|
||||
pub rep: &'a Reporter,
|
||||
pub consts: &'a mut HashMap<Sym, Expr>,
|
||||
}
|
||||
|
||||
pub async fn resolv_glob(
|
||||
cwd: &[Tok<String>],
|
||||
root: &Module,
|
||||
root: &ParsedModule,
|
||||
abs_path: &[Tok<String>],
|
||||
pos: Pos,
|
||||
i: &Interner,
|
||||
r: &impl Reporter,
|
||||
ctx: &mut DealiasCtx<'_>,
|
||||
) -> Vec<Tok<String>> {
|
||||
let coprefix_len = cwd.iter().zip(abs_path).take_while(|(a, b)| a == b).count();
|
||||
let (co_prefix, diff_path) = abs_path.split_at(coprefix_len);
|
||||
let co_parent = root.walk(false, co_prefix.iter().cloned()).await.expect("Invalid step in cwd");
|
||||
let target_module = match co_parent.walk(true, diff_path.iter().cloned()).await {
|
||||
let co_parent =
|
||||
root.walk(false, co_prefix.iter().cloned(), ctx.consts).await.expect("Invalid step in cwd");
|
||||
let target_module = match co_parent.walk(true, diff_path.iter().cloned(), ctx.consts).await {
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
let path = abs_path[..=coprefix_len + e.pos].iter().join("::");
|
||||
let (tk, msg) = match e.kind {
|
||||
WalkErrorKind::Constant =>
|
||||
(i.i("Invalid import path").await, format!("{path} is a constant")),
|
||||
WalkErrorKind::Missing => (i.i("Invalid import path").await, format!("{path} not found")),
|
||||
WalkErrorKind::Private => (i.i("Import inaccessible").await, format!("{path} is private")),
|
||||
(ctx.i.i("Invalid import path").await, format!("{path} is a constant")),
|
||||
WalkErrorKind::Missing =>
|
||||
(ctx.i.i("Invalid import path").await, format!("{path} not found")),
|
||||
WalkErrorKind::Private =>
|
||||
(ctx.i.i("Import inaccessible").await, format!("{path} is private")),
|
||||
};
|
||||
r.report(mk_err(tk, msg, [pos.into()]));
|
||||
(&ctx.rep).report(mk_err(tk, msg, [pos.into()]));
|
||||
return vec![];
|
||||
},
|
||||
};
|
||||
@@ -104,36 +113,36 @@ pub async fn resolv_glob(
|
||||
/// Read import statements and convert them into aliases, rasising any import
|
||||
/// errors in the process
|
||||
pub async fn imports_to_aliases(
|
||||
module: &Module,
|
||||
module: &ParsedModule,
|
||||
cwd: &mut Vec<Tok<String>>,
|
||||
root: &Module,
|
||||
root: &ParsedModule,
|
||||
alias_map: &mut HashMap<Sym, Sym>,
|
||||
alias_rev_map: &mut HashMap<Sym, HashSet<Sym>>,
|
||||
i: &Interner,
|
||||
rep: &impl Reporter,
|
||||
ctx: &mut DealiasCtx<'_>,
|
||||
) {
|
||||
let mut import_locs = HashMap::<Sym, Vec<Pos>>::new();
|
||||
for item in &module.items {
|
||||
match &item.kind {
|
||||
ItemKind::Import(imp) => match absolute_path(cwd, &imp.path) {
|
||||
Err(e) => rep.report(e.err_obj(i, item.pos.clone(), &imp.path.iter().join("::")).await),
|
||||
Err(e) =>
|
||||
ctx.rep.report(e.err_obj(ctx.i, item.pos.clone(), &imp.path.iter().join("::")).await),
|
||||
Ok(abs_path) => {
|
||||
let names = match imp.name.as_ref() {
|
||||
Some(n) => Either::Right([n.clone()].into_iter()),
|
||||
None => Either::Left(
|
||||
resolv_glob(cwd, root, &abs_path, item.pos.clone(), i, rep).await.into_iter(),
|
||||
resolv_glob(cwd, root, &abs_path, item.pos.clone(), ctx).await.into_iter(),
|
||||
),
|
||||
};
|
||||
for name in names {
|
||||
let mut tgt = abs_path.clone().suffix([name.clone()]).to_sym(i).await;
|
||||
let src = Sym::new(cwd.iter().cloned().chain([name]), i).await.unwrap();
|
||||
let mut tgt = abs_path.clone().suffix([name.clone()]).to_sym(ctx.i).await;
|
||||
let src = Sym::new(cwd.iter().cloned().chain([name]), ctx.i).await.unwrap();
|
||||
import_locs.entry(src.clone()).or_insert(vec![]).push(item.pos.clone());
|
||||
if let Some(tgt2) = alias_map.get(&tgt) {
|
||||
tgt = tgt2.clone();
|
||||
}
|
||||
if src == tgt {
|
||||
rep.report(mk_err(
|
||||
i.i("Circular references").await,
|
||||
ctx.rep.report(mk_err(
|
||||
ctx.i.i("Circular references").await,
|
||||
format!("{src} circularly refers to itself"),
|
||||
[item.pos.clone().into()],
|
||||
));
|
||||
@@ -142,8 +151,8 @@ pub async fn imports_to_aliases(
|
||||
if let Some(fst_val) = alias_map.get(&src) {
|
||||
let locations = (import_locs.get(&src))
|
||||
.expect("The same name could only have appeared in the same module");
|
||||
rep.report(mk_err(
|
||||
i.i("Conflicting imports").await,
|
||||
ctx.rep.report(mk_err(
|
||||
ctx.i.i("Conflicting imports").await,
|
||||
if fst_val == &src {
|
||||
format!("{src} is imported multiple times")
|
||||
} else {
|
||||
@@ -163,78 +172,35 @@ pub async fn imports_to_aliases(
|
||||
}
|
||||
},
|
||||
},
|
||||
ItemKind::Member(mem) => match mem.kind().await {
|
||||
MemberKind::Const(_) => (),
|
||||
MemberKind::Mod(m) => {
|
||||
ItemKind::Member(mem) => match mem.kind(ctx.consts).await {
|
||||
ParsedMemberKind::Const => (),
|
||||
ParsedMemberKind::Mod(m) => {
|
||||
cwd.push(mem.name());
|
||||
imports_to_aliases(m, cwd, root, alias_map, alias_rev_map, i, rep).boxed_local().await;
|
||||
imports_to_aliases(m, cwd, root, alias_map, alias_rev_map, ctx).boxed_local().await;
|
||||
cwd.pop();
|
||||
},
|
||||
},
|
||||
ItemKind::Export(_) | ItemKind::Macro(..) => (),
|
||||
ItemKind::Export(_) => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn dealias(module: &mut Module, alias_map: &HashMap<Sym, Sym>, i: &Interner) {
|
||||
pub async fn dealias(
|
||||
path: Substack<'_, Tok<String>>,
|
||||
module: &mut ParsedModule,
|
||||
alias_map: &HashMap<Sym, Sym>,
|
||||
ctx: &mut DealiasCtx<'_>,
|
||||
) {
|
||||
for item in &mut module.items {
|
||||
match &mut item.kind {
|
||||
ItemKind::Export(_) | ItemKind::Import(_) => (),
|
||||
ItemKind::Member(mem) => match mem.kind_mut().await {
|
||||
MemberKind::Const(c) => {
|
||||
let Some(source) = c.source() else { continue };
|
||||
let Some(new_source) = dealias_mactreev(source, alias_map, i).await else { continue };
|
||||
c.set_source(new_source);
|
||||
},
|
||||
MemberKind::Mod(m) => dealias(m, alias_map, i).boxed_local().await,
|
||||
ItemKind::Member(mem) => {
|
||||
let path = path.push(mem.name());
|
||||
match mem.kind_mut(ctx.consts).await {
|
||||
ParsedMemberKind::Const => (),
|
||||
ParsedMemberKind::Mod(m) => dealias(path, m, alias_map, ctx).boxed_local().await,
|
||||
}
|
||||
},
|
||||
ItemKind::Macro(_, rules) =>
|
||||
for rule in rules.iter_mut() {
|
||||
let RuleKind::Native(c) = &mut rule.kind else { continue };
|
||||
let Some(source) = c.source() else { continue };
|
||||
let Some(new_source) = dealias_mactreev(source, alias_map, i).await else { continue };
|
||||
c.set_source(new_source);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn dealias_mactree(
|
||||
mtree: &MacTree,
|
||||
aliases: &HashMap<Sym, Sym>,
|
||||
i: &Interner,
|
||||
) -> Option<MacTree> {
|
||||
let new_tok = match &*mtree.tok {
|
||||
MacTok::Atom(_) | MacTok::Ph(_) => return None,
|
||||
tok @ (MacTok::Done(_) | MacTok::Ref(_) | MacTok::Slot(_)) => panic!(
|
||||
"{} should not appear in retained pre-macro source",
|
||||
take_first(&tok.print(&FmtCtxImpl { i }).await, true)
|
||||
),
|
||||
MacTok::Name(n) => MacTok::Name(aliases.get(n).unwrap_or(n).clone()),
|
||||
MacTok::Lambda(arg, body) => {
|
||||
match (dealias_mactreev(arg, aliases, i).await, dealias_mactreev(body, aliases, i).await) {
|
||||
(None, None) => return None,
|
||||
(Some(arg), None) => MacTok::Lambda(arg, body.clone()),
|
||||
(None, Some(body)) => MacTok::Lambda(arg.clone(), body),
|
||||
(Some(arg), Some(body)) => MacTok::Lambda(arg, body),
|
||||
}
|
||||
},
|
||||
MacTok::S(p, b) => MacTok::S(*p, dealias_mactreev(b, aliases, i).await?),
|
||||
};
|
||||
Some(MacTree { pos: mtree.pos.clone(), tok: Rc::new(new_tok) })
|
||||
}
|
||||
|
||||
async fn dealias_mactreev(
|
||||
mtreev: &[MacTree],
|
||||
aliases: &HashMap<Sym, Sym>,
|
||||
i: &Interner,
|
||||
) -> Option<Vec<MacTree>> {
|
||||
let mut results = Vec::with_capacity(mtreev.len());
|
||||
let mut any_some = false;
|
||||
for item in mtreev {
|
||||
let out = dealias_mactree(item, aliases, i).boxed_local().await;
|
||||
any_some |= out.is_some();
|
||||
results.push(out.unwrap_or(item.clone()));
|
||||
}
|
||||
any_some.then_some(results)
|
||||
}
|
||||
|
||||
@@ -3,15 +3,13 @@ use std::mem;
|
||||
use async_std::sync::RwLockWriteGuard;
|
||||
use bound::Bound;
|
||||
use futures::FutureExt;
|
||||
use orchid_base::error::{OrcErrv, mk_errv};
|
||||
use orchid_base::error::OrcErrv;
|
||||
use orchid_base::format::{FmtCtxImpl, Format, take_first};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::logging::Logger;
|
||||
use orchid_base::name::NameLike;
|
||||
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::{Expr, ExprKind, PathSet, Step};
|
||||
use crate::tree::{ItemKind, MemberKind, Module, Root};
|
||||
use crate::expr::{Expr, ExprKind, ExprParseCtx, PathSet, PathSetBuilder, Step};
|
||||
|
||||
type ExprGuard = Bound<RwLockWriteGuard<'static, ExprKind>, Expr>;
|
||||
|
||||
@@ -38,13 +36,12 @@ pub struct ExecCtx {
|
||||
cur_pos: Pos,
|
||||
did_pop: bool,
|
||||
logger: Logger,
|
||||
root: Root,
|
||||
}
|
||||
impl ExecCtx {
|
||||
pub async fn new(ctx: Ctx, logger: Logger, root: Root, init: Expr) -> Self {
|
||||
pub async fn new(ctx: Ctx, logger: Logger, init: Expr) -> Self {
|
||||
let cur_pos = init.pos();
|
||||
let cur = Bound::async_new(init, |init| init.kind().write()).await;
|
||||
Self { ctx, gas: None, stack: vec![], cur, cur_pos, did_pop: false, root, logger }
|
||||
Self { ctx, gas: None, stack: vec![], cur, cur_pos, did_pop: false, logger }
|
||||
}
|
||||
pub fn remaining_gas(&self) -> u64 { self.gas.expect("queried remaining_gas but no gas was set") }
|
||||
pub fn set_gas(&mut self, gas: Option<u64>) { self.gas = gas }
|
||||
@@ -92,20 +89,25 @@ impl ExecCtx {
|
||||
},
|
||||
ExprKind::Seq(a, b) if !self.did_pop => (ExprKind::Seq(a.clone(), b), StackOp::Push(a)),
|
||||
ExprKind::Seq(_, b) => (ExprKind::Identity(b), StackOp::Nop),
|
||||
ExprKind::Const(name) =>
|
||||
match self.root.get_const_value(name, self.cur_pos.clone(), self.ctx.clone()).await {
|
||||
ExprKind::Const(name) => {
|
||||
let root = (self.ctx.root.get().and_then(|v| v.upgrade()))
|
||||
.expect("Root not assigned before execute call");
|
||||
match root.get_const_value(name, self.cur_pos.clone(), self.ctx.clone()).await {
|
||||
Err(e) => (ExprKind::Bottom(e), StackOp::Pop),
|
||||
Ok(v) => (ExprKind::Identity(v), StackOp::Nop),
|
||||
},
|
||||
}
|
||||
},
|
||||
ExprKind::Arg => panic!("This should not appear outside function bodies"),
|
||||
ek @ ExprKind::Atom(_) => (ek, StackOp::Pop),
|
||||
ExprKind::Bottom(bot) => (ExprKind::Bottom(bot.clone()), StackOp::Unwind(bot)),
|
||||
ExprKind::Call(f, x) if !self.did_pop => (ExprKind::Call(f.clone(), x), StackOp::Push(f)),
|
||||
ExprKind::Call(f, x) => match f.try_into_owned_atom().await {
|
||||
Ok(atom) => {
|
||||
let mut ext = atom.sys().ext().clone();
|
||||
let ext = atom.sys().ext().clone();
|
||||
let x_norm = self.unpack_ident(&x).await;
|
||||
let val = Expr::from_api(&atom.call(x_norm).await, &mut ext).await;
|
||||
let mut parse_ctx = ExprParseCtx { ctx: self.ctx.clone(), exprs: ext.exprs().clone() };
|
||||
let val =
|
||||
Expr::from_api(&atom.call(x_norm).await, PathSetBuilder::new(), &mut parse_ctx).await;
|
||||
(ExprKind::Identity(val.clone()), StackOp::Swap(val))
|
||||
},
|
||||
Err(f) => match &*f.kind().read().await {
|
||||
@@ -113,9 +115,16 @@ impl ExecCtx {
|
||||
panic!("This should not appear outside function bodies"),
|
||||
ExprKind::Missing => panic!("Should have been replaced"),
|
||||
ExprKind::Atom(a) => {
|
||||
let mut ext = a.sys().ext().clone();
|
||||
let ext = a.sys().ext().clone();
|
||||
let x_norm = self.unpack_ident(&x).await;
|
||||
let val = Expr::from_api(&a.clone().call(x_norm).await, &mut ext).await;
|
||||
let mut parse_ctx =
|
||||
ExprParseCtx { ctx: ext.ctx().clone(), exprs: ext.exprs().clone() };
|
||||
let val = Expr::from_api(
|
||||
&a.clone().call(x_norm).await,
|
||||
PathSetBuilder::new(),
|
||||
&mut parse_ctx,
|
||||
)
|
||||
.await;
|
||||
(ExprKind::Identity(val.clone()), StackOp::Swap(val))
|
||||
},
|
||||
ExprKind::Bottom(exprv) => (ExprKind::Bottom(exprv.clone()), StackOp::Pop),
|
||||
|
||||
@@ -8,23 +8,24 @@ use async_std::sync::RwLock;
|
||||
use futures::FutureExt;
|
||||
use hashbrown::HashSet;
|
||||
use itertools::Itertools;
|
||||
use orchid_base::error::{OrcErrv, mk_errv};
|
||||
use orchid_base::format::{FmtCtx, FmtCtxImpl, FmtUnit, Format, Variants, take_first};
|
||||
use orchid_base::error::OrcErrv;
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::macros::mtreev_fmt;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::tokens::Paren;
|
||||
use orchid_base::tl_cache;
|
||||
use orchid_base::tree::{AtomRepr, indent};
|
||||
use orchid_base::{match_mapping, tl_cache};
|
||||
use substack::Substack;
|
||||
|
||||
use crate::api;
|
||||
use crate::atom::AtomHand;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::extension::Extension;
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
use crate::expr_store::ExprStore;
|
||||
|
||||
pub type ExprParseCtx = Extension;
|
||||
#[derive(Clone)]
|
||||
pub struct ExprParseCtx {
|
||||
pub ctx: Ctx,
|
||||
pub exprs: ExprStore,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExprData {
|
||||
@@ -55,13 +56,44 @@ impl Expr {
|
||||
.expect("this is a ref, it cannot be null"),
|
||||
)
|
||||
}
|
||||
pub async fn from_api(api: &api::Expression, ctx: &mut ExprParseCtx) -> Self {
|
||||
if let api::ExpressionKind::Slot(tk) = &api.kind {
|
||||
return ctx.exprs().get_expr(*tk).expect("Invalid slot");
|
||||
}
|
||||
let pos = Pos::from_api(&api.location, &ctx.ctx().i).await;
|
||||
let kind = RwLock::new(ExprKind::from_api(&api.kind, pos.clone(), ctx).boxed_local().await);
|
||||
Self(Rc::new(ExprData { pos, kind }))
|
||||
pub async fn from_api(
|
||||
api: &api::Expression,
|
||||
psb: PathSetBuilder<'_, u64>,
|
||||
ctx: &mut ExprParseCtx,
|
||||
) -> Self {
|
||||
let pos = Pos::from_api(&api.location, &ctx.ctx.i).await;
|
||||
let kind = match &api.kind {
|
||||
api::ExpressionKind::Arg(n) => {
|
||||
assert!(psb.register_arg(&n), "Arguments must be enclosed in a matching lambda");
|
||||
ExprKind::Arg
|
||||
},
|
||||
api::ExpressionKind::Bottom(bot) =>
|
||||
ExprKind::Bottom(OrcErrv::from_api(bot, &ctx.ctx.i).await),
|
||||
api::ExpressionKind::Call(f, x) => {
|
||||
let (lpsb, rpsb) = psb.split();
|
||||
ExprKind::Call(
|
||||
Expr::from_api(&f, lpsb, ctx).boxed_local().await,
|
||||
Expr::from_api(&x, rpsb, ctx).boxed_local().await,
|
||||
)
|
||||
},
|
||||
api::ExpressionKind::Const(name) => ExprKind::Const(Sym::from_api(*name, &ctx.ctx.i).await),
|
||||
api::ExpressionKind::Lambda(x, body) => {
|
||||
let lbuilder = psb.lambda(&x);
|
||||
let body = Expr::from_api(&body, lbuilder.stack(), ctx).boxed_local().await;
|
||||
ExprKind::Lambda(lbuilder.collect(), body)
|
||||
},
|
||||
api::ExpressionKind::NewAtom(a) =>
|
||||
ExprKind::Atom(AtomHand::from_api(a, pos.clone(), &mut ctx.ctx.clone()).await),
|
||||
api::ExpressionKind::Slot(tk) => return ctx.exprs.get_expr(*tk).expect("Invalid slot"),
|
||||
api::ExpressionKind::Seq(a, b) => {
|
||||
let (apsb, bpsb) = psb.split();
|
||||
ExprKind::Seq(
|
||||
Expr::from_api(&a, apsb, ctx).boxed_local().await,
|
||||
Expr::from_api(&b, bpsb, ctx).boxed_local().await,
|
||||
)
|
||||
},
|
||||
};
|
||||
Self(Rc::new(ExprData { pos, kind: RwLock::new(kind) }))
|
||||
}
|
||||
pub async fn to_api(&self) -> api::InspectedKind {
|
||||
use api::InspectedKind as K;
|
||||
@@ -107,23 +139,6 @@ pub enum ExprKind {
|
||||
Missing,
|
||||
}
|
||||
impl ExprKind {
|
||||
pub async fn from_api(api: &api::ExpressionKind, pos: Pos, ctx: &mut ExprParseCtx) -> Self {
|
||||
match_mapping!(api, api::ExpressionKind => ExprKind {
|
||||
Lambda(id => PathSet::from_api(*id, api), b => Expr::from_api(b, ctx).await),
|
||||
Bottom(b => OrcErrv::from_api(b, &ctx.ctx().i).await),
|
||||
Call(f => Expr::from_api(f, ctx).await, x => Expr::from_api(x, ctx).await),
|
||||
Const(c => Sym::from_api(*c, &ctx.ctx().i).await),
|
||||
Seq(a => Expr::from_api(a, ctx).await, b => Expr::from_api(b, ctx).await),
|
||||
} {
|
||||
api::ExpressionKind::Arg(_) => ExprKind::Arg,
|
||||
api::ExpressionKind::NewAtom(a) => ExprKind::Atom(AtomHand::from_api(
|
||||
a,
|
||||
pos,
|
||||
&mut ctx.ctx().clone()
|
||||
).await),
|
||||
api::ExpressionKind::Slot(_) => panic!("Handled in Expr"),
|
||||
})
|
||||
}
|
||||
pub fn at(self, pos: Pos) -> Expr { Expr(Rc::new(ExprData { pos, kind: RwLock::new(self) })) }
|
||||
}
|
||||
impl Format for ExprKind {
|
||||
@@ -174,6 +189,93 @@ pub enum Step {
|
||||
Right,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum PathSetFrame<'a, T: PartialEq> {
|
||||
Lambda(&'a T, &'a RefCell<Option<PathSet>>),
|
||||
Step(Step),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PathSetBuilder<'a, T: PartialEq>(Substack<'a, PathSetFrame<'a, T>>);
|
||||
impl<'a, T: PartialEq> PathSetBuilder<'a, T> {
|
||||
pub fn new() -> Self { Self(Substack::Bottom) }
|
||||
pub fn split(&'a self) -> (Self, Self) {
|
||||
(
|
||||
Self(self.0.push(PathSetFrame::Step(Step::Left))),
|
||||
Self(self.0.push(PathSetFrame::Step(Step::Right))),
|
||||
)
|
||||
}
|
||||
pub fn lambda<'b>(self, arg: &'b T) -> LambdaBuilder<'b, T>
|
||||
where 'a: 'b {
|
||||
LambdaBuilder { arg, path: RefCell::default(), stack: self }
|
||||
}
|
||||
/// Register an argument with the corresponding lambda and return true if one
|
||||
/// was found. (if false is returned, the name is unbound and may refer to a
|
||||
/// global)
|
||||
pub fn register_arg(self, t: &T) -> bool {
|
||||
let mut steps = VecDeque::new();
|
||||
for step in self.0.iter() {
|
||||
match step {
|
||||
PathSetFrame::Step(step) => steps.push_front(*step),
|
||||
PathSetFrame::Lambda(name, _) if **name != *t => (),
|
||||
PathSetFrame::Lambda(_, cell) => {
|
||||
let mut ps_opt = cell.borrow_mut();
|
||||
match &mut *ps_opt {
|
||||
val @ None => *val = Some(PathSet { steps: steps.into(), next: None }),
|
||||
Some(val) => {
|
||||
let mut swap = PathSet { steps: Vec::new(), next: None };
|
||||
mem::swap(&mut swap, val);
|
||||
*val = merge(swap, &Vec::from(steps));
|
||||
},
|
||||
}
|
||||
return true;
|
||||
},
|
||||
};
|
||||
}
|
||||
return false;
|
||||
fn merge(ps: PathSet, steps: &[Step]) -> PathSet {
|
||||
let diff_idx = ps.steps.iter().zip(steps).take_while(|(l, r)| l == r).count();
|
||||
if diff_idx == ps.steps.len() {
|
||||
if diff_idx == steps.len() {
|
||||
match ps.next {
|
||||
Some(_) => panic!("New path ends where old path forks"),
|
||||
None => panic!("New path same as old path"),
|
||||
}
|
||||
}
|
||||
let Some((left, right)) = ps.next else { panic!("Old path ends where new path continues") };
|
||||
let next = match steps[diff_idx] {
|
||||
Step::Left => Some((Box::new(merge(*left, &steps[diff_idx + 1..])), right)),
|
||||
Step::Right => Some((left, Box::new(merge(*right, &steps[diff_idx + 1..])))),
|
||||
};
|
||||
PathSet { steps: ps.steps, next }
|
||||
} else {
|
||||
let shared_steps = ps.steps.iter().take(diff_idx).cloned().collect();
|
||||
let main_steps = ps.steps.iter().skip(diff_idx + 1).cloned().collect();
|
||||
let new_branch = steps[diff_idx + 1..].to_vec();
|
||||
let main_side = PathSet { steps: main_steps, next: ps.next };
|
||||
let new_side = PathSet { steps: new_branch, next: None };
|
||||
let (left, right) = match steps[diff_idx] {
|
||||
Step::Left => (new_side, main_side),
|
||||
Step::Right => (main_side, new_side),
|
||||
};
|
||||
PathSet { steps: shared_steps, next: Some((Box::new(left), Box::new(right))) }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LambdaBuilder<'a, T: PartialEq> {
|
||||
arg: &'a T,
|
||||
path: RefCell<Option<PathSet>>,
|
||||
stack: PathSetBuilder<'a, T>,
|
||||
}
|
||||
impl<'a, T: PartialEq> LambdaBuilder<'a, T> {
|
||||
pub fn stack(&'a self) -> PathSetBuilder<'a, T> {
|
||||
PathSetBuilder(self.stack.0.push(PathSetFrame::Lambda(self.arg, &self.path)))
|
||||
}
|
||||
pub fn collect(self) -> Option<PathSet> { self.path.into_inner() }
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PathSet {
|
||||
/// The single steps through [super::nort::Clause::Apply]
|
||||
@@ -186,32 +288,6 @@ impl PathSet {
|
||||
pub fn next(&self) -> Option<(&PathSet, &PathSet)> {
|
||||
self.next.as_ref().map(|(l, r)| (&**l, &**r))
|
||||
}
|
||||
pub fn from_api(id: u64, api: &api::ExpressionKind) -> Option<Self> {
|
||||
use api::ExpressionKind as K;
|
||||
struct Suffix(VecDeque<Step>, Option<(Box<PathSet>, Box<PathSet>)>);
|
||||
fn seal(Suffix(steps, next): Suffix) -> PathSet { PathSet { steps: steps.into(), next } }
|
||||
fn after(step: Step, mut suf: Suffix) -> Suffix {
|
||||
suf.0.push_front(step);
|
||||
suf
|
||||
}
|
||||
return from_api_inner(id, api).map(seal);
|
||||
fn from_api_inner(id: u64, api: &api::ExpressionKind) -> Option<Suffix> {
|
||||
match &api {
|
||||
K::Arg(id2) => (id == *id2).then_some(Suffix(VecDeque::new(), None)),
|
||||
K::Bottom(_) | K::Const(_) | K::NewAtom(_) | K::Slot(_) => None,
|
||||
K::Lambda(_, b) => from_api_inner(id, &b.kind),
|
||||
K::Call(l, r) | K::Seq(l, r) => {
|
||||
match (from_api_inner(id, &l.kind), from_api_inner(id, &r.kind)) {
|
||||
(Some(a), Some(b)) =>
|
||||
Some(Suffix(VecDeque::new(), Some((Box::new(seal(a)), Box::new(seal(b)))))),
|
||||
(Some(l), None) => Some(after(Step::Left, l)),
|
||||
(None, Some(r)) => Some(after(Step::Right, r)),
|
||||
(None, None) => None,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
impl fmt::Display for PathSet {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
@@ -239,121 +315,3 @@ pub struct WeakExpr(Weak<ExprData>);
|
||||
impl WeakExpr {
|
||||
pub fn upgrade(&self) -> Option<Expr> { self.0.upgrade().map(Expr) }
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum SrcToExprStep<'a> {
|
||||
Left,
|
||||
Right,
|
||||
Lambda(Sym, &'a RefCell<Option<PathSet>>),
|
||||
}
|
||||
|
||||
pub async fn mtreev_to_expr(
|
||||
src: &[MacTree],
|
||||
stack: Substack<'_, SrcToExprStep<'_>>,
|
||||
ctx: &Ctx,
|
||||
) -> ExprKind {
|
||||
let Some((x, f)) = src.split_last() else { panic!("Empty expression cannot be evaluated") };
|
||||
let x_stack = if f.is_empty() { stack.clone() } else { stack.push(SrcToExprStep::Right) };
|
||||
let x_kind = match &*x.tok {
|
||||
MacTok::Atom(a) => ExprKind::Atom(a.clone()),
|
||||
MacTok::Name(n) => 'name: {
|
||||
let mut steps = VecDeque::new();
|
||||
for step in x_stack.iter() {
|
||||
match step {
|
||||
SrcToExprStep::Left => steps.push_front(Step::Left),
|
||||
SrcToExprStep::Right => steps.push_front(Step::Right),
|
||||
SrcToExprStep::Lambda(name, _) if name != n => continue,
|
||||
SrcToExprStep::Lambda(_, cell) => {
|
||||
let mut ps = cell.borrow_mut();
|
||||
match &mut *ps {
|
||||
val @ None => *val = Some(PathSet { steps: steps.into(), next: None }),
|
||||
Some(val) => {
|
||||
let mut swap = PathSet { steps: Vec::new(), next: None };
|
||||
mem::swap(&mut swap, val);
|
||||
*val = merge(swap, &Vec::from(steps));
|
||||
fn merge(ps: PathSet, steps: &[Step]) -> PathSet {
|
||||
let diff_idx = ps.steps.iter().zip(steps).take_while(|(l, r)| l == r).count();
|
||||
if diff_idx == ps.steps.len() {
|
||||
if diff_idx == steps.len() {
|
||||
match ps.next {
|
||||
Some(_) => panic!("New path ends where old path forks"),
|
||||
None => panic!("New path same as old path"),
|
||||
}
|
||||
}
|
||||
let Some((left, right)) = ps.next else {
|
||||
panic!("Old path ends where new path continues")
|
||||
};
|
||||
let next = match steps[diff_idx] {
|
||||
Step::Left => Some((Box::new(merge(*left, &steps[diff_idx + 1..])), right)),
|
||||
Step::Right => Some((left, Box::new(merge(*right, &steps[diff_idx + 1..])))),
|
||||
};
|
||||
PathSet { steps: ps.steps, next }
|
||||
} else {
|
||||
let shared_steps = ps.steps.iter().take(diff_idx).cloned().collect();
|
||||
let main_steps = ps.steps.iter().skip(diff_idx + 1).cloned().collect();
|
||||
let new_branch = steps[diff_idx + 1..].to_vec();
|
||||
let main_side = PathSet { steps: main_steps, next: ps.next };
|
||||
let new_side = PathSet { steps: new_branch, next: None };
|
||||
let (left, right) = match steps[diff_idx] {
|
||||
Step::Left => (new_side, main_side),
|
||||
Step::Right => (main_side, new_side),
|
||||
};
|
||||
PathSet { steps: shared_steps, next: Some((Box::new(left), Box::new(right))) }
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
break 'name ExprKind::Arg;
|
||||
},
|
||||
}
|
||||
}
|
||||
ExprKind::Const(n.clone())
|
||||
},
|
||||
MacTok::Ph(_) | MacTok::Done(_) | MacTok::Ref(_) | MacTok::Slot(_) =>
|
||||
ExprKind::Bottom(mk_errv(
|
||||
ctx.i.i("placeholder in value").await,
|
||||
"Placeholders cannot appear anywhere outside macro patterns",
|
||||
[x.pos.clone().into()],
|
||||
)),
|
||||
MacTok::S(Paren::Round, b) if b.is_empty() =>
|
||||
return ExprKind::Bottom(mk_errv(
|
||||
ctx.i.i("Empty expression").await,
|
||||
"Empty parens () are illegal",
|
||||
[x.pos.clone().into()],
|
||||
)),
|
||||
MacTok::S(Paren::Round, b) => mtreev_to_expr(b, x_stack, ctx).boxed_local().await,
|
||||
MacTok::S(..) => ExprKind::Bottom(mk_errv(
|
||||
ctx.i.i("non-round parentheses after macros").await,
|
||||
"[] or {} block was not consumed by macros; expressions may only contain ()",
|
||||
[x.pos.clone().into()],
|
||||
)),
|
||||
MacTok::Lambda(_, b) if b.is_empty() =>
|
||||
return ExprKind::Bottom(mk_errv(
|
||||
ctx.i.i("Empty lambda").await,
|
||||
"Lambdas must have a body",
|
||||
[x.pos.clone().into()],
|
||||
)),
|
||||
MacTok::Lambda(arg, b) => 'lambda_converter: {
|
||||
if let [MacTree { tok, .. }] = &**arg {
|
||||
if let MacTok::Name(n) = &**tok {
|
||||
let path = RefCell::new(None);
|
||||
let b = mtreev_to_expr(b, x_stack.push(SrcToExprStep::Lambda(n.clone(), &path)), ctx)
|
||||
.boxed_local()
|
||||
.await;
|
||||
break 'lambda_converter ExprKind::Lambda(path.into_inner(), b.at(x.pos.clone()));
|
||||
}
|
||||
}
|
||||
let argstr = take_first(&mtreev_fmt(arg, &FmtCtxImpl { i: &ctx.i }).await, true);
|
||||
ExprKind::Bottom(mk_errv(
|
||||
ctx.i.i("Malformeed lambda").await,
|
||||
format!("Lambda argument should be single name, found {argstr}"),
|
||||
[x.pos.clone().into()],
|
||||
))
|
||||
},
|
||||
};
|
||||
if f.is_empty() {
|
||||
return x_kind;
|
||||
}
|
||||
let f = mtreev_to_expr(f, stack.push(SrcToExprStep::Left), ctx).boxed_local().await;
|
||||
ExprKind::Call(f.at(Pos::None), x_kind.at(x.pos.clone()))
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::cell::RefCell;
|
||||
use std::fmt;
|
||||
use std::rc::Rc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use hashbrown::hash_map::Entry;
|
||||
@@ -8,10 +9,18 @@ use crate::api;
|
||||
use crate::expr::Expr;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ExprStore(RefCell<HashMap<api::ExprTicket, (u32, Expr)>>);
|
||||
pub struct ExprStoreData {
|
||||
exprs: RefCell<HashMap<api::ExprTicket, (u32, Expr)>>,
|
||||
parent: Option<ExprStore>,
|
||||
}
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ExprStore(Rc<ExprStoreData>);
|
||||
impl ExprStore {
|
||||
pub fn derive(&self) -> Self {
|
||||
Self(Rc::new(ExprStoreData { exprs: RefCell::default(), parent: Some(self.clone()) }))
|
||||
}
|
||||
pub fn give_expr(&self, expr: Expr) {
|
||||
match self.0.borrow_mut().entry(expr.id()) {
|
||||
match self.0.exprs.borrow_mut().entry(expr.id()) {
|
||||
Entry::Occupied(mut oe) => oe.get_mut().0 += 1,
|
||||
Entry::Vacant(v) => {
|
||||
v.insert((1, expr));
|
||||
@@ -19,16 +28,17 @@ impl ExprStore {
|
||||
}
|
||||
}
|
||||
pub fn take_expr(&self, ticket: api::ExprTicket) {
|
||||
(self.0.borrow_mut().entry(ticket))
|
||||
(self.0.exprs.borrow_mut().entry(ticket))
|
||||
.and_replace_entry_with(|_, (rc, rt)| (1 < rc).then_some((rc - 1, rt)));
|
||||
}
|
||||
pub fn get_expr(&self, ticket: api::ExprTicket) -> Option<Expr> {
|
||||
self.0.borrow().get(&ticket).map(|(_, expr)| expr.clone())
|
||||
(self.0.exprs.borrow().get(&ticket).map(|(_, expr)| expr.clone()))
|
||||
.or_else(|| self.0.parent.as_ref()?.get_expr(ticket))
|
||||
}
|
||||
}
|
||||
impl fmt::Display for ExprStore {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let r = self.0.borrow();
|
||||
let r = self.0.exprs.borrow();
|
||||
let rc: u32 = r.values().map(|v| v.0).sum();
|
||||
write!(f, "Store holding {rc} refs to {} exprs", r.len())
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ use futures::future::{join, join_all};
|
||||
use futures::{FutureExt, StreamExt, stream, stream_select};
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
use orchid_api::HostMsgSet;
|
||||
use orchid_api::{HostMsgSet, LsModule};
|
||||
use orchid_api_traits::Request;
|
||||
use orchid_base::builtin::ExtInit;
|
||||
use orchid_base::clone;
|
||||
@@ -45,7 +45,6 @@ pub struct ExtensionData {
|
||||
exprs: ExprStore,
|
||||
exiting_snd: Sender<()>,
|
||||
lex_recur: Mutex<HashMap<api::ParsId, channel::Sender<ReqPair<api::SubLex>>>>,
|
||||
mac_recur: Mutex<HashMap<api::ParsId, channel::Sender<ReqPair<api::RunMacros>>>>,
|
||||
}
|
||||
impl Drop for ExtensionData {
|
||||
fn drop(&mut self) {
|
||||
@@ -79,7 +78,7 @@ impl Extension {
|
||||
})));
|
||||
ExtensionData {
|
||||
exiting_snd,
|
||||
exprs: ExprStore::default(),
|
||||
exprs: ctx.common_exprs.derive(),
|
||||
ctx: ctx.clone(),
|
||||
systems: (init.systems.iter().cloned())
|
||||
.map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) })
|
||||
@@ -88,7 +87,6 @@ impl Extension {
|
||||
init: init.clone(),
|
||||
next_pars: RefCell::new(NonZeroU64::new(1).unwrap()),
|
||||
lex_recur: Mutex::default(),
|
||||
mac_recur: Mutex::default(),
|
||||
reqnot: ReqNot::new(
|
||||
msg_logger,
|
||||
move |sfn, _| clone!(init; Box::pin(async move { init.send(sfn).await })),
|
||||
@@ -169,12 +167,8 @@ impl Extension {
|
||||
})
|
||||
.await
|
||||
},
|
||||
api::ExtHostReq::RunMacros(rm) => {
|
||||
let (rep_in, rep_out) = channel::bounded(1);
|
||||
let lex_g = this.0.mac_recur.lock().await;
|
||||
let req_in = lex_g.get(&rm.run_id).expect("Sublex for nonexistent lexid");
|
||||
req_in.send(ReqPair(rm.clone(), rep_in)).await.unwrap();
|
||||
hand.handle(&rm, &rep_out.recv().await.unwrap()).await
|
||||
api::ExtHostReq::LsModule(ref ls @ LsModule(ref sys, ref path)) => {
|
||||
todo!() // TODO
|
||||
},
|
||||
api::ExtHostReq::ExtAtomPrint(ref eap @ api::ExtAtomPrint(ref atom)) => {
|
||||
let atom = AtomHand::new(atom.clone(), &ctx).await;
|
||||
|
||||
@@ -1,30 +1,25 @@
|
||||
use std::num::NonZeroU64;
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_std::sync::Mutex;
|
||||
use futures::FutureExt;
|
||||
use hashbrown::HashMap;
|
||||
use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::match_mapping;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::number::{num_to_err, parse_num};
|
||||
use orchid_base::parse::{name_char, name_start, op_char, unrep_space};
|
||||
use orchid_base::tokens::PARENS;
|
||||
use orchid_base::tree::{AtomRepr, Ph};
|
||||
use orchid_base::tree::recur;
|
||||
|
||||
use crate::api;
|
||||
use crate::atom::AtomHand;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::{Expr, ExprParseCtx};
|
||||
use crate::parsed::{ParsTok, ParsTokTree};
|
||||
use crate::system::System;
|
||||
use crate::tree::{ParsTok, ParsTokTree};
|
||||
|
||||
pub struct LexCtx<'a> {
|
||||
pub systems: &'a [System],
|
||||
pub source: &'a Tok<String>,
|
||||
pub tail: &'a str,
|
||||
pub sub_trees: &'a mut HashMap<api::TreeTicket, ParsTokTree>,
|
||||
pub sub_trees: &'a mut Vec<Expr>,
|
||||
pub ctx: &'a Ctx,
|
||||
}
|
||||
impl<'a> LexCtx<'a> {
|
||||
@@ -50,13 +45,24 @@ impl<'a> LexCtx<'a> {
|
||||
}
|
||||
false
|
||||
}
|
||||
pub fn add_subtree(&mut self, subtree: ParsTokTree) -> api::TreeTicket {
|
||||
let next_idx = api::TreeTicket(NonZeroU64::new(self.sub_trees.len() as u64 + 1).unwrap());
|
||||
self.sub_trees.insert(next_idx, subtree);
|
||||
next_idx
|
||||
pub async fn ser_subtree(&mut self, subtree: ParsTokTree) -> api::TokenTree {
|
||||
let mut exprs = self.ctx.common_exprs.clone();
|
||||
let foo = recur(subtree, &|tt, r| {
|
||||
if let ParsTok::NewExpr(expr) = tt.tok {
|
||||
return ParsTok::Handle(expr).at(tt.range);
|
||||
}
|
||||
r(tt)
|
||||
});
|
||||
foo.into_api(&mut exprs, &mut ()).await
|
||||
}
|
||||
pub fn rm_subtree(&mut self, ticket: api::TreeTicket) -> ParsTokTree {
|
||||
self.sub_trees.remove(&ticket).unwrap()
|
||||
pub async fn des_subtree(&mut self, tree: &api::TokenTree) -> ParsTokTree {
|
||||
ParsTokTree::from_api(
|
||||
&tree,
|
||||
&mut self.ctx.common_exprs.clone(),
|
||||
&mut ExprParseCtx { ctx: self.ctx.clone(), exprs: self.ctx.common_exprs.clone() },
|
||||
&self.ctx.i,
|
||||
)
|
||||
.await
|
||||
}
|
||||
pub fn strip_char(&mut self, tgt: char) -> bool {
|
||||
if let Some(src) = self.tail.strip_prefix(tgt) {
|
||||
@@ -86,8 +92,12 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
||||
);
|
||||
let tok = if ctx.strip_prefix("\r\n") || ctx.strip_prefix("\r") || ctx.strip_prefix("\n") {
|
||||
ParsTok::BR
|
||||
} else if ctx.strip_prefix("::") {
|
||||
ParsTok::NS
|
||||
} else if let Some(tail) = (ctx.tail.starts_with(name_start).then_some(ctx.tail))
|
||||
.and_then(|t| t.trim_start_matches(name_char).strip_prefix("::"))
|
||||
{
|
||||
let name = &ctx.tail[..ctx.tail.len() - tail.len() - "::".len()];
|
||||
let body = lex_once(ctx).boxed_local().await?;
|
||||
ParsTok::NS(ctx.ctx.i.i(name).await, Box::new(body))
|
||||
} else if ctx.strip_prefix("--[") {
|
||||
let Some((cmt, tail)) = ctx.tail.split_once("]--") else {
|
||||
return Err(mk_errv(
|
||||
@@ -132,20 +142,6 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
||||
ctx.trim_ws();
|
||||
}
|
||||
ParsTok::S(*paren, body)
|
||||
} else if ctx.strip_prefix("macro")
|
||||
&& !ctx.tail.chars().next().is_some_and(|x| x.is_ascii_alphabetic())
|
||||
{
|
||||
ctx.strip_prefix("macro");
|
||||
if ctx.strip_char('(') {
|
||||
let pos = ctx.get_pos();
|
||||
let numstr = ctx.get_start_matches(|x| x != ')').trim();
|
||||
match parse_num(numstr) {
|
||||
Ok(num) => ParsTok::Macro(Some(num.to_f64())),
|
||||
Err(e) => return Err(num_to_err(e, pos, &ctx.ctx.i).await.into()),
|
||||
}
|
||||
} else {
|
||||
ParsTok::Macro(None)
|
||||
}
|
||||
} else {
|
||||
for sys in ctx.systems {
|
||||
let mut errors = Vec::new();
|
||||
@@ -157,7 +153,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
||||
.lex(source, pos, |pos| async move {
|
||||
let mut ctx_g = ctx_lck.lock().await;
|
||||
match lex_once(&mut ctx_g.push(pos)).boxed_local().await {
|
||||
Ok(t) => Some(api::SubLexed { pos: t.range.end, ticket: ctx_g.add_subtree(t) }),
|
||||
Ok(t) => Some(api::SubLexed { pos: t.range.end, tree: ctx_g.ser_subtree(t).await }),
|
||||
Err(e) => {
|
||||
errors_lck.lock().await.push(e);
|
||||
None
|
||||
@@ -172,7 +168,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
||||
),
|
||||
Ok(Some(lexed)) => {
|
||||
ctx.set_pos(lexed.pos);
|
||||
return Ok(tt_to_owned(&lexed.expr, ctx).await);
|
||||
return Ok(ctx.des_subtree(&lexed.expr).await);
|
||||
},
|
||||
Ok(None) => match errors.into_iter().reduce(|a, b| a + b) {
|
||||
Some(errors) => return Err(errors),
|
||||
@@ -196,39 +192,8 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
||||
Ok(ParsTokTree { tok, range: start..ctx.get_pos() })
|
||||
}
|
||||
|
||||
async fn tt_to_owned(api: &api::TokenTree, ctx: &mut LexCtx<'_>) -> ParsTokTree {
|
||||
let tok = match_mapping!(&api.token, api::Token => ParsTok {
|
||||
Atom(atom =>
|
||||
AtomHand::from_api(atom, Pos::Range(api.range.clone()), &mut ctx.ctx.clone()).await
|
||||
),
|
||||
Bottom(err => OrcErrv::from_api(err, &ctx.ctx.i).await),
|
||||
LambdaHead(arg => ttv_to_owned(arg, ctx).boxed_local().await),
|
||||
Name(name => Tok::from_api(*name, &ctx.ctx.i).await),
|
||||
Reference(tstrv => Sym::from_api(*tstrv, &ctx.ctx.i).await),
|
||||
S(p.clone(), b => ttv_to_owned(b, ctx).boxed_local().await),
|
||||
BR, NS,
|
||||
Comment(c.clone()),
|
||||
Ph(ph => Ph::from_api(ph, &ctx.ctx.i).await),
|
||||
Macro(*prio),
|
||||
} {
|
||||
api::Token::Slot(id) => return ctx.rm_subtree(*id),
|
||||
});
|
||||
ParsTokTree { range: api.range.clone(), tok }
|
||||
}
|
||||
|
||||
async fn ttv_to_owned<'a>(
|
||||
api: impl IntoIterator<Item = &'a api::TokenTree>,
|
||||
ctx: &mut LexCtx<'_>,
|
||||
) -> Vec<ParsTokTree> {
|
||||
let mut out = Vec::new();
|
||||
for tt in api {
|
||||
out.push(tt_to_owned(tt, ctx).await)
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
pub async fn lex(text: Tok<String>, systems: &[System], ctx: &Ctx) -> OrcRes<Vec<ParsTokTree>> {
|
||||
let mut sub_trees = HashMap::new();
|
||||
let mut sub_trees = Vec::new();
|
||||
let mut ctx = LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems, ctx };
|
||||
let mut tokv = Vec::new();
|
||||
ctx.trim(unrep_space);
|
||||
|
||||
@@ -8,9 +8,8 @@ pub mod expr;
|
||||
pub mod expr_store;
|
||||
pub mod extension;
|
||||
pub mod lex;
|
||||
pub mod macros;
|
||||
pub mod parse;
|
||||
pub mod rule;
|
||||
pub mod parsed;
|
||||
pub mod subprocess;
|
||||
pub mod system;
|
||||
pub mod tree;
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use futures::FutureExt;
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
use itertools::Itertools;
|
||||
use orchid_base::clone;
|
||||
use orchid_base::macros::{MTok, MTree, mtreev_from_api, mtreev_to_api};
|
||||
use orchid_base::name::Sym;
|
||||
use trait_set::trait_set;
|
||||
|
||||
use crate::api;
|
||||
use crate::atom::AtomHand;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::rule::state::MatchState;
|
||||
use crate::tree::Code;
|
||||
|
||||
pub type MacTok = MTok<'static, AtomHand>;
|
||||
pub type MacTree = MTree<'static, AtomHand>;
|
||||
|
||||
trait_set! {
|
||||
trait MacroCB = Fn(Vec<MacTree>) -> Option<Vec<MacTree>>;
|
||||
}
|
||||
|
||||
type Slots = HashMap<api::MacroTreeId, Rc<MacTok>>;
|
||||
|
||||
pub async fn macro_treev_to_api(mtree: Vec<MacTree>, slots: &mut Slots) -> Vec<api::MacroTree> {
|
||||
mtreev_to_api(&mtree, &mut |a: &AtomHand| {
|
||||
let id = api::MacroTreeId((slots.len() as u64 + 1).try_into().unwrap());
|
||||
slots.insert(id, Rc::new(MacTok::Atom(a.clone())));
|
||||
async move { api::MacroToken::Slot(id) }.boxed_local()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn macro_treev_from_api(api: Vec<api::MacroTree>, ctx: Ctx) -> Vec<MacTree> {
|
||||
mtreev_from_api(&api, &ctx.clone().i, &mut async move |atom| {
|
||||
MacTok::Atom(AtomHand::new(atom.clone(), &ctx).await)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn deslot_macro(tree: &[MacTree], slots: &mut Slots) -> Option<Vec<MacTree>> {
|
||||
return work(slots, tree);
|
||||
fn work(
|
||||
slots: &mut HashMap<api::MacroTreeId, Rc<MacTok>>,
|
||||
tree: &[MacTree],
|
||||
) -> Option<Vec<MacTree>> {
|
||||
let items = (tree.iter())
|
||||
.map(|t| {
|
||||
Some(MacTree {
|
||||
tok: match &*t.tok {
|
||||
MacTok::Atom(_) | MacTok::Name(_) | MacTok::Ph(_) => return None,
|
||||
MacTok::Ref(_) => panic!("Ref is an extension-local optimization"),
|
||||
MacTok::Done(_) => panic!("Created and removed by matcher"),
|
||||
MacTok::Slot(slot) => slots.get(&slot.id()).expect("Slot not found").clone(),
|
||||
MacTok::S(paren, b) => Rc::new(MacTok::S(*paren, work(slots, b)?)),
|
||||
MacTok::Lambda(a, b) => Rc::new(match (work(slots, a), work(slots, b)) {
|
||||
(None, None) => return None,
|
||||
(Some(a), None) => MacTok::Lambda(a, b.clone()),
|
||||
(None, Some(b)) => MacTok::Lambda(a.clone(), b),
|
||||
(Some(a), Some(b)) => MacTok::Lambda(a, b),
|
||||
}),
|
||||
},
|
||||
pos: t.pos.clone(),
|
||||
})
|
||||
})
|
||||
.collect_vec();
|
||||
let any_changed = items.iter().any(Option::is_some);
|
||||
any_changed.then(|| {
|
||||
(items.into_iter().enumerate())
|
||||
.map(|(i, opt)| opt.unwrap_or_else(|| tree[i].clone()))
|
||||
.collect_vec()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Macro<Matcher> {
|
||||
deps: HashSet<Sym>,
|
||||
cases: Vec<(Matcher, Code)>,
|
||||
}
|
||||
|
||||
fn fill_lexicon(tgt: &MacTree, lexicon: &mut HashSet<Sym>) {
|
||||
match &*tgt.tok {
|
||||
MTok::Name(n) => {
|
||||
lexicon.insert(n.clone());
|
||||
},
|
||||
MTok::Lambda(arg, body) => {
|
||||
arg.iter().for_each(|t| fill_lexicon(t, lexicon));
|
||||
body.iter().for_each(|t| fill_lexicon(t, lexicon))
|
||||
},
|
||||
MTok::S(_, body) => body.iter().for_each(|t| fill_lexicon(t, lexicon)),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn run_body(body: &Code, mut state: MatchState<'_>) -> Vec<MacTree> {
|
||||
let inject: Vec<MacTree> = todo!("Call the interpreter with bindings");
|
||||
inject
|
||||
.into_iter()
|
||||
.map(|MTree { pos, tok }| MTree { pos, tok: Rc::new(MTok::Done(tok)) })
|
||||
.collect_vec()
|
||||
}
|
||||
@@ -1,103 +1,119 @@
|
||||
use std::rc::Rc;
|
||||
use std::cell::RefCell;
|
||||
|
||||
use futures::FutureExt;
|
||||
use futures::future::join_all;
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
use never::Never;
|
||||
use orchid_base::error::{OrcErrv, OrcRes, Reporter, ReporterImpl, mk_err, mk_errv};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::error::{OrcRes, Reporter, mk_err, mk_errv};
|
||||
use orchid_base::format::fmt;
|
||||
use orchid_base::interner::{Interner, Tok};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::macros::{MTok, MTree};
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::parse::{
|
||||
Comment, Import, Parsed, Snippet, expect_end, line_items, parse_multiname, try_pop_no_fluff,
|
||||
Comment, Import, ParseCtx, Parsed, Snippet, expect_end, line_items, parse_multiname,
|
||||
try_pop_no_fluff,
|
||||
};
|
||||
use orchid_base::tree::{Paren, TokTree, Token};
|
||||
use substack::Substack;
|
||||
|
||||
use crate::atom::AtomHand;
|
||||
use crate::macros::MacTree;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::{Expr, ExprKind, PathSetBuilder};
|
||||
use crate::parsed::{Item, ItemKind, ParsTokTree, ParsedMember, ParsedMemberKind, ParsedModule};
|
||||
use crate::system::System;
|
||||
use crate::tree::{Code, CodeLocator, Item, ItemKind, Member, MemberKind, Module, Rule, RuleKind};
|
||||
|
||||
type ParsSnippet<'a> = Snippet<'a, 'static, AtomHand, Never>;
|
||||
type ParsSnippet<'a> = Snippet<'a, Expr, Expr>;
|
||||
|
||||
pub struct ParseCtxImpl<'a> {
|
||||
pub struct HostParseCtxImpl<'a> {
|
||||
pub ctx: Ctx,
|
||||
pub systems: &'a [System],
|
||||
pub reporter: &'a ReporterImpl,
|
||||
pub reporter: &'a Reporter,
|
||||
pub interner: &'a Interner,
|
||||
pub consts: RefCell<HashMap<Sym, Vec<ParsTokTree>>>,
|
||||
}
|
||||
|
||||
impl ParseCtx for ParseCtxImpl<'_> {
|
||||
fn reporter(&self) -> &(impl Reporter + ?Sized) { self.reporter }
|
||||
impl ParseCtx for HostParseCtxImpl<'_> {
|
||||
fn reporter(&self) -> &Reporter { self.reporter }
|
||||
fn i(&self) -> &Interner { self.interner }
|
||||
}
|
||||
|
||||
impl HostParseCtx for HostParseCtxImpl<'_> {
|
||||
fn ctx(&self) -> &Ctx { &self.ctx }
|
||||
fn systems(&self) -> impl Iterator<Item = &System> { self.systems.iter() }
|
||||
async fn save_const(&self, path: Substack<'_, Tok<String>>, value: Vec<ParsTokTree>) {
|
||||
let name = Sym::new(path.unreverse(), self.interner).await.unwrap();
|
||||
self.consts.borrow_mut().insert(name, value);
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ParseCtx {
|
||||
pub trait HostParseCtx: ParseCtx {
|
||||
fn ctx(&self) -> &Ctx;
|
||||
fn systems(&self) -> impl Iterator<Item = &System>;
|
||||
fn reporter(&self) -> &(impl Reporter + ?Sized);
|
||||
async fn save_const(&self, path: Substack<'_, Tok<String>>, value: Vec<ParsTokTree>);
|
||||
}
|
||||
|
||||
pub async fn parse_items(
|
||||
ctx: &impl ParseCtx,
|
||||
ctx: &impl HostParseCtx,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
items: ParsSnippet<'_>,
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
let lines = line_items(items).await;
|
||||
let lines = line_items(ctx, items).await;
|
||||
let line_res =
|
||||
join_all(lines.into_iter().map(|p| parse_item(ctx, path.clone(), p.output, p.tail))).await;
|
||||
Ok(line_res.into_iter().flat_map(|l| l.ok().into_iter().flatten()).collect())
|
||||
}
|
||||
|
||||
pub async fn parse_item(
|
||||
ctx: &impl ParseCtx,
|
||||
ctx: &impl HostParseCtx,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
comments: Vec<Comment>,
|
||||
item: ParsSnippet<'_>,
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
match item.pop_front() {
|
||||
Some((TokTree { tok: Token::Name(n), .. }, postdisc)) => match n {
|
||||
n if *n == item.i().i("export").await => match try_pop_no_fluff(postdisc).await? {
|
||||
n if *n == ctx.i().i("export").await => match try_pop_no_fluff(ctx, postdisc).await? {
|
||||
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } =>
|
||||
parse_exportable_item(ctx, path, comments, true, n.clone(), tail).await,
|
||||
Parsed { output: TokTree { tok: Token::NS, .. }, tail } => {
|
||||
let Parsed { output: exports, tail } = parse_multiname(ctx.reporter(), tail).await?;
|
||||
Parsed { output: TokTree { tok: Token::S(Paren::Round, body), .. }, tail } => {
|
||||
expect_end(ctx, tail).await?;
|
||||
let mut ok = Vec::new();
|
||||
for (e, pos) in exports {
|
||||
match (&e.path[..], e.name) {
|
||||
([], Some(n)) =>
|
||||
ok.push(Item { comments: comments.clone(), pos, kind: ItemKind::Export(n) }),
|
||||
(_, Some(_)) => ctx.reporter().report(mk_err(
|
||||
tail.i().i("Compound export").await,
|
||||
for tt in body {
|
||||
let pos = Pos::Range(tt.range.clone());
|
||||
match &tt.tok {
|
||||
Token::Name(n) =>
|
||||
ok.push(Item { comments: comments.clone(), pos, kind: ItemKind::Export(n.clone()) }),
|
||||
Token::NS(..) => ctx.reporter().report(mk_err(
|
||||
ctx.i().i("Compound export").await,
|
||||
"Cannot export compound names (names containing the :: separator)",
|
||||
[pos.into()],
|
||||
)),
|
||||
(_, None) => ctx.reporter().report(mk_err(
|
||||
tail.i().i("Wildcard export").await,
|
||||
"Exports cannot contain the globstar *",
|
||||
t => ctx.reporter().report(mk_err(
|
||||
ctx.i().i("Invalid export").await,
|
||||
format!("Invalid export target {}", fmt(t, ctx.i()).await),
|
||||
[pos.into()],
|
||||
)),
|
||||
}
|
||||
}
|
||||
expect_end(tail).await?;
|
||||
expect_end(ctx, tail).await?;
|
||||
Ok(ok)
|
||||
},
|
||||
Parsed { output, tail } => Err(mk_errv(
|
||||
tail.i().i("Malformed export").await,
|
||||
"`export` can either prefix other lines or list names inside ::( ) or ::[ ]",
|
||||
Parsed { output, tail: _ } => Err(mk_errv(
|
||||
ctx.i().i("Malformed export").await,
|
||||
"`export` can either prefix other lines or list names inside ( )",
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
)),
|
||||
},
|
||||
n if *n == item.i().i("import").await => parse_import(ctx, postdisc).await.map(|v| {
|
||||
Vec::from_iter(v.into_iter().map(|(t, pos)| Item {
|
||||
n if *n == ctx.i().i("import").await => {
|
||||
let imports = parse_import(ctx, postdisc).await?;
|
||||
Ok(Vec::from_iter(imports.into_iter().map(|(t, pos)| Item {
|
||||
comments: comments.clone(),
|
||||
pos,
|
||||
kind: ItemKind::Import(t),
|
||||
}))
|
||||
}),
|
||||
})))
|
||||
},
|
||||
n => parse_exportable_item(ctx, path, comments, false, n.clone(), postdisc).await,
|
||||
},
|
||||
Some(_) => Err(mk_errv(
|
||||
item.i().i("Expected a line type").await,
|
||||
ctx.i().i("Expected a line type").await,
|
||||
"All lines must begin with a keyword",
|
||||
[Pos::Range(item.pos()).into()],
|
||||
)),
|
||||
@@ -105,37 +121,37 @@ pub async fn parse_item(
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn parse_import(
|
||||
ctx: &impl ParseCtx,
|
||||
tail: ParsSnippet<'_>,
|
||||
pub async fn parse_import<'a>(
|
||||
ctx: &impl HostParseCtx,
|
||||
tail: ParsSnippet<'a>,
|
||||
) -> OrcRes<Vec<(Import, Pos)>> {
|
||||
let Parsed { output: imports, tail } = parse_multiname(ctx.reporter(), tail).await?;
|
||||
expect_end(tail).await?;
|
||||
let Parsed { output: imports, tail } = parse_multiname(ctx, tail).await?;
|
||||
expect_end(ctx, tail).await?;
|
||||
Ok(imports)
|
||||
}
|
||||
|
||||
pub async fn parse_exportable_item(
|
||||
ctx: &impl ParseCtx,
|
||||
pub async fn parse_exportable_item<'a>(
|
||||
ctx: &impl HostParseCtx,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
comments: Vec<Comment>,
|
||||
exported: bool,
|
||||
discr: Tok<String>,
|
||||
tail: ParsSnippet<'_>,
|
||||
tail: ParsSnippet<'a>,
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
let kind = if discr == tail.i().i("mod").await {
|
||||
let path_sym = Sym::new(path.unreverse(), ctx.i()).await.expect("Files should have a namespace");
|
||||
let kind = if discr == ctx.i().i("mod").await {
|
||||
let (name, body) = parse_module(ctx, path, tail).await?;
|
||||
ItemKind::Member(Member::new(name, MemberKind::Mod(body)))
|
||||
} else if discr == tail.i().i("const").await {
|
||||
let (name, val) = parse_const(tail, path.clone()).await?;
|
||||
let locator = CodeLocator::to_const(tail.i().i(&path.push(name.clone()).unreverse()).await);
|
||||
ItemKind::Member(Member::new(name, MemberKind::Const(Code::from_code(locator, val))))
|
||||
ItemKind::Member(ParsedMember::new(name, path_sym, ParsedMemberKind::Mod(body)))
|
||||
} else if discr == ctx.i().i("const").await {
|
||||
let name = parse_const(ctx, tail, path.clone()).await?;
|
||||
ItemKind::Member(ParsedMember::new(name, path_sym, ParsedMemberKind::Const))
|
||||
} else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) {
|
||||
let line = sys.parse(tail.to_vec(), exported, comments).await?;
|
||||
return parse_items(ctx, path, Snippet::new(tail.prev(), &line, tail.i())).await;
|
||||
let line = sys.parse(path_sym, tail.to_vec(), exported, comments).await?;
|
||||
return parse_items(ctx, path, Snippet::new(tail.prev(), &line)).await;
|
||||
} else {
|
||||
let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
|
||||
return Err(mk_errv(
|
||||
tail.i().i("Unrecognized line type").await,
|
||||
ctx.i().i("Unrecognized line type").await,
|
||||
format!("Line types are: const, mod, macro, grammar, {ext_lines}"),
|
||||
[Pos::Range(tail.prev().range.clone()).into()],
|
||||
));
|
||||
@@ -143,175 +159,115 @@ pub async fn parse_exportable_item(
|
||||
Ok(vec![Item { comments, pos: Pos::Range(tail.pos()), kind }])
|
||||
}
|
||||
|
||||
pub async fn parse_module(
|
||||
ctx: &impl ParseCtx,
|
||||
pub async fn parse_module<'a>(
|
||||
ctx: &impl HostParseCtx,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
tail: ParsSnippet<'_>,
|
||||
) -> OrcRes<(Tok<String>, Module)> {
|
||||
let (name, tail) = match try_pop_no_fluff(tail).await? {
|
||||
tail: ParsSnippet<'a>,
|
||||
) -> OrcRes<(Tok<String>, ParsedModule)> {
|
||||
let (name, tail) = match try_pop_no_fluff(ctx, tail).await? {
|
||||
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => (n.clone(), tail),
|
||||
Parsed { output, .. } => {
|
||||
return Err(mk_errv(
|
||||
tail.i().i("Missing module name").await,
|
||||
format!("A name was expected, {} was found", tail.fmt(output).await),
|
||||
ctx.i().i("Missing module name").await,
|
||||
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
));
|
||||
},
|
||||
};
|
||||
let Parsed { output, tail: surplus } = try_pop_no_fluff(tail).await?;
|
||||
expect_end(surplus).await?;
|
||||
let Some(body) = output.as_s(Paren::Round, tail.i()) else {
|
||||
let Parsed { output, tail: surplus } = try_pop_no_fluff(ctx, tail).await?;
|
||||
expect_end(ctx, surplus).await?;
|
||||
let Some(body) = output.as_s(Paren::Round) else {
|
||||
return Err(mk_errv(
|
||||
tail.i().i("Expected module body").await,
|
||||
format!("A ( block ) was expected, {} was found", tail.fmt(output).await),
|
||||
ctx.i().i("Expected module body").await,
|
||||
format!("A ( block ) was expected, {} was found", fmt(output, ctx.i()).await),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
));
|
||||
};
|
||||
let path = path.push(name.clone());
|
||||
Ok((name, Module::new(parse_items(ctx, path, body).await?)))
|
||||
Ok((name, ParsedModule::new(parse_items(ctx, path, body).await?)))
|
||||
}
|
||||
|
||||
pub async fn parse_const(
|
||||
tail: ParsSnippet<'_>,
|
||||
pub async fn parse_const<'a>(
|
||||
ctx: &impl HostParseCtx,
|
||||
tail: ParsSnippet<'a>,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
) -> OrcRes<(Tok<String>, Vec<MacTree>)> {
|
||||
let Parsed { output, tail } = try_pop_no_fluff(tail).await?;
|
||||
) -> OrcRes<Tok<String>> {
|
||||
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
|
||||
let Some(name) = output.as_name() else {
|
||||
return Err(mk_errv(
|
||||
tail.i().i("Missing module name").await,
|
||||
format!("A name was expected, {} was found", tail.fmt(output).await),
|
||||
ctx.i().i("Missing module name").await,
|
||||
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
));
|
||||
};
|
||||
let Parsed { output, tail } = try_pop_no_fluff(tail).await?;
|
||||
if !output.is_kw(tail.i().i("=").await) {
|
||||
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
|
||||
if !output.is_kw(ctx.i().i("=").await) {
|
||||
return Err(mk_errv(
|
||||
tail.i().i("Missing = separator").await,
|
||||
format!("Expected = , found {}", tail.fmt(output).await),
|
||||
ctx.i().i("Missing = separator").await,
|
||||
format!("Expected = , found {}", fmt(output, ctx.i()).await),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
));
|
||||
}
|
||||
try_pop_no_fluff(tail).await?;
|
||||
Ok((name, parse_mtree(tail, path).await?))
|
||||
try_pop_no_fluff(ctx, tail).await?;
|
||||
ctx.save_const(path, tail[..].to_vec()).await;
|
||||
Ok(name)
|
||||
}
|
||||
|
||||
pub async fn parse_mtree(
|
||||
mut snip: ParsSnippet<'_>,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
) -> OrcRes<Vec<MacTree>> {
|
||||
let mut mtreev = Vec::new();
|
||||
while let Some((ttree, tail)) = snip.pop_front() {
|
||||
snip = tail;
|
||||
let (range, tok, tail) = match &ttree.tok {
|
||||
Token::S(p, b) => {
|
||||
let b = parse_mtree(Snippet::new(ttree, b, snip.i()), path.clone()).boxed_local().await?;
|
||||
(ttree.range.clone(), MTok::S(*p, b), tail)
|
||||
},
|
||||
Token::Reference(name) => (ttree.range.clone(), MTok::Name(name.clone()), tail),
|
||||
Token::Name(tok) => {
|
||||
let mut segments = path.unreverse();
|
||||
segments.push(tok.clone());
|
||||
let mut end = ttree.range.end;
|
||||
while let Some((TokTree { tok: Token::NS, .. }, tail)) = snip.pop_front() {
|
||||
let Parsed { output, tail } = try_pop_no_fluff(tail).await?;
|
||||
let Some(seg) = output.as_name() else {
|
||||
return Err(mk_errv(
|
||||
tail.i().i("Namespaced name interrupted").await,
|
||||
"In expression context, :: must always be followed by a name.\n\
|
||||
::() is permitted only in import and export items",
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
));
|
||||
};
|
||||
segments.push(seg);
|
||||
snip = tail;
|
||||
end = output.range.end;
|
||||
}
|
||||
(ttree.range.start..end, MTok::Name(Sym::new(segments, snip.i()).await.unwrap()), snip)
|
||||
},
|
||||
Token::NS => {
|
||||
return Err(mk_errv(
|
||||
tail.i().i("Unexpected :: in expression").await,
|
||||
":: can only follow a name",
|
||||
[Pos::Range(ttree.range.clone()).into()],
|
||||
));
|
||||
},
|
||||
Token::Ph(ph) => (ttree.range.clone(), MTok::Ph(ph.clone()), tail),
|
||||
Token::Macro(_) => {
|
||||
return Err(mk_errv(
|
||||
tail.i().i("Invalid keyword in expression").await,
|
||||
"Expressions cannot use `macro` as a name.",
|
||||
[Pos::Range(ttree.range.clone()).into()],
|
||||
));
|
||||
},
|
||||
Token::Atom(a) => (ttree.range.clone(), MTok::Atom(a.clone()), tail),
|
||||
Token::BR | Token::Comment(_) => continue,
|
||||
Token::Bottom(e) => return Err(e.clone()),
|
||||
Token::LambdaHead(arg) => (
|
||||
ttree.range.start..snip.pos().end,
|
||||
MTok::Lambda(
|
||||
parse_mtree(Snippet::new(ttree, arg, snip.i()), path.clone()).boxed_local().await?,
|
||||
parse_mtree(tail, path.clone()).boxed_local().await?,
|
||||
),
|
||||
Snippet::new(ttree, &[], snip.i()),
|
||||
),
|
||||
Token::Slot(_) | Token::X(_) =>
|
||||
panic!("Did not expect {} in parsed token tree", tail.fmt(ttree).await),
|
||||
};
|
||||
mtreev.push(MTree { pos: Pos::Range(range.clone()), tok: Rc::new(tok) });
|
||||
snip = tail;
|
||||
}
|
||||
Ok(mtreev)
|
||||
}
|
||||
|
||||
pub async fn parse_macro(
|
||||
pub async fn parse_expr(
|
||||
ctx: &impl HostParseCtx,
|
||||
path: Sym,
|
||||
psb: PathSetBuilder<'_, Tok<String>>,
|
||||
tail: ParsSnippet<'_>,
|
||||
macro_i: u16,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
) -> OrcRes<Vec<Rule>> {
|
||||
let (surplus, prev, block) = match try_pop_no_fluff(tail).await? {
|
||||
Parsed { tail, output: o @ TokTree { tok: Token::S(Paren::Round, b), .. } } => (tail, o, b),
|
||||
Parsed { output, .. } => {
|
||||
return Err(mk_errv(
|
||||
tail.i().i("m").await,
|
||||
"Macro blocks must either start with a block or a ..$:number",
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
));
|
||||
},
|
||||
) -> OrcRes<Expr> {
|
||||
let Some((last_idx, _)) = (tail.iter().enumerate().find(|(_, tt)| tt.as_lambda().is_some()))
|
||||
.or_else(|| tail.iter().enumerate().rev().find(|(_, tt)| !tt.is_fluff()))
|
||||
else {
|
||||
return Err(mk_errv(ctx.i().i("Empty expression").await, "Expression ends abruptly here", [
|
||||
Pos::Range(tail.pos()).into(),
|
||||
]));
|
||||
};
|
||||
expect_end(surplus).await?;
|
||||
let mut errors = Vec::new();
|
||||
let mut rules = Vec::new();
|
||||
for (i, item) in line_items(Snippet::new(prev, block, tail.i())).await.into_iter().enumerate() {
|
||||
let Parsed { tail, output } = try_pop_no_fluff(item.tail).await?;
|
||||
if !output.is_kw(tail.i().i("rule").await) {
|
||||
errors.extend(mk_errv(
|
||||
tail.i().i("non-rule in macro").await,
|
||||
format!("Expected `rule`, got {}", tail.fmt(output).await),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
));
|
||||
continue;
|
||||
};
|
||||
let arrow = tail.i().i("=>").await;
|
||||
let (pat, body) = match tail.split_once(|t| t.is_kw(arrow.clone())) {
|
||||
Some((a, b)) => (a, b),
|
||||
None => {
|
||||
errors.extend(mk_errv(
|
||||
tail.i().i("no => in macro rule").await,
|
||||
"The pattern and body of a rule must be separated by a =>",
|
||||
[Pos::Range(tail.pos()).into()],
|
||||
));
|
||||
continue;
|
||||
},
|
||||
};
|
||||
rules.push(Rule {
|
||||
comments: item.output,
|
||||
pos: Pos::Range(tail.pos()),
|
||||
pattern: parse_mtree(pat, path.clone()).await?,
|
||||
kind: RuleKind::Native(Code::from_code(
|
||||
CodeLocator::to_rule(tail.i().i(&path.unreverse()).await, macro_i, i as u16),
|
||||
parse_mtree(body, path.clone()).await?,
|
||||
)),
|
||||
})
|
||||
let (function, value) = tail.split_at(last_idx as u32);
|
||||
let pos = Pos::Range(tail.pos());
|
||||
if !function.iter().all(TokTree::is_fluff) {
|
||||
let (f_psb, x_psb) = psb.split();
|
||||
let x_expr = parse_expr(ctx, path.clone(), x_psb, value).boxed_local().await?;
|
||||
let f_expr = parse_expr(ctx, path, f_psb, function).boxed_local().await?;
|
||||
return Ok(ExprKind::Call(f_expr, x_expr).at(pos));
|
||||
}
|
||||
let Parsed { output: head, tail } = try_pop_no_fluff(ctx, value).await?;
|
||||
match &head.tok {
|
||||
Token::BR | Token::Comment(_) => panic!("Fluff skipped"),
|
||||
Token::Bottom(b) => Ok(ExprKind::Bottom(b.clone()).at(pos.clone())),
|
||||
Token::Handle(expr) => Ok(expr.clone()),
|
||||
Token::NS(n, nametail) => {
|
||||
let mut nametail = nametail;
|
||||
let mut segments = path.iter().chain([n]).cloned().collect_vec();
|
||||
while let Token::NS(n, newtail) = &nametail.tok {
|
||||
segments.push(n.clone());
|
||||
nametail = newtail;
|
||||
}
|
||||
let Token::Name(n) = &nametail.tok else {
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Loose namespace prefix in constant").await,
|
||||
"Namespace prefixes in constants must be followed by names",
|
||||
[pos.into()],
|
||||
));
|
||||
};
|
||||
segments.push(n.clone());
|
||||
Ok(ExprKind::Const(Sym::new(segments, ctx.i()).await.unwrap()).at(pos.clone()))
|
||||
},
|
||||
Token::LambdaHead(h) => {
|
||||
let [TokTree { tok: Token::Name(arg), .. }] = &h[..] else {
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Complex lambda binding in constant").await,
|
||||
"Lambda args in constants must be identified by a single name",
|
||||
[pos.into()],
|
||||
));
|
||||
};
|
||||
let lambda_builder = psb.lambda(arg);
|
||||
let body = parse_expr(ctx, path.clone(), lambda_builder.stack(), tail).boxed_local().await?;
|
||||
Ok(ExprKind::Lambda(lambda_builder.collect(), body).at(pos.clone()))
|
||||
},
|
||||
_ => todo!("AAAAAA"), // TODO: todo
|
||||
}
|
||||
if let Ok(e) = OrcErrv::new(errors) { Err(e) } else { Ok(rules) }
|
||||
}
|
||||
|
||||
383
orchid-host/src/parsed.rs
Normal file
383
orchid-host/src/parsed.rs
Normal file
@@ -0,0 +1,383 @@
|
||||
use std::cell::RefCell;
|
||||
use std::fmt::Debug;
|
||||
use std::rc::Rc;
|
||||
|
||||
use async_once_cell::OnceCell;
|
||||
use async_std::sync::{Mutex, RwLock};
|
||||
use async_stream::stream;
|
||||
use futures::future::join_all;
|
||||
use futures::{FutureExt, StreamExt};
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
use orchid_base::error::{OrcRes, mk_errv};
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::{NameLike, Sym};
|
||||
use orchid_base::parse::{Comment, Import};
|
||||
use orchid_base::tl_cache;
|
||||
use orchid_base::tree::{TokTree, Token, TokenVariant};
|
||||
|
||||
use crate::api;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::{Expr, ExprParseCtx, PathSetBuilder};
|
||||
use crate::expr_store::ExprStore;
|
||||
use crate::system::System;
|
||||
|
||||
pub type ParsTokTree = TokTree<Expr, Expr>;
|
||||
pub type ParsTok = Token<Expr, Expr>;
|
||||
|
||||
impl TokenVariant<api::ExprTicket> for Expr {
|
||||
type ToApiCtx<'a> = ExprStore;
|
||||
async fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> api::ExprTicket {
|
||||
ctx.give_expr(self.clone());
|
||||
self.id()
|
||||
}
|
||||
type FromApiCtx<'a> = ExprStore;
|
||||
async fn from_api(
|
||||
api: &api::ExprTicket,
|
||||
ctx: &mut Self::FromApiCtx<'_>,
|
||||
_: Pos,
|
||||
_: &orchid_base::interner::Interner,
|
||||
) -> Self {
|
||||
let expr = ctx.get_expr(*api).expect("Dangling expr");
|
||||
ctx.take_expr(*api);
|
||||
expr
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenVariant<api::Expression> for Expr {
|
||||
type FromApiCtx<'a> = ExprParseCtx;
|
||||
async fn from_api(
|
||||
api: &api::Expression,
|
||||
ctx: &mut Self::FromApiCtx<'_>,
|
||||
_: Pos,
|
||||
_: &orchid_base::interner::Interner,
|
||||
) -> Self {
|
||||
Expr::from_api(api, PathSetBuilder::new(), ctx).await
|
||||
}
|
||||
type ToApiCtx<'a> = ();
|
||||
async fn into_api(self, (): &mut Self::ToApiCtx<'_>) -> api::Expression {
|
||||
panic!("Failed to replace NewExpr before returning sublexer value")
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ParsedFromApiCx<'a> {
|
||||
pub sys: &'a System,
|
||||
pub consts: &'a mut HashMap<Sym, Expr>,
|
||||
pub path: Tok<Vec<Tok<String>>>,
|
||||
}
|
||||
impl<'a> ParsedFromApiCx<'a> {
|
||||
pub async fn push<'c>(&'c mut self, name: Tok<String>) -> ParsedFromApiCx<'c> {
|
||||
let path = self.sys.ctx().i.i(&self.path.iter().cloned().chain([name]).collect_vec()).await;
|
||||
ParsedFromApiCx { path, consts: &mut *self.consts, sys: self.sys }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Item {
|
||||
pub pos: Pos,
|
||||
pub comments: Vec<Comment>,
|
||||
pub kind: ItemKind,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ItemKind {
|
||||
Member(ParsedMember),
|
||||
Export(Tok<String>),
|
||||
Import(Import),
|
||||
}
|
||||
impl ItemKind {
|
||||
pub fn at(self, pos: Pos) -> Item { Item { comments: vec![], pos, kind: self } }
|
||||
}
|
||||
|
||||
impl Item {
|
||||
pub async fn from_api<'a>(tree: api::Item, ctx: &mut ParsedFromApiCx<'a>) -> Self {
|
||||
let kind = match tree.kind {
|
||||
api::ItemKind::Member(m) => ItemKind::Member(ParsedMember::from_api(m, ctx).await),
|
||||
api::ItemKind::Import(name) => ItemKind::Import(Import {
|
||||
path: Sym::from_api(name, &ctx.sys.ctx().i).await.iter().collect(),
|
||||
name: None,
|
||||
}),
|
||||
api::ItemKind::Export(e) => ItemKind::Export(Tok::from_api(e, &ctx.sys.ctx().i).await),
|
||||
};
|
||||
let mut comments = Vec::new();
|
||||
for comment in tree.comments.iter() {
|
||||
comments.push(Comment::from_api(comment, &ctx.sys.ctx().i).await)
|
||||
}
|
||||
Self { pos: Pos::from_api(&tree.location, &ctx.sys.ctx().i).await, comments, kind }
|
||||
}
|
||||
}
|
||||
impl Format for Item {
|
||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
let comment_text = self.comments.iter().join("\n");
|
||||
let item_text = match &self.kind {
|
||||
ItemKind::Import(i) => format!("import {i}").into(),
|
||||
ItemKind::Export(e) => format!("export {e}").into(),
|
||||
ItemKind::Member(mem) => match mem.kind.get() {
|
||||
None => format!("lazy {}", mem.name).into(),
|
||||
Some(ParsedMemberKind::Const) =>
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0}")))
|
||||
.units([mem.name.rc().into()]),
|
||||
Some(ParsedMemberKind::Mod(module)) =>
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("module {0} {{\n\t{1}\n}}")))
|
||||
.units([mem.name.rc().into(), module.print(c).boxed_local().await]),
|
||||
},
|
||||
};
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{0}\n{1}")))
|
||||
.units([comment_text.into(), item_text])
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ParsedMember {
|
||||
name: Tok<String>,
|
||||
full_name: Sym,
|
||||
kind: OnceCell<ParsedMemberKind>,
|
||||
lazy: Mutex<Option<LazyMemberHandle>>,
|
||||
}
|
||||
// TODO: this one should own but not execute the lazy handle.
|
||||
// Lazy handles should run
|
||||
// - in the tree converter function as needed to resolve imports
|
||||
// - in the tree itself when a constant is loaded
|
||||
// - when a different lazy subtree references them in a wildcard import and
|
||||
// forces the enumeration.
|
||||
//
|
||||
// do we actually need to allow wildcard imports in lazy trees? maybe a
|
||||
// different kind of import is sufficient. Source code never becomes a lazy
|
||||
// tree. What does?
|
||||
// - Systems subtrees rarely reference each other at all. They can't use macros
|
||||
// and they usually point to constants with an embedded expr.
|
||||
// - Compiled libraries on the long run. The code as written may reference
|
||||
// constants by indirect path. But this is actually the same as the above,
|
||||
// they also wouldn't use regular imports because they are distributed as
|
||||
// exprs.
|
||||
//
|
||||
// Everything is distributed either as source code or as exprs. Line parsers
|
||||
// also operate on tokens.
|
||||
//
|
||||
// TODO: The trees produced by systems can be safely changed
|
||||
// to the new kind of tree. This datastructure does not need to support the lazy
|
||||
// handle.
|
||||
impl ParsedMember {
|
||||
pub fn name(&self) -> Tok<String> { self.name.clone() }
|
||||
pub async fn kind(&self, consts: &mut HashMap<Sym, Expr>) -> &ParsedMemberKind {
|
||||
(self.kind.get_or_init(async {
|
||||
let handle = self.lazy.lock().await.take().expect("Neither known nor lazy");
|
||||
handle.run(consts).await
|
||||
}))
|
||||
.await
|
||||
}
|
||||
pub async fn kind_mut(&mut self, consts: &mut HashMap<Sym, Expr>) -> &mut ParsedMemberKind {
|
||||
self.kind(consts).await;
|
||||
self.kind.get_mut().expect("kind() already filled the cell")
|
||||
}
|
||||
pub async fn from_api<'a>(api: api::Member, ctx: &'_ mut ParsedFromApiCx<'a>) -> Self {
|
||||
let name = Tok::from_api(api.name, &ctx.sys.ctx().i).await;
|
||||
let mut ctx: ParsedFromApiCx<'_> = (&mut *ctx).push(name.clone()).await;
|
||||
let path_sym = Sym::from_tok(ctx.path.clone()).expect("We just pushed on to this");
|
||||
let kind = match api.kind {
|
||||
api::MemberKind::Lazy(id) => {
|
||||
let handle = LazyMemberHandle { id, sys: ctx.sys.clone(), path: path_sym.clone() };
|
||||
return handle.into_member(name.clone()).await;
|
||||
},
|
||||
api::MemberKind::Const(c) => {
|
||||
let mut pctx =
|
||||
ExprParseCtx { ctx: ctx.sys.ctx().clone(), exprs: ctx.sys.ext().exprs().clone() };
|
||||
let expr = Expr::from_api(&c, PathSetBuilder::new(), &mut pctx).await;
|
||||
ctx.consts.insert(path_sym.clone(), expr);
|
||||
ParsedMemberKind::Const
|
||||
},
|
||||
api::MemberKind::Module(m) =>
|
||||
ParsedMemberKind::Mod(ParsedModule::from_api(m, &mut ctx).await),
|
||||
};
|
||||
ParsedMember { name, full_name: path_sym, kind: OnceCell::from(kind), lazy: Mutex::default() }
|
||||
}
|
||||
pub fn new(name: Tok<String>, full_name: Sym, kind: ParsedMemberKind) -> Self {
|
||||
ParsedMember { name, full_name, kind: OnceCell::from(kind), lazy: Mutex::default() }
|
||||
}
|
||||
}
|
||||
impl Debug for ParsedMember {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Member")
|
||||
.field("name", &self.name)
|
||||
.field("kind", &self.kind)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ParsedMemberKind {
|
||||
Const,
|
||||
Mod(ParsedModule),
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ParsedModule {
|
||||
pub imports: Vec<Sym>,
|
||||
pub exports: Vec<Tok<String>>,
|
||||
pub items: Vec<Item>,
|
||||
}
|
||||
impl ParsedModule {
|
||||
pub fn new(items: impl IntoIterator<Item = Item>) -> Self {
|
||||
let items = items.into_iter().collect_vec();
|
||||
let exports = (items.iter())
|
||||
.filter_map(|i| match &i.kind {
|
||||
ItemKind::Export(e) => Some(e.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect_vec();
|
||||
Self { imports: vec![], exports, items }
|
||||
}
|
||||
pub fn merge(&mut self, other: ParsedModule) {
|
||||
let mut swap = ParsedModule::default();
|
||||
std::mem::swap(self, &mut swap);
|
||||
*self = ParsedModule::new(swap.items.into_iter().chain(other.items))
|
||||
}
|
||||
pub async fn from_api<'a>(m: api::Module, ctx: &mut ParsedFromApiCx<'a>) -> Self {
|
||||
Self::new(
|
||||
stream! { for item in m.items { yield Item::from_api(item, ctx).boxed_local().await } }
|
||||
.collect::<Vec<_>>()
|
||||
.await,
|
||||
)
|
||||
}
|
||||
pub async fn walk<'a>(
|
||||
&self,
|
||||
allow_private: bool,
|
||||
path: impl IntoIterator<Item = Tok<String>>,
|
||||
consts: &mut HashMap<Sym, Expr>,
|
||||
) -> Result<&ParsedModule, WalkError> {
|
||||
let mut cur = self;
|
||||
for (pos, step) in path.into_iter().enumerate() {
|
||||
let Some(member) = (cur.items.iter())
|
||||
.filter_map(|it| if let ItemKind::Member(m) = &it.kind { Some(m) } else { None })
|
||||
.find(|m| m.name == step)
|
||||
else {
|
||||
return Err(WalkError { pos, kind: WalkErrorKind::Missing });
|
||||
};
|
||||
if !allow_private && !cur.exports.contains(&step) {
|
||||
return Err(WalkError { pos, kind: WalkErrorKind::Private });
|
||||
}
|
||||
match member.kind(consts).await {
|
||||
ParsedMemberKind::Const => return Err(WalkError { pos, kind: WalkErrorKind::Constant }),
|
||||
ParsedMemberKind::Mod(m) => cur = m,
|
||||
}
|
||||
}
|
||||
Ok(cur)
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub enum WalkErrorKind {
|
||||
Missing,
|
||||
Private,
|
||||
Constant,
|
||||
}
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub struct WalkError {
|
||||
pub pos: usize,
|
||||
pub kind: WalkErrorKind,
|
||||
}
|
||||
impl Format for ParsedModule {
|
||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
let import_str = self.imports.iter().map(|i| format!("import {i}")).join("\n");
|
||||
let head_str = format!("{import_str}\nexport ::({})\n", self.exports.iter().join(", "));
|
||||
Variants::sequence(self.items.len() + 1, "\n", None).units(
|
||||
[head_str.into()].into_iter().chain(join_all(self.items.iter().map(|i| i.print(c))).await),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LazyMemberHandle {
|
||||
id: api::TreeId,
|
||||
sys: System,
|
||||
path: Sym,
|
||||
}
|
||||
impl LazyMemberHandle {
|
||||
pub async fn run(self, consts: &mut HashMap<Sym, Expr>) -> ParsedMemberKind {
|
||||
match self.sys.get_tree(self.id).await {
|
||||
api::MemberKind::Const(c) => {
|
||||
let mut pctx =
|
||||
ExprParseCtx { ctx: self.sys.ctx().clone(), exprs: self.sys.ext().exprs().clone() };
|
||||
consts.insert(self.path, Expr::from_api(&c, PathSetBuilder::new(), &mut pctx).await);
|
||||
ParsedMemberKind::Const
|
||||
},
|
||||
api::MemberKind::Module(m) => ParsedMemberKind::Mod(
|
||||
ParsedModule::from_api(m, &mut ParsedFromApiCx {
|
||||
sys: &self.sys,
|
||||
consts,
|
||||
path: self.path.tok(),
|
||||
})
|
||||
.await,
|
||||
),
|
||||
api::MemberKind::Lazy(id) => Self { id, ..self }.run(consts).boxed_local().await,
|
||||
}
|
||||
}
|
||||
pub async fn into_member(self, name: Tok<String>) -> ParsedMember {
|
||||
ParsedMember {
|
||||
name,
|
||||
full_name: self.path.clone(),
|
||||
kind: OnceCell::new(),
|
||||
lazy: Mutex::new(Some(self)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// TODO:
|
||||
///
|
||||
/// idea, does the host need an IR here or can we figure out a way to transcribe
|
||||
/// these? Should we spin off a new stage for value parsing so that ParsTokTree
|
||||
/// doesn't appear in the interpreter's ingress?
|
||||
pub struct Const {
|
||||
pub source: Option<Vec<ParsTokTree>>,
|
||||
}
|
||||
|
||||
/// Selects a code element
|
||||
///
|
||||
/// Either the steps point to a constant and rule_loc is None, or the steps
|
||||
/// point to a module and rule_loc selects a macro rule within that module
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub struct ConstPath {
|
||||
steps: Tok<Vec<Tok<String>>>,
|
||||
}
|
||||
impl ConstPath {
|
||||
pub fn to_const(steps: Tok<Vec<Tok<String>>>) -> Self { Self { steps } }
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Root {
|
||||
tree: Rc<ParsedModule>,
|
||||
consts: Rc<RwLock<HashMap<Sym, Expr>>>,
|
||||
}
|
||||
impl Root {
|
||||
pub fn new(module: ParsedModule, consts: HashMap<Sym, Expr>) -> Self {
|
||||
Self { tree: Rc::new(module), consts: Rc::new(RwLock::new(consts)) }
|
||||
}
|
||||
pub async fn get_const_value(&self, name: Sym, pos: Pos, ctx: Ctx) -> OrcRes<Expr> {
|
||||
if let Some(val) = self.consts.read().await.get(&name) {
|
||||
return Ok(val.clone());
|
||||
}
|
||||
let (cn, mp) = name.split_last();
|
||||
let consts_mut = &mut *self.consts.write().await;
|
||||
let module = self.tree.walk(true, mp.iter().cloned(), consts_mut).await.unwrap();
|
||||
let member = (module.items.iter())
|
||||
.filter_map(|it| if let ItemKind::Member(m) = &it.kind { Some(m) } else { None })
|
||||
.find(|m| m.name() == cn);
|
||||
match member {
|
||||
None => Err(mk_errv(
|
||||
ctx.i.i("Constant does not exist").await,
|
||||
format!("{name} does not refer to a constant"),
|
||||
[pos.clone().into()],
|
||||
)),
|
||||
Some(mem) => match mem.kind(consts_mut).await {
|
||||
ParsedMemberKind::Mod(_) => Err(mk_errv(
|
||||
ctx.i.i("module used as constant").await,
|
||||
format!("{name} is a module, not a constant"),
|
||||
[pos.clone().into()],
|
||||
)),
|
||||
ParsedMemberKind::Const => Ok(
|
||||
(consts_mut.get(&name).cloned())
|
||||
.expect("Tree says the path is correct but no value was found"),
|
||||
),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
use orchid_base::name::Sym;
|
||||
|
||||
use super::scal_match::scalv_match;
|
||||
use super::shared::AnyMatcher;
|
||||
use super::vec_match::vec_match;
|
||||
use crate::macros::MacTree;
|
||||
use crate::rule::state::MatchState;
|
||||
|
||||
#[must_use]
|
||||
pub fn any_match<'a>(
|
||||
matcher: &AnyMatcher,
|
||||
seq: &'a [MacTree],
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<MatchState<'a>> {
|
||||
match matcher {
|
||||
AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq, save_loc),
|
||||
AnyMatcher::Vec { left, mid, right } => {
|
||||
if seq.len() < left.len() + right.len() {
|
||||
return None;
|
||||
};
|
||||
let left_split = left.len();
|
||||
let right_split = seq.len() - right.len();
|
||||
Some(
|
||||
scalv_match(left, &seq[..left_split], save_loc)?
|
||||
.combine(scalv_match(right, &seq[right_split..], save_loc)?)
|
||||
.combine(vec_match(mid, &seq[left_split..right_split], save_loc)?),
|
||||
)
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,161 +0,0 @@
|
||||
use itertools::Itertools;
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::side::Side;
|
||||
use orchid_base::tree::Ph;
|
||||
|
||||
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
use crate::rule::vec_attrs::vec_attrs;
|
||||
|
||||
pub type MaxVecSplit<'a> = (&'a [MacTree], (Tok<String>, u8, bool), &'a [MacTree]);
|
||||
|
||||
/// Derive the details of the central vectorial and the two sides from a
|
||||
/// slice of Expr's
|
||||
#[must_use]
|
||||
fn split_at_max_vec(pattern: &[MacTree]) -> Option<MaxVecSplit> {
|
||||
let rngidx = pattern
|
||||
.iter()
|
||||
.position_max_by_key(|expr| vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1))?;
|
||||
let (left, not_left) = pattern.split_at(rngidx);
|
||||
let (placeh, right) =
|
||||
not_left.split_first().expect("The index of the greatest element must be less than the length");
|
||||
vec_attrs(placeh).map(|attrs| (left, attrs, right))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn scal_cnt<'a>(iter: impl Iterator<Item = &'a MacTree>) -> usize {
|
||||
iter.take_while(|expr| vec_attrs(expr).is_none()).count()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn mk_any(pattern: &[MacTree]) -> AnyMatcher {
|
||||
let left_split = scal_cnt(pattern.iter());
|
||||
if pattern.len() <= left_split {
|
||||
return AnyMatcher::Scalar(mk_scalv(pattern));
|
||||
}
|
||||
let (left, not_left) = pattern.split_at(left_split);
|
||||
let right_split = not_left.len() - scal_cnt(pattern.iter().rev());
|
||||
let (mid, right) = not_left.split_at(right_split);
|
||||
AnyMatcher::Vec { left: mk_scalv(left), mid: mk_vec(mid), right: mk_scalv(right) }
|
||||
}
|
||||
|
||||
/// Pattern MUST NOT contain vectorial placeholders
|
||||
#[must_use]
|
||||
fn mk_scalv(pattern: &[MacTree]) -> Vec<ScalMatcher> { pattern.iter().map(mk_scalar).collect() }
|
||||
|
||||
/// Pattern MUST start and end with a vectorial placeholder
|
||||
#[must_use]
|
||||
pub fn mk_vec(pattern: &[MacTree]) -> VecMatcher {
|
||||
debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
|
||||
debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial");
|
||||
debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial");
|
||||
let (left, (key, _, nonzero), right) = split_at_max_vec(pattern)
|
||||
.expect("pattern must have vectorial placeholders at least at either end");
|
||||
let r_sep_size = scal_cnt(right.iter());
|
||||
let (r_sep, r_side) = right.split_at(r_sep_size);
|
||||
let l_sep_size = scal_cnt(left.iter().rev());
|
||||
let (l_side, l_sep) = left.split_at(left.len() - l_sep_size);
|
||||
let main = VecMatcher::Placeh { key: key.clone(), nonzero };
|
||||
match (left, right) {
|
||||
(&[], &[]) => VecMatcher::Placeh { key, nonzero },
|
||||
(&[], _) => VecMatcher::Scan {
|
||||
direction: Side::Left,
|
||||
left: Box::new(main),
|
||||
sep: mk_scalv(r_sep),
|
||||
right: Box::new(mk_vec(r_side)),
|
||||
},
|
||||
(_, &[]) => VecMatcher::Scan {
|
||||
direction: Side::Right,
|
||||
left: Box::new(mk_vec(l_side)),
|
||||
sep: mk_scalv(l_sep),
|
||||
right: Box::new(main),
|
||||
},
|
||||
(..) => {
|
||||
let mut key_order =
|
||||
l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>();
|
||||
key_order.sort_by_key(|(_, prio, _)| -(*prio as i64));
|
||||
VecMatcher::Middle {
|
||||
left: Box::new(mk_vec(l_side)),
|
||||
left_sep: mk_scalv(l_sep),
|
||||
mid: Box::new(main),
|
||||
right_sep: mk_scalv(r_sep),
|
||||
right: Box::new(mk_vec(r_side)),
|
||||
key_order: key_order.into_iter().map(|(n, ..)| n).collect(),
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Pattern MUST NOT be a vectorial placeholder
|
||||
#[must_use]
|
||||
fn mk_scalar(pattern: &MacTree) -> ScalMatcher {
|
||||
match &*pattern.tok {
|
||||
MacTok::Atom(_) | MacTok::Done(_) => panic!("Atoms and Done aren't supported in matchers"),
|
||||
MacTok::Name(n) => ScalMatcher::Name(n.clone()),
|
||||
MacTok::Ph(Ph { name, kind }) => match kind {
|
||||
PhKind::Vector { .. } => {
|
||||
panic!("Scalar matcher cannot be built from vector pattern")
|
||||
},
|
||||
PhKind::Scalar => ScalMatcher::Placeh { key: name.clone() },
|
||||
},
|
||||
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body))),
|
||||
MacTok::Lambda(arg, body) => ScalMatcher::Lambda(Box::new(mk_any(arg)), Box::new(mk_any(body))),
|
||||
MacTok::Ref(_) | MacTok::Slot(_) => panic!("Extension-only variants"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::rc::Rc;
|
||||
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::location::SourceRange;
|
||||
use orchid_base::sym;
|
||||
use orchid_base::tokens::Paren;
|
||||
use orchid_base::tree::Ph;
|
||||
use test_executors::spin_on;
|
||||
|
||||
use super::mk_any;
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
|
||||
#[test]
|
||||
fn test_scan() {
|
||||
spin_on(async {
|
||||
let i = Interner::new_master();
|
||||
let ex = |tok: MacTok| async {
|
||||
MacTree { tok: Rc::new(tok), pos: SourceRange::mock(&i).await.pos() }
|
||||
};
|
||||
let pattern = vec![
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: i.i("::prefix").await,
|
||||
}))
|
||||
.await,
|
||||
ex(MacTok::Name(sym!(prelude::do; i).await)).await,
|
||||
ex(MacTok::S(Paren::Round, vec![
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: i.i("expr").await,
|
||||
}))
|
||||
.await,
|
||||
ex(MacTok::Name(sym!(prelude::; ; i).await)).await,
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 1, at_least_one: false },
|
||||
name: i.i("rest").await,
|
||||
}))
|
||||
.await,
|
||||
]))
|
||||
.await,
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: i.i("::suffix").await,
|
||||
}))
|
||||
.await,
|
||||
];
|
||||
let matcher = mk_any(&pattern);
|
||||
println!("{matcher}");
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,86 +0,0 @@
|
||||
use std::fmt;
|
||||
|
||||
use itertools::Itertools;
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::tree::Ph;
|
||||
|
||||
use super::any_match::any_match;
|
||||
use super::build::mk_any;
|
||||
use super::shared::{AnyMatcher, VecMatcher};
|
||||
use super::state::{MatchState, StateEntry};
|
||||
use super::vec_attrs::vec_attrs;
|
||||
use super::vec_match::vec_match;
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
use crate::rule::build::mk_vec;
|
||||
|
||||
pub fn first_is_vec(pattern: &[MacTree]) -> bool { vec_attrs(pattern.first().unwrap()).is_some() }
|
||||
pub fn last_is_vec(pattern: &[MacTree]) -> bool { vec_attrs(pattern.last().unwrap()).is_some() }
|
||||
|
||||
pub struct NamedMatcher(AnyMatcher);
|
||||
impl NamedMatcher {
|
||||
pub async fn new(pattern: &[MacTree], i: &Interner) -> Self {
|
||||
assert!(
|
||||
matches!(pattern.first().map(|tree| &*tree.tok), Some(MacTok::Name(_))),
|
||||
"Named matchers must begin with a name"
|
||||
);
|
||||
|
||||
match last_is_vec(pattern) {
|
||||
true => Self(mk_any(pattern)),
|
||||
false => {
|
||||
let kind: PhKind = PhKind::Vector { priority: 0, at_least_one: false };
|
||||
let suffix = [MacTok::Ph(Ph { name: i.i("::after").await, kind }).at(Pos::None)];
|
||||
Self(mk_any(&pattern.iter().chain(&suffix).cloned().collect_vec()))
|
||||
},
|
||||
}
|
||||
}
|
||||
/// Also returns the tail, if any, which should be matched further
|
||||
/// Note that due to how priod works below, the main usable information from
|
||||
/// the tail is its length
|
||||
pub async fn apply<'a>(
|
||||
&self,
|
||||
seq: &'a [MacTree],
|
||||
i: &Interner,
|
||||
save_loc: impl Fn(Sym) -> bool,
|
||||
) -> Option<(MatchState<'a>, &'a [MacTree])> {
|
||||
let mut state = any_match(&self.0, seq, &save_loc)?;
|
||||
match state.remove(i.i("::after").await) {
|
||||
Some(StateEntry::Scalar(_)) => panic!("::after can never be a scalar entry!"),
|
||||
Some(StateEntry::Vec(v)) => Some((state, v)),
|
||||
None => Some((state, &[][..])),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl fmt::Display for NamedMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) }
|
||||
}
|
||||
impl fmt::Debug for NamedMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "NamedMatcher({self})") }
|
||||
}
|
||||
|
||||
pub struct PriodMatcher(VecMatcher);
|
||||
impl PriodMatcher {
|
||||
pub fn new(pattern: &[MacTree]) -> Self {
|
||||
assert!(
|
||||
pattern.first().and_then(vec_attrs).is_some() && pattern.last().and_then(vec_attrs).is_some(),
|
||||
"Prioritized matchers must start and end with a vectorial",
|
||||
);
|
||||
Self(mk_vec(pattern))
|
||||
}
|
||||
/// tokens before the offset always match the prefix
|
||||
pub fn apply<'a>(
|
||||
&self,
|
||||
seq: &'a [MacTree],
|
||||
save_loc: impl Fn(Sym) -> bool,
|
||||
) -> Option<MatchState<'a>> {
|
||||
vec_match(&self.0, seq, &save_loc)
|
||||
}
|
||||
}
|
||||
impl fmt::Display for PriodMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) }
|
||||
}
|
||||
impl fmt::Debug for PriodMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "PriodMatcher({self})") }
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
//! Optimized form of macro pattern that can be quickly tested against the AST.
|
||||
//!
|
||||
//! # Construction
|
||||
//!
|
||||
//! convert pattern into hierarchy of plain, scan, middle
|
||||
//! - plain: accept any sequence or any non-empty sequence
|
||||
//! - scan: a single scalar pattern moves LTR or RTL, submatchers on either side
|
||||
//! - middle: two scalar patterns walk over all permutations of matches while
|
||||
//! getting progressively closer to each other
|
||||
//!
|
||||
//! # Application
|
||||
//!
|
||||
//! walk over the current matcher's valid options and poll the submatchers
|
||||
//! for each of them
|
||||
|
||||
mod any_match;
|
||||
mod build;
|
||||
pub mod matcher;
|
||||
mod scal_match;
|
||||
pub mod shared;
|
||||
pub mod state;
|
||||
mod vec_attrs;
|
||||
mod vec_match;
|
||||
// pub mod matcher;
|
||||
@@ -1,44 +0,0 @@
|
||||
use orchid_base::name::Sym;
|
||||
|
||||
use super::any_match::any_match;
|
||||
use super::shared::ScalMatcher;
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
use crate::rule::state::{MatchState, StateEntry};
|
||||
|
||||
#[must_use]
|
||||
pub fn scal_match<'a>(
|
||||
matcher: &ScalMatcher,
|
||||
expr: &'a MacTree,
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<MatchState<'a>> {
|
||||
match (matcher, &*expr.tok) {
|
||||
(ScalMatcher::Name(n1), MacTok::Name(n2)) if n1 == n2 => Some(match save_loc(n1.clone()) {
|
||||
true => MatchState::from_name(n1.clone(), expr.pos.clone()),
|
||||
false => MatchState::default(),
|
||||
}),
|
||||
(ScalMatcher::Placeh { .. }, MacTok::Done(_)) => None,
|
||||
(ScalMatcher::Placeh { key }, _) =>
|
||||
Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))),
|
||||
(ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 =>
|
||||
any_match(b_mat, &body[..], save_loc),
|
||||
(ScalMatcher::Lambda(arg_mat, b_mat), MacTok::Lambda(arg, body)) =>
|
||||
Some(any_match(arg_mat, arg, save_loc)?.combine(any_match(b_mat, body, save_loc)?)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn scalv_match<'a>(
|
||||
matchers: &[ScalMatcher],
|
||||
seq: &'a [MacTree],
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<MatchState<'a>> {
|
||||
if seq.len() != matchers.len() {
|
||||
return None;
|
||||
}
|
||||
let mut state = MatchState::default();
|
||||
for (matcher, expr) in matchers.iter().zip(seq.iter()) {
|
||||
state = state.combine(scal_match(matcher, expr, save_loc)?);
|
||||
}
|
||||
Some(state)
|
||||
}
|
||||
@@ -1,101 +0,0 @@
|
||||
//! Datastructures for cached pattern
|
||||
|
||||
use std::fmt;
|
||||
|
||||
use itertools::Itertools;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::side::Side;
|
||||
use orchid_base::tokens::{PARENS, Paren};
|
||||
|
||||
pub enum ScalMatcher {
|
||||
Name(Sym),
|
||||
S(Paren, Box<AnyMatcher>),
|
||||
Lambda(Box<AnyMatcher>, Box<AnyMatcher>),
|
||||
Placeh { key: Tok<String> },
|
||||
}
|
||||
|
||||
pub enum VecMatcher {
|
||||
Placeh {
|
||||
key: Tok<String>,
|
||||
nonzero: bool,
|
||||
},
|
||||
Scan {
|
||||
left: Box<VecMatcher>,
|
||||
sep: Vec<ScalMatcher>,
|
||||
right: Box<VecMatcher>,
|
||||
/// The separator traverses the sequence towards this side
|
||||
direction: Side,
|
||||
},
|
||||
Middle {
|
||||
/// Matches the left outer region
|
||||
left: Box<VecMatcher>,
|
||||
/// Matches the left separator
|
||||
left_sep: Vec<ScalMatcher>,
|
||||
/// Matches the middle - can only ever be a plain placeholder
|
||||
mid: Box<VecMatcher>,
|
||||
/// Matches the right separator
|
||||
right_sep: Vec<ScalMatcher>,
|
||||
/// Matches the right outer region
|
||||
right: Box<VecMatcher>,
|
||||
/// Order of significance for sorting equally good projects based on
|
||||
/// the length of matches on either side.
|
||||
///
|
||||
/// Vectorial keys that appear on either side, in priority order
|
||||
key_order: Vec<Tok<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
pub enum AnyMatcher {
|
||||
Scalar(Vec<ScalMatcher>),
|
||||
Vec { left: Vec<ScalMatcher>, mid: VecMatcher, right: Vec<ScalMatcher> },
|
||||
}
|
||||
|
||||
// ################ Display ################
|
||||
|
||||
impl fmt::Display for ScalMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Placeh { key } => write!(f, "${key}"),
|
||||
Self::Name(n) => write!(f, "{n}"),
|
||||
Self::S(t, body) => {
|
||||
let (l, r, _) = PARENS.iter().find(|r| r.2 == *t).unwrap();
|
||||
write!(f, "{l}{body}{r}")
|
||||
},
|
||||
Self::Lambda(arg, body) => write!(f, "\\{arg}.{body}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for VecMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Placeh { key, nonzero: true } => write!(f, "...${key}"),
|
||||
Self::Placeh { key, nonzero: false } => write!(f, "..${key}"),
|
||||
Self::Scan { left, sep, right, direction } => {
|
||||
let arrow = if direction == &Side::Left { "<==" } else { "==>" };
|
||||
write!(f, "Scan{{{left} {arrow} {} {arrow} {right}}}", sep.iter().join(" "))
|
||||
},
|
||||
Self::Middle { left, left_sep, mid, right_sep, right, .. } => {
|
||||
let left_sep_s = left_sep.iter().join(" ");
|
||||
let right_sep_s = right_sep.iter().join(" ");
|
||||
write!(f, "Middle{{{left}|{left_sep_s}|{mid}|{right_sep_s}|{right}}}")
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AnyMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Scalar(s) => {
|
||||
write!(f, "({})", s.iter().join(" "))
|
||||
},
|
||||
Self::Vec { left, mid, right } => {
|
||||
let lefts = left.iter().join(" ");
|
||||
let rights = right.iter().join(" ");
|
||||
write!(f, "[{lefts}|{mid}|{rights}]")
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,91 +0,0 @@
|
||||
#![allow(unused)]
|
||||
use std::any::Any;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::join::join_maps;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::match_mapping;
|
||||
use orchid_base::name::Sym;
|
||||
|
||||
use crate::macros::MacTree;
|
||||
|
||||
enum StackAction {
|
||||
Return(Box<dyn Any>),
|
||||
Call {
|
||||
target: Box<dyn FnOnce(Box<dyn Any>) -> StackAction>,
|
||||
param: Box<dyn Any>,
|
||||
tail: Box<dyn FnOnce(Box<dyn Any>) -> StackAction>,
|
||||
},
|
||||
}
|
||||
|
||||
struct Trampoline {
|
||||
stack: Vec<Box<dyn FnOnce(Box<dyn Any>) -> StackAction>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum StateEntry<'a> {
|
||||
Vec(&'a [MacTree]),
|
||||
Scalar(&'a MacTree),
|
||||
}
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MatchState<'a> {
|
||||
placeholders: HashMap<Tok<String>, StateEntry<'a>>,
|
||||
name_posv: HashMap<Sym, Vec<Pos>>,
|
||||
}
|
||||
impl<'a> MatchState<'a> {
|
||||
pub fn from_ph(key: Tok<String>, entry: StateEntry<'a>) -> Self {
|
||||
Self { placeholders: HashMap::from([(key, entry)]), name_posv: HashMap::new() }
|
||||
}
|
||||
pub fn combine(self, s: Self) -> Self {
|
||||
Self {
|
||||
placeholders: self.placeholders.into_iter().chain(s.placeholders).collect(),
|
||||
name_posv: join_maps(self.name_posv, s.name_posv, |_, l, r| l.into_iter().chain(r).collect()),
|
||||
}
|
||||
}
|
||||
pub fn ph_len(&self, key: &Tok<String>) -> Option<usize> {
|
||||
match self.placeholders.get(key)? {
|
||||
StateEntry::Vec(slc) => Some(slc.len()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
pub fn from_name(name: Sym, location: Pos) -> Self {
|
||||
Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() }
|
||||
}
|
||||
pub fn remove(&mut self, name: Tok<String>) -> Option<StateEntry<'a>> {
|
||||
self.placeholders.remove(&name)
|
||||
}
|
||||
pub fn mk_owned(self) -> OwnedState {
|
||||
OwnedState {
|
||||
placeholders: (self.placeholders.into_iter())
|
||||
.map(|(k, v)| {
|
||||
(
|
||||
k.clone(),
|
||||
match_mapping!(v, StateEntry => OwnedEntry {
|
||||
Scalar(tree.clone()),
|
||||
Vec(v.to_vec()),
|
||||
}),
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
name_posv: self.name_posv,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Default for MatchState<'static> {
|
||||
fn default() -> Self { Self { name_posv: HashMap::new(), placeholders: HashMap::new() } }
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum OwnedEntry {
|
||||
Vec(Vec<MacTree>),
|
||||
Scalar(MacTree),
|
||||
}
|
||||
pub struct OwnedState {
|
||||
placeholders: HashMap<Tok<String>, OwnedEntry>,
|
||||
name_posv: HashMap<Sym, Vec<Pos>>,
|
||||
}
|
||||
impl OwnedState {
|
||||
pub fn get(&self, key: &Tok<String>) -> Option<&OwnedEntry> { self.placeholders.get(key) }
|
||||
pub fn positions(&self, name: &Sym) -> &[Pos] { self.name_posv.get(name).map_or(&[], |v| &v[..]) }
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::tree::Ph;
|
||||
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
|
||||
/// Returns the name, priority and at_least_one of the expression if it is
|
||||
/// a vectorial placeholder
|
||||
#[must_use]
|
||||
pub fn vec_attrs(expr: &MacTree) -> Option<(Tok<String>, u8, bool)> {
|
||||
match (*expr.tok).clone() {
|
||||
MacTok::Ph(Ph { kind: PhKind::Vector { priority, at_least_one }, name }) =>
|
||||
Some((name, priority, at_least_one)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use itertools::Itertools;
|
||||
use orchid_base::name::Sym;
|
||||
|
||||
use super::scal_match::scalv_match;
|
||||
use super::shared::VecMatcher;
|
||||
use crate::macros::MacTree;
|
||||
use crate::rule::state::{MatchState, StateEntry};
|
||||
|
||||
#[must_use]
|
||||
pub fn vec_match<'a>(
|
||||
matcher: &VecMatcher,
|
||||
seq: &'a [MacTree],
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<MatchState<'a>> {
|
||||
match matcher {
|
||||
VecMatcher::Placeh { key, nonzero } => {
|
||||
if *nonzero && seq.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(MatchState::from_ph(key.clone(), StateEntry::Vec(seq)))
|
||||
},
|
||||
VecMatcher::Scan { left, sep, right, direction } => {
|
||||
if seq.len() < sep.len() {
|
||||
return None;
|
||||
}
|
||||
for lpos in direction.walk(0..=seq.len() - sep.len()) {
|
||||
let rpos = lpos + sep.len();
|
||||
let state = vec_match(left, &seq[..lpos], save_loc)
|
||||
.and_then(|s| Some(s.combine(scalv_match(sep, &seq[lpos..rpos], save_loc)?)))
|
||||
.and_then(|s| Some(s.combine(vec_match(right, &seq[rpos..], save_loc)?)));
|
||||
if let Some(s) = state {
|
||||
return Some(s);
|
||||
}
|
||||
}
|
||||
None
|
||||
},
|
||||
// XXX predict heap space usage and allocation count
|
||||
VecMatcher::Middle { left, left_sep, mid, right_sep, right, key_order } => {
|
||||
if seq.len() < left_sep.len() + right_sep.len() {
|
||||
return None;
|
||||
}
|
||||
// Valid locations for the left separator
|
||||
let lposv = seq[..seq.len() - right_sep.len()]
|
||||
.windows(left_sep.len())
|
||||
.enumerate()
|
||||
.filter_map(|(i, window)| scalv_match(left_sep, window, save_loc).map(|s| (i, s)))
|
||||
.collect::<Vec<_>>();
|
||||
// Valid locations for the right separator
|
||||
let rposv = seq[left_sep.len()..]
|
||||
.windows(right_sep.len())
|
||||
.enumerate()
|
||||
.filter_map(|(i, window)| scalv_match(right_sep, window, save_loc).map(|s| (i, s)))
|
||||
.collect::<Vec<_>>();
|
||||
// Valid combinations of locations for the separators
|
||||
let mut pos_pairs = lposv
|
||||
.into_iter()
|
||||
.cartesian_product(rposv)
|
||||
.filter(|((lpos, _), (rpos, _))| lpos + left_sep.len() <= *rpos)
|
||||
.map(|((lpos, lstate), (rpos, rstate))| (lpos, rpos, lstate.combine(rstate)))
|
||||
.collect::<Vec<_>>();
|
||||
// In descending order of size
|
||||
pos_pairs.sort_by_key(|(l, r, _)| -((r - l) as i64));
|
||||
let eql_clusters = pos_pairs.into_iter().chunk_by(|(al, ar, _)| ar - al);
|
||||
for (_gap_size, cluster) in eql_clusters.into_iter() {
|
||||
let best_candidate = cluster
|
||||
.into_iter()
|
||||
.filter_map(|(lpos, rpos, state)| {
|
||||
Some(
|
||||
state
|
||||
.combine(vec_match(left, &seq[..lpos], save_loc)?)
|
||||
.combine(vec_match(mid, &seq[lpos + left_sep.len()..rpos], save_loc)?)
|
||||
.combine(vec_match(right, &seq[rpos + right_sep.len()..], save_loc)?),
|
||||
)
|
||||
})
|
||||
.max_by(|a, b| {
|
||||
for key in key_order {
|
||||
let alen = a.ph_len(key).expect("key_order references scalar or missing");
|
||||
let blen = b.ph_len(key).expect("key_order references scalar or missing");
|
||||
match alen.cmp(&blen) {
|
||||
Ordering::Equal => (),
|
||||
any => return any,
|
||||
}
|
||||
}
|
||||
Ordering::Equal
|
||||
});
|
||||
if let Some(state) = best_candidate {
|
||||
return Some(state);
|
||||
}
|
||||
}
|
||||
None
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::fmt;
|
||||
use std::future::Future;
|
||||
use std::rc::{Rc, Weak};
|
||||
use std::{fmt, mem};
|
||||
|
||||
use async_stream::stream;
|
||||
use derive_destructure::destructure;
|
||||
@@ -13,8 +13,9 @@ use orchid_base::char_filter::char_filter_match;
|
||||
use orchid_base::clone;
|
||||
use orchid_base::error::{OrcErrv, OrcRes};
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::interner::{Interner, Tok};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::parse::Comment;
|
||||
use orchid_base::reqnot::{ReqNot, Requester};
|
||||
use orchid_base::tree::ttv_from_api;
|
||||
@@ -23,8 +24,9 @@ use substack::{Stackframe, Substack};
|
||||
|
||||
use crate::api;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::{Expr, ExprParseCtx};
|
||||
use crate::extension::{Extension, WeakExtension};
|
||||
use crate::tree::{ItemKind, Member, Module, ParsTokTree, Root};
|
||||
use crate::parsed::{ItemKind, ParsedMember, ParsedModule, ParsTokTree, ParsedFromApiCx, Root};
|
||||
|
||||
#[derive(destructure)]
|
||||
struct SystemInstData {
|
||||
@@ -55,6 +57,7 @@ impl System {
|
||||
pub fn id(&self) -> api::SysId { self.0.id }
|
||||
pub fn ext(&self) -> &Extension { &self.0.ext }
|
||||
pub fn ctx(&self) -> &Ctx { &self.0.ctx }
|
||||
pub fn i(&self) -> &Interner { &self.0.ctx.i }
|
||||
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { self.0.ext.reqnot() }
|
||||
pub async fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
|
||||
self.reqnot().request(api::GetMember(self.0.id, id)).await
|
||||
@@ -75,15 +78,23 @@ impl System {
|
||||
pub fn line_types(&self) -> impl Iterator<Item = &Tok<String>> + '_ { self.0.line_types.iter() }
|
||||
pub async fn parse(
|
||||
&self,
|
||||
module: Sym,
|
||||
line: Vec<ParsTokTree>,
|
||||
exported: bool,
|
||||
comments: Vec<Comment>,
|
||||
) -> OrcRes<Vec<ParsTokTree>> {
|
||||
let line =
|
||||
join_all(line.iter().map(|t| async { t.to_api(&mut async |n, _| match *n {}).await })).await;
|
||||
let line = join_all(line.into_iter().map(|t| async {
|
||||
let mut expr_store = self.0.ext.exprs().clone();
|
||||
t.into_api(&mut expr_store, &mut ()).await
|
||||
}))
|
||||
.await;
|
||||
let comments = comments.iter().map(Comment::to_api).collect_vec();
|
||||
match self.reqnot().request(api::ParseLine { exported, sys: self.id(), comments, line }).await {
|
||||
Ok(parsed) => Ok(ttv_from_api(parsed, &mut self.ctx().clone(), &self.ctx().i).await),
|
||||
let req = api::ParseLine { module: module.to_api(), exported, sys: self.id(), comments, line };
|
||||
match self.reqnot().request(req).await {
|
||||
Ok(parsed) => {
|
||||
let mut pctx = ExprParseCtx { ctx: self.ctx().clone(), exprs: self.ext().exprs().clone() };
|
||||
Ok(ttv_from_api(parsed, &mut self.ext().exprs().clone(), &mut pctx, self.i()).await)
|
||||
},
|
||||
Err(e) => Err(OrcErrv::from_api(&e, &self.ctx().i).await),
|
||||
}
|
||||
}
|
||||
@@ -122,7 +133,11 @@ impl SystemCtor {
|
||||
self.decl.depends.iter().map(|s| &**s)
|
||||
}
|
||||
pub fn id(&self) -> api::SysDeclId { self.decl.id }
|
||||
pub async fn run<'a>(&self, depends: impl IntoIterator<Item = &'a System>) -> (Module, System) {
|
||||
pub async fn run<'a>(
|
||||
&self,
|
||||
depends: impl IntoIterator<Item = &'a System>,
|
||||
consts: &mut HashMap<Sym, Expr>,
|
||||
) -> (ParsedModule, System) {
|
||||
let depends = depends.into_iter().map(|si| si.id()).collect_vec();
|
||||
debug_assert_eq!(depends.len(), self.decl.depends.len(), "Wrong number of deps provided");
|
||||
let ext = self.ext.upgrade().expect("SystemCtor should be freed before Extension");
|
||||
@@ -139,10 +154,13 @@ impl SystemCtor {
|
||||
}));
|
||||
let const_root = clone!(data, ext; stream! {
|
||||
for (k, v) in sys_inst.const_root {
|
||||
yield Member::from_api(
|
||||
yield ParsedMember::from_api(
|
||||
api::Member { name: k, kind: v },
|
||||
&mut vec![Tok::from_api(k, &ext.ctx().i).await],
|
||||
&data,
|
||||
&mut ParsedFromApiCx {
|
||||
consts,
|
||||
path: ext.ctx().i.i(&[]).await,
|
||||
sys: &data,
|
||||
}
|
||||
).await;
|
||||
}
|
||||
})
|
||||
@@ -150,7 +168,7 @@ impl SystemCtor {
|
||||
.collect::<Vec<_>>()
|
||||
.await;
|
||||
ext.ctx().systems.write().await.insert(id, data.downgrade());
|
||||
let root = Module::new(const_root);
|
||||
let root = ParsedModule::new(const_root);
|
||||
(root, data)
|
||||
}
|
||||
}
|
||||
@@ -182,6 +200,7 @@ pub async fn init_systems(
|
||||
fn walk_deps<'a>(
|
||||
graph: &mut HashMap<&str, &'a SystemCtor>,
|
||||
list: &mut Vec<&'a SystemCtor>,
|
||||
consts: &mut HashMap<Sym, Expr>,
|
||||
chain: Stackframe<&str>,
|
||||
) -> Result<(), SysResolvErr> {
|
||||
if let Some(ctor) = graph.remove(chain.item) {
|
||||
@@ -193,21 +212,22 @@ pub async fn init_systems(
|
||||
circle.extend(Substack::Frame(chain).iter().map(|s| s.to_string()));
|
||||
return Err(SysResolvErr::Loop(circle));
|
||||
}
|
||||
walk_deps(graph, list, Substack::Frame(chain).new_frame(dep))?
|
||||
walk_deps(graph, list, consts, Substack::Frame(chain).new_frame(dep))?
|
||||
}
|
||||
list.push(ctor);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
let mut consts = HashMap::new();
|
||||
for tgt in tgts {
|
||||
walk_deps(&mut to_load, &mut to_load_ordered, Substack::Bottom.new_frame(tgt))?;
|
||||
walk_deps(&mut to_load, &mut to_load_ordered, &mut consts, Substack::Bottom.new_frame(tgt))?;
|
||||
}
|
||||
let mut systems = HashMap::<&str, System>::new();
|
||||
let mut root = Module::default();
|
||||
let mut root_mod = ParsedModule::default();
|
||||
for ctor in to_load_ordered.iter() {
|
||||
let (sys_root, sys) = ctor.run(ctor.depends().map(|n| &systems[n])).await;
|
||||
let (sys_root, sys) = ctor.run(ctor.depends().map(|n| &systems[n]), &mut consts).await;
|
||||
systems.insert(ctor.name(), sys);
|
||||
root.merge(sys_root);
|
||||
root_mod.merge(sys_root);
|
||||
}
|
||||
Ok((Root::new(root), systems.into_values().collect_vec()))
|
||||
Ok((Root::new(root_mod, consts), systems.into_values().collect_vec()))
|
||||
}
|
||||
|
||||
@@ -1,374 +1,59 @@
|
||||
use std::fmt::Debug;
|
||||
use std::rc::Rc;
|
||||
use std::cell::RefCell;
|
||||
use std::rc::{Rc, Weak};
|
||||
|
||||
use async_once_cell::OnceCell;
|
||||
use async_std::sync::{Mutex, RwLock};
|
||||
use async_stream::stream;
|
||||
use futures::future::join_all;
|
||||
use futures::{FutureExt, StreamExt};
|
||||
use itertools::Itertools;
|
||||
use never::Never;
|
||||
use orchid_base::error::{OrcRes, mk_errv};
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||
use hashbrown::HashMap;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::macros::{mtreev_fmt, mtreev_from_api};
|
||||
use orchid_base::name::{NameLike, Sym};
|
||||
use orchid_base::parse::{Comment, Import};
|
||||
use orchid_base::tree::{AtomRepr, TokTree, Token};
|
||||
use orchid_base::{clone, tl_cache};
|
||||
use ordered_float::NotNan;
|
||||
use substack::Substack;
|
||||
use orchid_base::name::Sym;
|
||||
|
||||
use crate::api;
|
||||
use crate::atom::AtomHand;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::{Expr, mtreev_to_expr};
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
use crate::system::System;
|
||||
use crate::expr::Expr;
|
||||
use crate::parsed::{LazyMemberHandle, ParsedMemberKind, ParsedModule};
|
||||
|
||||
pub type ParsTokTree = TokTree<'static, AtomHand, Never>;
|
||||
pub type ParsTok = Token<'static, AtomHand, Never>;
|
||||
pub struct Tree(Rc<Module>);
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Item {
|
||||
pub pos: Pos,
|
||||
pub comments: Vec<Comment>,
|
||||
pub kind: ItemKind,
|
||||
pub struct WeakTree(Weak<Module>);
|
||||
|
||||
pub struct Module {
|
||||
pub members: HashMap<Tok<String>, Rc<Member>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ItemKind {
|
||||
Member(Member),
|
||||
Export(Tok<String>),
|
||||
Import(Import),
|
||||
Macro(Option<NotNan<f64>>, Vec<Rule>),
|
||||
}
|
||||
impl ItemKind {
|
||||
pub fn at(self, pos: Pos) -> Item { Item { comments: vec![], pos, kind: self } }
|
||||
}
|
||||
|
||||
impl Item {
|
||||
pub async fn from_api(tree: api::Item, path: &mut Vec<Tok<String>>, sys: &System) -> Self {
|
||||
let kind = match tree.kind {
|
||||
api::ItemKind::Member(m) => ItemKind::Member(Member::from_api(m, path, sys).await),
|
||||
api::ItemKind::Import(name) => ItemKind::Import(Import {
|
||||
path: Sym::from_api(name, &sys.ctx().i).await.iter().collect(),
|
||||
name: None,
|
||||
}),
|
||||
api::ItemKind::Export(e) => ItemKind::Export(Tok::from_api(e, &sys.ctx().i).await),
|
||||
api::ItemKind::Macro(macro_block) => {
|
||||
let mut rules = Vec::new();
|
||||
for rule in macro_block.rules {
|
||||
let mut comments = Vec::new();
|
||||
for comment in rule.comments {
|
||||
comments.push(Comment::from_api(&comment, &sys.ctx().i).await);
|
||||
}
|
||||
let pos = Pos::from_api(&rule.location, &sys.ctx().i).await;
|
||||
let pattern = mtreev_from_api(&rule.pattern, &sys.ctx().i, &mut {
|
||||
clone!(pos, sys);
|
||||
async move |a| {
|
||||
MacTok::Atom(AtomHand::from_api(a, pos.clone(), &mut sys.ctx().clone()).await)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
rules.push(Rule { pos, pattern, kind: RuleKind::Remote(sys.clone(), rule.id), comments });
|
||||
}
|
||||
ItemKind::Macro(macro_block.priority, rules)
|
||||
},
|
||||
};
|
||||
let mut comments = Vec::new();
|
||||
for comment in tree.comments.iter() {
|
||||
comments.push(Comment::from_api(comment, &sys.ctx().i).await)
|
||||
}
|
||||
Self { pos: Pos::from_api(&tree.location, &sys.ctx().i).await, comments, kind }
|
||||
}
|
||||
}
|
||||
impl Format for Item {
|
||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
let comment_text = self.comments.iter().join("\n");
|
||||
let item_text = match &self.kind {
|
||||
ItemKind::Import(i) => format!("import {i}").into(),
|
||||
ItemKind::Export(e) => format!("export {e}").into(),
|
||||
ItemKind::Macro(None, rules) =>
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("macro {{\n\t{0}\n}}")))
|
||||
.units([Variants::sequence(rules.len(), "\n", None)
|
||||
.units(join_all(rules.iter().map(|r| r.print(c))).await)]),
|
||||
ItemKind::Member(mem) => match mem.kind.get() {
|
||||
None => format!("lazy {}", mem.name).into(),
|
||||
Some(MemberKind::Const(val)) =>
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} = {1}")))
|
||||
.units([mem.name.rc().into(), val.print(c).await]),
|
||||
Some(MemberKind::Mod(module)) =>
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("module {0} {{\n\t{1}\n}}")))
|
||||
.units([mem.name.rc().into(), module.print(c).boxed_local().await]),
|
||||
},
|
||||
_ => panic!(),
|
||||
};
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{0}\n{1}")))
|
||||
.units([comment_text.into(), item_text])
|
||||
impl Module {
|
||||
async fn from_parsed(parsed: &ParsedModule, root: &ParsedModule) -> Self {
|
||||
let imports =
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Member {
|
||||
name: Tok<String>,
|
||||
kind: OnceCell<MemberKind>,
|
||||
lazy: Mutex<Option<LazyMemberHandle>>,
|
||||
pub public: bool,
|
||||
pub root: WeakTree,
|
||||
pub canonical_path: Sym,
|
||||
pub lazy: RefCell<Option<(LazyMemberHandle, Rc<ParsedModule>)>>,
|
||||
pub kind: OnceCell<MemberKind>,
|
||||
}
|
||||
impl Member {
|
||||
pub fn name(&self) -> Tok<String> { self.name.clone() }
|
||||
pub async fn kind(&self) -> &MemberKind {
|
||||
pub async fn kind_mut(&mut self, consts: &mut HashMap<Sym, Expr>) -> &mut MemberKind {
|
||||
self.kind(consts).await;
|
||||
self.kind.get_mut().expect("Thhe above line should have initialized it")
|
||||
}
|
||||
pub async fn kind(&self, consts: &mut HashMap<Sym, Expr>) -> &MemberKind {
|
||||
(self.kind.get_or_init(async {
|
||||
let handle = self.lazy.lock().await.take().expect("Neither known nor lazy");
|
||||
handle.run().await
|
||||
let (handle, root) =
|
||||
self.lazy.borrow_mut().take().expect("If kind is uninit, lazy must be Some");
|
||||
let parsed = handle.run(consts).await;
|
||||
MemberKind::from_parsed(&parsed, &root).await
|
||||
}))
|
||||
.await
|
||||
}
|
||||
pub async fn kind_mut(&mut self) -> &mut MemberKind {
|
||||
self.kind().await;
|
||||
self.kind.get_mut().expect("kind() already filled the cell")
|
||||
}
|
||||
pub async fn from_api(api: api::Member, path: &mut Vec<Tok<String>>, sys: &System) -> Self {
|
||||
path.push(Tok::from_api(api.name, &sys.ctx().i).await);
|
||||
let kind = match api.kind {
|
||||
api::MemberKind::Lazy(id) => {
|
||||
let handle = LazyMemberHandle(id, sys.clone(), path.clone());
|
||||
return handle.into_member(path.pop().unwrap());
|
||||
},
|
||||
api::MemberKind::Const(c) => MemberKind::Const(Code::from_expr(
|
||||
CodeLocator::to_const(sys.ctx().i.i(&*path).await),
|
||||
Expr::from_api(&c, &mut sys.ext().clone()).await,
|
||||
)),
|
||||
api::MemberKind::Module(m) => MemberKind::Mod(Module::from_api(m, path, sys).await),
|
||||
};
|
||||
let name = path.pop().unwrap();
|
||||
Member { name, kind: OnceCell::from(kind), lazy: Mutex::default() }
|
||||
}
|
||||
pub fn new(name: Tok<String>, kind: MemberKind) -> Self {
|
||||
Member { name, kind: OnceCell::from(kind), lazy: Mutex::default() }
|
||||
}
|
||||
}
|
||||
impl Debug for Member {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Member")
|
||||
.field("name", &self.name)
|
||||
.field("kind", &self.kind)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum MemberKind {
|
||||
Const(Code),
|
||||
Mod(Module),
|
||||
Const,
|
||||
Module(Module),
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Module {
|
||||
pub imports: Vec<Sym>,
|
||||
pub exports: Vec<Tok<String>>,
|
||||
pub items: Vec<Item>,
|
||||
}
|
||||
impl Module {
|
||||
pub fn new(items: impl IntoIterator<Item = Item>) -> Self {
|
||||
let items = items.into_iter().collect_vec();
|
||||
let exports = (items.iter())
|
||||
.filter_map(|i| match &i.kind {
|
||||
ItemKind::Export(e) => Some(e.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect_vec();
|
||||
Self { imports: vec![], exports, items }
|
||||
}
|
||||
pub fn merge(&mut self, other: Module) {
|
||||
let mut swap = Module::default();
|
||||
std::mem::swap(self, &mut swap);
|
||||
*self = Module::new(swap.items.into_iter().chain(other.items))
|
||||
}
|
||||
pub async fn from_api(m: api::Module, path: &mut Vec<Tok<String>>, sys: &System) -> Self {
|
||||
Self::new(
|
||||
stream! { for item in m.items { yield Item::from_api(item, path, sys).boxed_local().await } }
|
||||
.collect::<Vec<_>>()
|
||||
.await,
|
||||
)
|
||||
}
|
||||
pub async fn walk(
|
||||
&self,
|
||||
allow_private: bool,
|
||||
path: impl IntoIterator<Item = Tok<String>>,
|
||||
) -> Result<&Module, WalkError> {
|
||||
let mut cur = self;
|
||||
for (pos, step) in path.into_iter().enumerate() {
|
||||
let Some(member) = (cur.items.iter())
|
||||
.filter_map(|it| if let ItemKind::Member(m) = &it.kind { Some(m) } else { None })
|
||||
.find(|m| m.name == step)
|
||||
else {
|
||||
return Err(WalkError { pos, kind: WalkErrorKind::Missing });
|
||||
};
|
||||
if !allow_private && !cur.exports.contains(&step) {
|
||||
return Err(WalkError { pos, kind: WalkErrorKind::Private });
|
||||
}
|
||||
match member.kind().await {
|
||||
MemberKind::Const(_) => return Err(WalkError { pos, kind: WalkErrorKind::Constant }),
|
||||
MemberKind::Mod(m) => cur = m,
|
||||
}
|
||||
}
|
||||
Ok(cur)
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub enum WalkErrorKind {
|
||||
Missing,
|
||||
Private,
|
||||
Constant,
|
||||
}
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub struct WalkError {
|
||||
pub pos: usize,
|
||||
pub kind: WalkErrorKind,
|
||||
}
|
||||
impl Format for Module {
|
||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
let import_str = self.imports.iter().map(|i| format!("import {i}")).join("\n");
|
||||
let head_str = format!("{import_str}\nexport ::({})\n", self.exports.iter().join(", "));
|
||||
Variants::sequence(self.items.len() + 1, "\n", None).units(
|
||||
[head_str.into()].into_iter().chain(join_all(self.items.iter().map(|i| i.print(c))).await),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LazyMemberHandle(api::TreeId, System, Vec<Tok<String>>);
|
||||
impl LazyMemberHandle {
|
||||
pub async fn run(self) -> MemberKind {
|
||||
match self.1.get_tree(self.0).await {
|
||||
api::MemberKind::Const(c) => MemberKind::Const(Code {
|
||||
bytecode: Expr::from_api(&c, &mut self.1.ext().clone()).await.into(),
|
||||
locator: CodeLocator { steps: self.1.ctx().i.i(&self.2).await, rule_loc: None },
|
||||
source: None,
|
||||
}),
|
||||
api::MemberKind::Module(m) =>
|
||||
MemberKind::Mod(Module::from_api(m, &mut { self.2 }, &self.1).await),
|
||||
api::MemberKind::Lazy(id) => Self(id, self.1, self.2).run().boxed_local().await,
|
||||
}
|
||||
}
|
||||
pub fn into_member(self, name: Tok<String>) -> Member {
|
||||
Member { name, kind: OnceCell::new(), lazy: Mutex::new(Some(self)) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Rule {
|
||||
pub pos: Pos,
|
||||
pub comments: Vec<Comment>,
|
||||
pub pattern: Vec<MacTree>,
|
||||
pub kind: RuleKind,
|
||||
}
|
||||
impl Format for Rule {
|
||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
FmtUnit::new(
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{0b}\n{1} => {2b}"))),
|
||||
[
|
||||
self.comments.iter().join("\n").into(),
|
||||
mtreev_fmt(&self.pattern, c).await,
|
||||
match &self.kind {
|
||||
RuleKind::Native(code) => code.print(c).await,
|
||||
RuleKind::Remote(sys, id) => FmtUnit::new(
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{0} #{1}"))),
|
||||
[sys.print(c).await, format!("{id:?}").into()],
|
||||
),
|
||||
},
|
||||
],
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum RuleKind {
|
||||
Remote(System, api::MacroId),
|
||||
Native(Code),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Code {
|
||||
locator: CodeLocator,
|
||||
source: Option<Vec<MacTree>>,
|
||||
bytecode: OnceCell<Expr>,
|
||||
}
|
||||
impl Code {
|
||||
pub fn from_expr(locator: CodeLocator, expr: Expr) -> Self {
|
||||
Self { locator, source: None, bytecode: expr.into() }
|
||||
}
|
||||
pub fn from_code(locator: CodeLocator, code: Vec<MacTree>) -> Self {
|
||||
Self { locator, source: Some(code), bytecode: OnceCell::new() }
|
||||
}
|
||||
pub fn source(&self) -> Option<&Vec<MacTree>> { self.source.as_ref() }
|
||||
pub fn set_source(&mut self, source: Vec<MacTree>) {
|
||||
self.source = Some(source);
|
||||
self.bytecode = OnceCell::new();
|
||||
}
|
||||
pub async fn get_bytecode(&self, ctx: &Ctx) -> &Expr {
|
||||
(self.bytecode.get_or_init(async {
|
||||
let src = self.source.as_ref().expect("no bytecode or source");
|
||||
mtreev_to_expr(src, Substack::Bottom, ctx).await.at(Pos::None)
|
||||
}))
|
||||
.await
|
||||
}
|
||||
}
|
||||
impl Format for Code {
|
||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
if let Some(bc) = self.bytecode.get() {
|
||||
return bc.print(c).await;
|
||||
}
|
||||
if let Some(src) = &self.source {
|
||||
return mtreev_fmt(src, c).await;
|
||||
}
|
||||
panic!("Code must be initialized with at least one state")
|
||||
}
|
||||
}
|
||||
|
||||
/// Selects a code element
|
||||
///
|
||||
/// Either the steps point to a constant and rule_loc is None, or the steps
|
||||
/// point to a module and rule_loc selects a macro rule within that module
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub struct CodeLocator {
|
||||
steps: Tok<Vec<Tok<String>>>,
|
||||
/// Index of a macro block in the module demarked by the steps, and a rule in
|
||||
/// that macro
|
||||
rule_loc: Option<(u16, u16)>,
|
||||
}
|
||||
impl CodeLocator {
|
||||
pub fn to_const(steps: Tok<Vec<Tok<String>>>) -> Self { Self { steps, rule_loc: None } }
|
||||
pub fn to_rule(steps: Tok<Vec<Tok<String>>>, macro_i: u16, rule_i: u16) -> Self {
|
||||
Self { steps, rule_loc: Some((macro_i, rule_i)) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Root(Rc<RwLock<Module>>);
|
||||
impl Root {
|
||||
pub fn new(module: Module) -> Self { Self(Rc::new(RwLock::new(module))) }
|
||||
pub async fn get_const_value(&self, name: impl NameLike, pos: Pos, ctx: Ctx) -> OrcRes<Expr> {
|
||||
let (cn, mp) = name.split_last();
|
||||
let root_lock = self.0.read().await;
|
||||
let module = root_lock.walk(true, mp.iter().cloned()).await.unwrap();
|
||||
let member = (module.items.iter())
|
||||
.filter_map(|it| if let ItemKind::Member(m) = &it.kind { Some(m) } else { None })
|
||||
.find(|m| m.name() == cn);
|
||||
match member {
|
||||
None => Err(mk_errv(
|
||||
ctx.i.i("Constant does not exist").await,
|
||||
format!("{name} does not refer to a constant"),
|
||||
[pos.clone().into()],
|
||||
)),
|
||||
Some(mem) => match mem.kind().await {
|
||||
MemberKind::Mod(_) => Err(mk_errv(
|
||||
ctx.i.i("module used as constant").await,
|
||||
format!("{name} is a module, not a constant"),
|
||||
[pos.clone().into()],
|
||||
)),
|
||||
MemberKind::Const(c) => Ok((c.get_bytecode(&ctx).await).clone()),
|
||||
},
|
||||
impl MemberKind {
|
||||
async fn from_parsed(parsed: &ParsedMemberKind, root: &ParsedModule) -> Self {
|
||||
match parsed {
|
||||
ParsedMemberKind::Const => MemberKind::Const,
|
||||
ParsedMemberKind::Mod(m) => MemberKind::Module(Module::from_parsed(m, root).await),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user