Began implementing fully isomorphic macros
Like Rust's Proc macros. Now we have preprocessor recursion to worry about. I also made a cool macro for enums
This commit is contained in:
@@ -11,9 +11,11 @@ hashbrown = "0.14.5"
|
||||
itertools = "0.13.0"
|
||||
lazy_static = "1.4.0"
|
||||
never = "0.1.0"
|
||||
num-traits = "0.2.19"
|
||||
orchid-api = { version = "0.1.0", path = "../orchid-api" }
|
||||
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
|
||||
orchid-base = { version = "0.1.0", path = "../orchid-base" }
|
||||
ordered-float = "4.2.0"
|
||||
paste = "1.0.15"
|
||||
substack = "1.1.0"
|
||||
substack = "1.1.1"
|
||||
trait-set = "0.3.0"
|
||||
|
||||
@@ -1,24 +1,34 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::num::NonZeroU64;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use lazy_static::lazy_static;
|
||||
use orchid_base::error::OrcErrv;
|
||||
use orchid_base::interner::deintern;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::tree::AtomTok;
|
||||
|
||||
use crate::api;
|
||||
use crate::extension::{AtomHand, System};
|
||||
use crate::extension::AtomHand;
|
||||
|
||||
pub type ExprParseCtx = ();
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RtExpr {
|
||||
pub struct Expr {
|
||||
is_canonical: Arc<AtomicBool>,
|
||||
data: Arc<()>,
|
||||
pos: Pos,
|
||||
kind: Arc<RwLock<ExprKind>>,
|
||||
}
|
||||
impl RtExpr {
|
||||
impl Expr {
|
||||
pub fn pos(&self) -> Pos { self.pos.clone() }
|
||||
pub fn as_atom(&self) -> Option<AtomHand> { todo!() }
|
||||
pub fn strong_count(&self) -> usize { todo!() }
|
||||
pub fn id(&self) -> api::ExprTicket {
|
||||
api::ExprTicket(
|
||||
NonZeroU64::new(self.data.as_ref() as *const () as usize as u64)
|
||||
NonZeroU64::new(self.kind.as_ref() as *const RwLock<_> as usize as u64)
|
||||
.expect("this is a ref, it cannot be null"),
|
||||
)
|
||||
}
|
||||
@@ -31,14 +41,29 @@ impl RtExpr {
|
||||
pub fn resolve(tk: api::ExprTicket) -> Option<Self> {
|
||||
KNOWN_EXPRS.read().unwrap().get(&tk).cloned()
|
||||
}
|
||||
pub fn from_api(api: api::Expr, sys: &System) -> Self {
|
||||
Self { data: Arc::default(), is_canonical: Arc::default() }
|
||||
pub fn from_api(api: api::Expression, ctx: &mut ExprParseCtx) -> Self {
|
||||
if let api::ExpressionKind::Slot(tk) = &api.kind {
|
||||
return Self::resolve(*tk).expect("Invalid slot");
|
||||
}
|
||||
Self {
|
||||
kind: Arc::new(RwLock::new(ExprKind::from_api(api.kind, ctx))),
|
||||
is_canonical: Arc::default(),
|
||||
pos: Pos::from_api(&api.location),
|
||||
}
|
||||
}
|
||||
pub fn to_api(&self) -> api::InspectedKind {
|
||||
use api::InspectedKind as K;
|
||||
match &*self.kind.read().unwrap() {
|
||||
ExprKind::Atom(a) => K::Atom(a.to_api()),
|
||||
ExprKind::Bottom(b) => K::Bottom(b.to_api()),
|
||||
_ => K::Opaque,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Drop for RtExpr {
|
||||
impl Drop for Expr {
|
||||
fn drop(&mut self) {
|
||||
// If the only two references left are this and known, remove from known
|
||||
if Arc::strong_count(&self.data) == 2 && self.is_canonical.load(Ordering::Relaxed) {
|
||||
if Arc::strong_count(&self.kind) == 2 && self.is_canonical.load(Ordering::Relaxed) {
|
||||
// if known is poisoned, a leak is preferable to a panicking destructor
|
||||
if let Ok(mut w) = KNOWN_EXPRS.write() {
|
||||
w.remove(&self.id());
|
||||
@@ -48,5 +73,67 @@ impl Drop for RtExpr {
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref KNOWN_EXPRS: RwLock<HashMap<api::ExprTicket, RtExpr>> = RwLock::default();
|
||||
static ref KNOWN_EXPRS: RwLock<HashMap<api::ExprTicket, Expr>> = RwLock::default();
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ExprKind {
|
||||
Seq(Expr, Expr),
|
||||
Call(Expr, Expr),
|
||||
Atom(AtomHand),
|
||||
Argument,
|
||||
Lambda(Option<PathSet>, Expr),
|
||||
Bottom(OrcErrv),
|
||||
Const(Sym),
|
||||
}
|
||||
impl ExprKind {
|
||||
pub fn from_api(api: api::ExpressionKind, ctx: &mut ExprParseCtx) -> Self {
|
||||
use api::ExpressionKind as K;
|
||||
match api {
|
||||
K::Slot(_) => panic!("Handled in Expr"),
|
||||
K::Lambda(id, b) => ExprKind::Lambda(PathSet::from_api(id, &b), Expr::from_api(*b, ctx)),
|
||||
K::Arg(_) => ExprKind::Argument,
|
||||
K::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b)),
|
||||
K::Call(f, x) => ExprKind::Call(Expr::from_api(*f, ctx), Expr::from_api(*x, ctx)),
|
||||
K::Const(c) => ExprKind::Const(Sym::from_tok(deintern(c)).unwrap()),
|
||||
K::NewAtom(a) => ExprKind::Atom(AtomHand::from_api(a)),
|
||||
K::Seq(a, b) => ExprKind::Seq(Expr::from_api(*a, ctx), Expr::from_api(*b, ctx)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
|
||||
pub enum Step {
|
||||
Left,
|
||||
Right,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PathSet {
|
||||
/// The single steps through [super::nort::Clause::Apply]
|
||||
pub steps: VecDeque<Step>,
|
||||
/// if Some, it splits at a [super::nort::Clause::Apply]. If None, it ends in
|
||||
/// a [super::nort::Clause::LambdaArg]
|
||||
pub next: Option<(Box<PathSet>, Box<PathSet>)>,
|
||||
}
|
||||
impl PathSet {
|
||||
pub fn after(mut self, step: Step) -> Self {
|
||||
self.steps.push_front(step);
|
||||
self
|
||||
}
|
||||
pub fn from_api(id: u64, b: &api::Expression) -> Option<Self> {
|
||||
use api::ExpressionKind as K;
|
||||
match &b.kind {
|
||||
K::Arg(id2) => (id == *id2).then(|| Self { steps: VecDeque::new(), next: None }),
|
||||
K::Bottom(_) | K::Const(_) | K::NewAtom(_) | K::Slot(_) => None,
|
||||
K::Lambda(_, b) => Self::from_api(id, b),
|
||||
K::Call(l, r) | K::Seq(l, r) => match (Self::from_api(id, l), Self::from_api(id, r)) {
|
||||
(Some(a), Some(b)) =>
|
||||
Some(Self { steps: VecDeque::new(), next: Some((Box::new(a), Box::new(b))) }),
|
||||
(Some(l), None) => Some(l.after(Step::Left)),
|
||||
(None, Some(r)) => Some(r.after(Step::Right)),
|
||||
(None, None) => None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,19 +11,23 @@ use hashbrown::hash_map::Entry;
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use orchid_api_traits::{enc_vec, Decode, Request};
|
||||
use orchid_api::TStrv;
|
||||
use orchid_api_traits::Request;
|
||||
use orchid_base::char_filter::char_filter_match;
|
||||
use orchid_base::error::{errv_from_apiv, mk_err, OrcRes};
|
||||
use orchid_base::error::{OrcErrv, OrcRes};
|
||||
use orchid_base::interner::{deintern, intern, Tok};
|
||||
use orchid_base::logging::Logger;
|
||||
use orchid_base::macros::{mtreev_from_api, mtreev_to_api};
|
||||
use orchid_base::parse::Comment;
|
||||
use orchid_base::reqnot::{ReqNot, Requester as _};
|
||||
use orchid_base::tree::{ttv_from_api, AtomInTok};
|
||||
use orchid_base::{clone, intern};
|
||||
use orchid_base::tree::{ttv_from_api, AtomTok};
|
||||
use orchid_base::clone;
|
||||
use ordered_float::NotNan;
|
||||
use substack::{Stackframe, Substack};
|
||||
|
||||
use crate::api;
|
||||
use crate::expr::RtExpr;
|
||||
use crate::expr::Expr;
|
||||
use crate::macros::macro_recur;
|
||||
use crate::tree::{Member, ParsTokTree};
|
||||
|
||||
#[derive(Debug, destructure)]
|
||||
@@ -76,7 +80,7 @@ impl AtomHand {
|
||||
Self::create_new(atom)
|
||||
}
|
||||
}
|
||||
pub fn call(self, arg: RtExpr) -> api::Expr {
|
||||
pub fn call(self, arg: Expr) -> api::Expression {
|
||||
let owner_sys = self.0.owner.clone();
|
||||
let reqnot = owner_sys.reqnot();
|
||||
let ticket = owner_sys.give_expr(arg.canonicalize(), || arg);
|
||||
@@ -85,20 +89,13 @@ impl AtomHand {
|
||||
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), ticket)),
|
||||
}
|
||||
}
|
||||
pub fn same(&self, other: &AtomHand) -> bool {
|
||||
let owner = self.0.owner.id();
|
||||
if other.0.owner.id() != owner {
|
||||
return false;
|
||||
}
|
||||
self.0.owner.reqnot().request(api::AtomSame(self.0.api_ref(), other.0.api_ref()))
|
||||
}
|
||||
pub fn req(&self, req: Vec<u8>) -> Vec<u8> {
|
||||
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), req))
|
||||
pub fn req(&self, key: TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
|
||||
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req))
|
||||
}
|
||||
pub fn api_ref(&self) -> api::Atom { self.0.api_ref() }
|
||||
pub fn print(&self) -> String { self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())) }
|
||||
}
|
||||
impl AtomInTok for AtomHand {
|
||||
impl AtomTok for AtomHand {
|
||||
type Context = ();
|
||||
fn from_api(atom: &orchid_api::Atom, _: Range<u32>, (): &mut Self::Context) -> Self {
|
||||
Self::from_api(atom.clone())
|
||||
@@ -118,6 +115,7 @@ impl fmt::Display for AtomHand {
|
||||
pub trait ExtensionPort: Send + Sync {
|
||||
fn send(&self, msg: &[u8]);
|
||||
fn receive(&self) -> Option<Vec<u8>>;
|
||||
fn header(&self) -> &api::ExtensionHeader;
|
||||
}
|
||||
|
||||
/// Data held about an Extension. This is refcounted within [Extension]. It's
|
||||
@@ -139,7 +137,7 @@ impl Drop for ExtensionData {
|
||||
|
||||
fn acq_expr(sys: api::SysId, extk: api::ExprTicket) {
|
||||
(System::resolve(sys).expect("Expr acq'd by invalid system"))
|
||||
.give_expr(extk, || RtExpr::resolve(extk).expect("Invalid expr acq'd"));
|
||||
.give_expr(extk, || Expr::resolve(extk).expect("Invalid expr acq'd"));
|
||||
}
|
||||
|
||||
fn rel_expr(sys: api::SysId, extk: api::ExprTicket) {
|
||||
@@ -154,10 +152,11 @@ fn rel_expr(sys: api::SysId, extk: api::ExprTicket) {
|
||||
pub struct Extension(Arc<ExtensionData>);
|
||||
impl Extension {
|
||||
pub fn new_process(port: Arc<dyn ExtensionPort>, logger: Logger) -> io::Result<Self> {
|
||||
port.send(&enc_vec(&api::HostHeader { log_strategy: logger.strat() }));
|
||||
let header_reply = port.receive().expect("Extension exited immediately");
|
||||
let eh = api::ExtensionHeader::decode(&mut &header_reply[..]);
|
||||
let eh = port.header();
|
||||
let ret = Arc::new_cyclic(|weak: &Weak<ExtensionData>| ExtensionData {
|
||||
systems: (eh.systems.iter().cloned())
|
||||
.map(|decl| SystemCtor { decl, ext: weak.clone() })
|
||||
.collect(),
|
||||
logger,
|
||||
port: port.clone(),
|
||||
reqnot: ReqNot::new(
|
||||
@@ -175,46 +174,43 @@ impl Extension {
|
||||
},
|
||||
api::ExtHostNotif::Log(api::Log(str)) => weak.upgrade().unwrap().logger.log(str),
|
||||
}),
|
||||
|req| match req.req() {
|
||||
api::ExtHostReq::Ping(ping) => req.handle(ping, &()),
|
||||
|hand, req| match req {
|
||||
api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()),
|
||||
api::ExtHostReq::IntReq(intreq) => match intreq {
|
||||
api::IntReq::InternStr(s) => req.handle(s, &intern(&**s.0).marker()),
|
||||
api::IntReq::InternStrv(v) => req.handle(v, &intern(&*v.0).marker()),
|
||||
api::IntReq::ExternStr(si) => req.handle(si, &deintern(si.0).arc()),
|
||||
api::IntReq::InternStr(s) => hand.handle(&s, &intern(&**s.0).marker()),
|
||||
api::IntReq::InternStrv(v) => hand.handle(&v, &intern(&*v.0).marker()),
|
||||
api::IntReq::ExternStr(si) => hand.handle(&si, &deintern(si.0).arc()),
|
||||
api::IntReq::ExternStrv(vi) =>
|
||||
req.handle(vi, &Arc::new(deintern(vi.0).iter().map(|t| t.marker()).collect_vec())),
|
||||
hand.handle(&vi, &Arc::new(deintern(vi.0).iter().map(|t| t.marker()).collect_vec())),
|
||||
},
|
||||
api::ExtHostReq::Fwd(fw @ api::Fwd(atom, _body)) => {
|
||||
api::ExtHostReq::Fwd(ref fw @ api::Fwd(ref atom, ref key, ref body)) => {
|
||||
let sys = System::resolve(atom.owner).unwrap();
|
||||
req.handle(fw, &sys.reqnot().request(api::Fwded(fw.0.clone(), fw.1.clone())))
|
||||
hand.handle(fw, &sys.reqnot().request(api::Fwded(fw.0.clone(), *key, body.clone())))
|
||||
},
|
||||
api::ExtHostReq::SysFwd(ref fw @ api::SysFwd(id, ref body)) => {
|
||||
let sys = System::resolve(id).unwrap();
|
||||
hand.handle(fw, &sys.request(body.clone()))
|
||||
},
|
||||
api::ExtHostReq::SubLex(sl) => {
|
||||
let (rep_in, rep_out) = sync_channel(0);
|
||||
let lex_g = LEX_RECUR.lock().unwrap();
|
||||
let req_in = lex_g.get(&sl.id).expect("Sublex for nonexistent lexid");
|
||||
req_in.send(ReqPair(sl.clone(), rep_in)).unwrap();
|
||||
req.handle(sl, &rep_out.recv().unwrap())
|
||||
hand.handle(&sl, &rep_out.recv().unwrap())
|
||||
},
|
||||
api::ExtHostReq::ExprReq(api::ExprReq::Inspect(ins @ api::Inspect(tk))) => {
|
||||
let expr = RtExpr::resolve(*tk);
|
||||
req.handle(ins, &api::Details {
|
||||
refcount: 1,
|
||||
expr: api::Expr {
|
||||
location: api::Location::None,
|
||||
clause: api::Clause::Bottom(vec![
|
||||
mk_err(
|
||||
intern!(str: "Unsupported"),
|
||||
"Inspecting clauses is unsupported at the moment",
|
||||
[],
|
||||
)
|
||||
.to_api(),
|
||||
]),
|
||||
},
|
||||
api::ExtHostReq::ExprReq(api::ExprReq::Inspect(ins @ api::Inspect { target })) => {
|
||||
let expr = Expr::resolve(target).expect("Invalid ticket");
|
||||
hand.handle(&ins, &api::Inspected {
|
||||
refcount: expr.strong_count() as u32,
|
||||
location: expr.pos().to_api(),
|
||||
kind: expr.to_api(),
|
||||
})
|
||||
},
|
||||
api::ExtHostReq::RunMacros(ref rm @ api::RunMacros{ ref run_id, ref query }) => {
|
||||
hand.handle(rm, ¯o_recur(*run_id, mtreev_from_api(query)).map(|x| mtreev_to_api(&x)))
|
||||
}
|
||||
},
|
||||
),
|
||||
systems: eh.systems.into_iter().map(|decl| SystemCtor { decl, ext: weak.clone() }).collect(),
|
||||
});
|
||||
let weak = Arc::downgrade(&ret);
|
||||
thread::Builder::new()
|
||||
@@ -263,7 +259,11 @@ impl SystemCtor {
|
||||
id,
|
||||
}));
|
||||
let root = (sys_inst.const_root.into_iter())
|
||||
.map(|(k, v)| Member::from_api(api::Member { exported: true, name: k, kind: v }, &data))
|
||||
.map(|(k, v)| Member::from_api(
|
||||
api::Member { name: k, kind: v },
|
||||
Substack::Bottom.push(deintern(k)),
|
||||
&data
|
||||
))
|
||||
.collect_vec();
|
||||
data.0.const_root.set(root).unwrap();
|
||||
inst_g.insert(id, data.clone());
|
||||
@@ -281,7 +281,7 @@ pub struct ReqPair<R: Request>(R, pub SyncSender<R::Response>);
|
||||
|
||||
#[derive(destructure)]
|
||||
pub struct SystemInstData {
|
||||
exprs: RwLock<HashMap<api::ExprTicket, (AtomicU32, RtExpr)>>,
|
||||
exprs: RwLock<HashMap<api::ExprTicket, (AtomicU32, Expr)>>,
|
||||
ext: Extension,
|
||||
decl_id: api::SysDeclId,
|
||||
lex_filter: api::CharFilter,
|
||||
@@ -303,11 +303,7 @@ impl System {
|
||||
pub fn id(&self) -> api::SysId { self.id }
|
||||
fn resolve(id: api::SysId) -> Option<System> { SYSTEM_INSTS.read().unwrap().get(&id).cloned() }
|
||||
fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.ext.0.reqnot }
|
||||
fn give_expr(
|
||||
&self,
|
||||
ticket: api::ExprTicket,
|
||||
get_expr: impl FnOnce() -> RtExpr,
|
||||
) -> api::ExprTicket {
|
||||
fn give_expr(&self, ticket: api::ExprTicket, get_expr: impl FnOnce() -> Expr) -> api::ExprTicket {
|
||||
match self.0.exprs.write().unwrap().entry(ticket) {
|
||||
Entry::Occupied(mut oe) => {
|
||||
oe.get_mut().0.fetch_add(1, Ordering::Relaxed);
|
||||
@@ -356,12 +352,22 @@ impl System {
|
||||
pub fn line_types(&self) -> impl Iterator<Item = Tok<String>> + '_ {
|
||||
self.line_types.iter().cloned()
|
||||
}
|
||||
pub fn parse(&self, line: Vec<ParsTokTree>) -> OrcRes<Vec<ParsTokTree>> {
|
||||
pub fn parse(
|
||||
&self,
|
||||
line: Vec<ParsTokTree>,
|
||||
exported: bool,
|
||||
comments: Vec<Comment>,
|
||||
) -> OrcRes<Vec<ParsTokTree>> {
|
||||
let line = line.iter().map(|t| t.to_api(&mut |n, _| match *n {})).collect_vec();
|
||||
let parsed = (self.reqnot().request(api::ParseLine { sys: self.id(), line }))
|
||||
.map_err(|e| errv_from_apiv(e.iter()))?;
|
||||
let comments = comments.iter().map(Comment::to_api).collect_vec();
|
||||
let parsed =
|
||||
(self.reqnot().request(api::ParseLine { exported, sys: self.id(), comments, line }))
|
||||
.map_err(|e| OrcErrv::from_api(&e))?;
|
||||
Ok(ttv_from_api(parsed, &mut ()))
|
||||
}
|
||||
pub fn request(&self, req: Vec<u8>) -> Vec<u8> {
|
||||
self.reqnot().request(api::SysFwded(self.id(), req))
|
||||
}
|
||||
}
|
||||
impl fmt::Debug for System {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
|
||||
@@ -2,12 +2,14 @@ use std::num::NonZeroU64;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use orchid_base::error::{mk_err, OrcErr, OrcRes};
|
||||
use orchid_base::error::{mk_errv, OrcErrv, OrcRes};
|
||||
use orchid_base::intern;
|
||||
use orchid_base::interner::{deintern, intern, Tok};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::number::{num_to_err, parse_num};
|
||||
use orchid_base::parse::{name_char, name_start, op_char, unrep_space};
|
||||
use orchid_base::tokens::PARENS;
|
||||
use orchid_base::tree::Ph;
|
||||
|
||||
use crate::api;
|
||||
use crate::extension::{AtomHand, System};
|
||||
@@ -81,11 +83,9 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
ParsTok::NS
|
||||
} else if ctx.strip_prefix("--[") {
|
||||
let (cmt, tail) = ctx.tail.split_once("]--").ok_or_else(|| {
|
||||
vec![mk_err(
|
||||
intern!(str: "Unterminated block comment"),
|
||||
"This block comment has no ending ]--",
|
||||
[Pos::Range(start..start + 3).into()],
|
||||
)]
|
||||
mk_errv(intern!(str: "Unterminated block comment"), "This block comment has no ending ]--", [
|
||||
Pos::Range(start..start + 3).into(),
|
||||
])
|
||||
})?;
|
||||
ctx.set_tail(tail);
|
||||
ParsTok::Comment(Arc::new(cmt.to_string()))
|
||||
@@ -98,11 +98,11 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
ctx.trim_ws();
|
||||
while !ctx.strip_char('.') {
|
||||
if ctx.tail.is_empty() {
|
||||
return Err(vec![mk_err(
|
||||
return Err(mk_errv(
|
||||
intern!(str: "Unclosed lambda"),
|
||||
"Lambdae started with \\ should separate arguments from body with .",
|
||||
[Pos::Range(start..start + 1).into()],
|
||||
)]);
|
||||
));
|
||||
}
|
||||
arg.push(lex_once(ctx)?);
|
||||
ctx.trim_ws();
|
||||
@@ -113,33 +113,46 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
ctx.trim_ws();
|
||||
while !ctx.strip_char(*rp) {
|
||||
if ctx.tail.is_empty() {
|
||||
return Err(vec![mk_err(
|
||||
return Err(mk_errv(
|
||||
intern!(str: "unclosed paren"),
|
||||
format!("this {lp} has no matching {rp}"),
|
||||
[Pos::Range(start..start + 1).into()],
|
||||
)]);
|
||||
));
|
||||
}
|
||||
body.push(lex_once(ctx)?);
|
||||
ctx.trim_ws();
|
||||
}
|
||||
ParsTok::S(paren.clone(), body)
|
||||
} else if ctx.strip_prefix("macro") &&
|
||||
!ctx.tail.chars().next().is_some_and(|x| x.is_ascii_alphabetic())
|
||||
{
|
||||
ctx.strip_prefix("macro");
|
||||
if ctx.strip_char('(') {
|
||||
let pos = ctx.get_pos();
|
||||
let numstr = ctx.get_start_matches(|x| x != ')').trim();
|
||||
let num = parse_num(numstr).map_err(|e| num_to_err(e, pos))?;
|
||||
ParsTok::Macro(Some(num.to_f64()))
|
||||
} else {
|
||||
ParsTok::Macro(None)
|
||||
}
|
||||
} else {
|
||||
for sys in ctx.systems {
|
||||
let mut errors = Vec::new();
|
||||
if ctx.tail.starts_with(|c| sys.can_lex(c)) {
|
||||
let lexed = sys.lex(ctx.source.clone(), ctx.get_pos(), |pos| {
|
||||
let mut sub_ctx = ctx.push(pos);
|
||||
let ott =
|
||||
lex_once(&mut sub_ctx).inspect_err(|e| errors.extend(e.iter().cloned())).ok()?;
|
||||
Some(api::SubLexed { pos: sub_ctx.get_pos(), ticket: sub_ctx.add_subtree(ott) })
|
||||
});
|
||||
match lexed {
|
||||
Ok(None) if errors.is_empty() => continue,
|
||||
Ok(None) => return Err(errors),
|
||||
Err(e) => return Err(e.into_iter().map(|e| OrcErr::from_api(&e)).collect()),
|
||||
Ok(Some(lexed)) => {
|
||||
ctx.set_pos(lexed.pos);
|
||||
return Ok(tt_to_owned(&lexed.expr, ctx));
|
||||
let lx =
|
||||
sys.lex(ctx.source.clone(), ctx.get_pos(), |pos| match lex_once(&mut ctx.push(pos)) {
|
||||
Ok(t) => Some(api::SubLexed { pos, ticket: ctx.add_subtree(t) }),
|
||||
Err(e) => {
|
||||
errors.push(e);
|
||||
None
|
||||
},
|
||||
});
|
||||
match lx {
|
||||
Err(e) => return Err(errors.into_iter().fold(OrcErrv::from_api(&e), |a, b| a + b)),
|
||||
Ok(Some(lexed)) => return Ok(tt_to_owned(&lexed.expr, &mut ctx.push(lexed.pos))),
|
||||
Ok(None) => match errors.into_iter().reduce(|a, b| a + b) {
|
||||
Some(errors) => return Err(errors),
|
||||
None => continue,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -149,11 +162,11 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
} else if ctx.tail.starts_with(op_char) {
|
||||
ParsTok::Name(intern(ctx.get_start_matches(op_char)))
|
||||
} else {
|
||||
return Err(vec![mk_err(
|
||||
return Err(mk_errv(
|
||||
intern!(str: "Unrecognized character"),
|
||||
"The following syntax is meaningless.",
|
||||
[Pos::Range(start..start + 1).into()],
|
||||
)]);
|
||||
));
|
||||
}
|
||||
};
|
||||
Ok(ParsTokTree { tok, range: start..ctx.get_pos() })
|
||||
@@ -162,19 +175,28 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
fn tt_to_owned(api: &api::TokenTree, ctx: &mut LexCtx<'_>) -> ParsTokTree {
|
||||
let tok = match &api.token {
|
||||
api::Token::Atom(atom) => ParsTok::Atom(AtomHand::from_api(atom.clone())),
|
||||
api::Token::Bottom(err) => ParsTok::Bottom(err.iter().map(OrcErr::from_api).collect()),
|
||||
api::Token::Lambda(arg) =>
|
||||
ParsTok::LambdaHead(arg.iter().map(|t| tt_to_owned(t, ctx)).collect()),
|
||||
api::Token::Bottom(err) => ParsTok::Bottom(OrcErrv::from_api(err)),
|
||||
api::Token::LambdaHead(arg) => ParsTok::LambdaHead(ttv_to_owned(arg, ctx)),
|
||||
api::Token::Lambda(arg, b) => ParsTok::Lambda(ttv_to_owned(arg, ctx), ttv_to_owned(b, ctx)),
|
||||
api::Token::Name(name) => ParsTok::Name(deintern(*name)),
|
||||
api::Token::S(p, b) => ParsTok::S(p.clone(), b.iter().map(|t| tt_to_owned(t, ctx)).collect()),
|
||||
api::Token::Slot(id) => return ctx.rm_subtree(*id),
|
||||
api::Token::BR => ParsTok::BR,
|
||||
api::Token::NS => ParsTok::NS,
|
||||
api::Token::Comment(c) => ParsTok::Comment(c.clone()),
|
||||
api::Token::Ph(ph) => ParsTok::Ph(Ph::from_api(ph)),
|
||||
api::Token::Macro(prio) => ParsTok::Macro(*prio)
|
||||
};
|
||||
ParsTokTree { range: api.range.clone(), tok }
|
||||
}
|
||||
|
||||
fn ttv_to_owned<'a>(
|
||||
api: impl IntoIterator<Item = &'a api::TokenTree>,
|
||||
ctx: &mut LexCtx<'_>
|
||||
) -> Vec<ParsTokTree> {
|
||||
api.into_iter().map(|t| tt_to_owned(t, ctx)).collect()
|
||||
}
|
||||
|
||||
pub fn lex(text: Tok<String>, systems: &[System]) -> OrcRes<Vec<ParsTokTree>> {
|
||||
let mut sub_trees = HashMap::new();
|
||||
let mut ctx = LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems };
|
||||
|
||||
@@ -7,3 +7,4 @@ pub mod lex;
|
||||
pub mod parse;
|
||||
pub mod subprocess;
|
||||
pub mod tree;
|
||||
pub mod macros;
|
||||
|
||||
20
orchid-host/src/macros.rs
Normal file
20
orchid-host/src/macros.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use std::sync::RwLock;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use lazy_static::lazy_static;
|
||||
use orchid_base::macros::MTree;
|
||||
use trait_set::trait_set;
|
||||
use crate::api::ParsId;
|
||||
|
||||
trait_set!{
|
||||
trait MacroCB = Fn(Vec<MTree>) -> Option<Vec<MTree>> + Send + Sync;
|
||||
}
|
||||
|
||||
lazy_static!{
|
||||
static ref RECURSION: RwLock<HashMap<ParsId, Box<dyn MacroCB>>> = RwLock::default();
|
||||
}
|
||||
|
||||
pub fn macro_recur(run_id: ParsId, input: Vec<MTree>) -> Option<Vec<MTree>> {
|
||||
(RECURSION.read().unwrap()[&run_id])(input)
|
||||
}
|
||||
|
||||
@@ -2,18 +2,21 @@ use std::{iter, thread};
|
||||
|
||||
use itertools::Itertools;
|
||||
use never::Never;
|
||||
use orchid_base::error::{mk_err, OrcErr, OrcRes, Reporter};
|
||||
use orchid_base::error::{mk_err, mk_errv, OrcErrv, OrcRes, Reporter};
|
||||
use orchid_base::intern;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::macros::{MTok, MTree};
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::parse::{
|
||||
expect_end, line_items, parse_multiname, strip_fluff, try_pop_no_fluff, Comment, CompName,
|
||||
Snippet,
|
||||
expect_end, line_items, parse_multiname, strip_fluff, try_pop_no_fluff, Comment, Import,
|
||||
Parsed, Snippet,
|
||||
};
|
||||
use orchid_base::tree::{Paren, TokTree, Token};
|
||||
use substack::Substack;
|
||||
|
||||
use crate::extension::{AtomHand, System};
|
||||
use crate::tree::{Item, ItemKind, Member, MemberKind, Module, ParsTokTree};
|
||||
use crate::tree::{Code, CodeLocator, Item, ItemKind, Member, MemberKind, Module, ParsTokTree, Rule, RuleKind};
|
||||
|
||||
type ParsSnippet<'a> = Snippet<'a, 'static, AtomHand, Never>;
|
||||
|
||||
@@ -22,15 +25,20 @@ pub trait ParseCtx: Send + Sync {
|
||||
fn reporter(&self) -> &impl Reporter;
|
||||
}
|
||||
|
||||
pub fn parse_items(ctx: &impl ParseCtx, items: ParsSnippet) -> OrcRes<Vec<Item>> {
|
||||
pub fn parse_items(
|
||||
ctx: &impl ParseCtx,
|
||||
path: Substack<Tok<String>>,
|
||||
items: ParsSnippet
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
let lines = line_items(items);
|
||||
let mut ok = iter::from_fn(|| None).take(lines.len()).collect_vec();
|
||||
thread::scope(|s| {
|
||||
let mut threads = Vec::new();
|
||||
for (slot, (cmts, item)) in ok.iter_mut().zip(lines.into_iter()) {
|
||||
for (slot, Parsed { output: cmts, tail }) in ok.iter_mut().zip(lines.into_iter()) {
|
||||
let path = &path;
|
||||
threads.push(s.spawn(move || {
|
||||
*slot = Some(parse_item(ctx, cmts, item)?);
|
||||
Ok::<(), Vec<OrcErr>>(())
|
||||
*slot = Some(parse_item(ctx, path.clone(), cmts, tail)?);
|
||||
Ok::<(), OrcErrv>(())
|
||||
}))
|
||||
}
|
||||
for t in threads {
|
||||
@@ -42,136 +50,239 @@ pub fn parse_items(ctx: &impl ParseCtx, items: ParsSnippet) -> OrcRes<Vec<Item>>
|
||||
|
||||
pub fn parse_item(
|
||||
ctx: &impl ParseCtx,
|
||||
path: Substack<Tok<String>>,
|
||||
comments: Vec<Comment>,
|
||||
item: ParsSnippet,
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
match item.pop_front() {
|
||||
Some((TokTree { tok: Token::Name(n), .. }, postdisc)) => match n {
|
||||
n if *n == intern!(str: "export") => match try_pop_no_fluff(postdisc)? {
|
||||
(TokTree { tok: Token::Name(n), .. }, postdisc) =>
|
||||
parse_item_2(ctx, comments, true, n.clone(), postdisc),
|
||||
(TokTree { tok: Token::NS, .. }, postdisc) => {
|
||||
let (exports, surplus) = parse_multiname(ctx.reporter(), postdisc)?;
|
||||
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } =>
|
||||
parse_exportable_item(ctx, path, comments, true, n.clone(), tail),
|
||||
Parsed { output: TokTree { tok: Token::NS, .. }, tail } => {
|
||||
let Parsed { output: exports, tail } = parse_multiname(ctx.reporter(), tail)?;
|
||||
let mut ok = Vec::new();
|
||||
exports.into_iter().for_each(|e| match (&e.path.as_slice(), e.name) {
|
||||
([], Some(n)) => ok.push(Item {
|
||||
comments: comments.clone(),
|
||||
pos: e.pos.clone(),
|
||||
kind: ItemKind::Export(n),
|
||||
}),
|
||||
exports.into_iter().for_each(|(e, pos)| match (&e.path.as_slice(), e.name) {
|
||||
([], Some(n)) =>
|
||||
ok.push(Item { comments: comments.clone(), pos, kind: ItemKind::Export(n) }),
|
||||
(_, Some(_)) => ctx.reporter().report(mk_err(
|
||||
intern!(str: "Compound export"),
|
||||
"Cannot export compound names (names containing the :: separator)",
|
||||
[e.pos.into()],
|
||||
[pos.into()],
|
||||
)),
|
||||
(_, None) => ctx.reporter().report(mk_err(
|
||||
intern!(str: "Wildcard export"),
|
||||
"Exports cannot contain the globstar *",
|
||||
[e.pos.into()],
|
||||
[pos.into()],
|
||||
)),
|
||||
});
|
||||
expect_end(surplus)?;
|
||||
expect_end(tail)?;
|
||||
Ok(ok)
|
||||
},
|
||||
(bogus, _) => Err(vec![mk_err(
|
||||
Parsed { output, .. } => Err(mk_errv(
|
||||
intern!(str: "Malformed export"),
|
||||
"`export` can either prefix other lines or list names inside ::( ) or ::[ ]",
|
||||
[Pos::Range(bogus.range.clone()).into()],
|
||||
)]),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
)),
|
||||
},
|
||||
n if *n == intern!(str: "import") => parse_import(ctx, postdisc).map(|v| {
|
||||
Vec::from_iter(v.into_iter().map(|t| Item {
|
||||
Vec::from_iter(v.into_iter().map(|(t, pos)| Item {
|
||||
comments: comments.clone(),
|
||||
pos: Pos::Range(postdisc.pos()),
|
||||
pos,
|
||||
kind: ItemKind::Import(t),
|
||||
}))
|
||||
}),
|
||||
n => parse_item_2(ctx, comments, false, n.clone(), postdisc),
|
||||
n => parse_exportable_item(ctx, path, comments, false, n.clone(), postdisc),
|
||||
},
|
||||
Some(_) => Err(vec![mk_err(
|
||||
intern!(str: "Expected a line type"),
|
||||
"All lines must begin with a keyword",
|
||||
[Pos::Range(item.pos()).into()],
|
||||
)]),
|
||||
Some(_) =>
|
||||
Err(mk_errv(intern!(str: "Expected a line type"), "All lines must begin with a keyword", [
|
||||
Pos::Range(item.pos()).into(),
|
||||
])),
|
||||
None => unreachable!("These lines are filtered and aggregated in earlier stages"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_import(ctx: &impl ParseCtx, tail: ParsSnippet) -> OrcRes<Vec<CompName>> {
|
||||
let (imports, surplus) = parse_multiname(ctx.reporter(), tail)?;
|
||||
expect_end(surplus)?;
|
||||
pub fn parse_import(ctx: &impl ParseCtx, tail: ParsSnippet) -> OrcRes<Vec<(Import, Pos)>> {
|
||||
let Parsed { output: imports, tail } = parse_multiname(ctx.reporter(), tail)?;
|
||||
expect_end(tail)?;
|
||||
Ok(imports)
|
||||
}
|
||||
|
||||
pub fn parse_item_2(
|
||||
pub fn parse_exportable_item(
|
||||
ctx: &impl ParseCtx,
|
||||
path: Substack<Tok<String>>,
|
||||
comments: Vec<Comment>,
|
||||
exported: bool,
|
||||
discr: Tok<String>,
|
||||
tail: ParsSnippet,
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
let kind = if discr == intern!(str: "mod") {
|
||||
let (name, body) = parse_module(ctx, tail)?;
|
||||
ItemKind::Member(Member::new(exported, name, MemberKind::Mod(body)))
|
||||
let (name, body) = parse_module(ctx, path, tail)?;
|
||||
ItemKind::Member(Member::new(name, MemberKind::Mod(body)))
|
||||
} else if discr == intern!(str: "const") {
|
||||
let (name, val) = parse_const(tail)?;
|
||||
ItemKind::Member(Member::new(exported, name, MemberKind::Const(val)))
|
||||
let locator = CodeLocator::to_const(path.push(name.clone()).unreverse());
|
||||
ItemKind::Member(Member::new(name, MemberKind::Const(Code::from_code(locator, val))))
|
||||
} else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) {
|
||||
let line = sys.parse(tail.to_vec())?;
|
||||
return parse_items(ctx, Snippet::new(tail.prev(), &line));
|
||||
let line = sys.parse(tail.to_vec(), exported, comments)?;
|
||||
return parse_items(ctx, path, Snippet::new(tail.prev(), &line));
|
||||
} else {
|
||||
let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
|
||||
return Err(vec![mk_err(
|
||||
return Err(mk_errv(
|
||||
intern!(str: "Unrecognized line type"),
|
||||
format!("Line types are: const, mod, macro, grammar, {ext_lines}"),
|
||||
[Pos::Range(tail.prev().range.clone()).into()],
|
||||
)]);
|
||||
));
|
||||
};
|
||||
Ok(vec![Item { comments, pos: Pos::Range(tail.pos()), kind }])
|
||||
}
|
||||
|
||||
pub fn parse_module(ctx: &impl ParseCtx, tail: ParsSnippet) -> OrcRes<(Tok<String>, Module)> {
|
||||
pub fn parse_module(
|
||||
ctx: &impl ParseCtx,
|
||||
path: Substack<Tok<String>>,
|
||||
tail: ParsSnippet
|
||||
) -> OrcRes<(Tok<String>, Module)> {
|
||||
let (name, tail) = match try_pop_no_fluff(tail)? {
|
||||
(TokTree { tok: Token::Name(n), .. }, tail) => (n.clone(), tail),
|
||||
(tt, _) =>
|
||||
return Err(vec![mk_err(
|
||||
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } => (n.clone(), tail),
|
||||
Parsed { output, .. } =>
|
||||
return Err(mk_errv(
|
||||
intern!(str: "Missing module name"),
|
||||
format!("A name was expected, {tt} was found"),
|
||||
[Pos::Range(tt.range.clone()).into()],
|
||||
)]),
|
||||
format!("A name was expected, {output} was found"),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
)),
|
||||
};
|
||||
let (body, surplus) = match try_pop_no_fluff(tail)? {
|
||||
(TokTree { tok: Token::S(Paren::Round, b), .. }, tail) => (b, tail),
|
||||
(tt, _) =>
|
||||
return Err(vec![mk_err(
|
||||
intern!(str: "Expected module body"),
|
||||
format!("A ( block ) was expected, {tt} was found"),
|
||||
[Pos::Range(tt.range.clone()).into()],
|
||||
)]),
|
||||
};
|
||||
let items = parse_items(ctx, ParsSnippet::new(surplus.prev(), body))?;
|
||||
Ok((name, Module { imports: vec![], items }))
|
||||
let Parsed { output, tail: surplus } = try_pop_no_fluff(tail)?;
|
||||
expect_end(surplus)?;
|
||||
let body = output.as_s(Paren::Round).ok_or_else(|| mk_errv(
|
||||
intern!(str: "Expected module body"),
|
||||
format!("A ( block ) was expected, {output} was found"),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
))?;
|
||||
let path = path.push(name.clone());
|
||||
Ok((name, Module::new(parse_items(ctx, path, body)?)))
|
||||
}
|
||||
|
||||
pub fn parse_const(tail: ParsSnippet) -> OrcRes<(Tok<String>, Vec<ParsTokTree>)> {
|
||||
let (name, tail) = match try_pop_no_fluff(tail)? {
|
||||
(TokTree { tok: Token::Name(n), .. }, tail) => (n.clone(), tail),
|
||||
(tt, _) =>
|
||||
return Err(vec![mk_err(
|
||||
intern!(str: "Missing module name"),
|
||||
format!("A name was expected, {tt} was found"),
|
||||
[Pos::Range(tt.range.clone()).into()],
|
||||
)]),
|
||||
};
|
||||
let tail = match try_pop_no_fluff(tail)? {
|
||||
(TokTree { tok: Token::Name(n), .. }, tail) if *n == intern!(str: ":=") => tail,
|
||||
(tt, _) =>
|
||||
return Err(vec![mk_err(
|
||||
intern!(str: "Missing walrus := separator"),
|
||||
format!("Expected operator := , found {tt}"),
|
||||
[Pos::Range(tt.range.clone()).into()],
|
||||
)]),
|
||||
};
|
||||
let Parsed { output, tail } = try_pop_no_fluff(tail)?;
|
||||
let name = output.as_name().ok_or_else(|| mk_errv(
|
||||
intern!(str: "Missing module name"),
|
||||
format!("A name was expected, {output} was found"),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
))?;
|
||||
let Parsed { output, tail } = try_pop_no_fluff(tail)?;
|
||||
if !output.is_kw(intern!(str: "=")) {
|
||||
return Err(mk_errv(
|
||||
intern!(str: "Missing walrus := separator"),
|
||||
format!("Expected operator := , found {output}"),
|
||||
[Pos::Range(output.range.clone()).into()],
|
||||
))
|
||||
}
|
||||
try_pop_no_fluff(tail)?;
|
||||
Ok((name, tail.iter().flat_map(strip_fluff).collect_vec()))
|
||||
}
|
||||
|
||||
pub fn parse_mtree<'a>(
|
||||
mut snip: ParsSnippet<'a>
|
||||
) -> OrcRes<Vec<MTree<'static>>> {
|
||||
let mut mtreev = Vec::new();
|
||||
while let Some((ttree, tail)) = snip.pop_front() {
|
||||
let (range, tok, tail) = match &ttree.tok {
|
||||
Token::S(p, b) => (
|
||||
ttree.range.clone(),
|
||||
MTok::S(*p, parse_mtree(Snippet::new(ttree, b))?),
|
||||
tail,
|
||||
),
|
||||
Token::Name(tok) => {
|
||||
let mut segments = vec![tok.clone()];
|
||||
let mut end = ttree.range.end;
|
||||
while let Some((TokTree { tok: Token::NS, .. }, tail)) = snip.pop_front() {
|
||||
let Parsed { output, tail } = try_pop_no_fluff(tail)?;
|
||||
segments.push(output.as_name().ok_or_else(|| mk_errv(
|
||||
intern!(str: "Namespaced name interrupted"),
|
||||
"In expression context, :: must always be followed by a name.\n\
|
||||
::() is permitted only in import and export items",
|
||||
[Pos::Range(output.range.clone()).into()]
|
||||
))?);
|
||||
snip = tail;
|
||||
end = output.range.end;
|
||||
}
|
||||
(ttree.range.start..end, MTok::Name(Sym::new(segments).unwrap()), snip)
|
||||
},
|
||||
Token::NS => return Err(mk_errv(
|
||||
intern!(str: "Unexpected :: in macro pattern"),
|
||||
":: can only follow a name outside export statements",
|
||||
[Pos::Range(ttree.range.clone()).into()]
|
||||
)),
|
||||
Token::Ph(ph) => (ttree.range.clone(), MTok::Ph(ph.clone()), tail),
|
||||
Token::Atom(_) | Token::Macro(_) => return Err(mk_errv(
|
||||
intern!(str: "Unsupported token in macro patterns"),
|
||||
format!("Macro patterns can only contain names, braces, and lambda, not {ttree}."),
|
||||
[Pos::Range(ttree.range.clone()).into()]
|
||||
)),
|
||||
Token::BR | Token::Comment(_) => continue,
|
||||
Token::Bottom(e) => return Err(e.clone()),
|
||||
Token::Lambda(arg, body) => {
|
||||
let tok = MTok::Lambda(
|
||||
parse_mtree(Snippet::new(&ttree, &arg))?,
|
||||
parse_mtree(Snippet::new(&ttree, &body))?,
|
||||
);
|
||||
(ttree.range.clone(), tok, tail)
|
||||
},
|
||||
Token::LambdaHead(arg) => (
|
||||
ttree.range.start..snip.pos().end,
|
||||
MTok::Lambda(parse_mtree(Snippet::new(&ttree, &arg))?, parse_mtree(tail)?),
|
||||
Snippet::new(ttree, &[]),
|
||||
),
|
||||
Token::Slot(_) | Token::X(_) => panic!("Did not expect {} in parsed token tree", &ttree.tok),
|
||||
};
|
||||
mtreev.push(MTree { pos: Pos::Range(range.clone()), tok });
|
||||
snip = tail;
|
||||
}
|
||||
Ok(mtreev)
|
||||
}
|
||||
|
||||
pub fn parse_macro(tail: ParsSnippet, macro_i: u16, path: Substack<Tok<String>>) -> OrcRes<Vec<Rule>> {
|
||||
let (surplus, prev, block) = match try_pop_no_fluff(tail)? {
|
||||
Parsed { tail, output: o@TokTree { tok: Token::S(Paren::Round, b), .. } } => (tail, o, b),
|
||||
Parsed { output, .. } => return Err(mk_errv(
|
||||
intern!(str: "m"),
|
||||
format!("Macro blocks must either start with a block or a ..$:number"),
|
||||
[Pos::Range(output.range.clone()).into()]
|
||||
)),
|
||||
};
|
||||
expect_end(surplus)?;
|
||||
let mut errors = Vec::new();
|
||||
let mut rules = Vec::new();
|
||||
for (i, item) in line_items(Snippet::new(prev, &block)).into_iter().enumerate() {
|
||||
let Parsed { tail, output } = try_pop_no_fluff(item.tail)?;
|
||||
if !output.is_kw(intern!(str: "rule")) {
|
||||
errors.extend(mk_errv(
|
||||
intern!(str: "non-rule in macro"),
|
||||
format!("Expected `rule`, got {output}"),
|
||||
[Pos::Range(output.range.clone()).into()]
|
||||
));
|
||||
continue
|
||||
};
|
||||
let (pat, body) = match tail.split_once(|t| t.is_kw(intern!(str: "=>"))) {
|
||||
Some((a, b)) => (a, b),
|
||||
None => {
|
||||
errors.extend(mk_errv(
|
||||
intern!(str: "no => in macro rule"),
|
||||
"The pattern and body of a rule must be separated by a =>",
|
||||
[Pos::Range(tail.pos()).into()],
|
||||
));
|
||||
continue
|
||||
}
|
||||
};
|
||||
rules.push(Rule {
|
||||
comments: item.output,
|
||||
pos: Pos::Range(tail.pos()),
|
||||
pattern: parse_mtree(pat)?,
|
||||
kind: RuleKind::Native(Code::from_code(
|
||||
CodeLocator::to_rule(path.unreverse(), macro_i, i as u16),
|
||||
body.to_vec(),
|
||||
))
|
||||
})
|
||||
}
|
||||
if let Ok(e) = OrcErrv::new(errors) { Err(e) } else { Ok(rules) }
|
||||
}
|
||||
|
||||
@@ -1,17 +1,21 @@
|
||||
use std::io::{self, BufRead as _};
|
||||
use std::io::{self, BufRead as _, Write};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Mutex;
|
||||
use std::{process, thread};
|
||||
|
||||
use orchid_api::ExtensionHeader;
|
||||
use orchid_api_traits::{Decode, Encode};
|
||||
use orchid_base::logging::Logger;
|
||||
use orchid_base::msg::{recv_msg, send_msg};
|
||||
|
||||
use crate::api;
|
||||
use crate::extension::ExtensionPort;
|
||||
|
||||
pub struct Subprocess {
|
||||
child: Mutex<process::Child>,
|
||||
stdin: Mutex<process::ChildStdin>,
|
||||
stdout: Mutex<process::ChildStdout>,
|
||||
header: ExtensionHeader,
|
||||
}
|
||||
impl Subprocess {
|
||||
pub fn new(mut cmd: process::Command, logger: Logger) -> io::Result<Self> {
|
||||
@@ -22,8 +26,11 @@ impl Subprocess {
|
||||
.stdout(process::Stdio::piped())
|
||||
.stderr(process::Stdio::piped())
|
||||
.spawn()?;
|
||||
let stdin = child.stdin.take().unwrap();
|
||||
let stdout = child.stdout.take().unwrap();
|
||||
let mut stdin = child.stdin.take().unwrap();
|
||||
api::HostHeader { log_strategy: logger.strat() }.encode(&mut stdin);
|
||||
stdin.flush()?;
|
||||
let mut stdout = child.stdout.take().unwrap();
|
||||
let header = ExtensionHeader::decode(&mut stdout);
|
||||
let child_stderr = child.stderr.take().unwrap();
|
||||
thread::Builder::new().name(format!("stderr-fwd:{prog}")).spawn(move || {
|
||||
let mut reader = io::BufReader::new(child_stderr);
|
||||
@@ -35,14 +42,25 @@ impl Subprocess {
|
||||
logger.log(buf);
|
||||
}
|
||||
})?;
|
||||
Ok(Self { child: Mutex::new(child), stdin: Mutex::new(stdin), stdout: Mutex::new(stdout) })
|
||||
Ok(Self {
|
||||
child: Mutex::new(child),
|
||||
stdin: Mutex::new(stdin),
|
||||
stdout: Mutex::new(stdout),
|
||||
header,
|
||||
})
|
||||
}
|
||||
}
|
||||
impl Drop for Subprocess {
|
||||
fn drop(&mut self) { self.child.lock().unwrap().wait().expect("Extension exited with error"); }
|
||||
}
|
||||
impl ExtensionPort for Subprocess {
|
||||
fn send(&self, msg: &[u8]) { send_msg(&mut *self.stdin.lock().unwrap(), msg).unwrap() }
|
||||
fn header(&self) -> &orchid_api::ExtensionHeader { &self.header }
|
||||
fn send(&self, msg: &[u8]) {
|
||||
if msg.starts_with(&[0, 0, 0, 0x1c]) {
|
||||
panic!("Received unnecessary prefix");
|
||||
}
|
||||
send_msg(&mut *self.stdin.lock().unwrap(), msg).unwrap()
|
||||
}
|
||||
fn receive(&self) -> Option<Vec<u8>> {
|
||||
match recv_msg(&mut *self.stdout.lock().unwrap()) {
|
||||
Ok(msg) => Some(msg),
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
use std::fmt::Debug;
|
||||
use std::sync::{Mutex, OnceLock};
|
||||
|
||||
use itertools::Itertools;
|
||||
use never::Never;
|
||||
use orchid_base::error::OrcRes;
|
||||
use orchid_base::interner::{deintern, Tok};
|
||||
use orchid_base::interner::{deintern, intern, Tok};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::macros::{mtreev_from_api, MTree};
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::parse::{Comment, CompName};
|
||||
use orchid_base::tree::{ttv_from_api, TokTree, Token};
|
||||
use orchid_base::parse::{Comment, Import};
|
||||
use orchid_base::tree::{TokTree, Token};
|
||||
use ordered_float::NotNan;
|
||||
use substack::{with_iter_stack, Substack};
|
||||
|
||||
use crate::api;
|
||||
use crate::expr::RtExpr;
|
||||
use crate::expr::Expr;
|
||||
use crate::extension::{AtomHand, System};
|
||||
|
||||
pub type ParsTokTree = TokTree<'static, AtomHand, Never>;
|
||||
@@ -25,81 +29,165 @@ pub struct Item {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ItemKind {
|
||||
Raw(Vec<ParsTokTree>),
|
||||
Member(Member),
|
||||
Export(Tok<String>),
|
||||
Import(CompName),
|
||||
Import(Import),
|
||||
Macro(Option<NotNan<f64>>, Vec<Rule>)
|
||||
}
|
||||
|
||||
impl Item {
|
||||
pub fn from_api(tree: api::Item, sys: &System) -> Self {
|
||||
pub fn from_api<'a>(
|
||||
tree: api::Item,
|
||||
path: Substack<Tok<String>>,
|
||||
sys: &System
|
||||
) -> Self {
|
||||
let kind = match tree.kind {
|
||||
api::ItemKind::Raw(tokv) => ItemKind::Raw(ttv_from_api(tokv, &mut ())),
|
||||
api::ItemKind::Member(m) => ItemKind::Member(Member::from_api(m, sys)),
|
||||
api::ItemKind::Import(i) => ItemKind::Import(CompName::from_api(i)),
|
||||
api::ItemKind::Member(m) => ItemKind::Member(Member::from_api(m, path, sys)),
|
||||
api::ItemKind::Import(i) =>
|
||||
ItemKind::Import(Import{ path: Sym::deintern(i).iter().collect(), name: None }),
|
||||
api::ItemKind::Export(e) => ItemKind::Export(deintern(e)),
|
||||
api::ItemKind::Macro(api::MacroBlock { priority, rules }) => ItemKind::Macro(priority, {
|
||||
Vec::from_iter(rules.into_iter().map(|api| Rule {
|
||||
pos: Pos::from_api(&api.location),
|
||||
pattern: mtreev_from_api(&api.pattern),
|
||||
kind: RuleKind::Remote(sys.clone(), api.id),
|
||||
comments: api.comments.iter().map(Comment::from_api).collect_vec()
|
||||
}))
|
||||
})
|
||||
};
|
||||
let comments = tree
|
||||
.comments
|
||||
.into_iter()
|
||||
.map(|(text, l)| Comment { text, pos: Pos::from_api(&l) })
|
||||
.collect_vec();
|
||||
let comments = tree.comments.iter().map(Comment::from_api).collect_vec();
|
||||
Self { pos: Pos::from_api(&tree.location), comments, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Member {
|
||||
pub exported: bool,
|
||||
pub name: Tok<String>,
|
||||
pub kind: OnceLock<MemberKind>,
|
||||
pub lazy: Mutex<Option<LazyMemberHandle>>,
|
||||
}
|
||||
impl Member {
|
||||
pub fn from_api(api::Member { exported: public, name, kind }: api::Member, sys: &System) -> Self {
|
||||
let (kind, lazy) = match kind {
|
||||
api::MemberKind::Const(c) =>
|
||||
(OnceLock::from(MemberKind::PreCnst(RtExpr::from_api(c, sys))), None),
|
||||
api::MemberKind::Module(m) =>
|
||||
(OnceLock::from(MemberKind::Mod(Module::from_api(m, sys))), None),
|
||||
api::MemberKind::Lazy(id) => (OnceLock::new(), Some(LazyMemberHandle(id, sys.clone()))),
|
||||
pub fn from_api<'a>(
|
||||
api: api::Member,
|
||||
path: Substack<Tok<String>>,
|
||||
sys: &System,
|
||||
) -> Self {
|
||||
let name = deintern(api.name);
|
||||
let full_path = path.push(name.clone());
|
||||
let kind = match api.kind {
|
||||
api::MemberKind::Lazy(id) =>
|
||||
return LazyMemberHandle(id, sys.clone(), intern(&full_path.unreverse())).to_member(name),
|
||||
api::MemberKind::Const(c) => MemberKind::Const(Code::from_expr(
|
||||
CodeLocator::to_const(full_path.unreverse()),
|
||||
Expr::from_api(c, &mut ())
|
||||
)),
|
||||
api::MemberKind::Module(m) => MemberKind::Mod(Module::from_api(m, full_path, sys)),
|
||||
};
|
||||
Member { exported: public, name: deintern(name), kind, lazy: Mutex::new(lazy) }
|
||||
Member { name, kind: OnceLock::from(kind), lazy: Mutex::default() }
|
||||
}
|
||||
pub fn new(public: bool, name: Tok<String>, kind: MemberKind) -> Self {
|
||||
Member { exported: public, name, kind: OnceLock::from(kind), lazy: Mutex::default() }
|
||||
pub fn new(name: Tok<String>, kind: MemberKind) -> Self {
|
||||
Member { name, kind: OnceLock::from(kind), lazy: Mutex::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum MemberKind {
|
||||
Const(Vec<ParsTokTree>),
|
||||
PreCnst(RtExpr),
|
||||
Const(Code),
|
||||
Mod(Module),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Module {
|
||||
pub imports: Vec<Sym>,
|
||||
pub exports: Vec<Tok<String>>,
|
||||
pub items: Vec<Item>,
|
||||
}
|
||||
impl Module {
|
||||
pub fn from_api(m: api::Module, sys: &System) -> Self {
|
||||
Self {
|
||||
imports: m.imports.into_iter().map(|m| Sym::from_tok(deintern(m)).unwrap()).collect_vec(),
|
||||
items: m.items.into_iter().map(|i| Item::from_api(i, sys)).collect_vec(),
|
||||
pub fn new(items: impl IntoIterator<Item = Item>) -> Self {
|
||||
let items = items.into_iter().collect_vec();
|
||||
let exports = (items.iter())
|
||||
.filter_map(|i| match &i.kind {
|
||||
ItemKind::Export(e) => Some(e.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect_vec();
|
||||
Self { imports: vec![], exports, items }
|
||||
}
|
||||
pub fn from_api(m: api::Module, path: Substack<Tok<String>>, sys: &System) -> Self {
|
||||
let mut output = Vec::new();
|
||||
for item in m.items.into_iter() {
|
||||
let next = Item::from_api(item, path.clone(), sys);
|
||||
output.push(next);
|
||||
}
|
||||
Self::new(output)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LazyMemberHandle(api::TreeId, System);
|
||||
pub struct LazyMemberHandle(api::TreeId, System, Tok<Vec<Tok<String>>>);
|
||||
impl LazyMemberHandle {
|
||||
pub fn run(self) -> OrcRes<MemberKind> {
|
||||
match self.1.get_tree(self.0) {
|
||||
api::MemberKind::Const(c) => Ok(MemberKind::PreCnst(RtExpr::from_api(c, &self.1))),
|
||||
api::MemberKind::Module(m) => Ok(MemberKind::Mod(Module::from_api(m, &self.1))),
|
||||
api::MemberKind::Lazy(id) => Self(id, self.1).run(),
|
||||
api::MemberKind::Const(c) => Ok(MemberKind::Const(Code {
|
||||
bytecode: Expr::from_api(c, &mut ()).into(),
|
||||
locator: CodeLocator { steps: self.2, rule_loc: None },
|
||||
source: None,
|
||||
})),
|
||||
api::MemberKind::Module(m) => with_iter_stack(self.2.iter().cloned(), |path| {
|
||||
Ok(MemberKind::Mod(Module::from_api(m, path, &self.1)))
|
||||
}),
|
||||
api::MemberKind::Lazy(id) => Self(id, self.1, self.2).run(),
|
||||
}
|
||||
}
|
||||
pub fn to_member(self, name: Tok<String>) -> Member {
|
||||
Member { name, kind: OnceLock::new(), lazy: Mutex::new(Some(self)) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Rule {
|
||||
pub pos: Pos,
|
||||
pub comments: Vec<Comment>,
|
||||
pub pattern: Vec<MTree<'static>>,
|
||||
pub kind: RuleKind,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum RuleKind {
|
||||
Remote(System, api::MacroId),
|
||||
Native(Code),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Code {
|
||||
locator: CodeLocator,
|
||||
source: Option<Vec<ParsTokTree>>,
|
||||
bytecode: OnceLock<Expr>,
|
||||
}
|
||||
impl Code {
|
||||
pub fn from_expr(locator: CodeLocator, expr: Expr) -> Self {
|
||||
Self { locator, source: None, bytecode: expr.into() }
|
||||
}
|
||||
pub fn from_code(locator: CodeLocator, code: Vec<ParsTokTree>) -> Self {
|
||||
Self { locator, source: Some(code), bytecode: OnceLock::new() }
|
||||
}
|
||||
}
|
||||
|
||||
/// Selects a code element
|
||||
///
|
||||
/// Either the steps point to a constant and rule_loc is None, or the steps point to a module and
|
||||
/// rule_loc selects a macro rule within that module
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub struct CodeLocator {
|
||||
steps: Tok<Vec<Tok<String>>>,
|
||||
/// Index of a macro block in the module demarked by the steps, and a rule in that macro
|
||||
rule_loc: Option<(u16, u16)>,
|
||||
}
|
||||
impl CodeLocator {
|
||||
pub fn to_const(path: impl IntoIterator<Item = Tok<String>>) -> Self {
|
||||
Self { steps: intern(&path.into_iter().collect_vec()), rule_loc: None }
|
||||
}
|
||||
pub fn to_rule(path: impl IntoIterator<Item = Tok<String>>, macro_i: u16, rule_i: u16) -> Self {
|
||||
Self { steps: intern(&path.into_iter().collect_vec()), rule_loc: Some((macro_i, rule_i)) }
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user