First steps for the macro system

This commit is contained in:
2025-08-01 18:32:55 +02:00
parent f87185ef88
commit 051b5e666f
18 changed files with 356 additions and 166 deletions

View File

@@ -1,6 +1,8 @@
Since the macro AST is built as a custom tokenizer inside the system, it needs access to the import set. On the other hand, import sets aren't available until after parsing. Need a way to place this order in a lexer without restricting the expression value of the lexer. Decide whether we need patterns at runtime. Maybe macros aren't obligated to return MacTree so destructuring can be done in a safer and easier way?
The daft option of accepting import resolution queries at runtime is available but consider better options. Double-check type and templating logic in the note, it's a bit fishy.
Consider whether all macros need to be loaded or the const references could be used to pre-filter for a given let line.
## alternate extension mechanism ## alternate extension mechanism

View File

@@ -158,6 +158,40 @@ impl fmt::Display for OrcErrv {
pub type OrcRes<T> = Result<T, OrcErrv>; pub type OrcRes<T> = Result<T, OrcErrv>;
pub fn join_ok<T, U>(left: OrcRes<T>, right: OrcRes<U>) -> OrcRes<(T, U)> {
match (left, right) {
(Ok(t), Ok(u)) => Ok((t, u)),
(Err(e), Ok(_)) | (Ok(_), Err(e)) => Err(e),
(Err(e1), Err(e2)) => Err(e1 + e2),
}
}
#[macro_export]
macro_rules! join_ok {
($($names:ident $(: $tys:ty)? = $vals:expr;)*) => {
let $crate::join_ok!(@NAMES $($names $(: $tys)? = $vals;)*)
:
$crate::join_ok!(@TYPES $($names $(: $tys)? = $vals;)*)
=
$crate::join_ok!(@VALUES $($names $(: $tys)? = $vals;)*)?;
};
(@NAMES $name:ident $(: $ty:ty)? = $val:expr ; $($names:ident $(: $tys:ty)? = $vals:expr;)*) => {
($name, $crate::join_ok!(@NAMES $($names $(: $tys)? = $vals;)*))
};
(@NAMES) => { _ };
(@TYPES $name:ident : $ty:ty = $val:expr ; $($names:ident $(: $tys:ty)? = $vals:expr;)*) => {
($ty, $crate::join_ok!(@TYPES $($names $(: $tys)? = $vals;)*))
};
(@TYPES $name:ident = $val:expr ; $($names:ident $(: $tys:ty)? = $vals:expr;)*) => {
(_, $crate::join_ok!(@TYPES $($names $(: $tys)? = $vals;)*))
};
(@TYPES) => { () };
(@VALUES $name:ident $(: $ty:ty)? = $val:expr ; $($names:ident $(: $tys:ty)? = $vals:expr;)*) => {
$crate::error::join_ok($val, $crate::join_ok!(@VALUES $($names $(: $tys)? = $vals;)*))
};
(@VALUES) => { Ok(()) };
}
pub fn mk_err( pub fn mk_err(
description: Tok<String>, description: Tok<String>,
message: impl AsRef<str>, message: impl AsRef<str>,

View File

@@ -108,6 +108,10 @@ impl SrcRange {
pub fn to_api(&self) -> api::SourceRange { pub fn to_api(&self) -> api::SourceRange {
api::SourceRange { path: self.path.to_api(), range: self.range.clone() } api::SourceRange { path: self.path.to_api(), range: self.range.clone() }
} }
pub fn to(&self, rhs: &Self) -> Self {
assert_eq!(self.path, rhs.path, "Range continues across files");
Self { path: self.path(), range: self.start().min(rhs.start())..self.end().max(rhs.end()) }
}
} }
impl From<SrcRange> for ErrPos { impl From<SrcRange> for ErrPos {
fn from(val: SrcRange) -> Self { val.pos().into() } fn from(val: SrcRange) -> Self { val.pos().into() }

View File

@@ -236,8 +236,8 @@ impl Sym {
Self::from_tok(Tok::from_api(marker, i).await).expect("Empty sequence found for serialized Sym") Self::from_tok(Tok::from_api(marker, i).await).expect("Empty sequence found for serialized Sym")
} }
pub fn to_api(&self) -> api::TStrv { self.tok().to_api() } pub fn to_api(&self) -> api::TStrv { self.tok().to_api() }
pub async fn push(&self, tok: Tok<String>, i: &Interner) -> Sym { pub async fn suffix(&self, tokv: impl IntoIterator<Item = Tok<String>>, i: &Interner) -> Sym {
Self::new(self.0.iter().cloned().chain([tok]), i).await.unwrap() Self::new(self.0.iter().cloned().chain(tokv), i).await.unwrap()
} }
} }
impl fmt::Debug for Sym { impl fmt::Debug for Sym {

View File

@@ -11,14 +11,14 @@ use crate::error::{OrcErrv, OrcRes, Reporter, mk_err, mk_errv};
use crate::format::{FmtCtx, FmtUnit, Format, fmt}; use crate::format::{FmtCtx, FmtUnit, Format, fmt};
use crate::interner::{Interner, Tok}; use crate::interner::{Interner, Tok};
use crate::location::SrcRange; use crate::location::SrcRange;
use crate::name::{Sym, VName, VPath}; use crate::name::{NameLike, Sym, VName, VPath};
use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_fmt, ttv_range}; use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_fmt, ttv_range};
pub trait ParseCtx { pub trait ParseCtx {
#[must_use] #[must_use]
fn i(&self) -> &Interner; fn i(&self) -> &Interner;
#[must_use] #[must_use]
fn reporter(&self) -> &Reporter; fn rep(&self) -> &Reporter;
} }
pub struct ParseCtxImpl<'a> { pub struct ParseCtxImpl<'a> {
pub i: &'a Interner, pub i: &'a Interner,
@@ -26,7 +26,7 @@ pub struct ParseCtxImpl<'a> {
} }
impl ParseCtx for ParseCtxImpl<'_> { impl ParseCtx for ParseCtxImpl<'_> {
fn i(&self) -> &Interner { self.i } fn i(&self) -> &Interner { self.i }
fn reporter(&self) -> &Reporter { self.r } fn rep(&self) -> &Reporter { self.r }
} }
pub fn name_start(c: char) -> bool { c.is_alphabetic() || c == '_' } pub fn name_start(c: char) -> bool { c.is_alphabetic() || c == '_' }
@@ -237,7 +237,7 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
match &tt.tok { match &tt.tok {
Token::NS(ns, body) => { Token::NS(ns, body) => {
if !ns.starts_with(name_start) { if !ns.starts_with(name_start) {
ctx.reporter().report(mk_err( ctx.rep().report(mk_err(
ctx.i().i("Unexpected name prefix").await, ctx.i().i("Unexpected name prefix").await,
"Only names can precede ::", "Only names can precede ::",
[ttpos.into()], [ttpos.into()],
@@ -257,7 +257,7 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
while let Some((output, tail)) = body.pop_front() { while let Some((output, tail)) = body.pop_front() {
match rec(output, ctx).boxed_local().await { match rec(output, ctx).boxed_local().await {
Ok(names) => o.extend(names), Ok(names) => o.extend(names),
Err(e) => ctx.reporter().report(e), Err(e) => ctx.rep().report(e),
} }
body = tail; body = tail;
} }
@@ -296,6 +296,10 @@ impl Import {
None => self.path.into_name().expect("Import cannot be empty"), None => self.path.into_name().expect("Import cannot be empty"),
} }
} }
pub fn new(sr: SrcRange, path: VPath, name: Tok<String>) -> Self {
Import { path, name: Some(name), sr }
}
pub fn new_glob(sr: SrcRange, path: VPath) -> Self { Import { path, name: None, sr } }
} }
impl Display for Import { impl Display for Import {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {

View File

@@ -16,7 +16,7 @@ use crate::error::OrcErrv;
use crate::format::{FmtCtx, FmtUnit, Format, Variants}; use crate::format::{FmtCtx, FmtUnit, Format, Variants};
use crate::interner::{Interner, Tok}; use crate::interner::{Interner, Tok};
use crate::location::{Pos, SrcRange}; use crate::location::{Pos, SrcRange};
use crate::name::Sym; use crate::name::{Sym, VName, VPath};
use crate::parse::Snippet; use crate::parse::Snippet;
use crate::{api, match_mapping, tl_cache}; use crate::{api, match_mapping, tl_cache};
@@ -149,6 +149,20 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
pub fn as_name(&self) -> Option<Tok<String>> { pub fn as_name(&self) -> Option<Tok<String>> {
if let Token::Name(n) = &self.tok { Some(n.clone()) } else { None } if let Token::Name(n) = &self.tok { Some(n.clone()) } else { None }
} }
pub fn as_multiname(&self) -> Result<VName, &TokTree<H, X>> {
let mut segs = VPath::new([]);
let mut cur = self;
loop {
match &cur.tok {
Token::Name(last) => return Ok(segs.name_with_suffix(last.clone())),
Token::NS(seg, inner) => {
segs = segs.suffix([seg.clone()]);
cur = inner;
},
_ => return Err(cur),
}
}
}
pub fn as_s(&self, par: Paren) -> Option<Snippet<'_, H, X>> { pub fn as_s(&self, par: Paren) -> Option<Snippet<'_, H, X>> {
self.tok.as_s(par).map(|slc| Snippet::new(self, slc)) self.tok.as_s(par).map(|slc| Snippet::new(self, slc))
} }

View File

@@ -32,10 +32,10 @@ use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId};
use crate::atom_owned::take_atom; use crate::atom_owned::take_atom;
use crate::expr::{Expr, ExprHandle}; use crate::expr::{Expr, ExprHandle};
use crate::lexer::{LexContext, err_cascade, err_not_applicable}; use crate::lexer::{LexContext, err_cascade, err_not_applicable};
use crate::parser::{ParsCtx, get_const, linev_into_api}; use crate::parser::{PTok, PTokTree, ParsCtx, get_const, linev_into_api};
use crate::system::{SysCtx, atom_by_idx}; use crate::system::{SysCtx, atom_by_idx};
use crate::system_ctor::{CtedObj, DynSystemCtor}; use crate::system_ctor::{CtedObj, DynSystemCtor};
use crate::tree::{GenTok, GenTokTree, LazyMemberFactory, TreeIntoApiCtxImpl}; use crate::tree::{LazyMemberFactory, TreeIntoApiCtxImpl};
pub type ExtReq<'a> = RequestHandle<'a, api::ExtMsgSet>; pub type ExtReq<'a> = RequestHandle<'a, api::ExtMsgSet>;
pub type ExtReqNot = ReqNot<api::ExtMsgSet>; pub type ExtReqNot = ReqNot<api::ExtMsgSet>;
@@ -259,10 +259,10 @@ pub fn extension_init(
let src = Sym::from_api(*src, ctx.i()).await; let src = Sym::from_api(*src, ctx.i()).await;
let comments = let comments =
join_all(comments.iter().map(|c| Comment::from_api(c, src.clone(), &i))).await; join_all(comments.iter().map(|c| Comment::from_api(c, src.clone(), &i))).await;
let line: Vec<GenTokTree> = ttv_from_api(line, &mut ctx, &mut (), &src, &i).await; let line: Vec<PTokTree> = ttv_from_api(line, &mut ctx, &mut (), &src, &i).await;
let snip = Snippet::new(line.first().expect("Empty line"), &line); let snip = Snippet::new(line.first().expect("Empty line"), &line);
let (head, tail) = snip.pop_front().unwrap(); let (head, tail) = snip.pop_front().unwrap();
let name = if let GenTok::Name(n) = &head.tok { n } else { panic!("No line head") }; let name = if let PTok::Name(n) = &head.tok { n } else { panic!("No line head") };
let parser = let parser =
parsers.iter().find(|p| p.line_head() == **name).expect("No parser candidate"); parsers.iter().find(|p| p.line_head() == **name).expect("No parser candidate");
let module = Sym::from_api(*module, ctx.i()).await; let module = Sym::from_api(*module, ctx.i()).await;

View File

@@ -4,6 +4,7 @@ use async_stream::stream;
use futures::future::{LocalBoxFuture, join_all}; use futures::future::{LocalBoxFuture, join_all};
use futures::{FutureExt, Stream, StreamExt, pin_mut}; use futures::{FutureExt, Stream, StreamExt, pin_mut};
use itertools::Itertools; use itertools::Itertools;
use never::Never;
use orchid_api::ResolveNames; use orchid_api::ResolveNames;
use orchid_base::error::{OrcRes, Reporter}; use orchid_base::error::{OrcRes, Reporter};
use orchid_base::id_store::IdStore; use orchid_base::id_store::IdStore;
@@ -12,7 +13,7 @@ use orchid_base::location::SrcRange;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::parse::{Comment, ParseCtx, Snippet}; use orchid_base::parse::{Comment, ParseCtx, Snippet};
use orchid_base::reqnot::{ReqHandlish, Requester}; use orchid_base::reqnot::{ReqHandlish, Requester};
use orchid_base::tree::ttv_into_api; use orchid_base::tree::{TokTree, Token, ttv_into_api};
use crate::api; use crate::api;
use crate::expr::Expr; use crate::expr::Expr;
@@ -20,7 +21,9 @@ use crate::gen_expr::GExpr;
use crate::system::{SysCtx, SysCtxEntry}; use crate::system::{SysCtx, SysCtxEntry};
use crate::tree::GenTokTree; use crate::tree::GenTokTree;
pub type GenSnippet<'a> = Snippet<'a, Expr, GExpr>; pub type PTok = Token<Expr, Never>;
pub type PTokTree = TokTree<Expr, Never>;
pub type PSnippet<'a> = Snippet<'a, Expr, Never>;
pub trait Parser: Send + Sync + Sized + Default + 'static { pub trait Parser: Send + Sync + Sized + Default + 'static {
const LINE_HEAD: &'static str; const LINE_HEAD: &'static str;
@@ -28,7 +31,7 @@ pub trait Parser: Send + Sync + Sized + Default + 'static {
ctx: ParsCtx<'a>, ctx: ParsCtx<'a>,
exported: bool, exported: bool,
comments: Vec<Comment>, comments: Vec<Comment>,
line: GenSnippet<'a>, line: PSnippet<'a>,
) -> impl Future<Output = OrcRes<Vec<ParsedLine>>> + 'a; ) -> impl Future<Output = OrcRes<Vec<ParsedLine>>> + 'a;
} }
@@ -39,7 +42,7 @@ pub trait DynParser: Send + Sync + 'static {
ctx: ParsCtx<'a>, ctx: ParsCtx<'a>,
exported: bool, exported: bool,
comments: Vec<Comment>, comments: Vec<Comment>,
line: GenSnippet<'a>, line: PSnippet<'a>,
) -> LocalBoxFuture<'a, OrcRes<Vec<ParsedLine>>>; ) -> LocalBoxFuture<'a, OrcRes<Vec<ParsedLine>>>;
} }
@@ -50,7 +53,7 @@ impl<T: Parser> DynParser for T {
ctx: ParsCtx<'a>, ctx: ParsCtx<'a>,
exported: bool, exported: bool,
comments: Vec<Comment>, comments: Vec<Comment>,
line: GenSnippet<'a>, line: PSnippet<'a>,
) -> LocalBoxFuture<'a, OrcRes<Vec<ParsedLine>>> { ) -> LocalBoxFuture<'a, OrcRes<Vec<ParsedLine>>> {
Box::pin(async move { Self::parse(ctx, exported, comments, line).await }) Box::pin(async move { Self::parse(ctx, exported, comments, line).await })
} }
@@ -73,7 +76,7 @@ impl<'a> ParsCtx<'a> {
} }
impl ParseCtx for ParsCtx<'_> { impl ParseCtx for ParsCtx<'_> {
fn i(&self) -> &Interner { self.ctx.i() } fn i(&self) -> &Interner { self.ctx.i() }
fn reporter(&self) -> &Reporter { self.reporter } fn rep(&self) -> &Reporter { self.reporter }
} }
type BoxConstCallback = Box<dyn FnOnce(ConstCtx) -> LocalBoxFuture<'static, GExpr>>; type BoxConstCallback = Box<dyn FnOnce(ConstCtx) -> LocalBoxFuture<'static, GExpr>>;
@@ -174,26 +177,25 @@ pub struct ConstCtx {
impl ConstCtx { impl ConstCtx {
pub fn names<'a>( pub fn names<'a>(
&'a self, &'a self,
names: impl IntoIterator<Item = &'a Sym> + Clone + 'a, names: impl IntoIterator<Item = &'a Sym> + 'a,
) -> impl Stream<Item = (Sym, Option<Sym>)> + 'a { ) -> impl Stream<Item = Option<Sym>> + 'a {
let resolve_names = ResolveNames { let resolve_names = ResolveNames {
constid: self.constid, constid: self.constid,
sys: self.ctx.sys_id(), sys: self.ctx.sys_id(),
names: names.clone().into_iter().map(|n| n.to_api()).collect_vec(), names: names.into_iter().map(|n| n.to_api()).collect_vec(),
}; };
stream! { stream! {
let new_names = self.ctx.reqnot().request(resolve_names).await; for name_opt in self.ctx.reqnot().request(resolve_names).await {
for (name, name_opt) in names.into_iter().zip(new_names) { yield match name_opt {
yield (name.clone(), match name_opt {
None => None, None => None,
Some(name) => Some(Sym::from_api(name, self.ctx.i()).await) Some(name) => Some(Sym::from_api(name, self.ctx.i()).await)
}) }
} }
} }
} }
pub async fn names_n<const N: usize>(&self, names: [&Sym; N]) -> [Option<Sym>; N] { pub async fn names_n<const N: usize>(&self, names: [&Sym; N]) -> [Option<Sym>; N] {
let mut results = [const { None }; N]; let mut results = [const { None }; N];
let names = self.names(names).enumerate().filter_map(|(i, n)| async move { Some((i, n.1?)) }); let names = self.names(names).enumerate().filter_map(|(i, n)| async move { Some((i, n?)) });
pin_mut!(names); pin_mut!(names);
while let Some((i, name)) = names.next().await { while let Some((i, name)) = names.next().await {
results[i] = Some(name); results[i] = Some(name);

View File

@@ -26,7 +26,7 @@ pub struct HostParseCtxImpl<'a> {
} }
impl ParseCtx for HostParseCtxImpl<'_> { impl ParseCtx for HostParseCtxImpl<'_> {
fn reporter(&self) -> &Reporter { self.rep } fn rep(&self) -> &Reporter { self.rep }
fn i(&self) -> &Interner { &self.ctx.i } fn i(&self) -> &Interner { &self.ctx.i }
} }

View File

@@ -1,9 +1,7 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::pin::Pin; use std::pin::Pin;
use std::rc::Rc;
use futures::AsyncWrite; use futures::AsyncWrite;
use itertools::Itertools;
use never::Never; use never::Never;
use orchid_extension::atom::{Atomic, TypAtom}; use orchid_extension::atom::{Atomic, TypAtom};
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant, get_own_instance}; use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant, get_own_instance};

View File

@@ -1,20 +1,17 @@
use futures::future::LocalBoxFuture; use async_std::stream;
use async_stream::stream;
use futures::{FutureExt, StreamExt};
use hashbrown::HashMap; use hashbrown::HashMap;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::{OrcRes, mk_errv}; use orchid_base::error::OrcRes;
use orchid_base::interner::Tok; use orchid_base::parse::{
use orchid_base::name::Sym; Comment, ParseCtx, Parsed, Snippet, expect_tok, token_errv, try_pop_no_fluff,
use orchid_base::parse::{Comment, ParseCtx, Parsed, expect_tok, token_errv, try_pop_no_fluff}; };
use orchid_extension::gen_expr::GExpr; use orchid_extension::parser::{
use orchid_extension::parser::{ PSnippet, PTok, PTokTree, ParsCtx, ParsedLine, ParsedLineKind, ParsedMem, ParsedMemKind, Parser,
ConstCtx, GenSnippet, ParsCtx, ParsedLine, ParsedLineKind, ParsedMem, ParsedMemKind, Parser,
}; };
use substack::Substack;
use crate::macros::mactree::{MacTok, MacTree, map_mactree, map_mactree_v}; use crate::macros::mactree::{MacTok, MacTree, map_mactree_v};
type ExprGenerator =
Box<dyn for<'a> FnOnce(ConstCtx, Substack<'a, Sym>) -> LocalBoxFuture<'a, GExpr>>;
#[derive(Default)] #[derive(Default)]
pub struct LetLine; pub struct LetLine;
@@ -24,7 +21,7 @@ impl Parser for LetLine {
ctx: ParsCtx<'a>, ctx: ParsCtx<'a>,
exported: bool, exported: bool,
comments: Vec<Comment>, comments: Vec<Comment>,
line: GenSnippet<'a>, line: PSnippet<'a>,
) -> OrcRes<Vec<ParsedLine>> { ) -> OrcRes<Vec<ParsedLine>> {
let Parsed { output: name_tok, tail } = try_pop_no_fluff(&ctx, line).await?; let Parsed { output: name_tok, tail } = try_pop_no_fluff(&ctx, line).await?;
let Some(name) = name_tok.as_name() else { let Some(name) = name_tok.as_name() else {
@@ -34,19 +31,11 @@ impl Parser for LetLine {
return Err(err.await); return Err(err.await);
}; };
let Parsed { tail, .. } = expect_tok(&ctx, tail, ctx.i().i("=").await).await?; let Parsed { tail, .. } = expect_tok(&ctx, tail, ctx.i().i("=").await).await?;
fn parse_tokv(line: GenSnippet<'_>) -> Vec<MacTree> {
let first: MacTree = if let Some((idx, arg)) =
line.iter().enumerate().find_map(|(i, x)| Some((i, x.as_lambda()?)))
{
} else {
};
todo!()
}
let mut names = HashMap::new(); let mut names = HashMap::new();
let aliased = parse_tokv(tail); let aliased = parse_tokv(tail, &ctx).await;
map_mactree_v(&aliased, &mut false, &mut |tpl| { map_mactree_v(&aliased, &mut false, &mut |tpl| {
if let MacTok::Name(n) = &*tpl.tok { if let MacTok::Name(n) = &*tpl.tok {
names.insert(n.clone(), n); names.insert(n.clone(), n.clone());
} }
None None
}); });
@@ -56,14 +45,76 @@ impl Parser for LetLine {
kind: ParsedLineKind::Mem(ParsedMem { kind: ParsedLineKind::Mem(ParsedMem {
exported, exported,
name, name,
kind: ParsedMemKind::cnst(async |ctx| { kind: ParsedMemKind::cnst(async move |ctx| {
let mut names_str = ctx.names(names.keys()); let keys = names.keys().cloned().collect_vec();
while let Some(()) = names_str.next().await { let names_mut = &mut names;
names[] stream! {
for await (canonical, local) in ctx.names(&keys).zip(stream::from_iter(&keys)) {
if let Some(name) = canonical {
*names_mut.get_mut(local).expect("Queried specifically the keys of this map") = name
}
}
} }
todo!() .collect::<()>()
.await;
let macro_input = map_mactree_v(&aliased, &mut false, &mut |tree| match &*tree.tok {
MacTok::Name(n) => names.get(n).map(|new_n| MacTok::Name(new_n.clone()).at(tree.pos())),
_ => None,
});
todo!("Run macros then convert this into an expr")
}), }),
}), }),
}]) }])
} }
} }
async fn parse_tokv(line: PSnippet<'_>, ctx: &ParsCtx<'_>) -> Vec<MacTree> {
if let Some((idx, arg)) = line.iter().enumerate().find_map(|(i, x)| Some((i, x.as_lambda()?))) {
let (head, lambda) = line.split_at(idx as u32);
let (_, body) = lambda.pop_front().unwrap();
let body = parse_tokv(body, ctx).boxed_local().await;
let mut all = parse_tokv_no_lambdas(&head, ctx).await;
match parse_tok(arg, ctx).await {
Some(arg) => all.push(MacTok::Lambda(arg, body).at(lambda.sr().pos())),
None => ctx.rep().report(
token_errv(ctx, arg, "Lambda argument fluff", |arg| {
format!("Lambda arguments must be a valid token, found meaningless fragment {arg}")
})
.await,
),
};
all
} else {
parse_tokv_no_lambdas(&line, ctx).await
}
}
async fn parse_tokv_no_lambdas(line: &[PTokTree], ctx: &ParsCtx<'_>) -> Vec<MacTree> {
stream::from_iter(line).filter_map(|tt| parse_tok(tt, ctx)).collect().await
}
async fn parse_tok(tree: &PTokTree, ctx: &ParsCtx<'_>) -> Option<MacTree> {
let tok = match &tree.tok {
PTok::Bottom(errv) => MacTok::Bottom(errv.clone()),
PTok::BR | PTok::Comment(_) => return None,
PTok::Name(n) => MacTok::Name(ctx.module().suffix([n.clone()], ctx.i()).await),
PTok::NS(..) => match tree.as_multiname() {
Ok(mn) => MacTok::Name(mn.to_sym(ctx.i()).await),
Err(nested) => {
ctx.rep().report(
token_errv(ctx, tree, ":: can only be followed by a name in an expression", |tok| {
format!("Expected name, found {tok}")
})
.await,
);
return parse_tok(nested, ctx).boxed_local().await;
},
},
PTok::Handle(expr) => MacTok::Value(expr.clone()),
PTok::NewExpr(never) => match *never {},
PTok::LambdaHead(_) => panic!("Lambda-head handled in the sequence parser"),
PTok::S(p, body) =>
MacTok::S(*p, parse_tokv(Snippet::new(tree, body), ctx).boxed_local().await),
};
Some(tok.at(tree.sr().pos()))
}

View File

@@ -2,14 +2,17 @@ use std::borrow::Cow;
use std::fmt::Display; use std::fmt::Display;
use std::rc::Rc; use std::rc::Rc;
use futures::FutureExt;
use futures::future::join_all; use futures::future::join_all;
use itertools::Itertools; use itertools::Itertools;
use orchid_api::Paren; use orchid_api::Paren;
use orchid_base::error::OrcErrv;
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants}; use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
use orchid_base::interner::Tok; use orchid_base::interner::Tok;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
use orchid_base::tl_cache; use orchid_base::tl_cache;
use orchid_base::tree::indent;
use orchid_extension::atom::Atomic; use orchid_extension::atom::Atomic;
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant}; use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
use orchid_extension::expr::Expr; use orchid_extension::expr::Expr;
@@ -20,7 +23,8 @@ pub struct MacTree {
pub tok: Rc<MacTok>, pub tok: Rc<MacTok>,
} }
impl MacTree { impl MacTree {
fn tok(&self) -> &MacTok { &*self.tok } pub fn tok(&self) -> &MacTok { &*self.tok }
pub fn pos(&self) -> Pos { self.pos.clone() }
} }
impl Atomic for MacTree { impl Atomic for MacTree {
type Data = (); type Data = ();
@@ -46,6 +50,12 @@ pub enum MacTok {
/// Only permitted in "pattern" values produced by macro blocks, which are /// Only permitted in "pattern" values produced by macro blocks, which are
/// never accessed as variables by usercode /// never accessed as variables by usercode
Ph(Ph), Ph(Ph),
Bottom(OrcErrv),
}
impl MacTok {
pub fn at(self, pos: impl Into<Pos>) -> MacTree {
MacTree { pos: pos.into(), tok: Rc::new(self) }
}
} }
impl Format for MacTok { impl Format for MacTok {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit { async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
@@ -55,7 +65,7 @@ impl Format for MacTok {
tl_cache!(Rc<Variants>: Rc::new(Variants::default() tl_cache!(Rc<Variants>: Rc::new(Variants::default()
.unbounded("\\{0b}.{1l}") .unbounded("\\{0b}.{1l}")
.bounded("(\\{0b}.{1b})"))), .bounded("(\\{0b}.{1b})"))),
[arg.print(c).await, mtreev_fmt(b, c).await], [arg.print(c).boxed_local().await, mtreev_fmt(b, c).await],
), ),
Self::Name(n) => format!("{n}").into(), Self::Name(n) => format!("{n}").into(),
Self::Ph(ph) => format!("{ph}").into(), Self::Ph(ph) => format!("{ph}").into(),
@@ -68,6 +78,8 @@ impl Format for MacTok {
[mtreev_fmt(body, c).await], [mtreev_fmt(body, c).await],
), ),
Self::Slot => "SLOT".into(), Self::Slot => "SLOT".into(),
Self::Bottom(err) if err.len() == 1 => format!("Bottom({}) ", err.one().unwrap()).into(),
Self::Bottom(err) => format!("Botttom(\n{}) ", indent(&err.to_string())).into(),
} }
} }
} }
@@ -117,7 +129,8 @@ pub fn map_mactree<F: FnMut(MacTree) -> Option<MacTree>>(
ro(changed, |changed| map_mactree(arg, changed, map)), ro(changed, |changed| map_mactree(arg, changed, map)),
map_mactree_v(body, changed, map), map_mactree_v(body, changed, map),
), ),
MacTok::Name(_) | MacTok::Value(_) | MacTok::Slot | MacTok::Ph(_) => return src.clone(), MacTok::Name(_) | MacTok::Value(_) | MacTok::Slot | MacTok::Ph(_) | MacTok::Bottom(_) =>
return src.clone(),
MacTok::S(p, body) => MacTok::S(*p, map_mactree_v(body, changed, map)), MacTok::S(p, body) => MacTok::S(*p, map_mactree_v(body, changed, map)),
}, },
}; };

View File

@@ -1,5 +1,9 @@
use futures::FutureExt;
use futures::future::join_all;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::interner::Tok; use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::interner::{Interner, Tok};
use orchid_base::join_ok;
use orchid_base::side::Side; use orchid_base::side::Side;
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher}; use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
@@ -28,24 +32,31 @@ fn scal_cnt<'a>(iter: impl Iterator<Item = &'a MacTree>) -> usize {
} }
#[must_use] #[must_use]
pub fn mk_any(pattern: &[MacTree]) -> AnyMatcher { pub async fn mk_any(pattern: &[MacTree], i: &Interner) -> OrcRes<AnyMatcher> {
let left_split = scal_cnt(pattern.iter()); let left_split = scal_cnt(pattern.iter());
if pattern.len() <= left_split { if pattern.len() <= left_split {
return AnyMatcher::Scalar(mk_scalv(pattern)); return Ok(AnyMatcher::Scalar(mk_scalv(pattern, i).await?));
} }
let (left, not_left) = pattern.split_at(left_split); let (left, not_left) = pattern.split_at(left_split);
let right_split = not_left.len() - scal_cnt(pattern.iter().rev()); let right_split = not_left.len() - scal_cnt(pattern.iter().rev());
let (mid, right) = not_left.split_at(right_split); let (mid, right) = not_left.split_at(right_split);
AnyMatcher::Vec { left: mk_scalv(left), mid: mk_vec(mid), right: mk_scalv(right) } join_ok! {
left = mk_scalv(left, i).await;
mid = mk_vec(mid, i).await;
right = mk_scalv(right, i).await;
}
Ok(AnyMatcher::Vec { left, mid, right })
} }
/// Pattern MUST NOT contain vectorial placeholders /// Pattern MUST NOT contain vectorial placeholders
#[must_use] #[must_use]
fn mk_scalv(pattern: &[MacTree]) -> Vec<ScalMatcher> { pattern.iter().map(mk_scalar).collect() } async fn mk_scalv(pattern: &[MacTree], i: &Interner) -> OrcRes<Vec<ScalMatcher>> {
join_all(pattern.iter().map(|pat| mk_scalar(pat, i))).await.into_iter().collect()
}
/// Pattern MUST start and end with a vectorial placeholder /// Pattern MUST start and end with a vectorial placeholder
#[must_use] #[must_use]
pub fn mk_vec(pattern: &[MacTree]) -> VecMatcher { pub async fn mk_vec(pattern: &[MacTree], i: &Interner) -> OrcRes<VecMatcher> {
debug_assert!(!pattern.is_empty(), "pattern cannot be empty"); debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial"); debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial");
debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial"); debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial");
@@ -57,39 +68,57 @@ pub fn mk_vec(pattern: &[MacTree]) -> VecMatcher {
let (l_side, l_sep) = left.split_at(left.len() - l_sep_size); let (l_side, l_sep) = left.split_at(left.len() - l_sep_size);
let main = VecMatcher::Placeh { key: key.clone(), nonzero }; let main = VecMatcher::Placeh { key: key.clone(), nonzero };
match (left, right) { match (left, right) {
(&[], &[]) => VecMatcher::Placeh { key, nonzero }, (&[], &[]) => Ok(VecMatcher::Placeh { key, nonzero }),
(&[], _) => VecMatcher::Scan { (&[], _) => {
direction: Side::Left, join_ok! {
left: Box::new(main), sep = mk_scalv(r_sep, i).await;
sep: mk_scalv(r_sep), right = mk_vec(r_side, i).boxed_local().await;
right: Box::new(mk_vec(r_side)), }
Ok(VecMatcher::Scan {
direction: Side::Left,
left: Box::new(main),
sep,
right: Box::new(right),
})
}, },
(_, &[]) => VecMatcher::Scan { (_, &[]) => {
direction: Side::Right, join_ok! {
left: Box::new(mk_vec(l_side)), left = mk_vec(l_side, i).boxed_local().await;
sep: mk_scalv(l_sep), sep = mk_scalv(l_sep, i).await;
right: Box::new(main), }
Ok(VecMatcher::Scan {
direction: Side::Right,
left: Box::new(left),
sep,
right: Box::new(main),
})
}, },
(..) => { (..) => {
let mut key_order = let mut key_order =
l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>(); l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>();
key_order.sort_by_key(|(_, prio, _)| -(*prio as i64)); key_order.sort_by_key(|(_, prio, _)| -(*prio as i64));
VecMatcher::Middle { join_ok! {
left: Box::new(mk_vec(l_side)), left = mk_vec(l_side, i).boxed_local().await;
left_sep: mk_scalv(l_sep), left_sep = mk_scalv(l_sep, i).await;
mid: Box::new(main), right_sep = mk_scalv(r_sep, i).await;
right_sep: mk_scalv(r_sep), right = mk_vec(r_side, i).boxed_local().await;
right: Box::new(mk_vec(r_side)),
key_order: key_order.into_iter().map(|(n, ..)| n).collect(),
} }
Ok(VecMatcher::Middle {
left: Box::new(left),
left_sep,
mid: Box::new(main),
right_sep,
right: Box::new(right),
key_order: key_order.into_iter().map(|(n, ..)| n).collect(),
})
}, },
} }
} }
/// Pattern MUST NOT be a vectorial placeholder /// Pattern MUST NOT be a vectorial placeholder
#[must_use] #[must_use]
fn mk_scalar(pattern: &MacTree) -> ScalMatcher { async fn mk_scalar(pattern: &MacTree, i: &Interner) -> OrcRes<ScalMatcher> {
match &*pattern.tok { Ok(match &*pattern.tok {
MacTok::Name(n) => ScalMatcher::Name(n.clone()), MacTok::Name(n) => ScalMatcher::Name(n.clone()),
MacTok::Ph(Ph { name, kind }) => match kind { MacTok::Ph(Ph { name, kind }) => match kind {
PhKind::Vector { .. } => { PhKind::Vector { .. } => {
@@ -97,10 +126,16 @@ fn mk_scalar(pattern: &MacTree) -> ScalMatcher {
}, },
PhKind::Scalar => ScalMatcher::Placeh { key: name.clone() }, PhKind::Scalar => ScalMatcher::Placeh { key: name.clone() },
}, },
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body))), MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body, i).boxed_local().await?)),
MacTok::Lambda(arg, body) => ScalMatcher::Lambda(Box::new(mk_any(arg)), Box::new(mk_any(body))), MacTok::Lambda(..) =>
return Err(mk_errv(
i.i("Lambda in matcher").await,
"Lambdas can't be matched for, only generated in templates",
[pattern.pos()],
)),
MacTok::Value(_) | MacTok::Slot => panic!("Only used for templating"), MacTok::Value(_) | MacTok::Slot => panic!("Only used for templating"),
} MacTok::Bottom(errv) => return Err(errv.clone()),
})
} }
#[cfg(test)] #[cfg(test)]
@@ -150,7 +185,7 @@ mod test {
})) }))
.await, .await,
]; ];
let matcher = mk_any(&pattern); let matcher = mk_any(&pattern, &i).await.expect("This matcher isn't broken");
println!("{matcher}"); println!("{matcher}");
}) })
} }

View File

@@ -2,6 +2,7 @@ use std::fmt;
use std::rc::Rc; use std::rc::Rc;
use itertools::Itertools; use itertools::Itertools;
use orchid_base::error::OrcRes;
use orchid_base::interner::Interner; use orchid_base::interner::Interner;
use orchid_base::location::Pos; use orchid_base::location::Pos;
use orchid_base::name::Sym; use orchid_base::name::Sym;
@@ -20,21 +21,21 @@ pub fn last_is_vec(pattern: &[MacTree]) -> bool { vec_attrs(pattern.last().unwra
pub struct NamedMatcher(AnyMatcher); pub struct NamedMatcher(AnyMatcher);
impl NamedMatcher { impl NamedMatcher {
pub async fn new(pattern: &[MacTree], i: &Interner) -> Self { pub async fn new(pattern: &[MacTree], i: &Interner) -> OrcRes<Self> {
assert!( assert!(
matches!(pattern.first().map(|tree| &*tree.tok), Some(MacTok::Name(_))), matches!(pattern.first().map(|tree| &*tree.tok), Some(MacTok::Name(_))),
"Named matchers must begin with a name" "Named matchers must begin with a name"
); );
match last_is_vec(pattern) { Ok(Self(match last_is_vec(pattern) {
true => Self(mk_any(pattern)), true => mk_any(pattern, i).await,
false => { false => {
let kind = PhKind::Vector { priority: 0, at_least_one: false }; let kind = PhKind::Vector { priority: 0, at_least_one: false };
let tok = MacTok::Ph(Ph { name: i.i("::after").await, kind }); let tok = MacTok::Ph(Ph { name: i.i("::after").await, kind });
let suffix = [MacTree { pos: Pos::None, tok: Rc::new(tok) }]; let suffix = [MacTree { pos: Pos::None, tok: Rc::new(tok) }];
Self(mk_any(&pattern.iter().chain(&suffix).cloned().collect_vec())) mk_any(&pattern.iter().chain(&suffix).cloned().collect_vec(), i).await
}, },
} }?))
} }
/// Also returns the tail, if any, which should be matched further /// Also returns the tail, if any, which should be matched further
/// Note that due to how priod works below, the main usable information from /// Note that due to how priod works below, the main usable information from
@@ -62,12 +63,12 @@ impl fmt::Debug for NamedMatcher {
pub struct PriodMatcher(VecMatcher); pub struct PriodMatcher(VecMatcher);
impl PriodMatcher { impl PriodMatcher {
pub fn new(pattern: &[MacTree]) -> Self { pub async fn new(pattern: &[MacTree], i: &Interner) -> OrcRes<Self> {
assert!( assert!(
pattern.first().and_then(vec_attrs).is_some() && pattern.last().and_then(vec_attrs).is_some(), pattern.first().and_then(vec_attrs).is_some() && pattern.last().and_then(vec_attrs).is_some(),
"Prioritized matchers must start and end with a vectorial", "Prioritized matchers must start and end with a vectorial",
); );
Self(mk_vec(pattern)) Ok(Self(mk_vec(pattern, i).await?))
} }
/// tokens before the offset always match the prefix /// tokens before the offset always match the prefix
pub fn apply<'a>( pub fn apply<'a>(

View File

@@ -20,8 +20,6 @@ pub fn scal_match<'a>(
Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))), Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))),
(ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 => (ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 =>
any_match(b_mat, &body[..], save_loc), any_match(b_mat, &body[..], save_loc),
(ScalMatcher::Lambda(arg_mat, b_mat), MacTok::Lambda(arg, body)) =>
Some(any_match(arg_mat, arg, save_loc)?.combine(any_match(b_mat, body, save_loc)?)),
_ => None, _ => None,
} }
} }

View File

@@ -11,7 +11,6 @@ use orchid_base::tokens::{PARENS, Paren};
pub enum ScalMatcher { pub enum ScalMatcher {
Name(Sym), Name(Sym),
S(Paren, Box<AnyMatcher>), S(Paren, Box<AnyMatcher>),
Lambda(Box<AnyMatcher>, Box<AnyMatcher>),
Placeh { key: Tok<String> }, Placeh { key: Tok<String> },
} }
@@ -62,7 +61,6 @@ impl fmt::Display for ScalMatcher {
let (l, r, _) = PARENS.iter().find(|r| r.2 == *t).unwrap(); let (l, r, _) = PARENS.iter().find(|r| r.2 == *t).unwrap();
write!(f, "{l}{body}{r}") write!(f, "{l}{body}{r}")
}, },
Self::Lambda(arg, body) => write!(f, "\\{arg}.{body}"),
} }
} }
} }

View File

@@ -13,22 +13,24 @@ use async_stream::try_stream;
use camino::Utf8PathBuf; use camino::Utf8PathBuf;
use clap::{Parser, Subcommand}; use clap::{Parser, Subcommand};
use futures::{Stream, TryStreamExt, io}; use futures::{Stream, TryStreamExt, io};
use itertools::Itertools;
use orchid_base::error::Reporter; use orchid_base::error::Reporter;
use orchid_base::format::{FmtCtxImpl, Format, take_first}; use orchid_base::format::{FmtCtxImpl, Format, take_first};
use orchid_base::location::SrcRange;
use orchid_base::logging::{LogStrategy, Logger}; use orchid_base::logging::{LogStrategy, Logger};
use orchid_base::parse::Snippet; use orchid_base::name::{NameLike, VPath};
use orchid_base::parse::{Import, Snippet};
use orchid_base::sym; use orchid_base::sym;
use orchid_base::tree::ttv_fmt; use orchid_base::tree::{Token, ttv_fmt};
use orchid_host::ctx::Ctx; use orchid_host::ctx::Ctx;
use orchid_host::execute::{ExecCtx, ExecResult}; use orchid_host::execute::{ExecCtx, ExecResult};
use orchid_host::expr::PathSetBuilder; use orchid_host::expr::ExprKind;
use orchid_host::extension::Extension; use orchid_host::extension::Extension;
use orchid_host::lex::lex; use orchid_host::lex::lex;
use orchid_host::parse::{HostParseCtxImpl, parse_expr, parse_items}; use orchid_host::parse::{HostParseCtxImpl, parse_item, parse_items};
use orchid_host::parsed::{Item, ItemKind, ParsedMember, ParsedMemberKind, ParsedModule}; use orchid_host::parsed::{Item, ItemKind, ParsTokTree, ParsedMember, ParsedModule};
use orchid_host::subprocess::ext_command; use orchid_host::subprocess::ext_command;
use orchid_host::system::init_systems; use orchid_host::system::init_systems;
use orchid_host::tree::Root;
use substack::Substack; use substack::Substack;
use tokio::task::{LocalSet, spawn_local}; use tokio::task::{LocalSet, spawn_local};
@@ -147,56 +149,90 @@ async fn main() -> io::Result<ExitCode> {
println!("{}", take_first(&item.print(&FmtCtxImpl { i }).await, true)) println!("{}", take_first(&item.print(&FmtCtxImpl { i }).await, true))
} }
}, },
Commands::Repl => loop { Commands::Repl => {
let (root, systems) = init_systems(&args.system, &extensions).await.unwrap(); let mut counter = 0;
print!("\\.> "); let mut imports = Vec::new();
std::io::stdout().flush().unwrap(); let usercode_path = sym!(usercode; i).await;
let mut prompt = String::new(); loop {
stdin().read_line(&mut prompt).await.unwrap(); counter += 1;
eprintln!("lexing"); let (mut root, systems) = init_systems(&args.system, &extensions).await.unwrap();
let lexemes = print!("\\.> ");
lex(i.i(prompt.trim()).await, sym!(usercode; i).await, &systems, ctx).await.unwrap(); std::io::stdout().flush().unwrap();
eprintln!("lexed"); let mut prompt = String::new();
if args.logs { stdin().read_line(&mut prompt).await.unwrap();
println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true)); let name = i.i(&format!("_{counter}")).await;
} let path = usercode_path.suffix([name.clone()], i).await;
let path = sym!(usercode; i).await; let mut lexemes =
let reporter = Reporter::new(); lex(i.i(prompt.trim()).await, path.clone(), &systems, ctx).await.unwrap();
let parse_ctx = HostParseCtxImpl { let Some(discr) = lexemes.first() else { continue };
ctx: ctx.clone(), if args.logs {
rep: &reporter, println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true));
src: path.clone(), }
systems: &systems[..], let prefix_sr = SrcRange::zw(path.clone(), 0);
}; let process_lexemes = async |lexemes: &[ParsTokTree]| {
let parse_res = parse_expr( let snippet = Snippet::new(&lexemes[0], lexemes);
&parse_ctx, let reporter = Reporter::new();
path.clone(), let parse_ctx = HostParseCtxImpl {
PathSetBuilder::new(), ctx: ctx.clone(),
Snippet::new(&lexemes[0], &lexemes), rep: &reporter,
) src: path.clone(),
.await; systems: &systems[..],
eprintln!("parsed"); };
let expr = match reporter.merge(parse_res) { let parse_result = parse_item(&parse_ctx, Substack::Bottom, vec![], snippet).await;
Ok(expr) => expr, match reporter.merge(parse_result) {
Err(e) => { Ok(items) => Some(items),
eprintln!("{e}"); Err(e) => {
eprintln!("{e}");
None
},
}
};
let add_imports = |items: &mut Vec<Item>, imports: &[Import]| {
items.extend(imports.iter().map(|import| Item::new(import.sr.clone(), import.clone())));
};
if discr.is_kw(i.i("import").await) {
let Some(import_lines) = process_lexemes(&lexemes).await else { continue };
imports.extend(import_lines.into_iter().map(|it| match it.kind {
ItemKind::Import(imp) => imp,
_ => panic!("Expected imports from import line"),
}));
continue; continue;
}, }
}; if !discr.is_kw(i.i("let").await) {
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root.clone(), expr).await; let prefix = [i.i("export").await, i.i("let").await, name.clone(), i.i("=").await];
eprintln!("executed"); lexemes.splice(0..0, prefix.map(|n| Token::Name(n).at(prefix_sr.clone())));
xctx.set_gas(Some(1000)); }
xctx.execute().await; let Some(mut new_lines) = process_lexemes(&lexemes).await else { continue };
match xctx.result() { let const_decl = new_lines.iter().exactly_one().expect("Multiple lines from let");
ExecResult::Value(val) => let input_sr = const_decl.sr.map_range(|_| 0..0);
println!("{}", take_first(&val.print(&FmtCtxImpl { i }).await, false)), let const_name = match &const_decl.kind {
ExecResult::Err(e) => println!("error: {e}"), ItemKind::Member(ParsedMember { name: const_name, .. }) => const_name.clone(),
ExecResult::Gas(_) => println!("Ran out of gas!"), _ => panic!("Expected exactly one constant declaration from let"),
};
add_imports(&mut new_lines, &imports);
imports.push(Import::new(input_sr.clone(), VPath::new(path.segs()), const_name.clone()));
let new_module = ParsedModule::new(true, new_lines);
let reporter = Reporter::new();
root = root.add_parsed(&new_module, path.clone(), &reporter).await;
eprintln!("parsed");
let entrypoint =
ExprKind::Const(path.suffix([const_name.clone()], i).await).at(input_sr.pos());
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root.clone(), entrypoint).await;
eprintln!("executed");
xctx.set_gas(Some(1000));
xctx.execute().await;
match xctx.result() {
ExecResult::Value(val) =>
println!("{const_name} = {}", take_first(&val.print(&FmtCtxImpl { i }).await, false)),
ExecResult::Err(e) => println!("error: {e}"),
ExecResult::Gas(_) => println!("Ran out of gas!"),
}
} }
}, },
Commands::Execute { proj, code } => { Commands::Execute { proj, code } => {
let reporter = Reporter::new(); let reporter = Reporter::new();
let path = sym!(usercode::entrypoint; i).await; let path = sym!(usercode::entrypoint; i).await;
let prefix_sr = SrcRange::zw(path.clone(), 0);
let (mut root, systems) = init_systems(&args.system, &extensions).await.unwrap(); let (mut root, systems) = init_systems(&args.system, &extensions).await.unwrap();
if let Some(proj_path) = proj { if let Some(proj_path) = proj {
let path = PathBuf::from(proj_path.into_std_path_buf()); let path = PathBuf::from(proj_path.into_std_path_buf());
@@ -209,8 +245,7 @@ async fn main() -> io::Result<ExitCode> {
}, },
} }
} }
let lexemes = lex(i.i(code.trim()).await, path.clone(), &systems, ctx).await.unwrap(); let mut lexemes = lex(i.i(code.trim()).await, path.clone(), &systems, ctx).await.unwrap();
let snippet = Snippet::new(&lexemes[0], &lexemes);
if args.logs { if args.logs {
println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true)); println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true));
} }
@@ -220,21 +255,22 @@ async fn main() -> io::Result<ExitCode> {
src: path.clone(), src: path.clone(),
systems: &systems[..], systems: &systems[..],
}; };
let parse_res = parse_expr(&parse_ctx, path.clone(), PathSetBuilder::new(), snippet).await; let prefix =
let expr = match reporter.merge(parse_res) { [i.i("export").await, i.i("let").await, i.i("entrypoint").await, i.i("=").await];
Ok(expr) => expr, lexemes.splice(0..0, prefix.map(|n| Token::Name(n).at(prefix_sr.clone())));
let snippet = Snippet::new(&lexemes[0], &lexemes);
let parse_res = parse_item(&parse_ctx, Substack::Bottom, vec![], snippet).await;
let entrypoint = match reporter.merge(parse_res) {
Ok(items) => ParsedModule::new(true, items),
Err(e) => { Err(e) => {
eprintln!("{e}"); eprintln!("{e}");
*exit_code1.borrow_mut() = ExitCode::FAILURE; *exit_code1.borrow_mut() = ExitCode::FAILURE;
return; return;
}, },
}; };
let parsed_root = ParsedModule::new(true, [Item::new(
snippet.sr(),
ParsedMember::new(true, i.i("entrypoint").await, expr.clone()),
)]);
let reporter = Reporter::new(); let reporter = Reporter::new();
let root = root.add_parsed(&parsed_root, sym!(usercode; i).await, &reporter).await; let root = root.add_parsed(&entrypoint, path.clone(), &reporter).await;
let expr = ExprKind::Const(sym!(usercode::entrypoint; i).await).at(prefix_sr.pos());
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root, expr).await; let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root, expr).await;
xctx.set_gas(Some(1000)); xctx.set_gas(Some(1000));
xctx.execute().await; xctx.execute().await;

View File

@@ -39,7 +39,7 @@ pub async fn parse_folder(
)); ));
}; };
let name = ctx.i.i(os_str_to_string(name_os, &ctx.i, [sr]).await?).await; let name = ctx.i.i(os_str_to_string(name_os, &ctx.i, [sr]).await?).await;
let ns = ns.push(name.clone(), &ctx.i).await; let ns = ns.suffix([name.clone()], &ctx.i).await;
let sr = SrcRange::new(0..0, &ns); let sr = SrcRange::new(0..0, &ns);
let mut items = Vec::new(); let mut items = Vec::new();
let mut stream = match fs::read_dir(path).await { let mut stream = match fs::read_dir(path).await {
@@ -67,7 +67,7 @@ pub async fn parse_folder(
} else if path.extension() == Some(OsStr::new("orc")) { } else if path.extension() == Some(OsStr::new("orc")) {
let name_os = path.file_stem().expect("If there is an extension, there must be a stem"); let name_os = path.file_stem().expect("If there is an extension, there must be a stem");
let name = ctx.i.i(os_str_to_string(name_os, &ctx.i, [sr]).await?).await; let name = ctx.i.i(os_str_to_string(name_os, &ctx.i, [sr]).await?).await;
let ns = ns.push(name, &ctx.i).await; let ns = ns.suffix([name], &ctx.i).await;
let sr = SrcRange::new(0..0, &ns); let sr = SrcRange::new(0..0, &ns);
let mut file = match File::open(path).await { let mut file = match File::open(path).await {
Err(e) => return Err(async_io_err(e, &ctx.i, [sr]).await), Err(e) => return Err(async_io_err(e, &ctx.i, [sr]).await),