Commit pending merge

This commit is contained in:
2025-04-30 22:13:09 +02:00
parent c9b349bccf
commit 1a25f52471
18 changed files with 198 additions and 129 deletions

View File

@@ -1,10 +1,7 @@
use std::rc::Rc;
use futures::FutureExt;
use hashbrown::{HashMap, HashSet};
use itertools::{Either, Itertools};
use orchid_base::error::{OrcErr, Reporter, mk_err};
use orchid_base::format::{FmtCtxImpl, Format, take_first};
use orchid_base::interner::{Interner, Tok};
use orchid_base::location::Pos;
use orchid_base::name::{NameLike, Sym, VName};
@@ -125,18 +122,17 @@ pub async fn imports_to_aliases(
match &item.kind {
ItemKind::Import(imp) => match absolute_path(cwd, &imp.path) {
Err(e) =>
ctx.rep.report(e.err_obj(ctx.i, item.pos.clone(), &imp.path.iter().join("::")).await),
ctx.rep.report(e.err_obj(ctx.i, item.sr.pos(), &imp.path.iter().join("::")).await),
Ok(abs_path) => {
let names = match imp.name.as_ref() {
Some(n) => Either::Right([n.clone()].into_iter()),
None => Either::Left(
resolv_glob(cwd, root, &abs_path, item.pos.clone(), ctx).await.into_iter(),
),
None =>
Either::Left(resolv_glob(cwd, root, &abs_path, item.sr.pos(), ctx).await.into_iter()),
};
for name in names {
let mut tgt = abs_path.clone().suffix([name.clone()]).to_sym(ctx.i).await;
let src = Sym::new(cwd.iter().cloned().chain([name]), ctx.i).await.unwrap();
import_locs.entry(src.clone()).or_insert(vec![]).push(item.pos.clone());
import_locs.entry(src.clone()).or_insert(vec![]).push(item.sr.pos());
if let Some(tgt2) = alias_map.get(&tgt) {
tgt = tgt2.clone();
}
@@ -144,7 +140,7 @@ pub async fn imports_to_aliases(
ctx.rep.report(mk_err(
ctx.i.i("Circular references").await,
format!("{src} circularly refers to itself"),
[item.pos.clone().into()],
[item.sr.pos().into()],
));
continue;
}

View File

@@ -20,6 +20,7 @@ use orchid_base::clone;
use orchid_base::format::{FmtCtxImpl, Format};
use orchid_base::interner::Tok;
use orchid_base::logging::Logger;
use orchid_base::name::Sym;
use orchid_base::reqnot::{DynRequester, ReqNot, Requester as _};
use crate::api;
@@ -203,6 +204,7 @@ impl Extension {
pub(crate) async fn lex_req<F: Future<Output = Option<api::SubLexed>>>(
&self,
source: Tok<String>,
src: Sym,
pos: u32,
sys: api::SysId,
mut r: impl FnMut(u32) -> F,
@@ -214,8 +216,9 @@ impl Extension {
self.0.lex_recur.lock().await.insert(id, req_in); // lex_recur released
let (ret, ()) = join(
async {
let res =
(self.reqnot()).request(api::LexExpr { id, pos, sys, text: source.to_api() }).await;
let res = (self.reqnot())
.request(api::LexExpr { id, pos, sys, src: src.to_api(), text: source.to_api() })
.await;
// collect sender to unblock recursion handler branch before returning
self.0.lex_recur.lock().await.remove(&id);
res

View File

@@ -4,7 +4,8 @@ use async_std::sync::Mutex;
use futures::FutureExt;
use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
use orchid_base::interner::Tok;
use orchid_base::location::Pos;
use orchid_base::location::SrcRange;
use orchid_base::name::Sym;
use orchid_base::parse::{name_char, name_start, op_char, unrep_space};
use orchid_base::tokens::PARENS;
use orchid_base::tree::recur;
@@ -18,6 +19,7 @@ use crate::system::System;
pub struct LexCtx<'a> {
pub systems: &'a [System],
pub source: &'a Tok<String>,
pub path: &'a Sym,
pub tail: &'a str,
pub sub_trees: &'a mut Vec<Expr>,
pub ctx: &'a Ctx,
@@ -27,6 +29,7 @@ impl<'a> LexCtx<'a> {
where 'a: 'b {
LexCtx {
source: self.source,
path: self.path,
tail: &self.source[pos as usize..],
systems: self.systems,
sub_trees: &mut *self.sub_trees,
@@ -49,7 +52,7 @@ impl<'a> LexCtx<'a> {
let mut exprs = self.ctx.common_exprs.clone();
let foo = recur(subtree, &|tt, r| {
if let ParsTok::NewExpr(expr) = tt.tok {
return ParsTok::Handle(expr).at(tt.range);
return ParsTok::Handle(expr).at(tt.sr);
}
r(tt)
});
@@ -60,6 +63,7 @@ impl<'a> LexCtx<'a> {
&tree,
&mut self.ctx.common_exprs.clone(),
&mut ExprParseCtx { ctx: self.ctx.clone(), exprs: self.ctx.common_exprs.clone() },
self.path,
&self.ctx.i,
)
.await
@@ -103,7 +107,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
return Err(mk_errv(
ctx.ctx.i.i("Unterminated block comment").await,
"This block comment has no ending ]--",
[Pos::Range(start..start + 3).into()],
[SrcRange::new(start..start + 3, ctx.path).pos().into()],
));
};
ctx.set_tail(tail);
@@ -120,7 +124,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
return Err(mk_errv(
ctx.ctx.i.i("Unclosed lambda").await,
"Lambdae started with \\ should separate arguments from body with .",
[Pos::Range(start..start + 1).into()],
[SrcRange::new(start..start + 1, ctx.path).pos().into()],
));
}
arg.push(lex_once(ctx).boxed_local().await?);
@@ -135,7 +139,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
return Err(mk_errv(
ctx.ctx.i.i("unclosed paren").await,
format!("this {lp} has no matching {rp}"),
[Pos::Range(start..start + 1).into()],
[SrcRange::new(start..start + 1, ctx.path).pos().into()],
));
}
body.push(lex_once(ctx).boxed_local().await?);
@@ -153,7 +157,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
.lex(source, pos, |pos| async move {
let mut ctx_g = ctx_lck.lock().await;
match lex_once(&mut ctx_g.push(pos)).boxed_local().await {
Ok(t) => Some(api::SubLexed { pos: t.range.end, tree: ctx_g.ser_subtree(t).await }),
Ok(t) => Some(api::SubLexed { pos: t.sr.end(), tree: ctx_g.ser_subtree(t).await }),
Err(e) => {
errors_lck.lock().await.push(e);
None
@@ -185,16 +189,22 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
return Err(mk_errv(
ctx.ctx.i.i("Unrecognized character").await,
"The following syntax is meaningless.",
[Pos::Range(start..start + 1).into()],
[SrcRange::new(start..start + 1, ctx.path).pos().into()],
));
}
};
Ok(ParsTokTree { tok, range: start..ctx.get_pos() })
Ok(ParsTokTree { tok, sr: SrcRange::new(start..ctx.get_pos(), ctx.path) })
}
pub async fn lex(text: Tok<String>, systems: &[System], ctx: &Ctx) -> OrcRes<Vec<ParsTokTree>> {
pub async fn lex(
text: Tok<String>,
path: Sym,
systems: &[System],
ctx: &Ctx,
) -> OrcRes<Vec<ParsTokTree>> {
let mut sub_trees = Vec::new();
let mut ctx = LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems, ctx };
let mut ctx =
LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems, path: &path, ctx };
let mut tokv = Vec::new();
ctx.trim(unrep_space);
while !ctx.tail.is_empty() {

View File

@@ -7,7 +7,6 @@ use itertools::Itertools;
use orchid_base::error::{OrcRes, Reporter, mk_err, mk_errv};
use orchid_base::format::fmt;
use orchid_base::interner::{Interner, Tok};
use orchid_base::location::Pos;
use orchid_base::name::Sym;
use orchid_base::parse::{
Comment, Import, ParseCtx, Parsed, Snippet, expect_end, line_items, parse_multiname,
@@ -25,6 +24,7 @@ type ParsSnippet<'a> = Snippet<'a, Expr, Expr>;
pub struct HostParseCtxImpl<'a> {
pub ctx: Ctx,
pub src: Sym,
pub systems: &'a [System],
pub reporter: &'a Reporter,
pub interner: &'a Interner,
@@ -77,19 +77,19 @@ pub async fn parse_item(
expect_end(ctx, tail).await?;
let mut ok = Vec::new();
for tt in body {
let pos = Pos::Range(tt.range.clone());
let sr = tt.sr.clone();
match &tt.tok {
Token::Name(n) =>
ok.push(Item { comments: comments.clone(), pos, kind: ItemKind::Export(n.clone()) }),
ok.push(Item { comments: comments.clone(), sr, kind: ItemKind::Export(n.clone()) }),
Token::NS(..) => ctx.reporter().report(mk_err(
ctx.i().i("Compound export").await,
"Cannot export compound names (names containing the :: separator)",
[pos.into()],
[sr.pos().into()],
)),
t => ctx.reporter().report(mk_err(
ctx.i().i("Invalid export").await,
format!("Invalid export target {}", fmt(t, ctx.i()).await),
[pos.into()],
[sr.pos().into()],
)),
}
}
@@ -99,14 +99,14 @@ pub async fn parse_item(
Parsed { output, tail: _ } => Err(mk_errv(
ctx.i().i("Malformed export").await,
"`export` can either prefix other lines or list names inside ( )",
[Pos::Range(output.range.clone()).into()],
[output.sr.pos().into()],
)),
},
n if *n == ctx.i().i("import").await => {
let imports = parse_import(ctx, postdisc).await?;
Ok(Vec::from_iter(imports.into_iter().map(|(t, pos)| Item {
Ok(Vec::from_iter(imports.into_iter().map(|t| Item {
comments: comments.clone(),
pos,
sr: t.sr.clone(),
kind: ItemKind::Import(t),
})))
},
@@ -115,7 +115,7 @@ pub async fn parse_item(
Some(_) => Err(mk_errv(
ctx.i().i("Expected a line type").await,
"All lines must begin with a keyword",
[Pos::Range(item.pos()).into()],
[item.sr().pos().into()],
)),
None => unreachable!("These lines are filtered and aggregated in earlier stages"),
}
@@ -124,7 +124,7 @@ pub async fn parse_item(
pub async fn parse_import<'a>(
ctx: &impl HostParseCtx,
tail: ParsSnippet<'a>,
) -> OrcRes<Vec<(Import, Pos)>> {
) -> OrcRes<Vec<Import>> {
let Parsed { output: imports, tail } = parse_multiname(ctx, tail).await?;
expect_end(ctx, tail).await?;
Ok(imports)
@@ -153,10 +153,10 @@ pub async fn parse_exportable_item<'a>(
return Err(mk_errv(
ctx.i().i("Unrecognized line type").await,
format!("Line types are: const, mod, macro, grammar, {ext_lines}"),
[Pos::Range(tail.prev().range.clone()).into()],
[tail.prev().sr.pos().into()],
));
};
Ok(vec![Item { comments, pos: Pos::Range(tail.pos()), kind }])
Ok(vec![Item { comments, sr: tail.sr(), kind }])
}
pub async fn parse_module<'a>(
@@ -170,7 +170,7 @@ pub async fn parse_module<'a>(
return Err(mk_errv(
ctx.i().i("Missing module name").await,
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
[Pos::Range(output.range.clone()).into()],
[output.sr.pos().into()],
));
},
};
@@ -180,7 +180,7 @@ pub async fn parse_module<'a>(
return Err(mk_errv(
ctx.i().i("Expected module body").await,
format!("A ( block ) was expected, {} was found", fmt(output, ctx.i()).await),
[Pos::Range(output.range.clone()).into()],
[output.sr.pos().into()],
));
};
let path = path.push(name.clone());
@@ -197,7 +197,7 @@ pub async fn parse_const<'a>(
return Err(mk_errv(
ctx.i().i("Missing module name").await,
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
[Pos::Range(output.range.clone()).into()],
[output.sr.pos().into()],
));
};
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
@@ -205,7 +205,7 @@ pub async fn parse_const<'a>(
return Err(mk_errv(
ctx.i().i("Missing = separator").await,
format!("Expected = , found {}", fmt(output, ctx.i()).await),
[Pos::Range(output.range.clone()).into()],
[output.sr.pos().into()],
));
}
try_pop_no_fluff(ctx, tail).await?;
@@ -223,11 +223,11 @@ pub async fn parse_expr(
.or_else(|| tail.iter().enumerate().rev().find(|(_, tt)| !tt.is_fluff()))
else {
return Err(mk_errv(ctx.i().i("Empty expression").await, "Expression ends abruptly here", [
Pos::Range(tail.pos()).into(),
tail.sr().pos().into(),
]));
};
let (function, value) = tail.split_at(last_idx as u32);
let pos = Pos::Range(tail.pos());
let pos = tail.sr().pos();
if !function.iter().all(TokTree::is_fluff) {
let (f_psb, x_psb) = psb.split();
let x_expr = parse_expr(ctx, path.clone(), x_psb, value).boxed_local().await?;

View File

@@ -1,4 +1,3 @@
use std::cell::RefCell;
use std::fmt::Debug;
use std::rc::Rc;
@@ -12,7 +11,7 @@ use itertools::Itertools;
use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
use orchid_base::interner::Tok;
use orchid_base::location::Pos;
use orchid_base::location::{Pos, SrcRange};
use orchid_base::name::{NameLike, Sym};
use orchid_base::parse::{Comment, Import};
use orchid_base::tl_cache;
@@ -37,7 +36,7 @@ impl TokenVariant<api::ExprTicket> for Expr {
async fn from_api(
api: &api::ExprTicket,
ctx: &mut Self::FromApiCtx<'_>,
_: Pos,
_: SrcRange,
_: &orchid_base::interner::Interner,
) -> Self {
let expr = ctx.get_expr(*api).expect("Dangling expr");
@@ -51,7 +50,7 @@ impl TokenVariant<api::Expression> for Expr {
async fn from_api(
api: &api::Expression,
ctx: &mut Self::FromApiCtx<'_>,
_: Pos,
_: SrcRange,
_: &orchid_base::interner::Interner,
) -> Self {
Expr::from_api(api, PathSetBuilder::new(), ctx).await
@@ -76,7 +75,7 @@ impl<'a> ParsedFromApiCx<'a> {
#[derive(Debug)]
pub struct Item {
pub pos: Pos,
pub sr: SrcRange,
pub comments: Vec<Comment>,
pub kind: ItemKind,
}
@@ -88,8 +87,9 @@ pub enum ItemKind {
Import(Import),
}
impl ItemKind {
pub fn at(self, pos: Pos) -> Item { Item { comments: vec![], pos, kind: self } }
pub fn at(self, sr: SrcRange) -> Item { Item { comments: vec![], sr, kind: self } }
}
impl Format for Item {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
let comment_text = self.comments.iter().join("\n");
@@ -135,7 +135,6 @@ pub enum ParsedMemberKind {
#[derive(Debug, Default)]
pub struct ParsedModule {
pub imports: Vec<Sym>,
pub exports: Vec<Tok<String>>,
pub items: Vec<Item>,
}
@@ -148,7 +147,7 @@ impl ParsedModule {
_ => None,
})
.collect_vec();
Self { imports: vec![], exports, items }
Self { exports, items }
}
pub fn merge(&mut self, other: ParsedModule) {
let mut swap = ParsedModule::default();
@@ -178,6 +177,10 @@ impl ParsedModule {
}
Ok(cur)
}
pub fn get_imports(&self) -> impl IntoIterator<Item = &Import> {
(self.items.iter())
.filter_map(|it| if let ItemKind::Import(i) = &it.kind { Some(i) } else { None })
}
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub enum WalkErrorKind {
@@ -192,8 +195,7 @@ pub struct WalkError {
}
impl Format for ParsedModule {
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
let import_str = self.imports.iter().map(|i| format!("import {i}")).join("\n");
let head_str = format!("{import_str}\nexport ::({})\n", self.exports.iter().join(", "));
let head_str = format!("export ::({})\n", self.exports.iter().join(", "));
Variants::sequence(self.items.len() + 1, "\n", None).units(
[head_str.into()].into_iter().chain(join_all(self.items.iter().map(|i| i.print(c))).await),
)

View File

@@ -10,6 +10,7 @@ use orchid_base::name::Sym;
use crate::api;
use crate::ctx::Ctx;
use crate::dealias::absolute_path;
use crate::expr::Expr;
use crate::parsed::{ParsedMemberKind, ParsedModule};
use crate::system::System;
@@ -37,6 +38,34 @@ impl Module {
}
Self { members }
}
async fn walk(&self, mut path: impl Iterator<Item = Tok<String>>, ) -> &Self { todo!()}
async fn from_parsed(
parsed: &ParsedModule,
path: Sym,
parsed_root_path: Sym,
parsed_root: &ParsedModule,
root: &Module,
preload: &mut HashMap<Sym, Module>,
) -> Self {
let mut imported_names = Vec::new();
for import in parsed.get_imports() {
if let Some(n) = import.name.clone() {
imported_names.push(n);
continue;
}
// the path in a wildcard import has to be a module
if import.path.is_empty() {
panic!("Imported root")
}
if let Some(subpath) = import.path.strip_prefix(&parsed_root_path) {
let abs = absolute_path(&path, subpath);
// path is in parsed_root
} else {
// path is in root
}
}
todo!()
}
}
pub struct Member {