Custom lexers can now terminate operators
Some checks failed
Rust / build (push) Has been cancelled

New constraint: custom lexer output is dropped whenever it is used to terminate an operator nested inside another custom lexer, because the recursive call has to return exactly one lexeme
This commit is contained in:
2026-01-25 17:52:18 +01:00
parent b9f1bb74d7
commit c461f82de1
17 changed files with 333 additions and 142 deletions

View File

@@ -1,7 +1,5 @@
let user = string::concat "lorinc" (string::concat " " "bethlenfalvy")
let number = 1 + 2
let interpolated = "Hello $user $number"
let foo = 1 + 2
let ffmain = "hello $foo"
let user = r[ "foo" 1, "bar" 3 ]
let ffmain = user . "foo" |> option::expect "missing value"
let main = 1
let user = r[ "foo" 1, "bar" t[3, 4] ]
let main = user."bar".1

View File

@@ -28,7 +28,7 @@ impl Request for LexExpr {
#[derive(Clone, Debug, Coding)]
pub struct LexedExpr {
pub pos: u32,
pub expr: TokenTree,
pub expr: Vec<TokenTree>,
}
#[derive(Clone, Debug, Coding, Hierarchy)]

View File

@@ -28,6 +28,6 @@ pub mod reqnot;
pub mod sequence;
pub mod side;
pub mod stash;
mod tl_cache;
pub mod tl_cache;
pub mod tokens;
pub mod tree;

View File

@@ -198,15 +198,11 @@ impl Sym {
let items = v.into_iter().collect_vec();
Self::from_tok(iv(&items).await)
}
/// Read a `::` separated namespaced name.
/// Read a `::` separated namespaced name. Do not use this for statically
/// known names, use the [sym] macro instead which is cached.
pub async fn parse(s: &str) -> Result<Self, EmptyNameError> {
Ok(Sym(iv(&VName::parse(s).await?.into_vec()).await))
}
/// Read a `::` separated namespaced name from a static string where.
pub async fn literal(s: &'static str) -> Self {
assert!(!s.is_empty(), "Literal cannot be empty");
Self::parse(s).await.unwrap()
}
/// Assert that a token isn't empty, and wrap it in a [Sym]
pub fn from_tok(t: IStrv) -> Result<Self, EmptyNameError> {
if t.is_empty() { Err(EmptyNameError) } else { Ok(Self(t)) }
@@ -290,20 +286,28 @@ impl NameLike for VName {}
/// Create a [Sym] literal.
///
/// Both the name and its components will be cached in a thread-local static so
/// The name and its components will be cached in a thread-local static so
/// that subsequent executions of the expression only incur an Arc-clone for
/// cloning the token.
#[macro_export]
macro_rules! sym {
($seg1:tt $( :: $seg:tt)*) => {
$crate::name::Sym::from_tok(
$crate::interner::iv(&[
$crate::interner::is(stringify!($seg1)).await
$( , $crate::interner::is(stringify!($seg)).await )*
])
.await
).unwrap()
$crate::tl_cache!(async $crate::name::Sym : {
$crate::name::Sym::from_tok(
$crate::interner::iv(&[
$crate::interner::is($crate::sym!(@SEG $seg1)).await
$( , $crate::interner::is($crate::sym!(@SEG $seg)).await )*
])
.await
).unwrap()
})
};
(@SEG [ $($data:tt)* ]) => {
stringify!($($data)*)
};
(@SEG $data:tt) => {
stringify!($data)
};
}
/// Create a [VName] literal.
@@ -312,10 +316,12 @@ macro_rules! sym {
#[macro_export]
macro_rules! vname {
($seg1:tt $( :: $seg:tt)*) => {
$crate::name::VName::new([
$crate::interner::is(stringify!($seg1)).await
$( , $crate::interner::is(stringify!($seg)).await )*
]).unwrap()
$crate::tl_cache!(async $crate::name::VName : {
$crate::name::VName::new([
$crate::interner::is(stringify!($seg1)).await
$( , $crate::interner::is(stringify!($seg)).await )*
]).unwrap()
})
};
}
@@ -325,10 +331,12 @@ macro_rules! vname {
#[macro_export]
macro_rules! vpath {
($seg1:tt $( :: $seg:tt)*) => {
$crate::name::VPath(vec![
$crate::interner::is(stringify!($seg1)).await
$( , $crate::interner::is(stringify!($seg)).await )*
])
$crate::tl_cache!(async $crate::name::VPath : {
$crate::name::VPath(vec![
$crate::interner::is(stringify!($seg1)).await
$( , $crate::interner::is(stringify!($seg)).await )*
])
})
};
() => {
$crate::name::VPath(vec![])
@@ -339,30 +347,43 @@ macro_rules! vpath {
pub mod test {
use std::borrow::Borrow;
use orchid_api_traits::spin_on;
use super::{NameLike, Sym, VName};
use crate::interner::{IStr, is};
use crate::interner::local_interner::local_interner;
use crate::interner::{IStr, is, with_interner};
use crate::name::VPath;
pub async fn recur() {
let myname = vname!(foo::bar);
let _borrowed_slice: &[IStr] = myname.borrow();
let _deref_pathslice: &[IStr] = &myname;
let _as_slice_out: &[IStr] = myname.as_slice();
#[test]
pub fn recur() {
spin_on(with_interner(local_interner(), async {
let myname = vname!(foo::bar);
let _borrowed_slice: &[IStr] = myname.borrow();
let _deref_pathslice: &[IStr] = &myname;
let _as_slice_out: &[IStr] = myname.as_slice();
}))
}
/// Tests that literals are correctly interned as equal
pub async fn literals() {
assert_eq!(
sym!(foo::bar::baz),
Sym::new([is("foo").await, is("bar").await, is("baz").await]).await.unwrap()
);
assert_eq!(
vname!(foo::bar::baz),
VName::new([is("foo").await, is("bar").await, is("baz").await]).unwrap()
);
assert_eq!(
vpath!(foo::bar::baz),
VPath::new([is("foo").await, is("bar").await, is("baz").await])
);
#[test]
pub fn literals() {
spin_on(with_interner(local_interner(), async {
assert_eq!(
sym!(foo::bar::baz),
Sym::new([is("foo").await, is("bar").await, is("baz").await]).await.unwrap()
);
assert_eq!(
sym!(foo::bar::[|>]),
Sym::new([is("foo").await, is("bar").await, is("|>").await]).await.unwrap()
);
assert_eq!(
vname!(foo::bar::baz),
VName::new([is("foo").await, is("bar").await, is("baz").await]).unwrap()
);
assert_eq!(
{ vpath!(foo::bar::baz) },
VPath::new([is("foo").await, is("bar").await, is("baz").await])
);
}))
}
}

View File

@@ -16,7 +16,7 @@ use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_fmt, ttv_range}
pub fn name_start(c: char) -> bool { c.is_alphabetic() || c == '_' }
pub fn name_char(c: char) -> bool { name_start(c) || c.is_numeric() }
pub fn op_char(c: char) -> bool { !name_char(c) && !c.is_whitespace() && !"()[]{}\\".contains(c) }
pub fn op_char(c: char) -> bool { !name_char(c) && !unrep_space(c) && !"()[]{}\\".contains(c) }
pub fn unrep_space(c: char) -> bool { c.is_whitespace() && !"\r\n".contains(c) }
/// A cheaply copiable subsection of a document that holds onto context data and

View File

@@ -6,4 +6,18 @@ macro_rules! tl_cache {
}
V.with(|v| v.clone())
}};
(async $ty:ty : $expr:expr) => {{
type CellType = std::cell::OnceCell<$ty>;
thread_local! {
static V: CellType = std::cell::OnceCell::default();
}
match V.with(|cell: &CellType| cell.get().cloned()) {
Some(val) => val as $ty,
None => {
let val = $expr;
let _ = V.with(|cell: &CellType| cell.set(val.clone()));
val as $ty
},
}
}};
}

View File

@@ -277,7 +277,11 @@ impl ExtensionBuilder {
return handle.reply(&lex, &eopt).await;
},
Ok((s, expr)) => {
let expr = expr.into_api(&mut (), &mut ()).await;
let expr = join_all(
(expr.into_iter())
.map(|tok| async { tok.into_api(&mut (), &mut ()).await }),
)
.await;
let pos = (text.len() - s.len()) as u32;
expr_store.dispose().await;
return handle.reply(&lex, &Some(Ok(api::LexedExpr { pos, expr }))).await;

View File

@@ -72,12 +72,22 @@ impl<'a> LexContext<'a> {
}
}
pub trait LexedData {
fn into_vec(self) -> Vec<GenTokTree>;
}
impl LexedData for GenTokTree {
fn into_vec(self) -> Vec<GenTokTree> { vec![self] }
}
impl LexedData for Vec<GenTokTree> {
fn into_vec(self) -> Vec<GenTokTree> { self }
}
pub trait Lexer: Debug + Send + Sync + Sized + Default + 'static {
const CHAR_FILTER: &'static [RangeInclusive<char>];
fn lex<'a>(
tail: &'a str,
lctx: &'a LexContext<'a>,
) -> impl Future<Output = OrcRes<(&'a str, GenTokTree)>>;
) -> impl Future<Output = OrcRes<(&'a str, impl LexedData)>>;
}
pub trait DynLexer: Debug + Send + Sync + 'static {
@@ -86,7 +96,7 @@ pub trait DynLexer: Debug + Send + Sync + 'static {
&self,
tail: &'a str,
ctx: &'a LexContext<'a>,
) -> LocalBoxFuture<'a, OrcRes<(&'a str, GenTokTree)>>;
) -> LocalBoxFuture<'a, OrcRes<(&'a str, Vec<GenTokTree>)>>;
}
impl<T: Lexer> DynLexer for T {
@@ -95,8 +105,8 @@ impl<T: Lexer> DynLexer for T {
&self,
tail: &'a str,
ctx: &'a LexContext<'a>,
) -> LocalBoxFuture<'a, OrcRes<(&'a str, GenTokTree)>> {
T::lex(tail, ctx).boxed_local()
) -> LocalBoxFuture<'a, OrcRes<(&'a str, Vec<GenTokTree>)>> {
async { T::lex(tail, ctx).await.map(|(s, d)| (s, d.into_vec())) }.boxed_local()
}
}

View File

@@ -1,7 +1,10 @@
use std::collections::VecDeque;
use std::ops::Range;
use futures::FutureExt;
use futures::lock::Mutex;
use orchid_base::clone;
use orchid_base::error::{OrcErrv, OrcRes, mk_errv};
use orchid_base::error::{OrcErrv, OrcRes, mk_errv, report};
use orchid_base::interner::{IStr, is};
use orchid_base::location::SrcRange;
use orchid_base::name::Sym;
@@ -23,10 +26,11 @@ pub struct LexCtx<'a> {
pub tail: &'a str,
pub sub_trees: &'a mut Vec<Expr>,
pub ctx: &'a Ctx,
pub produced: &'a mut VecDeque<ParsTokTree>,
}
impl<'a> LexCtx<'a> {
#[must_use]
pub fn push<'b>(&'b mut self, pos: u32) -> LexCtx<'b>
pub fn sub<'b>(&'b mut self, pos: u32, produced: &'b mut VecDeque<ParsTokTree>) -> LexCtx<'b>
where 'a: 'b {
LexCtx {
source: self.source,
@@ -35,6 +39,7 @@ impl<'a> LexCtx<'a> {
systems: self.systems,
sub_trees: &mut *self.sub_trees,
ctx: self.ctx,
produced,
}
}
#[must_use]
@@ -44,6 +49,7 @@ impl<'a> LexCtx<'a> {
pub fn set_pos(&mut self, pos: u32) { self.tail = &self.source[pos as usize..] }
pub fn push_pos(&mut self, delta: u32) { self.set_pos(self.get_pos() + delta) }
pub fn set_tail(&mut self, tail: &'a str) { self.tail = tail }
pub fn pos_from(&self, tail: &'a str) -> u32 { (self.source.len() - tail.len()) as u32 }
#[must_use]
pub fn strip_prefix(&mut self, tgt: &str) -> bool {
if let Some(src) = self.tail.strip_prefix(tgt) {
@@ -79,23 +85,41 @@ impl<'a> LexCtx<'a> {
self.tail = rest;
matches
}
pub fn pop_char(&mut self) -> Option<char> {
let mut chars = self.tail.chars();
let ret = chars.next()?;
self.tail = chars.as_str();
Some(ret)
}
pub fn sr_to(&self, start: u32) -> SrcRange { self.sr(start..self.get_pos()) }
pub fn sr(&self, range: Range<u32>) -> SrcRange { SrcRange::new(range, self.path) }
}
pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<bool> {
ctx.trim(unrep_space);
if ctx.tail.is_empty() {
return Ok(false);
}
let start = ctx.get_pos();
assert!(
!ctx.tail.is_empty() && !ctx.tail.starts_with(unrep_space),
"Lexing empty string or whitespace to token!\n\
Invocations of lex_tok should check for empty string"
);
let tok = if ctx.strip_prefix("\r\n") || ctx.strip_prefix("\r") || ctx.strip_prefix("\n") {
ParsTok::BR
} else if let Some(tail) = (ctx.tail.starts_with(name_start).then_some(ctx.tail))
.and_then(|t| t.trim_start_matches(name_char).strip_prefix("::"))
{
let name = &ctx.tail[..ctx.tail.len() - tail.len() - "::".len()];
ctx.set_tail(tail);
let body = lex_once(ctx).boxed_local().await?;
let mut produced = VecDeque::new();
let mut sub_cx = ctx.sub(ctx.pos_from(tail), &mut produced);
if !lex_once(&mut sub_cx).boxed_local().await? {
return Err(mk_errv(
is("Unexpected end of source text").await,
":: cannot be the last token",
[SrcRange::new(start..ctx.get_pos(), ctx.path)],
));
}
let pos = sub_cx.get_pos();
ctx.set_pos(pos);
let body = produced.pop_front().expect("lex_once returned true");
ctx.produced.extend(produced.into_iter());
ParsTok::NS(is(name).await, Box::new(body))
} else if ctx.strip_prefix("--[") {
let Some((cmt, tail)) = ctx.tail.split_once("]--") else {
@@ -113,96 +137,169 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
ParsTok::Comment(is(&tail[2..end]).await)
} else if let Some(tail) = ctx.tail.strip_prefix('\\').filter(|t| t.starts_with(name_start)) {
// fanciness like \$placeh in templates is resolved in the macro engine.
ctx.set_tail(tail);
let arg = lex_once(ctx).boxed_local().await?;
let start = ctx.get_pos();
let mut produced = VecDeque::new();
let mut sub_cx = ctx.sub(ctx.pos_from(tail), &mut produced);
if !lex_once(&mut sub_cx).boxed_local().await? {
return Err(mk_errv(
is("Unexpected end of file").await,
"Expected a lambda argument and body",
[SrcRange::new(start..ctx.get_pos(), ctx.path)],
));
}
let pos = sub_cx.get_pos();
ctx.set_pos(pos);
let arg = produced.pop_front().expect("lex_once returned true");
ctx.produced.extend(produced);
ctx.trim_ws();
ParsTok::LambdaHead(Box::new(arg))
} else if let Some((lp, rp, paren)) = PARENS.iter().find(|(lp, ..)| ctx.strip_char(*lp)) {
let mut body = Vec::new();
let mut body = VecDeque::new();
ctx.trim_ws();
while !ctx.strip_char(*rp) {
if ctx.tail.is_empty() {
let mut sub_cx = ctx.sub(ctx.get_pos(), &mut body);
if !lex_once(&mut sub_cx).boxed_local().await? {
return Err(mk_errv(
is("unclosed paren").await,
format!("this {lp} has no matching {rp}"),
[SrcRange::new(start..start + 1, ctx.path)],
));
}
body.push(lex_once(ctx).boxed_local().await?);
let pos = sub_cx.get_pos();
ctx.set_pos(pos);
ctx.trim_ws();
}
ParsTok::S(*paren, body)
ParsTok::S(*paren, body.into_iter().collect())
} else if let Some(res) = sys_lex(ctx).await {
let token = res?;
ctx.produced.extend(token);
return Ok(true);
} else if ctx.tail.starts_with(name_start) {
ParsTok::Name(is(ctx.get_start_matches(name_char)).await)
} else if ctx.tail.starts_with(op_char) {
let whole_tail = ctx.tail;
ctx.pop_char().expect("The above check would have failed");
let mut tail_after_op = ctx.tail;
let mut lookahead = Vec::new();
while !ctx.tail.is_empty() && ctx.tail.starts_with(op_char) {
match sys_lex(ctx).await {
None => {
ctx.pop_char();
tail_after_op = ctx.tail;
},
Some(sys_res) => {
match sys_res {
Err(e) => report(e),
Ok(tokv) => lookahead = tokv,
}
break;
},
}
}
let op_str = &whole_tail[0..whole_tail.len() - tail_after_op.len()];
ctx.produced.push_back(ParsTok::Name(is(op_str).await).at(ctx.sr_to(start)));
ctx.produced.extend(lookahead);
return Ok(true);
} else {
for sys in ctx.systems {
let mut errors = Vec::new();
if ctx.tail.starts_with(|c| sys.can_lex(c)) {
let (source, pos, path) = (ctx.source.clone(), ctx.get_pos(), ctx.path.clone());
let temp_store = ctx.ctx.exprs.derive();
let ctx_lck = &Mutex::new(&mut *ctx);
let errors_lck = &Mutex::new(&mut errors);
let temp_store_cb = temp_store.clone();
let lx = sys
.lex(source, path, pos, |pos| {
clone!(temp_store_cb);
async move {
let mut ctx_g = ctx_lck.lock().await;
match lex_once(&mut ctx_g.push(pos)).boxed_local().await {
Ok(t) => Some(api::SubLexed {
pos: t.sr.end(),
tree: ctx_g.ser_subtree(t, temp_store_cb.clone()).await,
}),
Err(e) => {
errors_lck.lock().await.push(e);
None
},
}
return Err(mk_errv(
is("Unrecognized character").await,
"The following syntax is meaningless.",
[SrcRange::new(start..start + 1, ctx.path)],
));
};
ctx.produced.push_back(ParsTokTree { tok, sr: ctx.sr_to(start) });
Ok(true)
}
/// Parse one token via any of the systems, if we can
///
/// This function never writes lookahead
pub async fn sys_lex(ctx: &mut LexCtx<'_>) -> Option<OrcRes<Vec<ParsTokTree>>> {
for sys in ctx.systems {
let mut errors = Vec::new();
if ctx.tail.starts_with(|c| sys.can_lex(c)) {
let (source, pos, path) = (ctx.source.clone(), ctx.get_pos(), ctx.path.clone());
let temp_store = ctx.ctx.exprs.derive();
let ctx_lck = &Mutex::new(&mut *ctx);
let errors_lck = &Mutex::new(&mut errors);
let temp_store_cb = temp_store.clone();
let lx = sys
.lex(source, path, pos, |pos| {
clone!(temp_store_cb);
async move {
let mut ctx_g = ctx_lck.lock().await;
let mut produced = VecDeque::new();
let mut sub_cx = ctx_g.sub(pos, &mut produced);
let lex_res = lex_once(&mut sub_cx).boxed_local().await;
let pos1 = sub_cx.get_pos();
ctx_g.set_pos(pos1);
match lex_res {
Ok(false) => {
errors_lck.lock().await.push(mk_errv(
is("End of file").await,
"Unexpected end of source text",
[ctx_g.sr_to(pos)],
));
None
},
Ok(true) => {
let tok = produced.pop_front().unwrap();
Some(api::SubLexed {
pos: tok.sr.end(),
tree: ctx_g.ser_subtree(tok, temp_store_cb.clone()).await,
})
},
Err(e) => {
errors_lck.lock().await.push(e);
None
},
}
})
.await;
match lx {
Err(e) => return Err(errors.into_iter().fold(OrcErrv::from_api(&e).await, |a, b| a + b)),
Ok(Some(lexed)) => {
ctx.set_pos(lexed.pos);
let lexed_tree = ctx.des_subtree(&lexed.expr, temp_store).await;
let stable_tree = recur(lexed_tree, &|tt, r| {
}
})
.await;
match lx {
Err(e) =>
return Some(Err(errors.into_iter().fold(OrcErrv::from_api(&e).await, |a, b| a + b))),
Ok(Some(lexed)) => {
ctx.set_pos(lexed.pos);
let mut stable_trees = Vec::new();
for tok in lexed.expr {
stable_trees.push(recur(ctx.des_subtree(&tok, temp_store.clone()).await, &|tt, r| {
if let ParsTok::NewExpr(expr) = tt.tok {
return ParsTok::Handle(expr).at(tt.sr);
}
r(tt)
});
return Ok(stable_tree);
},
Ok(None) => match errors.into_iter().reduce(|a, b| a + b) {
Some(errors) => return Err(errors),
None => continue,
},
}
}));
}
return Some(Ok(stable_trees));
},
Ok(None) => match errors.into_iter().reduce(|a, b| a + b) {
Some(errors) => return Some(Err(errors)),
None => continue,
},
}
}
if ctx.tail.starts_with(name_start) {
ParsTok::Name(is(ctx.get_start_matches(name_char)).await)
} else if ctx.tail.starts_with(op_char) {
ParsTok::Name(is(ctx.get_start_matches(op_char)).await)
} else {
return Err(mk_errv(
is("Unrecognized character").await,
"The following syntax is meaningless.",
[SrcRange::new(start..start + 1, ctx.path)],
));
}
};
Ok(ParsTokTree { tok, sr: SrcRange::new(start..ctx.get_pos(), ctx.path) })
}
None
}
pub async fn lex(text: IStr, path: Sym, systems: &[System], ctx: &Ctx) -> OrcRes<Vec<ParsTokTree>> {
let mut sub_trees = Vec::new();
let mut ctx =
LexCtx { source: &text, sub_trees: &mut sub_trees, tail: &text[..], systems, path: &path, ctx };
let mut tokv = Vec::new();
let mut produced = VecDeque::new();
let mut ctx = LexCtx {
source: &text,
sub_trees: &mut sub_trees,
tail: &text[..],
systems,
path: &path,
ctx,
produced: &mut produced,
};
ctx.trim(unrep_space);
while !ctx.tail.is_empty() {
tokv.push(lex_once(&mut ctx).await?);
while lex_once(&mut ctx).await? {
ctx.trim(unrep_space);
}
Ok(tokv)
Ok(produced.into())
}

View File

@@ -1,3 +1,4 @@
#![allow(refining_impl_trait)]
mod macros;
mod std;

View File

@@ -62,7 +62,7 @@ impl System for MacroSystem {
sym!(pattern::match),
sym!(pattern::ref),
sym!(pattern::=>),
Sym::literal("std::fn::|>").await,
sym!(std::fn::[|>]),
]
}
fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer, &PhLexer] }

View File

@@ -40,7 +40,7 @@ impl TryFromExpr for Int {
}
impl Supports<GetTagIdMethod> for Int {
async fn handle(&self, _: GetTagIdMethod) -> <GetTagIdMethod as Request>::Response {
Sym::parse("std::number::Int").await.unwrap().to_api()
sym!(std::number::Int).to_api()
}
}
impl Supports<GetImplMethod> for Int {
@@ -90,7 +90,7 @@ impl TryFromExpr for Float {
}
impl Supports<GetTagIdMethod> for Float {
async fn handle(&self, _: GetTagIdMethod) -> <GetTagIdMethod as Request>::Response {
Sym::parse("std::number::Float").await.unwrap().to_api()
sym!(std::number::Float).to_api()
}
}
impl Supports<GetImplMethod> for Float {

View File

@@ -45,7 +45,7 @@ impl OwnedAtom for RecordAtom {
}
impl Supports<GetTagIdMethod> for RecordAtom {
async fn handle(&self, _: GetTagIdMethod) -> <GetTagIdMethod as Request>::Response {
Sym::literal("std::record::Record").await.to_api()
sym!(std::record::Record).to_api()
}
}
impl Supports<GetImplMethod> for RecordAtom {

View File

@@ -1,12 +1,15 @@
use std::rc::Rc;
use hashbrown::HashMap;
use itertools::Itertools;
use orchid_base::error::mk_errv;
use orchid_base::interner::is;
use orchid_extension::atom::TAtom;
use orchid_extension::atom_owned::own;
use orchid_extension::expr::Expr;
use orchid_extension::gen_expr::arg;
use orchid_extension::tree::{GenMember, cnst, fun, prefix};
use crate::std::option::OrcOpt;
use crate::std::record::record_atom::RecordAtom;
use crate::std::string::str_atom::IntStrAtom;
@@ -19,7 +22,15 @@ pub fn gen_record_lib() -> Vec<GenMember> {
RecordAtom(Rc::new(map))
}),
fun(true, "get", async |map: TAtom<RecordAtom>, key: IntStrAtom| {
OrcOpt(own(&map).await.0.get(&key.0).cloned())
let record = own(&map).await;
match record.0.get(&key.0) {
Some(val) => Ok(val.clone()),
None => Err(mk_errv(
is("Key not found in record").await,
format!("{} is not in this record, valid keys are {}", key.0, record.0.keys().join(", ")),
[arg(0).pos.clone(), arg(1).pos.clone()],
)),
}
}),
fun(true, "delete", async |map: TAtom<RecordAtom>, key: IntStrAtom| {
let mut map = own(&map).await.0.as_ref().clone();

View File

@@ -35,7 +35,11 @@ impl Atomic for StrAtom {
type Variant = OwnedVariant;
type Data = ();
fn reg_reqs() -> MethodSetBuilder<Self> {
MethodSetBuilder::new().handle::<StringGetValMethod>().handle::<ToStringMethod>()
MethodSetBuilder::new()
.handle::<StringGetValMethod>()
.handle::<ToStringMethod>()
.handle::<GetTagIdMethod>()
.handle::<GetImplMethod>()
}
}
impl StrAtom {
@@ -70,7 +74,7 @@ impl Supports<ToStringMethod> for StrAtom {
}
impl Supports<GetTagIdMethod> for StrAtom {
async fn handle(&self, _: GetTagIdMethod) -> <GetTagIdMethod as Request>::Response {
Sym::literal("std::string::StrAtom").await.to_api()
sym!(std::string::StrAtom).to_api()
}
}
impl Supports<GetImplMethod> for StrAtom {
@@ -126,7 +130,7 @@ impl Supports<ToStringMethod> for IntStrAtom {
}
impl Supports<GetTagIdMethod> for IntStrAtom {
async fn handle(&self, _: GetTagIdMethod) -> <GetTagIdMethod as Request>::Response {
Sym::literal("std::string::IntStrAtom").await.to_api()
sym!(std::string::IntStrAtom).to_api()
}
}
impl Supports<GetImplMethod> for IntStrAtom {

View File

@@ -11,14 +11,17 @@ use orchid_api_traits::Request;
use orchid_base::error::{OrcRes, mk_errv};
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
use orchid_base::interner::is;
use orchid_extension::atom::{Atomic, TAtom};
use orchid_base::name::Sym;
use orchid_base::sym;
use orchid_extension::atom::{Atomic, MethodSetBuilder, Supports, TAtom};
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant, own};
use orchid_extension::conv::{ToExpr, TryFromExpr};
use orchid_extension::expr::{Expr, ExprHandle};
use orchid_extension::gen_expr::GExpr;
use orchid_extension::gen_expr::{GExpr, sym_ref};
use orchid_extension::system::dep_req;
use orchid_extension::tree::{GenMember, cnst, fun, prefix};
use crate::std::protocol::types::{GetImplMethod, GetTagIdMethod};
use crate::std::std_system::StdReq;
use crate::{Int, StdSystem, api};
@@ -28,6 +31,9 @@ pub struct Tuple(pub(super) Rc<Vec<Expr>>);
impl Atomic for Tuple {
type Data = Vec<api::ExprTicket>;
type Variant = OwnedVariant;
fn reg_reqs() -> orchid_extension::atom::MethodSetBuilder<Self> {
MethodSetBuilder::new().handle::<GetTagIdMethod>().handle::<GetImplMethod>()
}
}
impl OwnedAtom for Tuple {
@@ -46,6 +52,24 @@ impl OwnedAtom for Tuple {
.units_own(join_all(self.0.iter().map(|x| x.print(c))).await)
}
}
impl Supports<GetTagIdMethod> for Tuple {
async fn handle(&self, _: GetTagIdMethod) -> <GetTagIdMethod as Request>::Response {
sym!(std::tuple).to_api()
}
}
impl Supports<GetImplMethod> for Tuple {
async fn handle(&self, req: GetImplMethod) -> <GetImplMethod as Request>::Response {
let name = Sym::from_api(req.0).await;
let val = if name == sym!(std::ops::get) {
sym_ref(sym!(std::tuple::get))
} else if name == sym!(std::ops::set) {
sym_ref(sym!(std::tuple::set))
} else {
return None;
};
Some(val.create().await.serialize().await)
}
}
#[derive(Clone, Debug, Coding, Hierarchy)]
#[extends(StdReq)]

View File

@@ -205,8 +205,14 @@ async fn main() -> io::Result<ExitCode> {
let mut file = File::open(file.as_std_path()).unwrap();
let mut buf = String::new();
file.read_to_string(&mut buf).unwrap();
let lexemes = lex(is(&buf).await, sym!(usercode), &systems, ctx).await.unwrap();
println!("{}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl::default()).await, true))
match lex(is(&buf).await, sym!(usercode), &systems, ctx).await {
Ok(lexemes) =>
println!("{}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl::default()).await, true)),
Err(e) => {
eprintln!("{e}");
exit_code1.replace(ExitCode::FAILURE);
},
}
},
Commands::Parse { file } => {
let (_, systems) = init_systems(&args.system, &extensions).await.unwrap();
@@ -472,6 +478,7 @@ async fn main() -> io::Result<ExitCode> {
for (rc, expr) in &exprs {
eprintln!("{rc}x {:?} = {}", expr.id(), fmt(expr).await)
}
std::process::abort()
};
futures::future::select(
pin!(cleanup),