forked from Orchid/orchid
Various progress, doesnt compile
Added prelude, made lambdas a single-token prefix like NS, made progress on implementations, removed const line type
This commit is contained in:
1
Cargo.lock
generated
1
Cargo.lock
generated
@@ -1241,6 +1241,7 @@ dependencies = [
|
|||||||
"orchid-extension",
|
"orchid-extension",
|
||||||
"ordered-float",
|
"ordered-float",
|
||||||
"rust_decimal",
|
"rust_decimal",
|
||||||
|
"substack",
|
||||||
"test_executors",
|
"test_executors",
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ pub struct ParsedMember {
|
|||||||
#[derive(Clone, Debug, Coding)]
|
#[derive(Clone, Debug, Coding)]
|
||||||
pub enum ParsedMemberKind {
|
pub enum ParsedMemberKind {
|
||||||
Constant(ParsedConstId),
|
Constant(ParsedConstId),
|
||||||
Module(Vec<ParsedLine>),
|
Module { lines: Vec<ParsedLine>, use_prelude: bool },
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Obtain the value of a parsed constant. This is guaranteed to be called after
|
/// Obtain the value of a parsed constant. This is guaranteed to be called after
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ use orchid_api_derive::{Coding, Hierarchy};
|
|||||||
use orchid_api_traits::Request;
|
use orchid_api_traits::Request;
|
||||||
use ordered_float::NotNan;
|
use ordered_float::NotNan;
|
||||||
|
|
||||||
use crate::{CharFilter, ExtHostReq, HostExtNotif, HostExtReq, MemberKind, TStr};
|
use crate::{CharFilter, ExtHostReq, HostExtNotif, HostExtReq, MemberKind, TStr, TStrv};
|
||||||
|
|
||||||
/// ID of a system type
|
/// ID of a system type
|
||||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||||
@@ -63,6 +63,7 @@ pub struct NewSystemResponse {
|
|||||||
pub lex_filter: CharFilter,
|
pub lex_filter: CharFilter,
|
||||||
pub line_types: Vec<TStr>,
|
pub line_types: Vec<TStr>,
|
||||||
pub const_root: HashMap<TStr, MemberKind>,
|
pub const_root: HashMap<TStr, MemberKind>,
|
||||||
|
pub prelude: Vec<TStrv>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ pub struct TokenTree {
|
|||||||
#[derive(Clone, Debug, Coding)]
|
#[derive(Clone, Debug, Coding)]
|
||||||
pub enum Token {
|
pub enum Token {
|
||||||
/// Lambda function head, from the opening \ until the beginning of the body.
|
/// Lambda function head, from the opening \ until the beginning of the body.
|
||||||
LambdaHead(Vec<TokenTree>),
|
LambdaHead(Box<TokenTree>),
|
||||||
/// A name segment or an operator.
|
/// A name segment or an operator.
|
||||||
Name(TStr),
|
Name(TStr),
|
||||||
/// A newly generated expression. The last place this is supposed to happen is
|
/// A newly generated expression. The last place this is supposed to happen is
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! clone {
|
macro_rules! clone {
|
||||||
($($n:ident),+; $body:expr) => (
|
($($n:ident $($mut:ident)?),+; $body:expr) => (
|
||||||
{
|
{
|
||||||
$( let $n = $n.clone(); )+
|
$( let $($mut)? $n = $n.clone(); )+
|
||||||
$body
|
$body
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ trait_set! {
|
|||||||
pub trait GetSrc = FnMut(&Sym) -> Tok<String>;
|
pub trait GetSrc = FnMut(&Sym) -> Tok<String>;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum Pos {
|
pub enum Pos {
|
||||||
None,
|
None,
|
||||||
SlotTarget,
|
SlotTarget,
|
||||||
|
|||||||
@@ -272,34 +272,34 @@ pub trait NameLike:
|
|||||||
/// Convert into held slice
|
/// Convert into held slice
|
||||||
fn as_slice(&self) -> &[Tok<String>] { Borrow::<[Tok<String>]>::borrow(self) }
|
fn as_slice(&self) -> &[Tok<String>] { Borrow::<[Tok<String>]>::borrow(self) }
|
||||||
/// Get iterator over tokens
|
/// Get iterator over tokens
|
||||||
fn iter(&self) -> impl NameIter + '_ { self.as_slice().iter().cloned() }
|
fn segs(&self) -> impl NameIter + '_ { self.as_slice().iter().cloned() }
|
||||||
/// Get iterator over string segments
|
/// Get iterator over string segments
|
||||||
fn str_iter(&self) -> impl Iterator<Item = &'_ str> + '_ {
|
fn str_iter(&self) -> impl Iterator<Item = &'_ str> + '_ {
|
||||||
self.as_slice().iter().map(|t| t.as_str())
|
self.as_slice().iter().map(|t| t.as_str())
|
||||||
}
|
}
|
||||||
/// Fully resolve the name for printing
|
/// Fully resolve the name for printing
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn to_strv(&self) -> Vec<String> { self.iter().map(|s| s.to_string()).collect() }
|
fn to_strv(&self) -> Vec<String> { self.segs().map(|s| s.to_string()).collect() }
|
||||||
/// Format the name as an approximate filename
|
/// Format the name as an approximate filename
|
||||||
fn as_src_path(&self) -> String { format!("{}.orc", self.iter().join("/")) }
|
fn as_src_path(&self) -> String { format!("{}.orc", self.segs().join("/")) }
|
||||||
/// Return the number of segments in the name
|
/// Return the number of segments in the name
|
||||||
fn len(&self) -> NonZeroUsize {
|
fn len_nz(&self) -> NonZeroUsize {
|
||||||
NonZeroUsize::try_from(self.iter().count()).expect("NameLike never empty")
|
NonZeroUsize::try_from(self.segs().count()).expect("NameLike never empty")
|
||||||
}
|
}
|
||||||
/// Like slice's `split_first` except we know that it always returns Some
|
/// Like slice's `split_first` except we know that it always returns Some
|
||||||
fn split_first(&self) -> (Tok<String>, &[Tok<String>]) {
|
fn split_first_seg(&self) -> (Tok<String>, &[Tok<String>]) {
|
||||||
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
|
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
|
||||||
(foot.clone(), torso)
|
(foot.clone(), torso)
|
||||||
}
|
}
|
||||||
/// Like slice's `split_last` except we know that it always returns Some
|
/// Like slice's `split_last` except we know that it always returns Some
|
||||||
fn split_last(&self) -> (Tok<String>, &[Tok<String>]) {
|
fn split_last_seg(&self) -> (Tok<String>, &[Tok<String>]) {
|
||||||
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
|
let (foot, torso) = self.as_slice().split_last().expect("NameLike never empty");
|
||||||
(foot.clone(), torso)
|
(foot.clone(), torso)
|
||||||
}
|
}
|
||||||
/// Get the first element
|
/// Get the first element
|
||||||
fn first(&self) -> Tok<String> { self.split_first().0 }
|
fn first_seg(&self) -> Tok<String> { self.split_first_seg().0 }
|
||||||
/// Get the last element
|
/// Get the last element
|
||||||
fn last(&self) -> Tok<String> { self.split_last().0 }
|
fn last_seg(&self) -> Tok<String> { self.split_last_seg().0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NameLike for Sym {}
|
impl NameLike for Sym {}
|
||||||
|
|||||||
@@ -7,12 +7,12 @@ use futures::future::join_all;
|
|||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::error::{OrcRes, Reporter, mk_err, mk_errv};
|
use crate::error::{OrcErrv, OrcRes, Reporter, mk_err, mk_errv};
|
||||||
use crate::format::fmt;
|
use crate::format::{FmtCtx, FmtUnit, Format, fmt};
|
||||||
use crate::interner::{Interner, Tok};
|
use crate::interner::{Interner, Tok};
|
||||||
use crate::location::SrcRange;
|
use crate::location::SrcRange;
|
||||||
use crate::name::{Sym, VName, VPath};
|
use crate::name::{Sym, VName, VPath};
|
||||||
use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_range};
|
use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_fmt, ttv_range};
|
||||||
|
|
||||||
pub trait ParseCtx {
|
pub trait ParseCtx {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
@@ -95,18 +95,10 @@ impl<A: ExprRepr, X: ExtraTok> Deref for Snippet<'_, A, X> {
|
|||||||
type Target = [TokTree<A, X>];
|
type Target = [TokTree<A, X>];
|
||||||
fn deref(&self) -> &Self::Target { self.cur }
|
fn deref(&self) -> &Self::Target { self.cur }
|
||||||
}
|
}
|
||||||
|
impl<A: ExprRepr, X: ExtraTok> Format for Snippet<'_, A, X> {
|
||||||
/// Remove tokens that aren't meaningful in expression context, such as comments
|
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
/// or line breaks
|
ttv_fmt(&**self, c).await
|
||||||
pub fn strip_fluff<A: ExprRepr, X: ExtraTok>(tt: &TokTree<A, X>) -> Option<TokTree<A, X>> {
|
}
|
||||||
let tok = match &tt.tok {
|
|
||||||
Token::BR => return None,
|
|
||||||
Token::Comment(_) => return None,
|
|
||||||
Token::LambdaHead(arg) => Token::LambdaHead(arg.iter().filter_map(strip_fluff).collect()),
|
|
||||||
Token::S(p, b) => Token::S(*p, b.iter().filter_map(strip_fluff).collect()),
|
|
||||||
t => t.clone(),
|
|
||||||
};
|
|
||||||
Some(TokTree { tok, sr: tt.sr.clone() })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
@@ -208,6 +200,15 @@ pub async fn expect_tok<'a, A: ExprRepr, X: ExtraTok>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn token_errv<A: ExprRepr, X: ExtraTok>(
|
||||||
|
ctx: &impl ParseCtx,
|
||||||
|
tok: &TokTree<A, X>,
|
||||||
|
description: &'static str,
|
||||||
|
message: impl FnOnce(&str) -> String,
|
||||||
|
) -> OrcErrv {
|
||||||
|
mk_errv(ctx.i().i(description).await, message(&fmt(tok, ctx.i()).await), [tok.sr.pos()])
|
||||||
|
}
|
||||||
|
|
||||||
pub struct Parsed<'a, T, H: ExprRepr, X: ExtraTok> {
|
pub struct Parsed<'a, T, H: ExprRepr, X: ExtraTok> {
|
||||||
pub output: T,
|
pub output: T,
|
||||||
pub tail: Snippet<'a, H, X>,
|
pub tail: Snippet<'a, H, X>,
|
||||||
|
|||||||
@@ -62,8 +62,7 @@ pub fn recur<H: ExprRepr, X: ExtraTok>(
|
|||||||
tok @ (Token::BR | Token::Bottom(_) | Token::Comment(_) | Token::Name(_)) => tok,
|
tok @ (Token::BR | Token::Bottom(_) | Token::Comment(_) | Token::Name(_)) => tok,
|
||||||
tok @ (Token::Handle(_) | Token::NewExpr(_)) => tok,
|
tok @ (Token::Handle(_) | Token::NewExpr(_)) => tok,
|
||||||
Token::NS(n, b) => Token::NS(n, Box::new(recur(*b, f))),
|
Token::NS(n, b) => Token::NS(n, Box::new(recur(*b, f))),
|
||||||
Token::LambdaHead(arg) =>
|
Token::LambdaHead(arg) => Token::LambdaHead(Box::new(recur(*arg, f))),
|
||||||
Token::LambdaHead(arg.into_iter().map(|tt| recur(tt, f)).collect_vec()),
|
|
||||||
Token::S(p, b) => Token::S(p, b.into_iter().map(|tt| recur(tt, f)).collect_vec()),
|
Token::S(p, b) => Token::S(p, b.into_iter().map(|tt| recur(tt, f)).collect_vec()),
|
||||||
};
|
};
|
||||||
TokTree { sr: range, tok }
|
TokTree { sr: range, tok }
|
||||||
@@ -117,7 +116,7 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
|
|||||||
NS(n => Tok::from_api(*n, i).await,
|
NS(n => Tok::from_api(*n, i).await,
|
||||||
b => Box::new(Self::from_api(b, hctx, xctx, src, i).boxed_local().await)),
|
b => Box::new(Self::from_api(b, hctx, xctx, src, i).boxed_local().await)),
|
||||||
Bottom(e => OrcErrv::from_api(e, i).await),
|
Bottom(e => OrcErrv::from_api(e, i).await),
|
||||||
LambdaHead(arg => ttv_from_api(arg, hctx, xctx, src, i).await),
|
LambdaHead(arg => Box::new(Self::from_api(arg, hctx, xctx, src, i).boxed_local().await)),
|
||||||
Name(n => Tok::from_api(*n, i).await),
|
Name(n => Tok::from_api(*n, i).await),
|
||||||
S(*par, b => ttv_from_api(b, hctx, xctx, src, i).await),
|
S(*par, b => ttv_from_api(b, hctx, xctx, src, i).await),
|
||||||
Comment(c.clone()),
|
Comment(c.clone()),
|
||||||
@@ -137,7 +136,7 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
|
|||||||
NS(n.to_api(), b => Box::new(b.into_api(hctx, xctx).boxed_local().await)),
|
NS(n.to_api(), b => Box::new(b.into_api(hctx, xctx).boxed_local().await)),
|
||||||
Bottom(e.to_api()),
|
Bottom(e.to_api()),
|
||||||
Comment(c.clone()),
|
Comment(c.clone()),
|
||||||
LambdaHead(arg => ttv_into_api(arg, hctx, xctx).boxed_local().await),
|
LambdaHead(arg => Box::new(arg.into_api(hctx, xctx).boxed_local().await)),
|
||||||
Name(nn.to_api()),
|
Name(nn.to_api()),
|
||||||
S(p, b => ttv_into_api(b, hctx, xctx).boxed_local().await),
|
S(p, b => ttv_into_api(b, hctx, xctx).boxed_local().await),
|
||||||
Handle(hand.into_api(hctx).await),
|
Handle(hand.into_api(hctx).await),
|
||||||
@@ -153,18 +152,18 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
|
|||||||
pub fn as_s(&self, par: Paren) -> Option<Snippet<'_, H, X>> {
|
pub fn as_s(&self, par: Paren) -> Option<Snippet<'_, H, X>> {
|
||||||
self.tok.as_s(par).map(|slc| Snippet::new(self, slc))
|
self.tok.as_s(par).map(|slc| Snippet::new(self, slc))
|
||||||
}
|
}
|
||||||
pub fn as_lambda(&self) -> Option<Snippet<'_, H, X>> {
|
pub fn as_lambda(&self) -> Option<&Self> {
|
||||||
match &self.tok {
|
match &self.tok {
|
||||||
Token::LambdaHead(arg) => Some(Snippet::new(self, arg)),
|
Token::LambdaHead(arg) => Some(&**arg),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn is_fluff(&self) -> bool { matches!(self.tok, Token::Comment(_) | Token::BR) }
|
pub fn is_fluff(&self) -> bool { matches!(self.tok, Token::Comment(_) | Token::BR) }
|
||||||
pub fn lambda(arg: Vec<Self>, mut body: Vec<Self>) -> Self {
|
pub fn lambda(arg: Self, mut body: Vec<Self>) -> Self {
|
||||||
let arg_range = ttv_range(&arg).expect("Lambda with empty arg!");
|
let arg_range = arg.sr();
|
||||||
let mut s_range = arg_range.clone();
|
let mut s_range = arg_range.clone();
|
||||||
s_range.range.end = body.last().expect("Lambda with empty body!").sr.range.end;
|
s_range.range.end = body.last().expect("Lambda with empty body!").sr.range.end;
|
||||||
body.insert(0, Token::LambdaHead(arg).at(arg_range));
|
body.insert(0, Token::LambdaHead(Box::new(arg)).at(arg_range));
|
||||||
Token::S(Paren::Round, body).at(s_range)
|
Token::S(Paren::Round, body).at(s_range)
|
||||||
}
|
}
|
||||||
pub fn sr(&self) -> SrcRange { self.sr.clone() }
|
pub fn sr(&self) -> SrcRange { self.sr.clone() }
|
||||||
@@ -230,7 +229,7 @@ pub enum Token<H: ExprRepr, X: ExtraTok> {
|
|||||||
Comment(Rc<String>),
|
Comment(Rc<String>),
|
||||||
/// The part of a lambda between `\` and `.` enclosing the argument. The body
|
/// The part of a lambda between `\` and `.` enclosing the argument. The body
|
||||||
/// stretches to the end of the enclosing parens or the end of the const line
|
/// stretches to the end of the enclosing parens or the end of the const line
|
||||||
LambdaHead(Vec<TokTree<H, X>>),
|
LambdaHead(Box<TokTree<H, X>>),
|
||||||
/// A binding, operator, or a segment of a namespaced::name
|
/// A binding, operator, or a segment of a namespaced::name
|
||||||
Name(Tok<String>),
|
Name(Tok<String>),
|
||||||
/// A namespace prefix, like `my_ns::` followed by a token
|
/// A namespace prefix, like `my_ns::` followed by a token
|
||||||
@@ -267,7 +266,7 @@ impl<H: ExprRepr, X: ExtraTok> Format for Token<H, X> {
|
|||||||
Self::Comment(c) => format!("--[{c}]--").into(),
|
Self::Comment(c) => format!("--[{c}]--").into(),
|
||||||
Self::LambdaHead(arg) =>
|
Self::LambdaHead(arg) =>
|
||||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("\\{0b}.")))
|
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("\\{0b}.")))
|
||||||
.units([ttv_fmt(arg, c).await]),
|
.units([arg.print(c).boxed_local().await]),
|
||||||
Self::NS(n, b) => tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{0}::{1l}")))
|
Self::NS(n, b) => tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("{0}::{1l}")))
|
||||||
.units([n.to_string().into(), b.print(c).boxed_local().await]),
|
.units([n.to_string().into(), b.print(c).boxed_local().await]),
|
||||||
Self::Name(n) => format!("{n}").into(),
|
Self::Name(n) => format!("{n}").into(),
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ use orchid_base::name::Sym;
|
|||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::atom::{
|
use crate::atom::{
|
||||||
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
|
AtomCard, AtomCtx, AtomDynfo, AtomFactory, Atomic, AtomicFeaturesImpl, AtomicVariant, MethodSet,
|
||||||
MethodSetBuilder, err_not_callable, err_not_command, get_info,
|
MethodSetBuilder, TypAtom, err_not_callable, err_not_command, get_info,
|
||||||
};
|
};
|
||||||
use crate::expr::Expr;
|
use crate::expr::Expr;
|
||||||
use crate::gen_expr::{GExpr, bot};
|
use crate::gen_expr::{GExpr, bot};
|
||||||
@@ -68,6 +68,7 @@ impl Deref for AtomReadGuard<'_> {
|
|||||||
fn deref(&self) -> &Self::Target { &**self.guard.get(&self.id).unwrap() }
|
fn deref(&self) -> &Self::Target { &**self.guard.get(&self.id).unwrap() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Remove an atom from the store
|
||||||
pub(crate) async fn take_atom(id: api::AtomId, ctx: &SysCtx) -> Box<dyn DynOwnedAtom> {
|
pub(crate) async fn take_atom(id: api::AtomId, ctx: &SysCtx) -> Box<dyn DynOwnedAtom> {
|
||||||
let mut g = ctx.get_or_default::<ObjStore>().objects.write().await;
|
let mut g = ctx.get_or_default::<ObjStore>().objects.write().await;
|
||||||
g.remove(&id).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0))
|
g.remove(&id).unwrap_or_else(|| panic!("Received invalid atom ID: {}", id.0))
|
||||||
@@ -313,3 +314,11 @@ struct ObjStore {
|
|||||||
objects: RwLock<MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>,
|
objects: RwLock<MemoMap<api::AtomId, Box<dyn DynOwnedAtom>>>,
|
||||||
}
|
}
|
||||||
impl SysCtxEntry for ObjStore {}
|
impl SysCtxEntry for ObjStore {}
|
||||||
|
|
||||||
|
pub async fn get_own_instance<A: OwnedAtom>(typ: TypAtom<A>) -> A {
|
||||||
|
let ctx = typ.data.ctx();
|
||||||
|
let g = ctx.get_or_default::<ObjStore>().objects.read().await;
|
||||||
|
let dyn_atom = (g.get(&typ.data.atom.drop.expect("Owned atoms always have a drop ID")))
|
||||||
|
.expect("Atom ID invalid; atom type probably not owned by this crate");
|
||||||
|
dyn_atom.as_any_ref().downcast_ref().cloned().expect("The ID should imply a type as well")
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
|
|
||||||
|
use never::Never;
|
||||||
use orchid_base::error::{OrcErr, OrcRes, mk_err};
|
use orchid_base::error::{OrcErr, OrcRes, mk_err};
|
||||||
use orchid_base::interner::Interner;
|
use orchid_base::interner::Interner;
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::Pos;
|
||||||
@@ -7,7 +8,7 @@ use orchid_base::location::Pos;
|
|||||||
use crate::atom::{AtomicFeatures, ToAtom, TypAtom};
|
use crate::atom::{AtomicFeatures, ToAtom, TypAtom};
|
||||||
use crate::expr::Expr;
|
use crate::expr::Expr;
|
||||||
use crate::gen_expr::{GExpr, atom, bot};
|
use crate::gen_expr::{GExpr, atom, bot};
|
||||||
use crate::system::downcast_atom;
|
use crate::system::{SysCtx, downcast_atom};
|
||||||
|
|
||||||
pub trait TryFromExpr: Sized {
|
pub trait TryFromExpr: Sized {
|
||||||
fn try_from_expr(expr: Expr) -> impl Future<Output = OrcRes<Self>>;
|
fn try_from_expr(expr: Expr) -> impl Future<Output = OrcRes<Self>>;
|
||||||
@@ -43,6 +44,10 @@ impl<A: AtomicFeatures> TryFromExpr for TypAtom<A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TryFromExpr for SysCtx {
|
||||||
|
async fn try_from_expr(expr: Expr) -> OrcRes<Self> { Ok(expr.ctx()) }
|
||||||
|
}
|
||||||
|
|
||||||
pub trait ToExpr {
|
pub trait ToExpr {
|
||||||
fn to_expr(self) -> GExpr;
|
fn to_expr(self) -> GExpr;
|
||||||
}
|
}
|
||||||
@@ -66,3 +71,7 @@ impl<T: ToExpr> ToExpr for OrcRes<T> {
|
|||||||
impl<A: ToAtom> ToExpr for A {
|
impl<A: ToAtom> ToExpr for A {
|
||||||
fn to_expr(self) -> GExpr { atom(self) }
|
fn to_expr(self) -> GExpr { atom(self) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ToExpr for Never {
|
||||||
|
fn to_expr(self) -> GExpr { match self {} }
|
||||||
|
}
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ use orchid_api_traits::{Decode, UnderRoot, enc_vec};
|
|||||||
use orchid_base::builtin::{ExtInit, ExtPort, Spawner};
|
use orchid_base::builtin::{ExtInit, ExtPort, Spawner};
|
||||||
use orchid_base::char_filter::{char_filter_match, char_filter_union, mk_char_filter};
|
use orchid_base::char_filter::{char_filter_match, char_filter_union, mk_char_filter};
|
||||||
use orchid_base::clone;
|
use orchid_base::clone;
|
||||||
|
use orchid_base::error::Reporter;
|
||||||
use orchid_base::interner::{Interner, Tok};
|
use orchid_base::interner::{Interner, Tok};
|
||||||
use orchid_base::logging::Logger;
|
use orchid_base::logging::Logger;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
@@ -189,10 +190,13 @@ pub fn extension_init(
|
|||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
.await;
|
.await;
|
||||||
|
let prelude =
|
||||||
|
cted.inst().dyn_prelude(&i).await.iter().map(|sym| sym.to_api()).collect();
|
||||||
let record = SystemRecord { ctx, lazy_members: lazy_mems.into_inner() };
|
let record = SystemRecord { ctx, lazy_members: lazy_mems.into_inner() };
|
||||||
let systems = systems_weak.upgrade().expect("System constructed during shutdown");
|
let systems = systems_weak.upgrade().expect("System constructed during shutdown");
|
||||||
systems.lock().await.insert(new_sys.id, record);
|
systems.lock().await.insert(new_sys.id, record);
|
||||||
let response = api::NewSystemResponse { lex_filter, const_root, line_types: vec![] };
|
let response =
|
||||||
|
api::NewSystemResponse { lex_filter, const_root, line_types: vec![], prelude };
|
||||||
hand.handle(&new_sys, &response).await
|
hand.handle(&new_sys, &response).await
|
||||||
},
|
},
|
||||||
api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) => {
|
api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) => {
|
||||||
@@ -262,8 +266,10 @@ pub fn extension_init(
|
|||||||
let parser =
|
let parser =
|
||||||
parsers.iter().find(|p| p.line_head() == **name).expect("No parser candidate");
|
parsers.iter().find(|p| p.line_head() == **name).expect("No parser candidate");
|
||||||
let module = Sym::from_api(*module, ctx.i()).await;
|
let module = Sym::from_api(*module, ctx.i()).await;
|
||||||
let pctx = ParsCtx::new(ctx.clone(), module);
|
let reporter = Reporter::new();
|
||||||
let o_line = match parser.parse(pctx, *exported, comments, tail).await {
|
let pctx = ParsCtx::new(ctx.clone(), module, &reporter);
|
||||||
|
let parse_res = parser.parse(pctx, *exported, comments, tail).await;
|
||||||
|
let o_line = match reporter.merge(parse_res) {
|
||||||
Err(e) => Err(e.to_api()),
|
Err(e) => Err(e.to_api()),
|
||||||
Ok(t) => Ok(linev_into_api(t, ctx.clone(), &hand).await),
|
Ok(t) => Ok(linev_into_api(t, ctx.clone(), &hand).await),
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -4,12 +4,12 @@ use futures::FutureExt;
|
|||||||
use futures::future::{LocalBoxFuture, join_all};
|
use futures::future::{LocalBoxFuture, join_all};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_api::ResolveNames;
|
use orchid_api::ResolveNames;
|
||||||
use orchid_base::error::OrcRes;
|
use orchid_base::error::{OrcRes, Reporter};
|
||||||
use orchid_base::id_store::IdStore;
|
use orchid_base::id_store::IdStore;
|
||||||
use orchid_base::interner::Tok;
|
use orchid_base::interner::{Interner, Tok};
|
||||||
use orchid_base::location::SrcRange;
|
use orchid_base::location::SrcRange;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::parse::{Comment, Snippet};
|
use orchid_base::parse::{Comment, ParseCtx, Snippet};
|
||||||
use orchid_base::reqnot::{ReqHandlish, Requester};
|
use orchid_base::reqnot::{ReqHandlish, Requester};
|
||||||
use orchid_base::tree::ttv_into_api;
|
use orchid_base::tree::ttv_into_api;
|
||||||
|
|
||||||
@@ -61,12 +61,19 @@ pub struct ParsCtx<'a> {
|
|||||||
_parse: PhantomData<&'a mut ()>,
|
_parse: PhantomData<&'a mut ()>,
|
||||||
ctx: SysCtx,
|
ctx: SysCtx,
|
||||||
module: Sym,
|
module: Sym,
|
||||||
|
reporter: &'a Reporter,
|
||||||
}
|
}
|
||||||
impl ParsCtx<'_> {
|
impl<'a> ParsCtx<'a> {
|
||||||
pub(crate) fn new(ctx: SysCtx, module: Sym) -> Self { Self { _parse: PhantomData, ctx, module } }
|
pub(crate) fn new(ctx: SysCtx, module: Sym, reporter: &'a Reporter) -> Self {
|
||||||
|
Self { _parse: PhantomData, ctx, module, reporter }
|
||||||
|
}
|
||||||
pub fn ctx(&self) -> &SysCtx { &self.ctx }
|
pub fn ctx(&self) -> &SysCtx { &self.ctx }
|
||||||
pub fn module(&self) -> Sym { self.module.clone() }
|
pub fn module(&self) -> Sym { self.module.clone() }
|
||||||
}
|
}
|
||||||
|
impl ParseCtx for ParsCtx<'_> {
|
||||||
|
fn i(&self) -> &Interner { self.ctx.i() }
|
||||||
|
fn reporter(&self) -> &Reporter { self.reporter }
|
||||||
|
}
|
||||||
|
|
||||||
type BoxConstCallback = Box<dyn FnOnce(ConstCtx) -> LocalBoxFuture<'static, GExpr>>;
|
type BoxConstCallback = Box<dyn FnOnce(ConstCtx) -> LocalBoxFuture<'static, GExpr>>;
|
||||||
|
|
||||||
@@ -94,8 +101,10 @@ impl ParsedLine {
|
|||||||
ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId(
|
ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId(
|
||||||
ctx.get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(),
|
ctx.get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(),
|
||||||
)),
|
)),
|
||||||
ParsedMemKind::Mod(plv) =>
|
ParsedMemKind::Mod { lines, use_prelude } => api::ParsedMemberKind::Module {
|
||||||
api::ParsedMemberKind::Module(linev_into_api(plv, ctx, hand).boxed_local().await),
|
lines: linev_into_api(lines, ctx, hand).boxed_local().await,
|
||||||
|
use_prelude,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
ParsedLineKind::Rec(tv) =>
|
ParsedLineKind::Rec(tv) =>
|
||||||
@@ -119,20 +128,26 @@ pub enum ParsedLineKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub struct ParsedMem {
|
pub struct ParsedMem {
|
||||||
name: Tok<String>,
|
pub name: Tok<String>,
|
||||||
exported: bool,
|
pub exported: bool,
|
||||||
kind: ParsedMemKind,
|
pub kind: ParsedMemKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum ParsedMemKind {
|
pub enum ParsedMemKind {
|
||||||
Const(BoxConstCallback),
|
Const(BoxConstCallback),
|
||||||
Mod(Vec<ParsedLine>),
|
Mod { lines: Vec<ParsedLine>, use_prelude: bool },
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ParsedMemKind {
|
impl ParsedMemKind {
|
||||||
pub fn cnst<F: AsyncFnOnce(ConstCtx) -> GExpr + 'static>(f: F) -> Self {
|
pub fn cnst<F: AsyncFnOnce(ConstCtx) -> GExpr + 'static>(f: F) -> Self {
|
||||||
Self::Const(Box::new(|ctx| Box::pin(f(ctx))))
|
Self::Const(Box::new(|ctx| Box::pin(f(ctx))))
|
||||||
}
|
}
|
||||||
|
pub fn module(lines: impl IntoIterator<Item = ParsedLine>) -> Self {
|
||||||
|
Self::Mod { lines: lines.into_iter().collect(), use_prelude: true }
|
||||||
|
}
|
||||||
|
pub fn clean_module(lines: impl IntoIterator<Item = ParsedLine>) -> Self {
|
||||||
|
Self::Mod { lines: lines.into_iter().collect(), use_prelude: false }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* TODO: how the macro runner uses the multi-stage loader
|
/* TODO: how the macro runner uses the multi-stage loader
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ use orchid_base::boxed_iter::BoxedIter;
|
|||||||
use orchid_base::builtin::Spawner;
|
use orchid_base::builtin::Spawner;
|
||||||
use orchid_base::interner::Interner;
|
use orchid_base::interner::Interner;
|
||||||
use orchid_base::logging::Logger;
|
use orchid_base::logging::Logger;
|
||||||
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::reqnot::{Receipt, ReqNot};
|
use orchid_base::reqnot::{Receipt, ReqNot};
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
@@ -81,6 +82,7 @@ impl<T: SystemCard> DynSystemCard for T {
|
|||||||
|
|
||||||
/// System as defined by author
|
/// System as defined by author
|
||||||
pub trait System: Send + Sync + SystemCard + 'static {
|
pub trait System: Send + Sync + SystemCard + 'static {
|
||||||
|
fn prelude(i: &Interner) -> impl Future<Output = Vec<Sym>>;
|
||||||
fn env() -> Vec<GenMember>;
|
fn env() -> Vec<GenMember>;
|
||||||
fn lexers() -> Vec<LexerObj>;
|
fn lexers() -> Vec<LexerObj>;
|
||||||
fn parsers() -> Vec<ParserObj>;
|
fn parsers() -> Vec<ParserObj>;
|
||||||
@@ -88,6 +90,7 @@ pub trait System: Send + Sync + SystemCard + 'static {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
||||||
|
fn dyn_prelude<'a>(&'a self, i: &'a Interner) -> LocalBoxFuture<'a, Vec<Sym>>;
|
||||||
fn dyn_env(&self) -> Vec<GenMember>;
|
fn dyn_env(&self) -> Vec<GenMember>;
|
||||||
fn dyn_lexers(&self) -> Vec<LexerObj>;
|
fn dyn_lexers(&self) -> Vec<LexerObj>;
|
||||||
fn dyn_parsers(&self) -> Vec<ParserObj>;
|
fn dyn_parsers(&self) -> Vec<ParserObj>;
|
||||||
@@ -96,6 +99,9 @@ pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<T: System> DynSystem for T {
|
impl<T: System> DynSystem for T {
|
||||||
|
fn dyn_prelude<'a>(&'a self, i: &'a Interner) -> LocalBoxFuture<'a, Vec<Sym>> {
|
||||||
|
Box::pin(Self::prelude(i))
|
||||||
|
}
|
||||||
fn dyn_env(&self) -> Vec<GenMember> { Self::env() }
|
fn dyn_env(&self) -> Vec<GenMember> { Self::env() }
|
||||||
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
|
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
|
||||||
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
|
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
|
||||||
@@ -129,22 +135,6 @@ where A: AtomicFeatures {
|
|||||||
Ok(TypAtom { value, data: foreign })
|
Ok(TypAtom { value, data: foreign })
|
||||||
}
|
}
|
||||||
|
|
||||||
// #[derive(Clone)]
|
|
||||||
// pub struct SysCtx {
|
|
||||||
// pub reqnot: ReqNot<api::ExtMsgSet>,
|
|
||||||
// pub spawner: Spawner,
|
|
||||||
// pub id: api::SysId,
|
|
||||||
// pub cted: CtedObj,
|
|
||||||
// pub logger: Logger,
|
|
||||||
// pub obj_store: ObjStore,
|
|
||||||
// pub i: Rc<Interner>,
|
|
||||||
// }
|
|
||||||
// impl fmt::Debug for SysCtx {
|
|
||||||
// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
// write!(f, "SysCtx({:?})", self.id)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct SysCtx(Rc<MemoMap<TypeId, Box<dyn Any>>>);
|
pub struct SysCtx(Rc<MemoMap<TypeId, Box<dyn Any>>>);
|
||||||
impl SysCtx {
|
impl SysCtx {
|
||||||
|
|||||||
@@ -293,6 +293,7 @@ impl Extension {
|
|||||||
pub fn downgrade(&self) -> WeakExtension { WeakExtension(Rc::downgrade(&self.0)) }
|
pub fn downgrade(&self) -> WeakExtension { WeakExtension(Rc::downgrade(&self.0)) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct WeakExtension(Weak<ExtensionData>);
|
pub struct WeakExtension(Weak<ExtensionData>);
|
||||||
impl WeakExtension {
|
impl WeakExtension {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
|
|||||||
@@ -125,21 +125,12 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
|||||||
let end = tail.find(['\n', '\r']).map_or(tail.len(), |n| n - 1);
|
let end = tail.find(['\n', '\r']).map_or(tail.len(), |n| n - 1);
|
||||||
ctx.push_pos(end as u32);
|
ctx.push_pos(end as u32);
|
||||||
ParsTok::Comment(Rc::new(tail[2..end].to_string()))
|
ParsTok::Comment(Rc::new(tail[2..end].to_string()))
|
||||||
} else if ctx.strip_char('\\') {
|
} else if let Some(tail) = ctx.tail.strip_prefix('\\').filter(|t| t.starts_with(name_start)) {
|
||||||
let mut arg = Vec::new();
|
// fanciness like \$placeh in templates is resolved in the macro engine.
|
||||||
|
ctx.set_tail(tail);
|
||||||
|
let arg = lex_once(ctx).boxed_local().await?;
|
||||||
ctx.trim_ws();
|
ctx.trim_ws();
|
||||||
while !ctx.strip_char('.') {
|
ParsTok::LambdaHead(Box::new(arg))
|
||||||
if ctx.tail.is_empty() {
|
|
||||||
return Err(mk_errv(
|
|
||||||
ctx.ctx.i.i("Unclosed lambda").await,
|
|
||||||
"Lambdae started with \\ should separate arguments from body with .",
|
|
||||||
[SrcRange::new(start..start + 1, ctx.path)],
|
|
||||||
));
|
|
||||||
}
|
|
||||||
arg.push(lex_once(ctx).boxed_local().await?);
|
|
||||||
ctx.trim_ws();
|
|
||||||
}
|
|
||||||
ParsTok::LambdaHead(arg)
|
|
||||||
} else if let Some((lp, rp, paren)) = PARENS.iter().find(|(lp, ..)| ctx.strip_char(*lp)) {
|
} else if let Some((lp, rp, paren)) = PARENS.iter().find(|(lp, ..)| ctx.strip_char(*lp)) {
|
||||||
let mut body = Vec::new();
|
let mut body = Vec::new();
|
||||||
ctx.trim_ws();
|
ctx.trim_ws();
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
use futures::FutureExt;
|
|
||||||
use futures::future::join_all;
|
use futures::future::join_all;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::{OrcRes, Reporter, mk_errv};
|
use orchid_base::error::{OrcRes, Reporter, mk_errv};
|
||||||
use orchid_base::format::fmt;
|
use orchid_base::format::fmt;
|
||||||
use orchid_base::interner::{Interner, Tok};
|
use orchid_base::interner::{Interner, Tok};
|
||||||
use orchid_base::name::{Sym, VPath};
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::parse::{
|
use orchid_base::parse::{
|
||||||
Comment, Import, ParseCtx, Parsed, Snippet, expect_end, line_items, parse_multiname,
|
Comment, Import, ParseCtx, Parsed, Snippet, expect_end, line_items, parse_multiname,
|
||||||
try_pop_no_fluff,
|
try_pop_no_fluff,
|
||||||
@@ -13,7 +12,7 @@ use orchid_base::tree::{Paren, TokTree, Token};
|
|||||||
use substack::Substack;
|
use substack::Substack;
|
||||||
|
|
||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
use crate::expr::{Expr, ExprKind, PathSetBuilder};
|
use crate::expr::Expr;
|
||||||
use crate::parsed::{Item, ItemKind, ParsedMember, ParsedMemberKind, ParsedModule};
|
use crate::parsed::{Item, ItemKind, ParsedMember, ParsedMemberKind, ParsedModule};
|
||||||
use crate::system::System;
|
use crate::system::System;
|
||||||
|
|
||||||
@@ -110,9 +109,6 @@ pub async fn parse_exportable_item<'a>(
|
|||||||
let kind = if discr == ctx.i().i("mod").await {
|
let kind = if discr == ctx.i().i("mod").await {
|
||||||
let (name, body) = parse_module(ctx, path, tail).await?;
|
let (name, body) = parse_module(ctx, path, tail).await?;
|
||||||
ItemKind::Member(ParsedMember { name, exported, kind: ParsedMemberKind::Mod(body) })
|
ItemKind::Member(ParsedMember { name, exported, kind: ParsedMemberKind::Mod(body) })
|
||||||
} else if discr == ctx.i().i("const").await {
|
|
||||||
let (name, expr) = parse_const(ctx, tail, path.clone()).await?;
|
|
||||||
ItemKind::Member(ParsedMember { name, exported, kind: ParsedMemberKind::ParsedConst(expr) })
|
|
||||||
} else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) {
|
} else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) {
|
||||||
return sys
|
return sys
|
||||||
.parse(path, tail.to_vec(), exported, comments, &mut async |stack, lines| {
|
.parse(path, tail.to_vec(), exported, comments, &mut async |stack, lines| {
|
||||||
@@ -156,107 +152,5 @@ pub async fn parse_module<'a>(
|
|||||||
));
|
));
|
||||||
};
|
};
|
||||||
let path = path.push(name.clone());
|
let path = path.push(name.clone());
|
||||||
Ok((name, ParsedModule::new(parse_items(ctx, path, body).await?)))
|
Ok((name, ParsedModule::new(true, parse_items(ctx, path, body).await?)))
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn parse_const<'a>(
|
|
||||||
ctx: &impl HostParseCtx,
|
|
||||||
tail: ParsSnippet<'a>,
|
|
||||||
path: Substack<'_, Tok<String>>,
|
|
||||||
) -> OrcRes<(Tok<String>, Expr)> {
|
|
||||||
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
|
|
||||||
let Some(name) = output.as_name() else {
|
|
||||||
return Err(mk_errv(
|
|
||||||
ctx.i().i("Missing module name").await,
|
|
||||||
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
|
|
||||||
[output.sr()],
|
|
||||||
));
|
|
||||||
};
|
|
||||||
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
|
|
||||||
if !output.is_kw(ctx.i().i("=").await) {
|
|
||||||
return Err(mk_errv(
|
|
||||||
ctx.i().i("Missing = separator").await,
|
|
||||||
format!("Expected = , found {}", fmt(output, ctx.i()).await),
|
|
||||||
[output.sr()],
|
|
||||||
));
|
|
||||||
}
|
|
||||||
try_pop_no_fluff(ctx, tail).await?;
|
|
||||||
// ctx.save_const(path, tail[..].to_vec()).await;
|
|
||||||
let final_path =
|
|
||||||
VPath::new(path.unreverse()).name_with_suffix(name.clone()).to_sym(ctx.i()).await;
|
|
||||||
let val = parse_expr(ctx, final_path, PathSetBuilder::new(), tail).await?;
|
|
||||||
Ok((name, val))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn parse_expr(
|
|
||||||
ctx: &impl HostParseCtx,
|
|
||||||
path: Sym,
|
|
||||||
psb: PathSetBuilder<'_, Tok<String>>,
|
|
||||||
tail: ParsSnippet<'_>,
|
|
||||||
) -> OrcRes<Expr> {
|
|
||||||
let Some((last_idx, _)) = (tail.iter().enumerate().find(|(_, tt)| tt.as_lambda().is_some()))
|
|
||||||
.or_else(|| tail.iter().enumerate().rev().find(|(_, tt)| !tt.is_fluff()))
|
|
||||||
else {
|
|
||||||
return Err(mk_errv(ctx.i().i("Empty expression").await, "Expression ends abruptly here", [
|
|
||||||
tail.sr(),
|
|
||||||
]));
|
|
||||||
};
|
|
||||||
let (function, value) = tail.split_at(last_idx as u32);
|
|
||||||
let pos = tail.sr().pos();
|
|
||||||
if !function.iter().all(TokTree::is_fluff) {
|
|
||||||
let (f_psb, x_psb) = psb.split();
|
|
||||||
let x_expr = parse_expr(ctx, path.clone(), x_psb, value).boxed_local().await?;
|
|
||||||
let f_expr = parse_expr(ctx, path, f_psb, function).boxed_local().await?;
|
|
||||||
return Ok(ExprKind::Call(f_expr, x_expr).at(pos));
|
|
||||||
}
|
|
||||||
let Parsed { output: head, tail } = try_pop_no_fluff(ctx, value).await?;
|
|
||||||
match &head.tok {
|
|
||||||
Token::BR | Token::Comment(_) => panic!("Fluff skipped"),
|
|
||||||
Token::Bottom(b) => Ok(ExprKind::Bottom(b.clone()).at(pos.clone())),
|
|
||||||
Token::Handle(expr) => Ok(expr.clone()),
|
|
||||||
Token::NS(n, nametail) => {
|
|
||||||
let mut nametail = nametail;
|
|
||||||
let mut segments = vec![n.clone()];
|
|
||||||
while let Token::NS(n, newtail) = &nametail.tok {
|
|
||||||
segments.push(n.clone());
|
|
||||||
nametail = newtail;
|
|
||||||
}
|
|
||||||
let Token::Name(n) = &nametail.tok else {
|
|
||||||
return Err(mk_errv(
|
|
||||||
ctx.i().i("Loose namespace prefix in constant").await,
|
|
||||||
"Namespace prefixes in constants must be followed by names",
|
|
||||||
[pos],
|
|
||||||
));
|
|
||||||
};
|
|
||||||
segments.push(n.clone());
|
|
||||||
Ok(ExprKind::Const(Sym::new(segments, ctx.i()).await.unwrap()).at(pos.clone()))
|
|
||||||
},
|
|
||||||
Token::LambdaHead(h) => {
|
|
||||||
let [TokTree { tok: Token::Name(arg), .. }] = &h[..] else {
|
|
||||||
return Err(mk_errv(
|
|
||||||
ctx.i().i("Complex lambda binding in constant").await,
|
|
||||||
"Lambda args in constants must be identified by a single name",
|
|
||||||
[pos],
|
|
||||||
));
|
|
||||||
};
|
|
||||||
let lambda_builder = psb.lambda(arg);
|
|
||||||
let body = parse_expr(ctx, path.clone(), lambda_builder.stack(), tail).boxed_local().await?;
|
|
||||||
Ok(ExprKind::Lambda(lambda_builder.collect(), body).at(pos.clone()))
|
|
||||||
},
|
|
||||||
Token::S(Paren::Round, body) =>
|
|
||||||
parse_expr(ctx, path, psb, Snippet::new(head, body)).boxed_local().await,
|
|
||||||
Token::S(..) =>
|
|
||||||
return Err(mk_errv(
|
|
||||||
ctx.i().i("Constants may only contain (), not [] or {}").await,
|
|
||||||
"It seems like you are trying to call a macro. Consider a 'let' line",
|
|
||||||
[pos],
|
|
||||||
)),
|
|
||||||
Token::Name(n) =>
|
|
||||||
if psb.register_arg(n) {
|
|
||||||
Ok(ExprKind::Arg.at(pos))
|
|
||||||
} else {
|
|
||||||
Ok(ExprKind::Const(Sym::new([n.clone()], ctx.i()).await.unwrap()).at(pos))
|
|
||||||
},
|
|
||||||
Token::NewExpr(ex) => Ok(ex.clone()),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use std::fmt::Debug;
|
use std::fmt::{self, Debug};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use futures::future::join_all;
|
use futures::future::{LocalBoxFuture, join_all};
|
||||||
use hashbrown::HashSet;
|
use hashbrown::HashSet;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||||
@@ -26,6 +26,11 @@ pub struct Item {
|
|||||||
pub comments: Vec<Comment>,
|
pub comments: Vec<Comment>,
|
||||||
pub kind: ItemKind,
|
pub kind: ItemKind,
|
||||||
}
|
}
|
||||||
|
impl Item {
|
||||||
|
pub fn new(sr: SrcRange, kind: impl Into<ItemKind>) -> Self {
|
||||||
|
Self { sr, comments: Vec::new(), kind: kind.into() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum ItemKind {
|
pub enum ItemKind {
|
||||||
@@ -36,6 +41,12 @@ impl ItemKind {
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn at(self, sr: SrcRange) -> Item { Item { comments: vec![], sr, kind: self } }
|
pub fn at(self, sr: SrcRange) -> Item { Item { comments: vec![], sr, kind: self } }
|
||||||
}
|
}
|
||||||
|
impl From<ParsedMember> for ItemKind {
|
||||||
|
fn from(value: ParsedMember) -> Self { Self::Member(value) }
|
||||||
|
}
|
||||||
|
impl From<Import> for ItemKind {
|
||||||
|
fn from(value: Import) -> Self { Self::Import(value) }
|
||||||
|
}
|
||||||
|
|
||||||
impl Format for Item {
|
impl Format for Item {
|
||||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
@@ -43,9 +54,6 @@ impl Format for Item {
|
|||||||
let item_text = match &self.kind {
|
let item_text = match &self.kind {
|
||||||
ItemKind::Import(i) => format!("import {i}").into(),
|
ItemKind::Import(i) => format!("import {i}").into(),
|
||||||
ItemKind::Member(mem) => match &mem.kind {
|
ItemKind::Member(mem) => match &mem.kind {
|
||||||
ParsedMemberKind::ParsedConst(expr) =>
|
|
||||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} = {1l}")))
|
|
||||||
.units([mem.name.rc().into(), expr.print(c).await]),
|
|
||||||
ParsedMemberKind::DeferredConst(_, sys) =>
|
ParsedMemberKind::DeferredConst(_, sys) =>
|
||||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} via {1}")))
|
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} via {1}")))
|
||||||
.units([mem.name.rc().into(), sys.print(c).await]),
|
.units([mem.name.rc().into(), sys.print(c).await]),
|
||||||
@@ -67,6 +75,9 @@ pub struct ParsedMember {
|
|||||||
impl ParsedMember {
|
impl ParsedMember {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn name(&self) -> Tok<String> { self.name.clone() }
|
pub fn name(&self) -> Tok<String> { self.name.clone() }
|
||||||
|
pub fn new(exported: bool, name: Tok<String>, kind: impl Into<ParsedMemberKind>) -> Self {
|
||||||
|
Self { exported, name, kind: kind.into() }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
impl Debug for ParsedMember {
|
impl Debug for ParsedMember {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
@@ -77,36 +88,53 @@ impl Debug for ParsedMember {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) type ParsedExprCallback =
|
||||||
|
Rc<dyn for<'a> Fn(&'a [Tok<String>]) -> LocalBoxFuture<'a, Expr>>;
|
||||||
|
|
||||||
|
pub struct ParsedExpr {
|
||||||
|
pub(crate) debug: String,
|
||||||
|
pub(crate) callback: ParsedExprCallback,
|
||||||
|
}
|
||||||
|
impl ParsedExpr {
|
||||||
|
pub async fn run(self, imported_names: &[Tok<String>]) -> Expr {
|
||||||
|
(self.callback)(imported_names).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl fmt::Debug for ParsedExpr {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.debug) }
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum ParsedMemberKind {
|
pub enum ParsedMemberKind {
|
||||||
DeferredConst(api::ParsedConstId, System),
|
DeferredConst(api::ParsedConstId, System),
|
||||||
ParsedConst(Expr),
|
|
||||||
Mod(ParsedModule),
|
Mod(ParsedModule),
|
||||||
}
|
}
|
||||||
|
impl From<ParsedModule> for ParsedMemberKind {
|
||||||
// TODO: cannot determine alias origin at this stage; parsed tree is never
|
fn from(value: ParsedModule) -> Self { Self::Mod(value) }
|
||||||
// walkable!
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct ParsedModule {
|
pub struct ParsedModule {
|
||||||
pub exports: Vec<Tok<String>>,
|
pub exports: Vec<Tok<String>>,
|
||||||
pub items: Vec<Item>,
|
pub items: Vec<Item>,
|
||||||
|
pub use_prelude: bool,
|
||||||
}
|
}
|
||||||
impl ParsedModule {
|
impl ParsedModule {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn new(items: impl IntoIterator<Item = Item>) -> Self {
|
pub fn new(use_prelude: bool, items: impl IntoIterator<Item = Item>) -> Self {
|
||||||
let items = items.into_iter().collect_vec();
|
let items = items.into_iter().collect_vec();
|
||||||
let exports = (items.iter())
|
let exports = (items.iter())
|
||||||
.filter_map(|i| if let ItemKind::Member(m) = &i.kind { Some(m) } else { None })
|
.filter_map(|i| if let ItemKind::Member(m) = &i.kind { Some(m) } else { None })
|
||||||
.filter(|m| m.exported)
|
.filter(|m| m.exported)
|
||||||
.map(|m| m.name.clone())
|
.map(|m| m.name.clone())
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
Self { exports, items }
|
Self { exports, items, use_prelude }
|
||||||
}
|
}
|
||||||
pub fn merge(&mut self, other: ParsedModule) {
|
pub fn merge(&mut self, other: ParsedModule) {
|
||||||
let mut swap = ParsedModule::default();
|
let mut swap = ParsedModule::default();
|
||||||
std::mem::swap(self, &mut swap);
|
std::mem::swap(self, &mut swap);
|
||||||
*self = ParsedModule::new(swap.items.into_iter().chain(other.items))
|
assert_eq!(self.use_prelude, other.use_prelude, "merging modules that disagree on prelude");
|
||||||
|
*self = ParsedModule::new(self.use_prelude, swap.items.into_iter().chain(other.items))
|
||||||
}
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn get_imports(&self) -> impl IntoIterator<Item = &Import> {
|
pub fn get_imports(&self) -> impl IntoIterator<Item = &Import> {
|
||||||
@@ -134,8 +162,7 @@ impl Tree for ParsedModule {
|
|||||||
.find(|m| m.name == key)
|
.find(|m| m.name == key)
|
||||||
{
|
{
|
||||||
match &member.kind {
|
match &member.kind {
|
||||||
ParsedMemberKind::DeferredConst(..) | ParsedMemberKind::ParsedConst(_) =>
|
ParsedMemberKind::DeferredConst(..) => return ChildResult::Err(ChildErrorKind::Constant),
|
||||||
return ChildResult::Err(ChildErrorKind::Constant),
|
|
||||||
ParsedMemberKind::Mod(m) => return ChildResult::Ok(m),
|
ParsedMemberKind::Mod(m) => return ChildResult::Ok(m),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ struct SystemInstData {
|
|||||||
lex_filter: api::CharFilter,
|
lex_filter: api::CharFilter,
|
||||||
id: api::SysId,
|
id: api::SysId,
|
||||||
line_types: Vec<Tok<String>>,
|
line_types: Vec<Tok<String>>,
|
||||||
|
prelude: Vec<Sym>,
|
||||||
pub(crate) const_paths: MemoMap<api::ParsedConstId, Sym>,
|
pub(crate) const_paths: MemoMap<api::ParsedConstId, Sym>,
|
||||||
}
|
}
|
||||||
impl Drop for SystemInstData {
|
impl Drop for SystemInstData {
|
||||||
@@ -69,6 +70,11 @@ impl System {
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn deps(&self) -> &[System] { &self.0.deps }
|
pub fn deps(&self) -> &[System] { &self.0.deps }
|
||||||
#[must_use]
|
#[must_use]
|
||||||
|
pub fn ctor(&self) -> SystemCtor {
|
||||||
|
(self.0.ext.system_ctors().find(|c| c.decl.id == self.0.decl_id).cloned())
|
||||||
|
.expect("Ctor was used to create ext")
|
||||||
|
}
|
||||||
|
#[must_use]
|
||||||
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { self.0.ext.reqnot() }
|
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { self.0.ext.reqnot() }
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub async fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
|
pub async fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
|
||||||
@@ -78,6 +84,8 @@ impl System {
|
|||||||
pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() }
|
pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() }
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) }
|
pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) }
|
||||||
|
#[must_use]
|
||||||
|
pub fn prelude(&self) -> Vec<Sym> { self.0.prelude.clone() }
|
||||||
/// Have this system lex a part of the source. It is assumed that
|
/// Have this system lex a part of the source. It is assumed that
|
||||||
/// [Self::can_lex] was called and returned true.
|
/// [Self::can_lex] was called and returned true.
|
||||||
pub async fn lex<F: Future<Output = Option<api::SubLexed>>>(
|
pub async fn lex<F: Future<Output = Option<api::SubLexed>>>(
|
||||||
@@ -147,10 +155,10 @@ impl System {
|
|||||||
};
|
};
|
||||||
let name = ctx.i.ex(name).await;
|
let name = ctx.i.ex(name).await;
|
||||||
let mkind = match kind {
|
let mkind = match kind {
|
||||||
api::ParsedMemberKind::Module(items) => {
|
api::ParsedMemberKind::Module { lines, use_prelude } => {
|
||||||
let items =
|
let items =
|
||||||
conv(items, module.push(name.clone()), callback, ctx).boxed_local().await?;
|
conv(lines, module.push(name.clone()), callback, ctx).boxed_local().await?;
|
||||||
ParsedMemberKind::Mod(ParsedModule::new(items))
|
ParsedMemberKind::Mod(ParsedModule::new(use_prelude, items))
|
||||||
},
|
},
|
||||||
api::ParsedMemberKind::Constant(cid) =>
|
api::ParsedMemberKind::Constant(cid) =>
|
||||||
ParsedMemberKind::DeferredConst(cid, ctx.sys.clone()),
|
ParsedMemberKind::DeferredConst(cid, ctx.sys.clone()),
|
||||||
@@ -199,7 +207,7 @@ impl System {
|
|||||||
let orig = self.0.const_paths.get(&orig).expect("origin for find_names invalid").clone();
|
let orig = self.0.const_paths.get(&orig).expect("origin for find_names invalid").clone();
|
||||||
let ctx = self.0.ctx.clone();
|
let ctx = self.0.ctx.clone();
|
||||||
async move |rel| {
|
async move |rel| {
|
||||||
let cwd = orig.split_last().1;
|
let cwd = orig.split_last_seg().1;
|
||||||
let abs = absolute_path(cwd, rel, &ctx.i).await.ok()?;
|
let abs = absolute_path(cwd, rel, &ctx.i).await.ok()?;
|
||||||
let root_data = &mut *root.0.write().await;
|
let root_data = &mut *root.0.write().await;
|
||||||
let walk_ctx = &mut (ctx.clone(), &mut root_data.consts);
|
let walk_ctx = &mut (ctx.clone(), &mut root_data.consts);
|
||||||
@@ -221,6 +229,7 @@ impl WeakSystem {
|
|||||||
pub fn upgrade(&self) -> Option<System> { self.0.upgrade().map(System) }
|
pub fn upgrade(&self) -> Option<System> { self.0.upgrade().map(System) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct SystemCtor {
|
pub struct SystemCtor {
|
||||||
pub(crate) decl: api::SystemDecl,
|
pub(crate) decl: api::SystemDecl,
|
||||||
pub(crate) ext: WeakExtension,
|
pub(crate) ext: WeakExtension,
|
||||||
@@ -228,6 +237,10 @@ pub struct SystemCtor {
|
|||||||
impl SystemCtor {
|
impl SystemCtor {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn name(&self) -> &str { &self.decl.name }
|
pub fn name(&self) -> &str { &self.decl.name }
|
||||||
|
pub async fn name_tok(&self) -> Sym {
|
||||||
|
(Sym::parse(&self.decl.name, &self.ext.upgrade().expect("ext dropped early").ctx().i).await)
|
||||||
|
.expect("System cannot have empty name")
|
||||||
|
}
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn priority(&self) -> NotNan<f64> { self.decl.priority }
|
pub fn priority(&self) -> NotNan<f64> { self.decl.priority }
|
||||||
#[must_use]
|
#[must_use]
|
||||||
@@ -252,6 +265,7 @@ impl SystemCtor {
|
|||||||
line_types: join_all(sys_inst.line_types.iter().map(|m| Tok::from_api(*m, &ext.ctx().i)))
|
line_types: join_all(sys_inst.line_types.iter().map(|m| Tok::from_api(*m, &ext.ctx().i)))
|
||||||
.await,
|
.await,
|
||||||
id,
|
id,
|
||||||
|
prelude: join_all(sys_inst.prelude.iter().map(|tok| Sym::from_api(*tok, &ext.ctx().i))).await,
|
||||||
const_paths: MemoMap::new(),
|
const_paths: MemoMap::new(),
|
||||||
}));
|
}));
|
||||||
let api_module_root = api::Module {
|
let api_module_root = api::Module {
|
||||||
|
|||||||
@@ -9,12 +9,11 @@ use futures::{FutureExt, StreamExt, stream};
|
|||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use hashbrown::hash_map::Entry;
|
use hashbrown::hash_map::Entry;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_api::FetchParsedConst;
|
|
||||||
use orchid_base::clone;
|
use orchid_base::clone;
|
||||||
use orchid_base::error::{OrcRes, Reporter, mk_err, mk_errv};
|
use orchid_base::error::{OrcRes, Reporter, mk_err, mk_errv};
|
||||||
use orchid_base::interner::Tok;
|
use orchid_base::interner::Tok;
|
||||||
use orchid_base::location::{Pos, SrcRange};
|
use orchid_base::location::{CodeGenInfo, Pos};
|
||||||
use orchid_base::name::{Sym, VPath};
|
use orchid_base::name::{NameLike, Sym, VPath};
|
||||||
use orchid_base::reqnot::Requester;
|
use orchid_base::reqnot::Requester;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
@@ -60,11 +59,12 @@ impl Root {
|
|||||||
let mut ref_this = self.0.write().await;
|
let mut ref_this = self.0.write().await;
|
||||||
let this = &mut *ref_this;
|
let this = &mut *ref_this;
|
||||||
let mut deferred_consts = HashMap::new();
|
let mut deferred_consts = HashMap::new();
|
||||||
|
let mut consts = this.consts.clone();
|
||||||
let mut tfpctx = FromParsedCtx {
|
let mut tfpctx = FromParsedCtx {
|
||||||
pars_root: parsed,
|
pars_root: parsed,
|
||||||
deferred_consts: &mut deferred_consts,
|
deferred_consts: &mut deferred_consts,
|
||||||
|
consts: &mut consts,
|
||||||
pars_prefix: pars_prefix.clone(),
|
pars_prefix: pars_prefix.clone(),
|
||||||
consts: &mut this.consts,
|
|
||||||
root: &this.root,
|
root: &this.root,
|
||||||
ctx: &this.ctx,
|
ctx: &this.ctx,
|
||||||
rep,
|
rep,
|
||||||
@@ -78,14 +78,13 @@ impl Root {
|
|||||||
)]);
|
)]);
|
||||||
module = Module { imports: HashMap::new(), members }
|
module = Module { imports: HashMap::new(), members }
|
||||||
}
|
}
|
||||||
let mut consts = this.consts.clone();
|
|
||||||
let root = (this.root.merge(&module, this.ctx.clone(), &mut consts).await)
|
let root = (this.root.merge(&module, this.ctx.clone(), &mut consts).await)
|
||||||
.expect("Merge conflict between parsed and existing module");
|
.expect("Merge conflict between parsed and existing module");
|
||||||
let new = Root(Rc::new(RwLock::new(RootData { root, consts, ctx: this.ctx.clone() })));
|
let new = Root(Rc::new(RwLock::new(RootData { root, consts, ctx: this.ctx.clone() })));
|
||||||
*this.ctx.root.write().await = new.downgrade();
|
*this.ctx.root.write().await = new.downgrade();
|
||||||
for (path, (sys_id, pc_id)) in deferred_consts {
|
for (path, (sys_id, pc_id)) in deferred_consts {
|
||||||
let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing");
|
let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing");
|
||||||
let api_expr = sys.reqnot().request(FetchParsedConst { id: pc_id, sys: sys.id() }).await;
|
let api_expr = sys.reqnot().request(api::FetchParsedConst { id: pc_id, sys: sys.id() }).await;
|
||||||
let mut xp_ctx = ExprParseCtx { ctx: &this.ctx, exprs: sys.ext().exprs() };
|
let mut xp_ctx = ExprParseCtx { ctx: &this.ctx, exprs: sys.ext().exprs() };
|
||||||
let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), &mut xp_ctx).await;
|
let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), &mut xp_ctx).await;
|
||||||
new.0.write().await.consts.insert(path, expr);
|
new.0.write().await.consts.insert(path, expr);
|
||||||
@@ -154,7 +153,7 @@ impl<'a> TreeFromApiCtx<'a> {
|
|||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
pub struct ResolvedImport {
|
pub struct ResolvedImport {
|
||||||
target: Sym,
|
target: Sym,
|
||||||
sr: SrcRange,
|
pos: Pos,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
@@ -234,6 +233,22 @@ impl Module {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut imports = HashMap::new();
|
let mut imports = HashMap::new();
|
||||||
|
if parsed.use_prelude {
|
||||||
|
let systems = ctx.ctx.systems.read().await;
|
||||||
|
for sys in systems.values().flat_map(|weak| weak.upgrade()) {
|
||||||
|
for prelude_item in sys.prelude() {
|
||||||
|
imports.insert(
|
||||||
|
prelude_item.last_seg(),
|
||||||
|
Ok(ResolvedImport {
|
||||||
|
target: prelude_item,
|
||||||
|
pos: CodeGenInfo::new_details(sys.ctor().name_tok().await, "In prelude", &ctx.ctx.i)
|
||||||
|
.await
|
||||||
|
.pos(),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
let conflicting_imports_msg = ctx.ctx.i.i("Conflicting imports").await;
|
let conflicting_imports_msg = ctx.ctx.i.i("Conflicting imports").await;
|
||||||
for (key, values) in imports_by_name {
|
for (key, values) in imports_by_name {
|
||||||
if values.len() == 1 {
|
if values.len() == 1 {
|
||||||
@@ -243,8 +258,10 @@ impl Module {
|
|||||||
match abs_path_res {
|
match abs_path_res {
|
||||||
Err(e) => ctx.rep.report(e.err_obj(&ctx.ctx.i, sr.pos(), &import.to_string()).await),
|
Err(e) => ctx.rep.report(e.err_obj(&ctx.ctx.i, sr.pos(), &import.to_string()).await),
|
||||||
Ok(abs_path) => {
|
Ok(abs_path) => {
|
||||||
imports
|
imports.insert(
|
||||||
.insert(key, Ok(ResolvedImport { target: abs_path.to_sym(&ctx.ctx.i).await, sr }));
|
key,
|
||||||
|
Ok(ResolvedImport { target: abs_path.to_sym(&ctx.ctx.i).await, pos: sr.pos() }),
|
||||||
|
);
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -263,7 +280,10 @@ impl Module {
|
|||||||
let values = stream::iter(values)
|
let values = stream::iter(values)
|
||||||
.then(|(n, sr)| {
|
.then(|(n, sr)| {
|
||||||
clone!(key; async move {
|
clone!(key; async move {
|
||||||
ResolvedImport { target: n.to_vname().suffix([key.clone()]).to_sym(i).await, sr }
|
ResolvedImport {
|
||||||
|
target: n.to_vname().suffix([key.clone()]).to_sym(i).await,
|
||||||
|
pos: sr.pos(),
|
||||||
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
@@ -278,7 +298,7 @@ impl Module {
|
|||||||
ctx.rep.report(mk_err(
|
ctx.rep.report(mk_err(
|
||||||
self_referential_msg.clone(),
|
self_referential_msg.clone(),
|
||||||
format!("import {} points to itself or a path within itself", &import.target),
|
format!("import {} points to itself or a path within itself", &import.target),
|
||||||
[import.sr.pos().into()],
|
[import.pos.clone().into()],
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -363,9 +383,9 @@ pub struct FromParsedCtx<'a> {
|
|||||||
pars_prefix: Sym,
|
pars_prefix: Sym,
|
||||||
pars_root: &'a ParsedModule,
|
pars_root: &'a ParsedModule,
|
||||||
root: &'a Module,
|
root: &'a Module,
|
||||||
consts: &'a mut HashMap<Sym, Expr>,
|
|
||||||
rep: &'a Reporter,
|
rep: &'a Reporter,
|
||||||
ctx: &'a Ctx,
|
ctx: &'a Ctx,
|
||||||
|
consts: &'a mut HashMap<Sym, Expr>,
|
||||||
deferred_consts: &'a mut HashMap<Sym, (api::SysId, api::ParsedConstId)>,
|
deferred_consts: &'a mut HashMap<Sym, (api::SysId, api::ParsedConstId)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -417,10 +437,6 @@ impl MemberKind {
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
async fn from_parsed(parsed: &ParsedMemberKind, path: Sym, ctx: &mut FromParsedCtx<'_>) -> Self {
|
async fn from_parsed(parsed: &ParsedMemberKind, path: Sym, ctx: &mut FromParsedCtx<'_>) -> Self {
|
||||||
match parsed {
|
match parsed {
|
||||||
ParsedMemberKind::ParsedConst(expr) => {
|
|
||||||
ctx.consts.insert(path, expr.clone());
|
|
||||||
MemberKind::Const
|
|
||||||
},
|
|
||||||
ParsedMemberKind::DeferredConst(id, sys) => {
|
ParsedMemberKind::DeferredConst(id, sys) => {
|
||||||
ctx.deferred_consts.insert(path, (sys.id(), *id));
|
ctx.deferred_consts.insert(path, (sys.id(), *id));
|
||||||
MemberKind::Const
|
MemberKind::Const
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ orchid-extension = { version = "0.1.0", path = "../orchid-extension", features =
|
|||||||
] }
|
] }
|
||||||
ordered-float = "5.0.0"
|
ordered-float = "5.0.0"
|
||||||
rust_decimal = "1.36.0"
|
rust_decimal = "1.36.0"
|
||||||
|
substack = "1.1.1"
|
||||||
tokio = { version = "1.43.0", features = ["full"] }
|
tokio = { version = "1.43.0", features = ["full"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
// mod macros;
|
mod macros;
|
||||||
mod std;
|
mod std;
|
||||||
|
|
||||||
pub use std::number::num_atom::{Float, HomoArray, Int, Num};
|
pub use std::number::num_atom::{Float, HomoArray, Int, Num};
|
||||||
|
|||||||
94
orchid-std/src/macros/instantiate_tpl.rs
Normal file
94
orchid-std/src/macros/instantiate_tpl.rs
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
|
use std::pin::Pin;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use futures::AsyncWrite;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use never::Never;
|
||||||
|
use orchid_extension::atom::{Atomic, TypAtom};
|
||||||
|
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant, get_own_instance};
|
||||||
|
use orchid_extension::conv::{ToExpr, TryFromExpr};
|
||||||
|
use orchid_extension::expr::Expr;
|
||||||
|
use orchid_extension::gen_expr::GExpr;
|
||||||
|
use orchid_extension::system::SysCtx;
|
||||||
|
|
||||||
|
use crate::macros::mactree::{MacTok, MacTree};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct InstantiateTplCall {
|
||||||
|
pub(crate) tpl: MacTree,
|
||||||
|
pub(crate) argc: usize,
|
||||||
|
pub(crate) argv: Vec<MacTree>,
|
||||||
|
}
|
||||||
|
impl Atomic for InstantiateTplCall {
|
||||||
|
type Variant = OwnedVariant;
|
||||||
|
type Data = ();
|
||||||
|
}
|
||||||
|
impl OwnedAtom for InstantiateTplCall {
|
||||||
|
async fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(()) }
|
||||||
|
/// TODO: get serialization done for mactree
|
||||||
|
type Refs = Vec<Expr>;
|
||||||
|
async fn serialize(
|
||||||
|
&self,
|
||||||
|
ctx: SysCtx,
|
||||||
|
write: Pin<&mut (impl AsyncWrite + ?Sized)>,
|
||||||
|
) -> Self::Refs {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
async fn deserialize(ctx: impl DeserializeCtx, refs: Self::Refs) -> Self { todo!() }
|
||||||
|
// Technically must be supported but shouldn't actually ever be called
|
||||||
|
async fn call_ref(&self, arg: Expr) -> GExpr {
|
||||||
|
eprintln!(
|
||||||
|
"Copying partially applied instantiate_tpl call. This is an internal value.\
|
||||||
|
\nIt should be fully consumed within generated code."
|
||||||
|
);
|
||||||
|
self.clone().call(arg).await
|
||||||
|
}
|
||||||
|
async fn call(mut self, arg: Expr) -> GExpr {
|
||||||
|
let arg = match TypAtom::try_from_expr(arg).await {
|
||||||
|
Err(e) => return Err::<Never, _>(e).to_expr(),
|
||||||
|
Ok(t) => get_own_instance(t).await,
|
||||||
|
};
|
||||||
|
self.argv.push(arg);
|
||||||
|
if self.argv.len() < self.argc {
|
||||||
|
return self.to_expr();
|
||||||
|
}
|
||||||
|
instantiate_tpl(&self.tpl, &mut self.argv.into_iter(), &mut false).to_expr()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn instantiate_tpl(
|
||||||
|
tpl: &MacTree,
|
||||||
|
argv: &mut impl Iterator<Item = MacTree>,
|
||||||
|
changed: &mut bool,
|
||||||
|
) -> MacTree {
|
||||||
|
let tok = match &*tpl.tok {
|
||||||
|
MacTok::Slot => {
|
||||||
|
*changed = true;
|
||||||
|
return argv.next().expect("Not enough arguments to fill all slots!");
|
||||||
|
},
|
||||||
|
MacTok::Lambda(arg, body) => MacTok::Lambda(
|
||||||
|
ro(changed, |changed| instantiate_tpl(arg, argv, changed)),
|
||||||
|
instantiate_tpl_v(body, argv, changed),
|
||||||
|
),
|
||||||
|
MacTok::Name(_) | MacTok::Value(_) => return tpl.clone(),
|
||||||
|
MacTok::Ph(_) => panic!("instantiate_tpl received a placeholder"),
|
||||||
|
MacTok::S(p, body) => MacTok::S(*p, instantiate_tpl_v(body, argv, changed)),
|
||||||
|
};
|
||||||
|
if *changed { MacTree { pos: tpl.pos.clone(), tok: Rc::new(tok) } } else { tpl.clone() }
|
||||||
|
}
|
||||||
|
fn instantiate_tpl_v(
|
||||||
|
tpl: &[MacTree],
|
||||||
|
argv: &mut impl Iterator<Item = MacTree>,
|
||||||
|
changed: &mut bool,
|
||||||
|
) -> Vec<MacTree> {
|
||||||
|
tpl.iter().map(|tree| ro(changed, |changed| instantiate_tpl(tree, argv, changed))).collect_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// reverse "or". Inside, the flag is always false, but raising it will raise
|
||||||
|
/// the outside flag too.
|
||||||
|
fn ro<T>(flag: &mut bool, cb: impl FnOnce(&mut bool) -> T) -> T {
|
||||||
|
let mut new_flag = false;
|
||||||
|
let val = cb(&mut new_flag);
|
||||||
|
*flag |= new_flag;
|
||||||
|
val
|
||||||
|
}
|
||||||
59
orchid-std/src/macros/let_line.rs
Normal file
59
orchid-std/src/macros/let_line.rs
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
use futures::future::LocalBoxFuture;
|
||||||
|
use itertools::Itertools;
|
||||||
|
use orchid_base::error::{OrcRes, mk_errv};
|
||||||
|
use orchid_base::interner::Tok;
|
||||||
|
use orchid_base::name::Sym;
|
||||||
|
use orchid_base::parse::{Comment, ParseCtx, Parsed, expect_tok, token_errv, try_pop_no_fluff};
|
||||||
|
use orchid_extension::gen_expr::GExpr;
|
||||||
|
use orchid_extension::parser::{
|
||||||
|
ConstCtx, GenSnippet, ParsCtx, ParsedLine, ParsedLineKind, ParsedMem, ParsedMemKind, Parser,
|
||||||
|
};
|
||||||
|
use substack::Substack;
|
||||||
|
|
||||||
|
type ExprGenerator =
|
||||||
|
Box<dyn for<'a> FnOnce(ConstCtx, Substack<'a, Sym>) -> LocalBoxFuture<'a, GExpr>>;
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct LetLine;
|
||||||
|
impl Parser for LetLine {
|
||||||
|
const LINE_HEAD: &'static str = "let";
|
||||||
|
async fn parse<'a>(
|
||||||
|
ctx: ParsCtx<'a>,
|
||||||
|
exported: bool,
|
||||||
|
comments: Vec<Comment>,
|
||||||
|
line: GenSnippet<'a>,
|
||||||
|
) -> OrcRes<Vec<ParsedLine>> {
|
||||||
|
let Parsed { output: name_tok, tail } = try_pop_no_fluff(&ctx, line).await?;
|
||||||
|
let Some(name) = name_tok.as_name() else {
|
||||||
|
let err = token_errv(&ctx, name_tok, "Constant must have a name", |t| {
|
||||||
|
format!("Expected a name but found {t}")
|
||||||
|
});
|
||||||
|
return Err(err.await);
|
||||||
|
};
|
||||||
|
let Parsed { tail, .. } = expect_tok(&ctx, tail, ctx.i().i("=").await).await?;
|
||||||
|
fn do_tokv(line: GenSnippet<'_>) -> ExprGenerator {
|
||||||
|
let first: ExprGenerator = if let Some((idx, arg)) =
|
||||||
|
line.iter().enumerate().find_map(|(i, x)| Some((i, x.as_lambda()?)))
|
||||||
|
{
|
||||||
|
Box::new(move |ctx, stack| Box::pin(async move {
|
||||||
|
let name = ctx.names([])
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
|
||||||
|
};
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
let expr_generator = do_tokv(tail);
|
||||||
|
Ok(vec![ParsedLine {
|
||||||
|
comments,
|
||||||
|
sr: line.sr(),
|
||||||
|
kind: ParsedLineKind::Mem(ParsedMem {
|
||||||
|
exported,
|
||||||
|
name,
|
||||||
|
kind: ParsedMemKind::cnst(async |ctx| expr_generator(ctx, Substack::Bottom).await),
|
||||||
|
}),
|
||||||
|
}])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_names(tree: MacTree)
|
||||||
20
orchid-std/src/macros/macro_lib.rs
Normal file
20
orchid-std/src/macros/macro_lib.rs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
use orchid_extension::atom::TypAtom;
|
||||||
|
use orchid_extension::atom_owned::get_own_instance;
|
||||||
|
use orchid_extension::tree::{GenMember, comments, fun, prefix};
|
||||||
|
|
||||||
|
use crate::Int;
|
||||||
|
use crate::macros::instantiate_tpl::InstantiateTplCall;
|
||||||
|
use crate::macros::mactree::MacTree;
|
||||||
|
|
||||||
|
pub fn gen_macro_lib() -> Vec<GenMember> {
|
||||||
|
prefix("macros", [comments(
|
||||||
|
["This is an internal function, you can't obtain a value of its argument type.", "hidden"],
|
||||||
|
fun(true, "instantiate_tpl", |tpl: TypAtom<MacTree>, right: Int| async move {
|
||||||
|
InstantiateTplCall {
|
||||||
|
tpl: get_own_instance(tpl).await,
|
||||||
|
argc: right.0.try_into().unwrap(),
|
||||||
|
argv: Vec::new(),
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)])
|
||||||
|
}
|
||||||
@@ -1,14 +1,17 @@
|
|||||||
use never::Never;
|
use never::Never;
|
||||||
|
use orchid_base::interner::Interner;
|
||||||
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::reqnot::Receipt;
|
use orchid_base::reqnot::Receipt;
|
||||||
use orchid_extension::atom::AtomDynfo;
|
use orchid_extension::atom::AtomDynfo;
|
||||||
use orchid_extension::entrypoint::ExtReq;
|
use orchid_extension::entrypoint::ExtReq;
|
||||||
use orchid_extension::fs::DeclFs;
|
|
||||||
use orchid_extension::lexer::LexerObj;
|
use orchid_extension::lexer::LexerObj;
|
||||||
use orchid_extension::parser::ParserObj;
|
use orchid_extension::parser::ParserObj;
|
||||||
use orchid_extension::system::{System, SystemCard};
|
use orchid_extension::system::{System, SystemCard};
|
||||||
use orchid_extension::system_ctor::SystemCtor;
|
use orchid_extension::system_ctor::SystemCtor;
|
||||||
use orchid_extension::tree::GenMember;
|
use orchid_extension::tree::GenMember;
|
||||||
|
|
||||||
|
use crate::macros::let_line::LetLine;
|
||||||
|
use crate::macros::macro_lib::gen_macro_lib;
|
||||||
use crate::macros::mactree_lexer::MacTreeLexer;
|
use crate::macros::mactree_lexer::MacTreeLexer;
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
@@ -27,8 +30,8 @@ impl SystemCard for MacroSystem {
|
|||||||
}
|
}
|
||||||
impl System for MacroSystem {
|
impl System for MacroSystem {
|
||||||
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} }
|
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} }
|
||||||
fn vfs() -> orchid_extension::fs::DeclFs { DeclFs::Mod(&[]) }
|
async fn prelude(_: &Interner) -> Vec<Sym> { vec![] }
|
||||||
fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer] }
|
fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer] }
|
||||||
fn parsers() -> Vec<ParserObj> { vec![] }
|
fn parsers() -> Vec<ParserObj> { vec![&LetLine] }
|
||||||
fn env() -> Vec<GenMember> { vec![] }
|
fn env() -> Vec<GenMember> { gen_macro_lib() }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,7 +39,9 @@ pub enum MacTok {
|
|||||||
/// Only permitted in arguments to `instantiate_tpl`
|
/// Only permitted in arguments to `instantiate_tpl`
|
||||||
Slot,
|
Slot,
|
||||||
Value(Expr),
|
Value(Expr),
|
||||||
Lambda(Vec<MacTree>, Vec<MacTree>),
|
Lambda(MacTree, Vec<MacTree>),
|
||||||
|
/// Only permitted in "pattern" values produced by macro blocks, which are
|
||||||
|
/// never accessed as variables by usercode
|
||||||
Ph(Ph),
|
Ph(Ph),
|
||||||
}
|
}
|
||||||
impl Format for MacTok {
|
impl Format for MacTok {
|
||||||
@@ -50,7 +52,7 @@ impl Format for MacTok {
|
|||||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
tl_cache!(Rc<Variants>: Rc::new(Variants::default()
|
||||||
.unbounded("\\{0b}.{1l}")
|
.unbounded("\\{0b}.{1l}")
|
||||||
.bounded("(\\{0b}.{1b})"))),
|
.bounded("(\\{0b}.{1b})"))),
|
||||||
[mtreev_fmt(arg, c).await, mtreev_fmt(b, c).await],
|
[arg.print(c).await, mtreev_fmt(b, c).await],
|
||||||
),
|
),
|
||||||
Self::Name(n) => format!("{n}").into(),
|
Self::Name(n) => format!("{n}").into(),
|
||||||
Self::Ph(ph) => format!("{ph}").into(),
|
Self::Ph(ph) => format!("{ph}").into(),
|
||||||
@@ -96,3 +98,41 @@ pub enum PhKind {
|
|||||||
Scalar,
|
Scalar,
|
||||||
Vector { at_least_one: bool, priority: u8 },
|
Vector { at_least_one: bool, priority: u8 },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn map_mactree(
|
||||||
|
tpl: &MacTree,
|
||||||
|
map: &mut impl FnMut(MacTree) -> Option<MacTree>,
|
||||||
|
argv: &mut impl Iterator<Item = MacTree>,
|
||||||
|
changed: &mut bool,
|
||||||
|
) -> MacTree {
|
||||||
|
let tok = match &*tpl.tok {
|
||||||
|
MacTok::Slot => {
|
||||||
|
*changed = true;
|
||||||
|
return argv.next().expect("Not enough arguments to fill all slots!");
|
||||||
|
},
|
||||||
|
MacTok::Lambda(arg, body) => MacTok::Lambda(
|
||||||
|
ro(changed, |changed| instantiate_tpl(arg, argv, changed)),
|
||||||
|
instantiate_tpl_v(body, argv, changed),
|
||||||
|
),
|
||||||
|
MacTok::Name(_) | MacTok::Value(_) => return tpl.clone(),
|
||||||
|
MacTok::Ph(_) => panic!("instantiate_tpl received a placeholder"),
|
||||||
|
MacTok::S(p, body) => MacTok::S(*p, instantiate_tpl_v(body, argv, changed)),
|
||||||
|
};
|
||||||
|
if *changed { MacTree { pos: tpl.pos.clone(), tok: Rc::new(tok) } } else { tpl.clone() }
|
||||||
|
}
|
||||||
|
pub fn map_mactree_v(
|
||||||
|
tpl: &[MacTree],
|
||||||
|
argv: &mut impl Iterator<Item = MacTree>,
|
||||||
|
changed: &mut bool,
|
||||||
|
) -> Vec<MacTree> {
|
||||||
|
tpl.iter().map(|tree| ro(changed, |changed| instantiate_tpl(tree, argv, changed))).collect_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// reverse "or". Inside, the flag is always false, but raising it will raise
|
||||||
|
/// the outside flag too.
|
||||||
|
fn ro<T>(flag: &mut bool, cb: impl FnOnce(&mut bool) -> T) -> T {
|
||||||
|
let mut new_flag = false;
|
||||||
|
let val = cb(&mut new_flag);
|
||||||
|
*flag |= new_flag;
|
||||||
|
val
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
mod instantiate_tpl;
|
||||||
|
mod let_line;
|
||||||
|
mod macro_lib;
|
||||||
mod macro_system;
|
mod macro_system;
|
||||||
pub mod mactree;
|
pub mod mactree;
|
||||||
mod mactree_lexer;
|
mod mactree_lexer;
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
use never::Never;
|
use never::Never;
|
||||||
|
use orchid_base::interner::Interner;
|
||||||
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::reqnot::Receipt;
|
use orchid_base::reqnot::Receipt;
|
||||||
|
use orchid_base::sym;
|
||||||
use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
|
use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
|
||||||
use orchid_extension::entrypoint::ExtReq;
|
use orchid_extension::entrypoint::ExtReq;
|
||||||
use orchid_extension::lexer::LexerObj;
|
use orchid_extension::lexer::LexerObj;
|
||||||
@@ -36,4 +39,5 @@ impl System for StdSystem {
|
|||||||
fn lexers() -> Vec<LexerObj> { vec![&StringLexer, &NumLexer] }
|
fn lexers() -> Vec<LexerObj> { vec![&StringLexer, &NumLexer] }
|
||||||
fn parsers() -> Vec<ParserObj> { vec![] }
|
fn parsers() -> Vec<ParserObj> { vec![] }
|
||||||
fn env() -> Vec<GenMember> { merge_trivial([gen_num_lib(), gen_str_lib()]) }
|
fn env() -> Vec<GenMember> { merge_trivial([gen_num_lib(), gen_str_lib()]) }
|
||||||
|
async fn prelude(i: &Interner) -> Vec<Sym> { vec![sym!(std; i).await] }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -25,8 +25,10 @@ use orchid_host::expr::PathSetBuilder;
|
|||||||
use orchid_host::extension::Extension;
|
use orchid_host::extension::Extension;
|
||||||
use orchid_host::lex::lex;
|
use orchid_host::lex::lex;
|
||||||
use orchid_host::parse::{HostParseCtxImpl, parse_expr, parse_items};
|
use orchid_host::parse::{HostParseCtxImpl, parse_expr, parse_items};
|
||||||
|
use orchid_host::parsed::{Item, ItemKind, ParsedMember, ParsedMemberKind, ParsedModule};
|
||||||
use orchid_host::subprocess::ext_command;
|
use orchid_host::subprocess::ext_command;
|
||||||
use orchid_host::system::init_systems;
|
use orchid_host::system::init_systems;
|
||||||
|
use orchid_host::tree::Root;
|
||||||
use substack::Substack;
|
use substack::Substack;
|
||||||
use tokio::task::{LocalSet, spawn_local};
|
use tokio::task::{LocalSet, spawn_local};
|
||||||
|
|
||||||
@@ -194,6 +196,7 @@ async fn main() -> io::Result<ExitCode> {
|
|||||||
},
|
},
|
||||||
Commands::Execute { proj, code } => {
|
Commands::Execute { proj, code } => {
|
||||||
let reporter = Reporter::new();
|
let reporter = Reporter::new();
|
||||||
|
let path = sym!(usercode::entrypoint; i).await;
|
||||||
let (mut root, systems) = init_systems(&args.system, &extensions).await.unwrap();
|
let (mut root, systems) = init_systems(&args.system, &extensions).await.unwrap();
|
||||||
if let Some(proj_path) = proj {
|
if let Some(proj_path) = proj {
|
||||||
let path = PathBuf::from(proj_path.into_std_path_buf());
|
let path = PathBuf::from(proj_path.into_std_path_buf());
|
||||||
@@ -206,25 +209,18 @@ async fn main() -> io::Result<ExitCode> {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let lexemes =
|
let lexemes = lex(i.i(code.trim()).await, path.clone(), &systems, ctx).await.unwrap();
|
||||||
lex(i.i(code.trim()).await, sym!(usercode; i).await, &systems, ctx).await.unwrap();
|
let snippet = Snippet::new(&lexemes[0], &lexemes);
|
||||||
if args.logs {
|
if args.logs {
|
||||||
println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true));
|
println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true));
|
||||||
}
|
}
|
||||||
let path = sym!(usercode; i).await;
|
|
||||||
let parse_ctx = HostParseCtxImpl {
|
let parse_ctx = HostParseCtxImpl {
|
||||||
ctx: ctx.clone(),
|
ctx: ctx.clone(),
|
||||||
rep: &reporter,
|
rep: &reporter,
|
||||||
src: path.clone(),
|
src: path.clone(),
|
||||||
systems: &systems[..],
|
systems: &systems[..],
|
||||||
};
|
};
|
||||||
let parse_res = parse_expr(
|
let parse_res = parse_expr(&parse_ctx, path.clone(), PathSetBuilder::new(), snippet).await;
|
||||||
&parse_ctx,
|
|
||||||
path.clone(),
|
|
||||||
PathSetBuilder::new(),
|
|
||||||
Snippet::new(&lexemes[0], &lexemes),
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
let expr = match reporter.merge(parse_res) {
|
let expr = match reporter.merge(parse_res) {
|
||||||
Ok(expr) => expr,
|
Ok(expr) => expr,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
@@ -233,6 +229,12 @@ async fn main() -> io::Result<ExitCode> {
|
|||||||
return;
|
return;
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
let parsed_root = ParsedModule::new(true, [Item::new(
|
||||||
|
snippet.sr(),
|
||||||
|
ParsedMember::new(true, i.i("entrypoint").await, expr.clone()),
|
||||||
|
)]);
|
||||||
|
let reporter = Reporter::new();
|
||||||
|
let root = root.add_parsed(&parsed_root, sym!(usercode; i).await, &reporter).await;
|
||||||
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root, expr).await;
|
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root, expr).await;
|
||||||
xctx.set_gas(Some(1000));
|
xctx.set_gas(Some(1000));
|
||||||
xctx.execute().await;
|
xctx.execute().await;
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ pub async fn parse_folder(
|
|||||||
Ok(Some(module)) => items.push(module.default_item(name.clone(), sr.clone())),
|
Ok(Some(module)) => items.push(module.default_item(name.clone(), sr.clone())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(Some(ParsedModule::new(items)))
|
Ok(Some(ParsedModule::new(false, items)))
|
||||||
} else if path.extension() == Some(OsStr::new("orc")) {
|
} else if path.extension() == Some(OsStr::new("orc")) {
|
||||||
let name_os = path.file_stem().expect("If there is an extension, there must be a stem");
|
let name_os = path.file_stem().expect("If there is an extension, there must be a stem");
|
||||||
let name = ctx.i.i(os_str_to_string(name_os, &ctx.i, [sr]).await?).await;
|
let name = ctx.i.i(os_str_to_string(name_os, &ctx.i, [sr]).await?).await;
|
||||||
@@ -81,9 +81,9 @@ pub async fn parse_folder(
|
|||||||
ctx.systems.read().await.iter().filter_map(|(_, sys)| sys.upgrade()).collect_vec();
|
ctx.systems.read().await.iter().filter_map(|(_, sys)| sys.upgrade()).collect_vec();
|
||||||
let lexemes = lex(ctx.i.i(&text).await, ns.clone(), &systems, &ctx).await?;
|
let lexemes = lex(ctx.i.i(&text).await, ns.clone(), &systems, &ctx).await?;
|
||||||
let hpctx = HostParseCtxImpl { ctx: ctx.clone(), rep, src: ns.clone(), systems: &systems };
|
let hpctx = HostParseCtxImpl { ctx: ctx.clone(), rep, src: ns.clone(), systems: &systems };
|
||||||
let Some(fst) = lexemes.first() else { return Ok(Some(ParsedModule::new([]))) };
|
let Some(fst) = lexemes.first() else { return Ok(Some(ParsedModule::new(false, []))) };
|
||||||
let items = parse_items(&hpctx, Substack::Bottom, Snippet::new(fst, &lexemes)).await?;
|
let items = parse_items(&hpctx, Substack::Bottom, Snippet::new(fst, &lexemes)).await?;
|
||||||
Ok(Some(ParsedModule::new(items)))
|
Ok(Some(ParsedModule::new(false, items)))
|
||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user