forked from Orchid/orchid
temp commit
This commit is contained in:
@@ -1,9 +1,11 @@
|
||||
[alias]
|
||||
xtask = "run --quiet --package xtask --"
|
||||
orcx = "xtask orcx"
|
||||
orcxdb = "xtask orcxdb"
|
||||
|
||||
[env]
|
||||
CARGO_WORKSPACE_DIR = { value = "", relative = true }
|
||||
ORCHID_EXTENSIONS = "target/debug/orchid-std"
|
||||
ORCHID_DEFAULT_SYSTEMS = "orchid::std"
|
||||
ORCHID_LOG_BUFFERS = "true"
|
||||
RUSTBACKTRACE = "1"
|
||||
|
||||
62
Cargo.lock
generated
62
Cargo.lock
generated
@@ -862,6 +862,25 @@ version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
||||
|
||||
[[package]]
|
||||
name = "include_dir"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "923d117408f1e49d914f1a379a309cffe4f18c05cf4e3d12e613a15fc81bd0dd"
|
||||
dependencies = [
|
||||
"include_dir_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "include_dir_macros"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7cab85a7ed0bd5f0e76d93846e0147172bed2e2d3f859bcc33a8d9699cad1a75"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.92",
|
||||
"quote 1.0.38",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.7.0"
|
||||
@@ -872,6 +891,17 @@ dependencies = [
|
||||
"hashbrown 0.15.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "io-uring"
|
||||
version = "0.7.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is_terminal_polyfill"
|
||||
version = "1.70.1"
|
||||
@@ -1142,6 +1172,7 @@ dependencies = [
|
||||
"dyn-clone",
|
||||
"futures",
|
||||
"hashbrown 0.15.2",
|
||||
"include_dir",
|
||||
"itertools",
|
||||
"konst",
|
||||
"lazy_static",
|
||||
@@ -1153,8 +1184,8 @@ dependencies = [
|
||||
"orchid-api-traits",
|
||||
"orchid-base",
|
||||
"ordered-float",
|
||||
"paste",
|
||||
"some_executor 0.4.0",
|
||||
"pastey",
|
||||
"some_executor 0.5.1",
|
||||
"substack",
|
||||
"tokio",
|
||||
"trait-set",
|
||||
@@ -1174,6 +1205,7 @@ dependencies = [
|
||||
"hashbrown 0.15.2",
|
||||
"itertools",
|
||||
"lazy_static",
|
||||
"memo-map",
|
||||
"never",
|
||||
"num-traits",
|
||||
"orchid-api",
|
||||
@@ -1274,6 +1306,12 @@ version = "1.0.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
|
||||
|
||||
[[package]]
|
||||
name = "pastey"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3a8cb46bdc156b1c90460339ae6bfd45ba0394e5effbaa640badb4987fdc261"
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-lite"
|
||||
version = "0.2.16"
|
||||
@@ -1708,6 +1746,20 @@ dependencies = [
|
||||
"web-time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "some_executor"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eb323f85458f395c28b5ea35a2626e9f46a35f1d730d37e6fa24dcf2848835ee"
|
||||
dependencies = [
|
||||
"atomic-waker",
|
||||
"priority",
|
||||
"wasm-bindgen",
|
||||
"wasm_thread",
|
||||
"web-sys",
|
||||
"web-time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "stdio-perftest"
|
||||
version = "0.1.0"
|
||||
@@ -1805,17 +1857,19 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.43.0"
|
||||
version = "1.46.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e"
|
||||
checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"bytes",
|
||||
"io-uring",
|
||||
"libc",
|
||||
"mio",
|
||||
"parking_lot",
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"slab",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.52.0",
|
||||
|
||||
12
SWAP.md
12
SWAP.md
@@ -1,12 +1,10 @@
|
||||
## Async conversion
|
||||
Since the macro AST is built as a custom tokenizer inside the system, it needs access to the import set. On the other hand, import sets aren't available until after parsing. Need a way to place this order in a lexer without restricting the expression value of the lexer.
|
||||
|
||||
consider converting extension's SysCtx to a typed context bag
|
||||
|
||||
align fn atom and macros on both sides with new design. No global state.
|
||||
The daft option of accepting import resolution queries at runtime is available but consider better options.
|
||||
|
||||
## alternate extension mechanism
|
||||
|
||||
The Macro extension needs to be in the same compilation unit as the interpreter because the interpreter needs to proactively access its datastructures (in particular, it needs to generate MacTree from TokTree)
|
||||
The STD system will have a lot of traffic for trivial operations like algebra, stream IO will likely not be fast enough. A faster system is in order.
|
||||
|
||||
Ideally, it should reuse `orchid-extension` for message routing and decoding.
|
||||
|
||||
@@ -14,10 +12,8 @@ Ideally, it should reuse `orchid-extension` for message routing and decoding.
|
||||
|
||||
## Preprocessor extension
|
||||
|
||||
Must figure out how preprocessor can both be a System and referenced in the interpreter
|
||||
The macro system will not be privileged, it can take control from the interpreter via a custom top-level "let" line type.
|
||||
|
||||
Must actually write macro system as recorded in note
|
||||
|
||||
At this point swappable preprocessors aren't a target because interaction with module system sounds complicated
|
||||
|
||||
Check if any of this needs interpreter, if so, start with that
|
||||
@@ -4,11 +4,21 @@ use std::num::NonZeroU64;
|
||||
use orchid_api_derive::{Coding, Hierarchy};
|
||||
use orchid_api_traits::Request;
|
||||
|
||||
use crate::{HostExtReq, OrcResult, SysId, TStr, TStrv, TokenTree};
|
||||
use crate::{
|
||||
Expression, ExtHostReq, HostExtReq, OrcResult, SourceRange, SysId, TStr, TStrv, TokenTree,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||
pub struct ParsId(pub NonZeroU64);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||
pub struct ParsedConstId(pub NonZeroU64);
|
||||
|
||||
/// Parse a single source line. Return values can be modules, constants, or
|
||||
/// token sequences for re-parsing. These re-parsed token sequences can also
|
||||
/// represent raw language items such as modules, imports, and const. This is
|
||||
/// how we enable generating imports without forcing import syntax to affect API
|
||||
/// versioning
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(HostExtReq)]
|
||||
pub struct ParseLine {
|
||||
@@ -23,7 +33,46 @@ pub struct ParseLine {
|
||||
pub line: Vec<TokenTree>,
|
||||
}
|
||||
impl Request for ParseLine {
|
||||
type Response = OrcResult<Vec<TokenTree>>;
|
||||
type Response = OrcResult<Vec<ParsedLine>>;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub struct ParsedLine {
|
||||
pub comments: Vec<Comment>,
|
||||
pub source_range: SourceRange,
|
||||
pub kind: ParsedLineKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub enum ParsedLineKind {
|
||||
Recursive(Vec<TokenTree>),
|
||||
Member(ParsedMember),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub struct ParsedMember {
|
||||
pub name: TStr,
|
||||
pub exported: bool,
|
||||
pub kind: ParsedMemberKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub enum ParsedMemberKind {
|
||||
Constant(ParsedConstId),
|
||||
Module(Vec<ParsedLine>),
|
||||
}
|
||||
|
||||
/// Obtain the value of a parsed constant. This is guaranteed to be called after
|
||||
/// the last [ParseLine] but before any [crate::AtomReq]. As such, in principle
|
||||
/// the macro engine could run here.
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(HostExtReq)]
|
||||
pub struct FetchParsedConst {
|
||||
pub sys: SysId,
|
||||
pub id: ParsedConstId,
|
||||
}
|
||||
impl Request for FetchParsedConst {
|
||||
type Response = Expression;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
@@ -31,3 +80,25 @@ pub struct Comment {
|
||||
pub text: TStr,
|
||||
pub range: Range<u32>,
|
||||
}
|
||||
|
||||
/// Resolve relative names from the perspective of a constant. This can only be
|
||||
/// called during a [FetchParsedConst] call, but it can be called for a
|
||||
/// different [ParsedConstId] from the one in [FetchParsedConst].
|
||||
///
|
||||
/// Each name is either resolved to an alias or existing constant `Some(TStrv)`
|
||||
/// or not resolved `None`. An error is never raised, as names may have a
|
||||
/// primary meaning such as a local binding which can be overridden by specific
|
||||
/// true names such as those triggering macro keywords. It is not recommended to
|
||||
/// define syntax that can break by defining arbitrary constants, as line
|
||||
/// parsers can define new ones at will.
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(ExtHostReq)]
|
||||
pub struct ResolveNames {
|
||||
pub sys: SysId,
|
||||
pub constid: ParsedConstId,
|
||||
pub names: Vec<TStrv>,
|
||||
}
|
||||
|
||||
impl Request for ResolveNames {
|
||||
type Response = Vec<Option<TStrv>>;
|
||||
}
|
||||
|
||||
@@ -89,6 +89,7 @@ pub enum ExtHostReq {
|
||||
ExprReq(expr::ExprReq),
|
||||
SubLex(lexer::SubLex),
|
||||
LsModule(tree::LsModule),
|
||||
ResolveNames(parser::ResolveNames),
|
||||
}
|
||||
|
||||
/// Notifications sent from the extension to the host
|
||||
@@ -117,8 +118,9 @@ pub enum HostExtReq {
|
||||
DeserAtom(atom::DeserAtom),
|
||||
LexExpr(lexer::LexExpr),
|
||||
ParseLine(parser::ParseLine),
|
||||
FetchParsedConst(parser::FetchParsedConst),
|
||||
GetMember(tree::GetMember),
|
||||
VfsReq(vfs::VfsReq),
|
||||
VfsRead(vfs::VfsRead),
|
||||
}
|
||||
|
||||
/// Notifications sent from the host to the extension
|
||||
|
||||
@@ -5,7 +5,7 @@ use orchid_api_derive::{Coding, Hierarchy};
|
||||
use orchid_api_traits::Request;
|
||||
use ordered_float::NotNan;
|
||||
|
||||
use crate::{CharFilter, ExtHostReq, HostExtNotif, HostExtReq, MemberKind, TStr};
|
||||
use crate::{CharFilter, EagerVfs, ExtHostReq, HostExtNotif, HostExtReq, MemberKind, TStr};
|
||||
|
||||
/// ID of a system type
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||
@@ -63,6 +63,7 @@ pub struct NewSystemResponse {
|
||||
pub lex_filter: CharFilter,
|
||||
pub line_types: Vec<TStr>,
|
||||
pub const_root: HashMap<TStr, MemberKind>,
|
||||
pub vfs: HashMap<TStr, EagerVfs>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
|
||||
@@ -72,7 +72,6 @@ pub struct Member {
|
||||
pub enum MemberKind {
|
||||
Const(Expression),
|
||||
Module(Module),
|
||||
Import(TStrv),
|
||||
Lazy(TreeId),
|
||||
}
|
||||
|
||||
@@ -105,10 +104,10 @@ pub enum LsModuleError {
|
||||
TreeUnavailable,
|
||||
}
|
||||
|
||||
/// Information about a module sent from the host to an extension. By necessity,
|
||||
/// members and imports are non-overlapping.
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub struct ModuleInfo {
|
||||
/// If the name isn't a canonical name, returns the true name.
|
||||
pub canonical: Option<TStrv>,
|
||||
/// List the names defined in this module
|
||||
pub members: HashMap<TStr, MemberInfo>,
|
||||
}
|
||||
@@ -116,9 +115,7 @@ pub struct ModuleInfo {
|
||||
#[derive(Clone, Copy, Debug, Coding)]
|
||||
pub struct MemberInfo {
|
||||
/// true if the name is exported
|
||||
pub exported: bool,
|
||||
/// If it's imported, you can find the canonical name here
|
||||
pub canonical: Option<TStrv>,
|
||||
pub public: bool,
|
||||
/// Whether the tree item is a constant value or a module
|
||||
pub kind: MemberInfoKind,
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ pub enum Loaded {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(VfsReq, HostExtReq)]
|
||||
#[extends(HostExtReq)]
|
||||
pub struct VfsRead(pub SysId, pub VfsId, pub Vec<TStr>);
|
||||
impl Request for VfsRead {
|
||||
type Response = OrcResult<Loaded>;
|
||||
@@ -30,18 +30,3 @@ pub enum EagerVfs {
|
||||
Lazy(VfsId),
|
||||
Eager(HashMap<TStr, EagerVfs>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(VfsReq, HostExtReq)]
|
||||
pub struct GetVfs(pub SysId);
|
||||
impl Request for GetVfs {
|
||||
type Response = EagerVfs;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||
#[extends(HostExtReq)]
|
||||
#[extendable]
|
||||
pub enum VfsReq {
|
||||
GetVfs(GetVfs),
|
||||
VfsRead(VfsRead),
|
||||
}
|
||||
|
||||
@@ -14,7 +14,9 @@ pub type Spawner = Rc<dyn Fn(LocalBoxFuture<'static, ()>)>;
|
||||
///
|
||||
/// There are no ordering guarantees about these
|
||||
pub trait ExtPort {
|
||||
#[must_use]
|
||||
fn send<'a>(&'a self, msg: &'a [u8]) -> LocalBoxFuture<'a, ()>;
|
||||
#[must_use]
|
||||
fn recv(&self) -> LocalBoxFuture<'_, Option<Vec<u8>>>;
|
||||
}
|
||||
|
||||
|
||||
@@ -39,6 +39,14 @@ impl ErrPos {
|
||||
impl From<Pos> for ErrPos {
|
||||
fn from(origin: Pos) -> Self { Self { position: origin, message: None } }
|
||||
}
|
||||
impl fmt::Display for ErrPos {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match &self.message {
|
||||
Some(msg) => write!(f, "{}: {}", self.position, msg),
|
||||
None => write!(f, "{}", self.position),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct OrcErr {
|
||||
@@ -71,7 +79,7 @@ impl From<OrcErr> for Vec<OrcErr> {
|
||||
}
|
||||
impl fmt::Display for OrcErr {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let pstr = self.positions.iter().map(|p| format!("{p:?}")).join("; ");
|
||||
let pstr = self.positions.iter().map(|p| format!("{p}")).join("; ");
|
||||
write!(f, "{}: {} @ {}", self.description, self.message, pstr)
|
||||
}
|
||||
}
|
||||
@@ -161,12 +169,12 @@ pub fn mk_err(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mk_errv(
|
||||
pub fn mk_errv<I: Into<ErrPos>>(
|
||||
description: Tok<String>,
|
||||
message: impl AsRef<str>,
|
||||
posv: impl IntoIterator<Item = ErrPos>,
|
||||
posv: impl IntoIterator<Item = I>,
|
||||
) -> OrcErrv {
|
||||
mk_err(description, message, posv).into()
|
||||
mk_err(description, message, posv.into_iter().map_into()).into()
|
||||
}
|
||||
|
||||
pub struct Reporter {
|
||||
@@ -177,6 +185,14 @@ impl Reporter {
|
||||
pub fn report(&self, e: impl Into<OrcErrv>) { self.errors.borrow_mut().extend(e.into()) }
|
||||
pub fn new() -> Self { Self { errors: RefCell::new(vec![]) } }
|
||||
pub fn errv(self) -> Option<OrcErrv> { OrcErrv::new(self.errors.into_inner()).ok() }
|
||||
pub fn merge<T>(self, res: OrcRes<T>) -> OrcRes<T> {
|
||||
match (res, self.errv()) {
|
||||
(res, None) => res,
|
||||
(Ok(_), Some(errv)) => Err(errv),
|
||||
(Err(e), Some(errv)) => Err(e + errv),
|
||||
}
|
||||
}
|
||||
pub fn is_empty(&self) -> bool { self.errors.borrow().is_empty() }
|
||||
}
|
||||
|
||||
impl Default for Reporter {
|
||||
|
||||
@@ -13,6 +13,7 @@ use crate::interner::Interner;
|
||||
use crate::{api, match_mapping};
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
#[must_use]
|
||||
pub struct FmtUnit {
|
||||
pub subs: Vec<FmtUnit>,
|
||||
pub variants: Rc<Variants>,
|
||||
@@ -209,6 +210,9 @@ impl From<Rc<String>> for Variants {
|
||||
impl From<String> for Variants {
|
||||
fn from(value: String) -> Self { Self::from(Rc::new(value)) }
|
||||
}
|
||||
impl From<&str> for Variants {
|
||||
fn from(value: &str) -> Self { Self::from(value.to_string()) }
|
||||
}
|
||||
impl FromStr for Variants {
|
||||
type Err = Infallible;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Self::default().bounded(s)) }
|
||||
@@ -265,6 +269,7 @@ impl FmtCtx for FmtCtxImpl<'_> {
|
||||
}
|
||||
|
||||
pub trait Format {
|
||||
#[must_use]
|
||||
fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> impl Future<Output = FmtUnit> + 'a;
|
||||
}
|
||||
impl Format for Never {
|
||||
|
||||
@@ -6,6 +6,7 @@ use std::ops::Range;
|
||||
|
||||
use trait_set::trait_set;
|
||||
|
||||
use crate::error::ErrPos;
|
||||
use crate::interner::{Interner, Tok};
|
||||
use crate::name::Sym;
|
||||
use crate::{api, match_mapping, sym};
|
||||
@@ -51,6 +52,17 @@ impl Pos {
|
||||
})
|
||||
}
|
||||
}
|
||||
impl fmt::Display for Pos {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Pos::Inherit => f.write_str("Unresolved inherited position"),
|
||||
Pos::SlotTarget => f.write_str("Unresolved slot target position"),
|
||||
Pos::None => f.write_str("N/A"),
|
||||
Pos::Gen(g) => write!(f, "{g}"),
|
||||
Pos::SrcRange(sr) => write!(f, "{sr}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Exact source code location. Includes where the code was loaded from, what
|
||||
/// the original source code was, and a byte range.
|
||||
@@ -90,13 +102,24 @@ impl SrcRange {
|
||||
}
|
||||
}
|
||||
pub fn zw(path: Sym, pos: u32) -> Self { Self { path, range: pos..pos } }
|
||||
async fn from_api(api: &api::SourceRange, i: &Interner) -> Self {
|
||||
pub async fn from_api(api: &api::SourceRange, i: &Interner) -> Self {
|
||||
Self { path: Sym::from_api(api.path, i).await, range: api.range.clone() }
|
||||
}
|
||||
fn to_api(&self) -> api::SourceRange {
|
||||
pub fn to_api(&self) -> api::SourceRange {
|
||||
api::SourceRange { path: self.path.to_api(), range: self.range.clone() }
|
||||
}
|
||||
}
|
||||
impl From<SrcRange> for ErrPos {
|
||||
fn from(val: SrcRange) -> Self { val.pos().into() }
|
||||
}
|
||||
impl fmt::Display for SrcRange {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self.range.len() {
|
||||
0 => write!(f, "{}:{}", self.path(), self.range.start),
|
||||
n => write!(f, "{}:{}+{}", self.path(), self.range.start, n),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Information about a code generator attached to the generated code
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
|
||||
@@ -53,12 +53,12 @@ impl VPath {
|
||||
pub fn into_name(self) -> Result<VName, EmptyNameError> { VName::new(self.0) }
|
||||
/// Add a token to the path. Since now we know that it can't be empty, turn it
|
||||
/// into a name.
|
||||
pub fn name_with_prefix(self, name: Tok<String>) -> VName {
|
||||
pub fn name_with_suffix(self, name: Tok<String>) -> VName {
|
||||
VName(self.into_iter().chain([name]).collect())
|
||||
}
|
||||
/// Add a token to the beginning of the. Since now we know that it can't be
|
||||
/// empty, turn it into a name.
|
||||
pub fn name_with_suffix(self, name: Tok<String>) -> VName {
|
||||
pub fn name_with_prefix(self, name: Tok<String>) -> VName {
|
||||
VName([name].into_iter().chain(self).collect())
|
||||
}
|
||||
|
||||
@@ -236,6 +236,9 @@ impl Sym {
|
||||
Self::from_tok(Tok::from_api(marker, i).await).expect("Empty sequence found for serialized Sym")
|
||||
}
|
||||
pub fn to_api(&self) -> api::TStrv { self.tok().to_api() }
|
||||
pub async fn push(&self, tok: Tok<String>, i: &Interner) -> Sym {
|
||||
Self::new(self.0.iter().cloned().chain([tok]), i).await.unwrap()
|
||||
}
|
||||
}
|
||||
impl fmt::Debug for Sym {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Sym({self})") }
|
||||
|
||||
@@ -15,7 +15,9 @@ use crate::name::{Sym, VName, VPath};
|
||||
use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_range};
|
||||
|
||||
pub trait ParseCtx {
|
||||
#[must_use]
|
||||
fn i(&self) -> &Interner;
|
||||
#[must_use]
|
||||
fn reporter(&self) -> &Reporter;
|
||||
}
|
||||
pub struct ParseCtxImpl<'a> {
|
||||
@@ -171,7 +173,7 @@ pub async fn try_pop_no_fluff<'a, A: ExprRepr, X: ExtraTok>(
|
||||
None => Err(mk_errv(
|
||||
ctx.i().i("Unexpected end").await,
|
||||
"Line ends abruptly; more tokens were expected",
|
||||
[snip.sr().pos().into()],
|
||||
[snip.sr()],
|
||||
)),
|
||||
}
|
||||
}
|
||||
@@ -184,7 +186,7 @@ pub async fn expect_end(
|
||||
Some(surplus) => Err(mk_errv(
|
||||
ctx.i().i("Extra code after end of line").await,
|
||||
"Code found after the end of the line",
|
||||
[surplus.sr.pos().into()],
|
||||
[surplus.sr.pos()],
|
||||
)),
|
||||
None => Ok(()),
|
||||
}
|
||||
@@ -201,7 +203,7 @@ pub async fn expect_tok<'a, A: ExprRepr, X: ExtraTok>(
|
||||
t => Err(mk_errv(
|
||||
ctx.i().i("Expected specific keyword").await,
|
||||
format!("Expected {tok} but found {:?}", fmt(t, ctx.i()).await),
|
||||
[head.sr.pos().into()],
|
||||
[head.sr()],
|
||||
)),
|
||||
}
|
||||
}
|
||||
@@ -221,7 +223,7 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Expected token").await,
|
||||
"Expected a name, a parenthesized list of names, or a globstar.",
|
||||
[tail.sr().pos().into()],
|
||||
[tail.sr().pos()],
|
||||
));
|
||||
};
|
||||
let ret = rec(tt, ctx).await;
|
||||
@@ -264,7 +266,7 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Unrecognized name end").await,
|
||||
format!("Names cannot end with {:?} tokens", fmt(t, ctx.i()).await),
|
||||
[ttpos.into()],
|
||||
[ttpos],
|
||||
));
|
||||
},
|
||||
}
|
||||
|
||||
@@ -23,12 +23,14 @@ use crate::{api, match_mapping, tl_cache};
|
||||
pub trait TokenVariant<ApiEquiv: Clone + Debug + Coding>: Format + Clone + fmt::Debug {
|
||||
type FromApiCtx<'a>;
|
||||
type ToApiCtx<'a>;
|
||||
#[must_use]
|
||||
fn from_api(
|
||||
api: &ApiEquiv,
|
||||
ctx: &mut Self::FromApiCtx<'_>,
|
||||
pos: SrcRange,
|
||||
i: &Interner,
|
||||
) -> impl Future<Output = Self>;
|
||||
#[must_use]
|
||||
fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> impl Future<Output = ApiEquiv>;
|
||||
}
|
||||
impl<T: Clone + Debug + Coding> TokenVariant<T> for Never {
|
||||
@@ -70,7 +72,9 @@ pub fn recur<H: ExprRepr, X: ExtraTok>(
|
||||
|
||||
pub trait AtomRepr: Clone + Format {
|
||||
type Ctx: ?Sized;
|
||||
#[must_use]
|
||||
fn from_api(api: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> impl Future<Output = Self>;
|
||||
#[must_use]
|
||||
fn to_api(&self) -> impl Future<Output = orchid_api::Atom> + '_;
|
||||
}
|
||||
impl AtomRepr for Never {
|
||||
@@ -133,9 +137,9 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
|
||||
NS(n.to_api(), b => Box::new(b.into_api(hctx, xctx).boxed_local().await)),
|
||||
Bottom(e.to_api()),
|
||||
Comment(c.clone()),
|
||||
LambdaHead(arg => ttv_into_api(arg, hctx, xctx).await),
|
||||
LambdaHead(arg => ttv_into_api(arg, hctx, xctx).boxed_local().await),
|
||||
Name(nn.to_api()),
|
||||
S(p, b => ttv_into_api(b, hctx, xctx).await),
|
||||
S(p, b => ttv_into_api(b, hctx, xctx).boxed_local().await),
|
||||
Handle(hand.into_api(hctx).await),
|
||||
NewExpr(expr.into_api(xctx).await),
|
||||
});
|
||||
@@ -163,6 +167,7 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
|
||||
body.insert(0, Token::LambdaHead(arg).at(arg_range));
|
||||
Token::S(Paren::Round, body).at(s_range)
|
||||
}
|
||||
pub fn sr(&self) -> SrcRange { self.sr.clone() }
|
||||
}
|
||||
impl<H: ExprRepr, X: ExtraTok> Format for TokTree<H, X> {
|
||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
|
||||
@@ -14,6 +14,7 @@ derive_destructure = "1.0.0"
|
||||
dyn-clone = "1.0.17"
|
||||
futures = "0.3.31"
|
||||
hashbrown = "0.15.2"
|
||||
include_dir = { version = "0.7.4", optional = true }
|
||||
itertools = "0.14.0"
|
||||
konst = "0.3.16"
|
||||
lazy_static = "1.5.0"
|
||||
@@ -25,8 +26,8 @@ orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
|
||||
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
|
||||
orchid-base = { version = "0.1.0", path = "../orchid-base" }
|
||||
ordered-float = "5.0.0"
|
||||
paste = "1.0.15"
|
||||
some_executor = "0.4.0"
|
||||
pastey = "0.1.0"
|
||||
some_executor = "0.5.1"
|
||||
substack = "1.1.1"
|
||||
tokio = { version = "1.43.0", optional = true }
|
||||
tokio = { version = "1.46.1", optional = true }
|
||||
trait-set = "0.3.0"
|
||||
|
||||
@@ -59,7 +59,7 @@ impl<'a> AtomReadGuard<'a> {
|
||||
async fn new(id: api::AtomId, ctx: &'a SysCtx) -> Self {
|
||||
let guard = ctx.get_or_default::<ObjStore>().objects.read().await;
|
||||
let valid = guard.iter().map(|i| i.0).collect_vec();
|
||||
assert!(guard.get(&id).is_some(), "Received invalid atom ID: {:?} not in {:?}", id, valid);
|
||||
assert!(guard.get(&id).is_some(), "Received invalid atom ID: {id:?} not in {valid:?}");
|
||||
Self { id, guard }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use std::num::NonZero;
|
||||
use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
|
||||
use async_std::channel::{self, Receiver, RecvError, Sender};
|
||||
use async_std::channel::{self, Receiver, Sender};
|
||||
use async_std::stream;
|
||||
use async_std::sync::Mutex;
|
||||
use futures::future::{LocalBoxFuture, join_all};
|
||||
@@ -22,7 +22,7 @@ use orchid_base::logging::Logger;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::parse::{Comment, Snippet};
|
||||
use orchid_base::reqnot::{ReqNot, RequestHandle, Requester};
|
||||
use orchid_base::tree::{TokenVariant, ttv_from_api, ttv_into_api};
|
||||
use orchid_base::tree::{TokenVariant, ttv_from_api};
|
||||
use substack::Substack;
|
||||
use trait_set::trait_set;
|
||||
|
||||
@@ -32,6 +32,7 @@ use crate::atom_owned::take_atom;
|
||||
use crate::expr::{Expr, ExprHandle};
|
||||
use crate::fs::VirtFS;
|
||||
use crate::lexer::{LexContext, err_cascade, err_not_applicable};
|
||||
use crate::parser::{ParsCtx, get_const, linev_into_api};
|
||||
use crate::system::{SysCtx, atom_by_idx};
|
||||
use crate::system_ctor::{CtedObj, DynSystemCtor};
|
||||
use crate::tree::{GenTok, GenTokTree, LazyMemberFactory, TreeIntoApiCtxImpl};
|
||||
@@ -47,7 +48,6 @@ impl ExtensionData {
|
||||
pub fn new(name: &'static str, systems: &'static [&'static dyn DynSystemCtor]) -> Self {
|
||||
Self { name, systems }
|
||||
}
|
||||
// pub fn main(self) { extension_main(self) }
|
||||
}
|
||||
|
||||
pub enum MemberRecord {
|
||||
@@ -57,7 +57,6 @@ pub enum MemberRecord {
|
||||
|
||||
pub struct SystemRecord {
|
||||
vfses: HashMap<api::VfsId, &'static dyn VirtFS>,
|
||||
declfs: api::EagerVfs,
|
||||
lazy_members: HashMap<api::TreeId, MemberRecord>,
|
||||
ctx: SysCtx,
|
||||
}
|
||||
@@ -84,19 +83,6 @@ pub async fn with_atom_record<'a, F: Future<Output = SysCtx>, T>(
|
||||
cb(atom_record, ctx, id, data).await
|
||||
}
|
||||
|
||||
// pub fn extension_main(data: ExtensionData) {
|
||||
|
||||
// if thread::Builder::new()
|
||||
// .name(format!("ext-main:{}", data.name))
|
||||
// .spawn(|| extension_main_logic(data))
|
||||
// .unwrap()
|
||||
// .join()
|
||||
// .is_err()
|
||||
// {
|
||||
// process::exit(-1)
|
||||
// }
|
||||
// }
|
||||
|
||||
pub struct ExtensionOwner {
|
||||
_interner_cell: Rc<RefCell<Option<Interner>>>,
|
||||
_systems_lock: Rc<Mutex<HashMap<api::SysId, SystemRecord>>>,
|
||||
@@ -109,12 +95,7 @@ impl ExtPort for ExtensionOwner {
|
||||
Box::pin(async { self.out_send.send(msg.to_vec()).boxed_local().await.unwrap() })
|
||||
}
|
||||
fn recv(&self) -> LocalBoxFuture<'_, Option<Vec<u8>>> {
|
||||
Box::pin(async {
|
||||
match self.out_recv.recv().await {
|
||||
Ok(v) => Some(v),
|
||||
Err(RecvError) => None,
|
||||
}
|
||||
})
|
||||
Box::pin(async { (self.out_recv.recv().await).ok() })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -141,8 +122,7 @@ pub fn extension_init(
|
||||
let get_ctx = clone!(systems_weak; move |id: api::SysId| clone!(systems_weak; async move {
|
||||
let systems =
|
||||
systems_weak.upgrade().expect("System table dropped before request processing done");
|
||||
let x = systems.lock().await.get(&id).expect("System not found").ctx.clone();
|
||||
x
|
||||
systems.lock().await.get(&id).expect("System not found").ctx.clone()
|
||||
}));
|
||||
let init_ctx = {
|
||||
clone!(interner_weak, spawner, logger);
|
||||
@@ -200,32 +180,25 @@ pub fn extension_init(
|
||||
.then(|mem| {
|
||||
let (req, lazy_mems) = (&hand, &lazy_mems);
|
||||
clone!(i, ctx; async move {
|
||||
let name = i.i(&mem.name).await.to_api();
|
||||
let value = mem.kind.into_api(&mut TreeIntoApiCtxImpl {
|
||||
let mut tia_ctx = TreeIntoApiCtxImpl {
|
||||
lazy_members: &mut *lazy_mems.lock().await,
|
||||
sys: ctx,
|
||||
basepath: &[],
|
||||
path: Substack::Bottom,
|
||||
req
|
||||
})
|
||||
.await;
|
||||
(name, value)
|
||||
};
|
||||
(i.i(&mem.name).await.to_api(), mem.kind.into_api(&mut tia_ctx).await)
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
.await;
|
||||
let declfs = cted.inst().dyn_vfs().to_api_rec(&mut vfses, &i).await;
|
||||
let record =
|
||||
SystemRecord { declfs, vfses, ctx, lazy_members: lazy_mems.into_inner() };
|
||||
let vfs = cted.inst().dyn_vfs().to_api_rec(&mut vfses, &i).await;
|
||||
let record = SystemRecord { vfses, ctx, lazy_members: lazy_mems.into_inner() };
|
||||
let systems = systems_weak.upgrade().expect("System constructed during shutdown");
|
||||
systems.lock().await.insert(new_sys.id, record);
|
||||
hand
|
||||
.handle(&new_sys, &api::NewSystemResponse {
|
||||
lex_filter,
|
||||
const_root,
|
||||
line_types: vec![],
|
||||
})
|
||||
.await
|
||||
let response =
|
||||
api::NewSystemResponse { lex_filter, const_root, line_types: vec![], vfs };
|
||||
hand.handle(&new_sys, &response).await
|
||||
},
|
||||
api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) => {
|
||||
let sys_ctx = get_ctx(sys_id).await;
|
||||
@@ -248,18 +221,13 @@ pub fn extension_init(
|
||||
};
|
||||
hand.handle(&get_tree, &tree.into_api(&mut tia_ctx).await).await
|
||||
},
|
||||
api::HostExtReq::VfsReq(api::VfsReq::GetVfs(get_vfs @ api::GetVfs(sys_id))) => {
|
||||
let systems = systems_weak.upgrade().expect("VFS root requested during shutdown");
|
||||
let systems_g = systems.lock().await;
|
||||
hand.handle(&get_vfs, &systems_g[&sys_id].declfs).await
|
||||
},
|
||||
api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => {
|
||||
let api::SysFwded(sys_id, payload) = fwd;
|
||||
let ctx = get_ctx(sys_id).await;
|
||||
let sys = ctx.cted().inst();
|
||||
sys.dyn_request(hand, payload).await
|
||||
},
|
||||
api::HostExtReq::VfsReq(api::VfsReq::VfsRead(vfs_read)) => {
|
||||
api::HostExtReq::VfsRead(vfs_read) => {
|
||||
let api::VfsRead(sys_id, vfs_id, path) = &vfs_read;
|
||||
let ctx = get_ctx(*sys_id).await;
|
||||
let systems = systems_weak.upgrade().expect("VFS requested during shutdoown");
|
||||
@@ -308,13 +276,18 @@ pub fn extension_init(
|
||||
let parser =
|
||||
parsers.iter().find(|p| p.line_head() == **name).expect("No parser candidate");
|
||||
let module = Sym::from_api(*module, ctx.i()).await;
|
||||
let o_line = match parser.parse(ctx.clone(), module, *exported, comments, tail).await
|
||||
{
|
||||
let pctx = ParsCtx::new(ctx.clone(), module);
|
||||
let o_line = match parser.parse(pctx, *exported, comments, tail).await {
|
||||
Err(e) => Err(e.to_api()),
|
||||
Ok(t) => Ok(ttv_into_api(t, &mut (), &mut (ctx.clone(), &hand)).await),
|
||||
Ok(t) => Ok(linev_into_api(t, ctx.clone(), &hand).await),
|
||||
};
|
||||
hand.handle(&pline, &o_line).await
|
||||
},
|
||||
api::HostExtReq::FetchParsedConst(ref fpc @ api::FetchParsedConst { id, sys }) => {
|
||||
let ctx = get_ctx(sys).await;
|
||||
let cnst = get_const(id, ctx.clone()).await;
|
||||
hand.handle(fpc, &cnst.api_return(ctx, &hand).await).await
|
||||
},
|
||||
api::HostExtReq::AtomReq(atom_req) => {
|
||||
let atom = atom_req.get_atom();
|
||||
let atom_req = atom_req.clone();
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
use std::borrow::Cow;
|
||||
use std::num::NonZero;
|
||||
|
||||
use futures::FutureExt;
|
||||
use futures::future::LocalBoxFuture;
|
||||
use hashbrown::HashMap;
|
||||
use orchid_base::interner::{Interner, Tok};
|
||||
use orchid_base::pure_seq::pushed;
|
||||
|
||||
use crate::api;
|
||||
use crate::system::SysCtx;
|
||||
@@ -16,11 +18,58 @@ pub trait VirtFS: Send + Sync + 'static {
|
||||
) -> LocalBoxFuture<'a, api::OrcResult<api::Loaded>>;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DeclVmod(Cow<'static, [(&'static str, DeclFs)]>);
|
||||
impl DeclVmod {
|
||||
pub fn new(items: &'static [(&'static str, DeclFs)]) -> DeclVmod {
|
||||
DeclVmod(Cow::Borrowed(items))
|
||||
}
|
||||
pub fn entry(
|
||||
key: &'static str,
|
||||
items: &'static [(&'static str, DeclFs)],
|
||||
) -> (&'static str, DeclVmod) {
|
||||
(key, DeclVmod(Cow::Borrowed(items)))
|
||||
}
|
||||
pub fn merge(&self, other: &Self) -> Result<Self, Vec<&'static str>> {
|
||||
let mut items = Vec::new();
|
||||
for (k, v1) in self.0.iter() {
|
||||
items.push((*k, match other.0.iter().find(|(k2, _)| k == k2) {
|
||||
Some((_, v2)) => v1.merge(v2).map_err(|e| pushed::<_, Vec<_>>(e, *k))?,
|
||||
None => v1.clone(),
|
||||
}));
|
||||
}
|
||||
for (k, v) in other.0.iter() {
|
||||
if !items.iter().any(|(k2, _)| k2 == k) {
|
||||
items.push((*k, v.clone()))
|
||||
}
|
||||
}
|
||||
Ok(Self(Cow::Owned(items)))
|
||||
}
|
||||
pub async fn to_api_rec(
|
||||
&self,
|
||||
vfses: &mut HashMap<api::VfsId, &'static dyn VirtFS>,
|
||||
i: &Interner,
|
||||
) -> std::collections::HashMap<api::TStr, api::EagerVfs> {
|
||||
let mut output = std::collections::HashMap::new();
|
||||
for (k, v) in self.0.iter() {
|
||||
output.insert(i.i::<String>(*k).await.to_api(), v.to_api_rec(vfses, i).boxed_local().await);
|
||||
}
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum DeclFs {
|
||||
Lazy(&'static dyn VirtFS),
|
||||
Mod(&'static [(&'static str, DeclFs)]),
|
||||
Mod(DeclVmod),
|
||||
}
|
||||
impl DeclFs {
|
||||
pub fn merge(&self, other: &Self) -> Result<Self, Vec<&'static str>> {
|
||||
match (self, other) {
|
||||
(Self::Mod(m1), Self::Mod(m2)) => Ok(Self::Mod(m1.merge(m2)?)),
|
||||
(..) => Err(Vec::new()),
|
||||
}
|
||||
}
|
||||
pub async fn to_api_rec(
|
||||
&self,
|
||||
vfses: &mut HashMap<api::VfsId, &'static dyn VirtFS>,
|
||||
@@ -33,14 +82,7 @@ impl DeclFs {
|
||||
vfses.insert(id, *fs);
|
||||
api::EagerVfs::Lazy(id)
|
||||
},
|
||||
DeclFs::Mod(children) => {
|
||||
let mut output = std::collections::HashMap::new();
|
||||
for (k, v) in children.iter() {
|
||||
output
|
||||
.insert(i.i::<String>(*k).await.to_api(), v.to_api_rec(vfses, i).boxed_local().await);
|
||||
}
|
||||
api::EagerVfs::Eager(output)
|
||||
},
|
||||
DeclFs::Mod(m) => api::EagerVfs::Eager(m.to_api_rec(vfses, i).await),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -141,7 +141,7 @@ mod expr_func_derives {
|
||||
|
||||
macro_rules! expr_func_derive {
|
||||
($arity: tt, $($t:ident),*) => {
|
||||
paste::paste!{
|
||||
pastey::paste!{
|
||||
impl<
|
||||
$($t: TryFromExpr, )*
|
||||
Out: ToExpr,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::fmt;
|
||||
use std::future::Future;
|
||||
use std::ops::RangeInclusive;
|
||||
|
||||
@@ -49,10 +50,15 @@ impl<'a> LexContext<'a> {
|
||||
}
|
||||
|
||||
pub fn pos(&self, tail: &'a str) -> u32 { (self.text.len() - tail.len()) as u32 }
|
||||
|
||||
pub fn tok_ran(&self, len: u32, tail: &'a str) -> SrcRange {
|
||||
SrcRange::new(self.pos(tail) - len..self.pos(tail), &self.src)
|
||||
pub fn pos_tt(&self, tail_with: &'a str, tail_without: &'a str) -> SrcRange {
|
||||
SrcRange::new(self.pos(tail_with)..self.pos(tail_without), &self.src)
|
||||
}
|
||||
|
||||
pub fn pos_lt(&self, len: impl TryInto<u32, Error: fmt::Debug>, tail: &'a str) -> SrcRange {
|
||||
SrcRange::new(self.pos(tail) - len.try_into().unwrap()..self.pos(tail), &self.src)
|
||||
}
|
||||
|
||||
pub fn i(&self) -> &Interner { self.ctx.i() }
|
||||
}
|
||||
|
||||
pub trait Lexer: Send + Sync + Sized + Default + 'static {
|
||||
|
||||
@@ -1,50 +1,179 @@
|
||||
use futures::future::LocalBoxFuture;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use futures::FutureExt;
|
||||
use futures::future::{LocalBoxFuture, join_all};
|
||||
use itertools::Itertools;
|
||||
use orchid_api::ResolveNames;
|
||||
use orchid_base::error::OrcRes;
|
||||
use orchid_base::id_store::IdStore;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::SrcRange;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::parse::{Comment, Snippet};
|
||||
use orchid_base::reqnot::{ReqHandlish, Requester};
|
||||
use orchid_base::tree::ttv_into_api;
|
||||
|
||||
use crate::api;
|
||||
use crate::expr::Expr;
|
||||
use crate::gen_expr::GExpr;
|
||||
use crate::system::SysCtx;
|
||||
use crate::system::{SysCtx, SysCtxEntry};
|
||||
use crate::tree::GenTokTree;
|
||||
|
||||
pub type GenSnippet<'a> = Snippet<'a, Expr, GExpr>;
|
||||
|
||||
pub trait Parser: Send + Sync + Sized + Default + 'static {
|
||||
const LINE_HEAD: &'static str;
|
||||
fn parse(
|
||||
ctx: SysCtx,
|
||||
module: Sym,
|
||||
fn parse<'a>(
|
||||
ctx: ParsCtx<'a>,
|
||||
exported: bool,
|
||||
comments: Vec<Comment>,
|
||||
line: GenSnippet<'_>,
|
||||
) -> impl Future<Output = OrcRes<Vec<GenTokTree>>> + '_;
|
||||
line: GenSnippet<'a>,
|
||||
) -> impl Future<Output = OrcRes<Vec<ParsedLine>>> + 'a;
|
||||
}
|
||||
|
||||
pub trait DynParser: Send + Sync + 'static {
|
||||
fn line_head(&self) -> &'static str;
|
||||
fn parse<'a>(
|
||||
&self,
|
||||
ctx: SysCtx,
|
||||
module: Sym,
|
||||
ctx: ParsCtx<'a>,
|
||||
exported: bool,
|
||||
comments: Vec<Comment>,
|
||||
line: GenSnippet<'a>,
|
||||
) -> LocalBoxFuture<'a, OrcRes<Vec<GenTokTree>>>;
|
||||
) -> LocalBoxFuture<'a, OrcRes<Vec<ParsedLine>>>;
|
||||
}
|
||||
|
||||
impl<T: Parser> DynParser for T {
|
||||
fn line_head(&self) -> &'static str { Self::LINE_HEAD }
|
||||
fn parse<'a>(
|
||||
&self,
|
||||
ctx: SysCtx,
|
||||
module: Sym,
|
||||
ctx: ParsCtx<'a>,
|
||||
exported: bool,
|
||||
comments: Vec<Comment>,
|
||||
line: GenSnippet<'a>,
|
||||
) -> LocalBoxFuture<'a, OrcRes<Vec<GenTokTree>>> {
|
||||
Box::pin(async move { Self::parse(ctx, module, exported, comments, line).await })
|
||||
) -> LocalBoxFuture<'a, OrcRes<Vec<ParsedLine>>> {
|
||||
Box::pin(async move { Self::parse(ctx, exported, comments, line).await })
|
||||
}
|
||||
}
|
||||
|
||||
pub type ParserObj = &'static dyn DynParser;
|
||||
|
||||
pub struct ParsCtx<'a> {
|
||||
_parse: PhantomData<&'a mut ()>,
|
||||
ctx: SysCtx,
|
||||
module: Sym,
|
||||
}
|
||||
impl ParsCtx<'_> {
|
||||
pub(crate) fn new(ctx: SysCtx, module: Sym) -> Self { Self { _parse: PhantomData, ctx, module } }
|
||||
pub fn ctx(&self) -> &SysCtx { &self.ctx }
|
||||
pub fn module(&self) -> Sym { self.module.clone() }
|
||||
}
|
||||
|
||||
type BoxConstCallback = Box<dyn FnOnce(ConstCtx) -> LocalBoxFuture<'static, GExpr>>;
|
||||
|
||||
#[derive(Default)]
|
||||
struct ParsedConstCtxEntry {
|
||||
consts: IdStore<BoxConstCallback>,
|
||||
}
|
||||
impl SysCtxEntry for ParsedConstCtxEntry {}
|
||||
|
||||
pub struct ParsedLine {
|
||||
pub sr: SrcRange,
|
||||
pub comments: Vec<Comment>,
|
||||
pub kind: ParsedLineKind,
|
||||
}
|
||||
impl ParsedLine {
|
||||
pub async fn into_api(self, ctx: SysCtx, hand: &dyn ReqHandlish) -> api::ParsedLine {
|
||||
api::ParsedLine {
|
||||
comments: self.comments.into_iter().map(|c| c.to_api()).collect(),
|
||||
source_range: self.sr.to_api(),
|
||||
kind: match self.kind {
|
||||
ParsedLineKind::Mem(mem) => api::ParsedLineKind::Member(api::ParsedMember {
|
||||
name: mem.name.to_api(),
|
||||
exported: mem.exported,
|
||||
kind: match mem.kind {
|
||||
ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId(
|
||||
ctx.get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(),
|
||||
)),
|
||||
ParsedMemKind::Mod(plv) =>
|
||||
api::ParsedMemberKind::Module(linev_into_api(plv, ctx, hand).boxed_local().await),
|
||||
},
|
||||
}),
|
||||
ParsedLineKind::Rec(tv) =>
|
||||
api::ParsedLineKind::Recursive(ttv_into_api(tv, &mut (), &mut (ctx, hand)).await),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn linev_into_api(
|
||||
v: Vec<ParsedLine>,
|
||||
ctx: SysCtx,
|
||||
hand: &dyn ReqHandlish,
|
||||
) -> Vec<api::ParsedLine> {
|
||||
join_all(v.into_iter().map(|l| l.into_api(ctx.clone(), hand))).await
|
||||
}
|
||||
|
||||
pub enum ParsedLineKind {
|
||||
Mem(ParsedMem),
|
||||
Rec(Vec<GenTokTree>),
|
||||
}
|
||||
|
||||
pub struct ParsedMem {
|
||||
name: Tok<String>,
|
||||
exported: bool,
|
||||
kind: ParsedMemKind,
|
||||
}
|
||||
|
||||
pub enum ParsedMemKind {
|
||||
Const(BoxConstCallback),
|
||||
Mod(Vec<ParsedLine>),
|
||||
}
|
||||
|
||||
impl ParsedMemKind {
|
||||
pub fn cnst<F: AsyncFnOnce(ConstCtx) -> GExpr + 'static>(f: F) -> Self {
|
||||
Self::Const(Box::new(|ctx| Box::pin(f(ctx))))
|
||||
}
|
||||
}
|
||||
|
||||
/* TODO: how the macro runner uses the multi-stage loader
|
||||
|
||||
Since the macro runner actually has to invoke the interpreter,
|
||||
it'll run at const-time and not at postparse-time anyway.
|
||||
|
||||
pasing stage establishes the role of every constant as a macro keyword
|
||||
postparse / const load links up constants with every macro they can directly invoke
|
||||
the constants representing the keywords might not actually be postparsed,
|
||||
\ the connection is instead made by detecting in the macro system that the
|
||||
\ resolved name is owned by a macro
|
||||
the returned constant from this call is always an entrypoint call to
|
||||
\ the macro system
|
||||
the constants representing the keywords resolve to panic
|
||||
execute relies on these links detected in the extension to dispatch relevant macros
|
||||
*/
|
||||
|
||||
pub struct ConstCtx {
|
||||
ctx: SysCtx,
|
||||
constid: api::ParsedConstId,
|
||||
}
|
||||
impl ConstCtx {
|
||||
pub async fn names<const N: usize>(&self, names: [&Sym; N]) -> [Option<Sym>; N] {
|
||||
let resolve_names = ResolveNames {
|
||||
constid: self.constid,
|
||||
sys: self.ctx.sys_id(),
|
||||
names: names.into_iter().map(|n| n.to_api()).collect_vec(),
|
||||
};
|
||||
let names = self.ctx.reqnot().request(resolve_names).await;
|
||||
let mut results = [const { None }; N];
|
||||
for (i, name) in names.into_iter().enumerate().filter_map(|(i, n)| Some((i, n?))) {
|
||||
results[i] = Some(Sym::from_api(name, self.ctx.i()).await);
|
||||
}
|
||||
results
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn get_const(id: api::ParsedConstId, ctx: SysCtx) -> GExpr {
|
||||
let ent = ctx.get::<ParsedConstCtxEntry>();
|
||||
let rec = ent.consts.get(id.0).expect("Bad ID or double read of parsed const");
|
||||
let ctx = ConstCtx { constid: id, ctx: ctx.clone() };
|
||||
rec.remove()(ctx).await
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ use orchid_base::reqnot::{Receipt, ReqNot};
|
||||
use crate::api;
|
||||
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId, AtomicFeatures, ForeignAtom, TypAtom, get_info};
|
||||
use crate::entrypoint::ExtReq;
|
||||
use crate::fs::DeclFs;
|
||||
use crate::fs::DeclVmod;
|
||||
use crate::func_atom::Fun;
|
||||
use crate::lexer::LexerObj;
|
||||
use crate::parser::ParserObj;
|
||||
@@ -83,7 +83,7 @@ impl<T: SystemCard> DynSystemCard for T {
|
||||
/// System as defined by author
|
||||
pub trait System: Send + Sync + SystemCard + 'static {
|
||||
fn env() -> Vec<GenMember>;
|
||||
fn vfs() -> DeclFs;
|
||||
fn vfs() -> DeclVmod;
|
||||
fn lexers() -> Vec<LexerObj>;
|
||||
fn parsers() -> Vec<ParserObj>;
|
||||
fn request(hand: ExtReq<'_>, req: Self::Req) -> impl Future<Output = Receipt<'_>>;
|
||||
@@ -91,7 +91,7 @@ pub trait System: Send + Sync + SystemCard + 'static {
|
||||
|
||||
pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
||||
fn dyn_env(&self) -> Vec<GenMember>;
|
||||
fn dyn_vfs(&self) -> DeclFs;
|
||||
fn dyn_vfs(&self) -> DeclVmod;
|
||||
fn dyn_lexers(&self) -> Vec<LexerObj>;
|
||||
fn dyn_parsers(&self) -> Vec<ParserObj>;
|
||||
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>>;
|
||||
@@ -100,7 +100,7 @@ pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
||||
|
||||
impl<T: System> DynSystem for T {
|
||||
fn dyn_env(&self) -> Vec<GenMember> { Self::env() }
|
||||
fn dyn_vfs(&self) -> DeclFs { Self::vfs() }
|
||||
fn dyn_vfs(&self) -> DeclVmod { Self::vfs() }
|
||||
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
|
||||
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
|
||||
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>> {
|
||||
|
||||
@@ -90,7 +90,7 @@ impl<T: SystemCtor> DynSystemCtor for T {
|
||||
mod dep_set_tuple_impls {
|
||||
use orchid_base::box_chain;
|
||||
use orchid_base::boxed_iter::BoxedIter;
|
||||
use paste::paste;
|
||||
use pastey::paste;
|
||||
|
||||
use super::{DepDef, DepSat};
|
||||
use crate::api;
|
||||
|
||||
@@ -20,7 +20,7 @@ use crate::conv::ToExpr;
|
||||
use crate::entrypoint::MemberRecord;
|
||||
use crate::expr::{Expr, ExprHandle};
|
||||
use crate::func_atom::{ExprFunc, Fun};
|
||||
use crate::gen_expr::{GExpr, arg, call, lambda, seq};
|
||||
use crate::gen_expr::{GExpr, arg, call, lambda, seq, sym_ref};
|
||||
use crate::system::SysCtx;
|
||||
|
||||
pub type GenTokTree = TokTree<Expr, GExpr>;
|
||||
@@ -65,6 +65,9 @@ impl TokenVariant<api::ExprTicket> for Expr {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn x_tok(x: impl ToExpr) -> GenTok { GenTok::NewExpr(x.to_expr()) }
|
||||
pub fn ref_tok(path: Sym) -> GenTok { GenTok::NewExpr(sym_ref(path)) }
|
||||
|
||||
pub fn cnst(public: bool, name: &str, value: impl ToExpr) -> Vec<GenMember> {
|
||||
vec![GenMember {
|
||||
name: name.to_string(),
|
||||
|
||||
@@ -16,6 +16,7 @@ futures = "0.3.31"
|
||||
hashbrown = "0.15.2"
|
||||
itertools = "0.14.0"
|
||||
lazy_static = "1.5.0"
|
||||
memo-map = "0.3.3"
|
||||
never = "0.1.0"
|
||||
num-traits = "0.2.19"
|
||||
orchid-api = { version = "0.1.0", path = "../orchid-api" }
|
||||
|
||||
@@ -20,10 +20,12 @@ pub struct AtomData {
|
||||
data: Vec<u8>,
|
||||
}
|
||||
impl AtomData {
|
||||
#[must_use]
|
||||
fn api(self) -> api::Atom {
|
||||
let (owner, drop, data) = self.destructure();
|
||||
api::Atom { data, drop, owner: owner.id() }
|
||||
}
|
||||
#[must_use]
|
||||
fn api_ref(&self) -> api::Atom {
|
||||
api::Atom { data: self.data.clone(), drop: self.drop, owner: self.owner.id() }
|
||||
}
|
||||
@@ -48,6 +50,7 @@ impl fmt::Debug for AtomData {
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AtomHand(Rc<AtomData>);
|
||||
impl AtomHand {
|
||||
#[must_use]
|
||||
pub(crate) async fn new(api::Atom { data, drop, owner }: api::Atom, ctx: &Ctx) -> Self {
|
||||
let create = || async {
|
||||
let owner = ctx.system_inst(owner).await.expect("Dropped system created atom");
|
||||
@@ -67,6 +70,7 @@ impl AtomHand {
|
||||
create().await
|
||||
}
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn call(self, arg: Expr) -> api::Expression {
|
||||
let owner_sys = self.0.owner.clone();
|
||||
let reqnot = owner_sys.reqnot();
|
||||
@@ -76,13 +80,18 @@ impl AtomHand {
|
||||
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), arg.id())).await,
|
||||
}
|
||||
}
|
||||
#[must_use]
|
||||
pub fn sys(&self) -> &System { &self.0.owner }
|
||||
#[must_use]
|
||||
pub fn ext(&self) -> &Extension { self.sys().ext() }
|
||||
pub async fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
|
||||
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req)).await
|
||||
}
|
||||
#[must_use]
|
||||
pub fn api_ref(&self) -> api::Atom { self.0.api_ref() }
|
||||
#[must_use]
|
||||
pub async fn to_string(&self) -> String { take_first_fmt(self, &self.0.owner.ctx().i).await }
|
||||
#[must_use]
|
||||
pub fn downgrade(&self) -> WeakAtomHand { WeakAtomHand(Rc::downgrade(&self.0)) }
|
||||
}
|
||||
impl Format for AtomHand {
|
||||
@@ -100,5 +109,6 @@ impl AtomRepr for AtomHand {
|
||||
|
||||
pub struct WeakAtomHand(Weak<AtomData>);
|
||||
impl WeakAtomHand {
|
||||
#[must_use]
|
||||
pub fn upgrade(&self) -> Option<AtomHand> { self.0.upgrade().map(AtomHand) }
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ use std::num::{NonZero, NonZeroU16};
|
||||
use std::rc::{Rc, Weak};
|
||||
use std::{fmt, ops};
|
||||
|
||||
use async_once_cell::OnceCell;
|
||||
use async_std::sync::RwLock;
|
||||
use hashbrown::HashMap;
|
||||
use orchid_api::SysId;
|
||||
@@ -13,17 +12,17 @@ use orchid_base::interner::Interner;
|
||||
use crate::api;
|
||||
use crate::atom::WeakAtomHand;
|
||||
use crate::expr_store::ExprStore;
|
||||
use crate::parsed::Root;
|
||||
use crate::system::{System, WeakSystem};
|
||||
use crate::tree::WeakRoot;
|
||||
|
||||
pub struct CtxData {
|
||||
pub i: Rc<Interner>,
|
||||
pub i: Interner,
|
||||
pub spawn: Spawner,
|
||||
pub systems: RwLock<HashMap<api::SysId, WeakSystem>>,
|
||||
pub system_id: RefCell<NonZeroU16>,
|
||||
pub owned_atoms: RwLock<HashMap<api::AtomId, WeakAtomHand>>,
|
||||
pub common_exprs: ExprStore,
|
||||
pub root: OnceCell<Weak<Root>>,
|
||||
pub root: RwLock<WeakRoot>,
|
||||
}
|
||||
#[derive(Clone)]
|
||||
pub struct Ctx(Rc<CtxData>);
|
||||
@@ -31,30 +30,39 @@ impl ops::Deref for Ctx {
|
||||
type Target = CtxData;
|
||||
fn deref(&self) -> &Self::Target { &self.0 }
|
||||
}
|
||||
#[derive(Clone)]
|
||||
pub struct WeakCtx(Weak<CtxData>);
|
||||
impl WeakCtx {
|
||||
#[must_use]
|
||||
pub fn try_upgrade(&self) -> Option<Ctx> { Some(Ctx(self.0.upgrade()?)) }
|
||||
#[must_use]
|
||||
pub fn upgrade(&self) -> Ctx { self.try_upgrade().expect("Ctx manually kept alive until exit") }
|
||||
}
|
||||
impl Ctx {
|
||||
#[must_use]
|
||||
pub fn new(spawn: Spawner) -> Self {
|
||||
Self(Rc::new(CtxData {
|
||||
spawn,
|
||||
i: Rc::default(),
|
||||
i: Interner::default(),
|
||||
systems: RwLock::default(),
|
||||
system_id: RefCell::new(NonZero::new(1).unwrap()),
|
||||
owned_atoms: RwLock::default(),
|
||||
common_exprs: ExprStore::default(),
|
||||
root: OnceCell::default(),
|
||||
root: RwLock::default(),
|
||||
}))
|
||||
}
|
||||
#[must_use]
|
||||
pub(crate) async fn system_inst(&self, id: api::SysId) -> Option<System> {
|
||||
self.systems.read().await.get(&id).and_then(WeakSystem::upgrade)
|
||||
}
|
||||
#[must_use]
|
||||
pub(crate) fn next_sys_id(&self) -> api::SysId {
|
||||
let mut g = self.system_id.borrow_mut();
|
||||
*g = g.checked_add(1).unwrap_or(NonZeroU16::new(1).unwrap());
|
||||
SysId(*g)
|
||||
}
|
||||
pub async fn set_root(&self, root: Weak<Root>) {
|
||||
assert!(self.root.get().is_none(), "Root already assigned");
|
||||
self.root.get_or_init(async { root }).await;
|
||||
}
|
||||
#[must_use]
|
||||
pub fn downgrade(&self) -> WeakCtx { WeakCtx(Rc::downgrade(&self.0)) }
|
||||
}
|
||||
impl fmt::Debug for Ctx {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
|
||||
@@ -1,16 +1,9 @@
|
||||
use std::collections::VecDeque;
|
||||
|
||||
use futures::FutureExt;
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
use itertools::{Either, Itertools};
|
||||
use hashbrown::HashSet;
|
||||
use itertools::Itertools;
|
||||
use orchid_base::error::{OrcErr, OrcRes, Reporter, mk_err, mk_errv};
|
||||
use orchid_base::interner::{Interner, Tok};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::{NameLike, Sym, VName};
|
||||
use substack::Substack;
|
||||
|
||||
use crate::expr::Expr;
|
||||
use crate::parsed::{ItemKind, ParsedMemberKind, ParsedModule};
|
||||
use orchid_base::name::VName;
|
||||
|
||||
/// Errors produced by absolute_path
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
@@ -48,29 +41,24 @@ impl AbsPathError {
|
||||
///
|
||||
/// if the relative path contains as many or more `super` segments than the
|
||||
/// length of the absolute path.
|
||||
pub fn absolute_path(
|
||||
pub async fn absolute_path(
|
||||
mut cwd: &[Tok<String>],
|
||||
mut rel: &[Tok<String>],
|
||||
i: &Interner,
|
||||
) -> Result<VName, AbsPathError> {
|
||||
let mut relative = false;
|
||||
if rel.first().map(|t| t.as_str()) == Some("self") {
|
||||
relative = true;
|
||||
rel = rel.split_first().expect("checked above").1;
|
||||
let i_self = i.i("self").await;
|
||||
let i_super = i.i("super").await;
|
||||
let relative = rel.first().is_some_and(|s| *s != i_self && *s != i_super);
|
||||
if let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h != i_self) {
|
||||
rel = tail;
|
||||
} else {
|
||||
while rel.first().map(|t| t.as_str()) == Some("super") {
|
||||
match cwd.split_last() {
|
||||
Some((_, torso)) => cwd = torso,
|
||||
None => return Err(AbsPathError::TooManySupers),
|
||||
};
|
||||
rel = rel.split_first().expect("checked above").1;
|
||||
relative = true;
|
||||
while let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h == i_super) {
|
||||
cwd = cwd.split_last().ok_or(AbsPathError::TooManySupers)?.1;
|
||||
rel = tail;
|
||||
}
|
||||
}
|
||||
match relative {
|
||||
true => VName::new(cwd.iter().chain(rel).cloned()),
|
||||
false => VName::new(rel.to_vec()),
|
||||
}
|
||||
.map_err(|_| AbsPathError::RootPath)
|
||||
if relative { VName::new(cwd.iter().chain(rel).cloned()) } else { VName::new(rel.to_vec()) }
|
||||
.map_err(|_| AbsPathError::RootPath)
|
||||
}
|
||||
|
||||
pub struct DealiasCtx<'a> {
|
||||
@@ -84,8 +72,7 @@ pub async fn resolv_glob<Mod: Tree>(
|
||||
abs_path: &[Tok<String>],
|
||||
pos: Pos,
|
||||
i: &Interner,
|
||||
rep: &Reporter,
|
||||
ctx: &mut Mod::Ctx,
|
||||
ctx: &mut Mod::Ctx<'_>,
|
||||
) -> OrcRes<HashSet<Tok<String>>> {
|
||||
let coprefix_len = cwd.iter().zip(abs_path).take_while(|(a, b)| a == b).count();
|
||||
let (co_prefix, diff_path) = abs_path.split_at(abs_path.len().min(coprefix_len + 1));
|
||||
@@ -96,35 +83,34 @@ pub async fn resolv_glob<Mod: Tree>(
|
||||
Err(e) => {
|
||||
let path = abs_path[..=coprefix_len + e.pos].iter().join("::");
|
||||
let (tk, msg) = match e.kind {
|
||||
ChildErrorKind::Constant =>
|
||||
(i.i("Invalid import path").await, format!("{path} is a const")),
|
||||
ChildErrorKind::Missing => (i.i("Invalid import path").await, format!("{path} not found")),
|
||||
ChildErrorKind::Private => (i.i("Import inaccessible").await, format!("{path} is private")),
|
||||
ChildErrorKind::Constant => ("Invalid import path", format!("{path} is a const")),
|
||||
ChildErrorKind::Missing => ("Invalid import path", format!("{path} not found")),
|
||||
ChildErrorKind::Private => ("Import inaccessible", format!("{path} is private")),
|
||||
};
|
||||
return Err(mk_errv(tk, msg, [pos.into()]));
|
||||
return Err(mk_errv(i.i(tk).await, msg, [pos]));
|
||||
},
|
||||
};
|
||||
Ok(target_module.children(coprefix_len < abs_path.len()))
|
||||
}
|
||||
|
||||
pub enum ChildResult<'a, T: Tree + ?Sized> {
|
||||
Value(&'a T),
|
||||
Err(ChildErrorKind),
|
||||
Alias(&'a [Tok<String>]),
|
||||
}
|
||||
pub type ChildResult<'a, T> = Result<&'a T, ChildErrorKind>;
|
||||
|
||||
pub trait Tree {
|
||||
type Ctx;
|
||||
type Ctx<'a>;
|
||||
#[must_use]
|
||||
fn children(&self, public_only: bool) -> HashSet<Tok<String>>;
|
||||
#[must_use]
|
||||
fn child(
|
||||
&self,
|
||||
key: Tok<String>,
|
||||
public_only: bool,
|
||||
ctx: &mut Self::Ctx,
|
||||
ctx: &mut Self::Ctx<'_>,
|
||||
) -> impl Future<Output = ChildResult<'_, Self>>;
|
||||
}
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub enum ChildErrorKind {
|
||||
Missing,
|
||||
/// Only thrown if public_only is true
|
||||
Private,
|
||||
Constant,
|
||||
}
|
||||
@@ -144,42 +130,17 @@ pub struct ChildError {
|
||||
//
|
||||
// caveat: we need to check EVERY IMPORT to ensure that all
|
||||
// errors are raised
|
||||
|
||||
async fn walk_no_access_chk<'a, T: Tree>(
|
||||
root: &'a T,
|
||||
cur: &mut &'a T,
|
||||
path: impl IntoIterator<Item = Tok<String>, IntoIter: DoubleEndedIterator>,
|
||||
ctx: &mut T::Ctx,
|
||||
) -> Result<(), ChildErrorKind> {
|
||||
// this VecDeque is used like a stack to leverage its Extend implementation.
|
||||
let mut path: VecDeque<Tok<String>> = path.into_iter().rev().collect();
|
||||
while let Some(step) = path.pop_back() {
|
||||
match cur.child(step, false, ctx).await {
|
||||
ChildResult::Alias(target) => {
|
||||
path.extend(target.iter().cloned().rev());
|
||||
*cur = root;
|
||||
},
|
||||
ChildResult::Err(e) => return Err(e),
|
||||
ChildResult::Value(v) => *cur = v,
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn walk<'a, T: Tree>(
|
||||
pub async fn walk<'a, T: Tree>(
|
||||
root: &'a T,
|
||||
public_only: bool,
|
||||
path: impl IntoIterator<Item = Tok<String>>,
|
||||
ctx: &mut T::Ctx,
|
||||
ctx: &mut T::Ctx<'_>,
|
||||
) -> Result<&'a T, ChildError> {
|
||||
let mut cur = root;
|
||||
for (i, item) in path.into_iter().enumerate() {
|
||||
match cur.child(item, public_only, ctx).await {
|
||||
ChildResult::Value(v) => cur = v,
|
||||
ChildResult::Ok(v) => cur = v,
|
||||
ChildResult::Err(kind) => return Err(ChildError { pos: i, kind }),
|
||||
ChildResult::Alias(path) => (walk_no_access_chk(root, &mut cur, path.iter().cloned(), ctx)
|
||||
.await)
|
||||
.map_err(|kind| ChildError { kind, pos: i })?,
|
||||
}
|
||||
}
|
||||
Ok(cur)
|
||||
|
||||
@@ -10,6 +10,7 @@ use orchid_base::logging::Logger;
|
||||
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::{Expr, ExprKind, ExprParseCtx, PathSet, PathSetBuilder, Step};
|
||||
use crate::tree::Root;
|
||||
|
||||
type ExprGuard = Bound<RwLockWriteGuard<'static, ExprKind>, Expr>;
|
||||
|
||||
@@ -36,16 +37,21 @@ pub struct ExecCtx {
|
||||
cur_pos: Pos,
|
||||
did_pop: bool,
|
||||
logger: Logger,
|
||||
root: Root,
|
||||
}
|
||||
impl ExecCtx {
|
||||
pub async fn new(ctx: Ctx, logger: Logger, init: Expr) -> Self {
|
||||
#[must_use]
|
||||
pub async fn new(ctx: Ctx, logger: Logger, root: Root, init: Expr) -> Self {
|
||||
let cur_pos = init.pos();
|
||||
let cur = Bound::async_new(init, |init| init.kind().write()).await;
|
||||
Self { ctx, gas: None, stack: vec![], cur, cur_pos, did_pop: false, logger }
|
||||
Self { ctx, gas: None, stack: vec![], cur, cur_pos, did_pop: false, logger, root }
|
||||
}
|
||||
#[must_use]
|
||||
pub fn remaining_gas(&self) -> u64 { self.gas.expect("queried remaining_gas but no gas was set") }
|
||||
pub fn set_gas(&mut self, gas: Option<u64>) { self.gas = gas }
|
||||
#[must_use]
|
||||
pub fn idle(&self) -> bool { self.did_pop }
|
||||
#[must_use]
|
||||
pub fn result(self) -> ExecResult {
|
||||
if self.idle() {
|
||||
match &*self.cur {
|
||||
@@ -56,15 +62,18 @@ impl ExecCtx {
|
||||
ExecResult::Gas(self)
|
||||
}
|
||||
}
|
||||
#[must_use]
|
||||
pub fn use_gas(&mut self, amount: u64) -> bool {
|
||||
if let Some(gas) = &mut self.gas {
|
||||
*gas -= amount;
|
||||
}
|
||||
self.gas != Some(0)
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn try_lock(&self, ex: &Expr) -> ExprGuard {
|
||||
Bound::async_new(ex.clone(), |ex| ex.kind().write()).await
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn unpack_ident(&self, ex: &Expr) -> Expr {
|
||||
match ex.kind().try_write().as_deref_mut() {
|
||||
Some(ExprKind::Identity(ex)) => {
|
||||
@@ -89,14 +98,11 @@ impl ExecCtx {
|
||||
},
|
||||
ExprKind::Seq(a, b) if !self.did_pop => (ExprKind::Seq(a.clone(), b), StackOp::Push(a)),
|
||||
ExprKind::Seq(_, b) => (ExprKind::Identity(b), StackOp::Nop),
|
||||
ExprKind::Const(name) => {
|
||||
let root = (self.ctx.root.get().and_then(|v| v.upgrade()))
|
||||
.expect("Root not assigned before execute call");
|
||||
match root.get_const_value(name, self.cur_pos.clone(), self.ctx.clone()).await {
|
||||
ExprKind::Const(name) =>
|
||||
match self.root.get_const_value(name, self.cur_pos.clone()).await {
|
||||
Err(e) => (ExprKind::Bottom(e), StackOp::Pop),
|
||||
Ok(v) => (ExprKind::Identity(v), StackOp::Nop),
|
||||
}
|
||||
},
|
||||
},
|
||||
ExprKind::Arg => panic!("This should not appear outside function bodies"),
|
||||
ek @ ExprKind::Atom(_) => (ek, StackOp::Pop),
|
||||
ExprKind::Bottom(bot) => (ExprKind::Bottom(bot.clone()), StackOp::Unwind(bot)),
|
||||
@@ -105,7 +111,7 @@ impl ExecCtx {
|
||||
Ok(atom) => {
|
||||
let ext = atom.sys().ext().clone();
|
||||
let x_norm = self.unpack_ident(&x).await;
|
||||
let mut parse_ctx = ExprParseCtx { ctx: self.ctx.clone(), exprs: ext.exprs().clone() };
|
||||
let mut parse_ctx = ExprParseCtx { ctx: &self.ctx, exprs: ext.exprs() };
|
||||
let val =
|
||||
Expr::from_api(&atom.call(x_norm).await, PathSetBuilder::new(), &mut parse_ctx).await;
|
||||
(ExprKind::Identity(val.clone()), StackOp::Swap(val))
|
||||
@@ -117,12 +123,10 @@ impl ExecCtx {
|
||||
ExprKind::Atom(a) => {
|
||||
let ext = a.sys().ext().clone();
|
||||
let x_norm = self.unpack_ident(&x).await;
|
||||
let mut parse_ctx =
|
||||
ExprParseCtx { ctx: ext.ctx().clone(), exprs: ext.exprs().clone() };
|
||||
let val = Expr::from_api(
|
||||
&a.clone().call(x_norm).await,
|
||||
PathSetBuilder::new(),
|
||||
&mut parse_ctx,
|
||||
&mut ExprParseCtx { ctx: ext.ctx(), exprs: ext.exprs() },
|
||||
)
|
||||
.await;
|
||||
(ExprKind::Identity(val.clone()), StackOp::Swap(val))
|
||||
@@ -168,6 +172,7 @@ impl ExecCtx {
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
async fn substitute(
|
||||
src: &Expr,
|
||||
path: &[Step],
|
||||
|
||||
@@ -10,10 +10,11 @@ use hashbrown::HashSet;
|
||||
use itertools::Itertools;
|
||||
use orchid_base::error::OrcErrv;
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::location::{Pos, SrcRange};
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::tl_cache;
|
||||
use orchid_base::tree::{AtomRepr, indent};
|
||||
use orchid_base::tree::{AtomRepr, TokenVariant, indent};
|
||||
use substack::Substack;
|
||||
|
||||
use crate::api;
|
||||
@@ -22,9 +23,9 @@ use crate::ctx::Ctx;
|
||||
use crate::expr_store::ExprStore;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ExprParseCtx {
|
||||
pub ctx: Ctx,
|
||||
pub exprs: ExprStore,
|
||||
pub struct ExprParseCtx<'a> {
|
||||
pub ctx: &'a Ctx,
|
||||
pub exprs: &'a ExprStore,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -36,6 +37,7 @@ pub struct ExprData {
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Expr(Rc<ExprData>);
|
||||
impl Expr {
|
||||
#[must_use]
|
||||
pub fn pos(&self) -> Pos { self.0.pos.clone() }
|
||||
pub async fn try_into_owned_atom(self) -> Result<AtomHand, Self> {
|
||||
match Rc::try_unwrap(self.0) {
|
||||
@@ -46,25 +48,29 @@ impl Expr {
|
||||
},
|
||||
}
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn as_atom(&self) -> Option<AtomHand> {
|
||||
if let ExprKind::Atom(a) = &*self.kind().read().await { Some(a.clone()) } else { None }
|
||||
}
|
||||
#[must_use]
|
||||
pub fn strong_count(&self) -> usize { Rc::strong_count(&self.0) }
|
||||
#[must_use]
|
||||
pub fn id(&self) -> api::ExprTicket {
|
||||
api::ExprTicket(
|
||||
NonZeroU64::new(self.0.as_ref() as *const ExprData as usize as u64)
|
||||
.expect("this is a ref, it cannot be null"),
|
||||
)
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn from_api(
|
||||
api: &api::Expression,
|
||||
psb: PathSetBuilder<'_, u64>,
|
||||
ctx: &mut ExprParseCtx,
|
||||
ctx: &mut ExprParseCtx<'_>,
|
||||
) -> Self {
|
||||
let pos = Pos::from_api(&api.location, &ctx.ctx.i).await;
|
||||
let kind = match &api.kind {
|
||||
api::ExpressionKind::Arg(n) => {
|
||||
assert!(psb.register_arg(&n), "Arguments must be enclosed in a matching lambda");
|
||||
assert!(psb.register_arg(n), "Arguments must be enclosed in a matching lambda");
|
||||
ExprKind::Arg
|
||||
},
|
||||
api::ExpressionKind::Bottom(bot) =>
|
||||
@@ -72,14 +78,14 @@ impl Expr {
|
||||
api::ExpressionKind::Call(f, x) => {
|
||||
let (lpsb, rpsb) = psb.split();
|
||||
ExprKind::Call(
|
||||
Expr::from_api(&f, lpsb, ctx).boxed_local().await,
|
||||
Expr::from_api(&x, rpsb, ctx).boxed_local().await,
|
||||
Expr::from_api(f, lpsb, ctx).boxed_local().await,
|
||||
Expr::from_api(x, rpsb, ctx).boxed_local().await,
|
||||
)
|
||||
},
|
||||
api::ExpressionKind::Const(name) => ExprKind::Const(Sym::from_api(*name, &ctx.ctx.i).await),
|
||||
api::ExpressionKind::Lambda(x, body) => {
|
||||
let lbuilder = psb.lambda(&x);
|
||||
let body = Expr::from_api(&body, lbuilder.stack(), ctx).boxed_local().await;
|
||||
let lbuilder = psb.lambda(x);
|
||||
let body = Expr::from_api(body, lbuilder.stack(), ctx).boxed_local().await;
|
||||
ExprKind::Lambda(lbuilder.collect(), body)
|
||||
},
|
||||
api::ExpressionKind::NewAtom(a) =>
|
||||
@@ -88,13 +94,14 @@ impl Expr {
|
||||
api::ExpressionKind::Seq(a, b) => {
|
||||
let (apsb, bpsb) = psb.split();
|
||||
ExprKind::Seq(
|
||||
Expr::from_api(&a, apsb, ctx).boxed_local().await,
|
||||
Expr::from_api(&b, bpsb, ctx).boxed_local().await,
|
||||
Expr::from_api(a, apsb, ctx).boxed_local().await,
|
||||
Expr::from_api(b, bpsb, ctx).boxed_local().await,
|
||||
)
|
||||
},
|
||||
};
|
||||
Self(Rc::new(ExprData { pos, kind: RwLock::new(kind) }))
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn to_api(&self) -> api::InspectedKind {
|
||||
use api::InspectedKind as K;
|
||||
match &*self.0.kind.read().await {
|
||||
@@ -104,6 +111,7 @@ impl Expr {
|
||||
_ => K::Opaque,
|
||||
}
|
||||
}
|
||||
#[must_use]
|
||||
pub fn kind(&self) -> &RwLock<ExprKind> { &self.0.kind }
|
||||
}
|
||||
impl Format for Expr {
|
||||
@@ -139,6 +147,7 @@ pub enum ExprKind {
|
||||
Missing,
|
||||
}
|
||||
impl ExprKind {
|
||||
#[must_use]
|
||||
pub fn at(self, pos: Pos) -> Expr { Expr(Rc::new(ExprData { pos, kind: RwLock::new(self) })) }
|
||||
}
|
||||
impl Format for ExprKind {
|
||||
@@ -198,13 +207,16 @@ pub enum PathSetFrame<'a, T: PartialEq> {
|
||||
#[derive(Clone)]
|
||||
pub struct PathSetBuilder<'a, T: PartialEq>(Substack<'a, PathSetFrame<'a, T>>);
|
||||
impl<'a, T: PartialEq> PathSetBuilder<'a, T> {
|
||||
#[must_use]
|
||||
pub fn new() -> Self { Self(Substack::Bottom) }
|
||||
#[must_use]
|
||||
pub fn split(&'a self) -> (Self, Self) {
|
||||
(
|
||||
Self(self.0.push(PathSetFrame::Step(Step::Left))),
|
||||
Self(self.0.push(PathSetFrame::Step(Step::Right))),
|
||||
)
|
||||
}
|
||||
#[must_use]
|
||||
pub fn lambda<'b>(self, arg: &'b T) -> LambdaBuilder<'b, T>
|
||||
where 'a: 'b {
|
||||
LambdaBuilder { arg, path: RefCell::default(), stack: self }
|
||||
@@ -264,15 +276,21 @@ impl<'a, T: PartialEq> PathSetBuilder<'a, T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: PartialEq> Default for PathSetBuilder<'a, T> {
|
||||
fn default() -> Self { Self::new() }
|
||||
}
|
||||
|
||||
pub struct LambdaBuilder<'a, T: PartialEq> {
|
||||
arg: &'a T,
|
||||
path: RefCell<Option<PathSet>>,
|
||||
stack: PathSetBuilder<'a, T>,
|
||||
}
|
||||
impl<'a, T: PartialEq> LambdaBuilder<'a, T> {
|
||||
#[must_use]
|
||||
pub fn stack(&'a self) -> PathSetBuilder<'a, T> {
|
||||
PathSetBuilder(self.stack.0.push(PathSetFrame::Lambda(self.arg, &self.path)))
|
||||
}
|
||||
#[must_use]
|
||||
pub fn collect(self) -> Option<PathSet> { self.path.into_inner() }
|
||||
}
|
||||
|
||||
@@ -285,6 +303,7 @@ pub struct PathSet {
|
||||
pub next: Option<(Box<PathSet>, Box<PathSet>)>,
|
||||
}
|
||||
impl PathSet {
|
||||
#[must_use]
|
||||
pub fn next(&self) -> Option<(&PathSet, &PathSet)> {
|
||||
self.next.as_ref().map(|(l, r)| (&**l, &**r))
|
||||
}
|
||||
@@ -305,6 +324,7 @@ impl fmt::Display for PathSet {
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn bot_expr(err: impl Into<OrcErrv>) -> Expr {
|
||||
let errv: OrcErrv = err.into();
|
||||
let pos = errv.pos_iter().next().map_or(Pos::None, |ep| ep.position.clone());
|
||||
@@ -313,5 +333,44 @@ pub fn bot_expr(err: impl Into<OrcErrv>) -> Expr {
|
||||
|
||||
pub struct WeakExpr(Weak<ExprData>);
|
||||
impl WeakExpr {
|
||||
#[must_use]
|
||||
pub fn upgrade(&self) -> Option<Expr> { self.0.upgrade().map(Expr) }
|
||||
}
|
||||
|
||||
impl TokenVariant<api::ExprTicket> for Expr {
|
||||
type FromApiCtx<'a> = ExprStore;
|
||||
async fn from_api(
|
||||
api: &api::ExprTicket,
|
||||
ctx: &mut Self::FromApiCtx<'_>,
|
||||
_: SrcRange,
|
||||
_: &Interner,
|
||||
) -> Self {
|
||||
ctx.get_expr(*api).expect("Invalid ticket")
|
||||
}
|
||||
type ToApiCtx<'a> = ExprStore;
|
||||
async fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> api::ExprTicket {
|
||||
let id = self.id();
|
||||
ctx.give_expr(self);
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
/// Acknowledgment that expr serialization is impossible and thus will panic.
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
pub struct ExprWillPanic;
|
||||
|
||||
impl TokenVariant<api::Expression> for Expr {
|
||||
type FromApiCtx<'a> = ExprParseCtx<'a>;
|
||||
async fn from_api(
|
||||
api: &api::Expression,
|
||||
ctx: &mut Self::FromApiCtx<'_>,
|
||||
_: SrcRange,
|
||||
_: &Interner,
|
||||
) -> Self {
|
||||
Self::from_api(api, PathSetBuilder::new(), ctx).await
|
||||
}
|
||||
type ToApiCtx<'a> = ExprWillPanic;
|
||||
async fn into_api(self, ExprWillPanic: &mut Self::ToApiCtx<'_>) -> api::Expression {
|
||||
panic!("Cannot serialize expr!")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ pub struct ExprStoreData {
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ExprStore(Rc<ExprStoreData>);
|
||||
impl ExprStore {
|
||||
#[must_use]
|
||||
pub fn derive(&self) -> Self {
|
||||
Self(Rc::new(ExprStoreData { exprs: RefCell::default(), parent: Some(self.clone()) }))
|
||||
}
|
||||
@@ -31,6 +32,7 @@ impl ExprStore {
|
||||
(self.0.exprs.borrow_mut().entry(ticket))
|
||||
.and_replace_entry_with(|_, (rc, rt)| (1 < rc).then_some((rc - 1, rt)));
|
||||
}
|
||||
#[must_use]
|
||||
pub fn get_expr(&self, ticket: api::ExprTicket) -> Option<Expr> {
|
||||
(self.0.exprs.borrow().get(&ticket).map(|(_, expr)| expr.clone()))
|
||||
.or_else(|| self.0.parent.as_ref()?.get_expr(ticket))
|
||||
|
||||
@@ -13,7 +13,6 @@ use futures::future::{join, join_all};
|
||||
use futures::{StreamExt, stream};
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
use orchid_api::{HostMsgSet, LsModule};
|
||||
use orchid_api_traits::Request;
|
||||
use orchid_base::builtin::ExtInit;
|
||||
use orchid_base::clone;
|
||||
@@ -26,8 +25,10 @@ use orchid_base::reqnot::{DynRequester, ReqNot, Requester as _};
|
||||
use crate::api;
|
||||
use crate::atom::AtomHand;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::dealias::{ChildError, ChildErrorKind, walk};
|
||||
use crate::expr_store::ExprStore;
|
||||
use crate::system::SystemCtor;
|
||||
use crate::tree::MemberKind;
|
||||
|
||||
pub struct ReqPair<R: Request>(R, Sender<R::Response>);
|
||||
|
||||
@@ -38,7 +39,6 @@ pub struct ReqPair<R: Request>(R, Sender<R::Response>);
|
||||
#[derive(destructure)]
|
||||
pub struct ExtensionData {
|
||||
ctx: Ctx,
|
||||
init: Rc<ExtInit>,
|
||||
reqnot: ReqNot<api::HostMsgSet>,
|
||||
systems: Vec<SystemCtor>,
|
||||
logger: Logger,
|
||||
@@ -85,7 +85,6 @@ impl Extension {
|
||||
.map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) })
|
||||
.collect(),
|
||||
logger: logger.clone(),
|
||||
init: init.clone(),
|
||||
next_pars: RefCell::new(NonZeroU64::new(1).unwrap()),
|
||||
lex_recur: Mutex::default(),
|
||||
reqnot: ReqNot::new(
|
||||
@@ -168,8 +167,52 @@ impl Extension {
|
||||
})
|
||||
.await
|
||||
},
|
||||
api::ExtHostReq::LsModule(ref ls @ LsModule(ref sys, ref path)) => {
|
||||
todo!() // TODO
|
||||
api::ExtHostReq::LsModule(ref ls @ api::LsModule(_sys, path)) => {
|
||||
let reply: <api::LsModule as Request>::Response = 'reply: {
|
||||
let path = i.ex(path).await;
|
||||
let root = (ctx.root.read().await.upgrade())
|
||||
.expect("LSModule called when root isn't in context");
|
||||
let root_data = &mut *root.0.write().await;
|
||||
let mut walk_ctx = (ctx.clone(), &mut root_data.consts);
|
||||
let module =
|
||||
match walk(&root_data.root, false, path.iter().cloned(), &mut walk_ctx)
|
||||
.await
|
||||
{
|
||||
Ok(module) => module,
|
||||
Err(ChildError { kind, .. }) =>
|
||||
break 'reply Err(match kind {
|
||||
ChildErrorKind::Private => panic!("Access checking was disabled"),
|
||||
ChildErrorKind::Constant => api::LsModuleError::IsConstant,
|
||||
ChildErrorKind::Missing => api::LsModuleError::InvalidPath,
|
||||
}),
|
||||
};
|
||||
let mut members = std::collections::HashMap::new();
|
||||
for (k, v) in &module.members {
|
||||
let kind = match v.kind(ctx.clone(), &mut root_data.consts).await {
|
||||
MemberKind::Const => api::MemberInfoKind::Constant,
|
||||
MemberKind::Module(_) => api::MemberInfoKind::Module,
|
||||
};
|
||||
members.insert(k.to_api(), api::MemberInfo { public: v.public, kind });
|
||||
}
|
||||
Ok(api::ModuleInfo { members })
|
||||
};
|
||||
hand.handle(ls, &reply).await
|
||||
},
|
||||
api::ExtHostReq::ResolveNames(ref rn) => {
|
||||
let api::ResolveNames { constid, names, sys } = rn;
|
||||
let mut resolver = {
|
||||
let systems = ctx.systems.read().await;
|
||||
let weak_sys = systems.get(sys).expect("ResolveNames for invalid sys");
|
||||
let sys = weak_sys.upgrade().expect("ResolveNames after sys drop");
|
||||
sys.name_resolver(*constid).await
|
||||
};
|
||||
let mut responses = vec![const { None }; names.len()];
|
||||
for (i, name) in names.iter().enumerate() {
|
||||
if let Some(abs) = resolver(&ctx.i.ex(*name).await[..]).await {
|
||||
responses[i] = Some(abs.to_sym(&ctx.i).await.to_api())
|
||||
}
|
||||
}
|
||||
hand.handle(rn, &responses).await
|
||||
},
|
||||
api::ExtHostReq::ExtAtomPrint(ref eap @ api::ExtAtomPrint(ref atom)) => {
|
||||
let atom = AtomHand::new(atom.clone(), &ctx).await;
|
||||
@@ -184,11 +227,16 @@ impl Extension {
|
||||
}
|
||||
})))
|
||||
}
|
||||
pub(crate) fn reqnot(&self) -> &ReqNot<HostMsgSet> { &self.0.reqnot }
|
||||
#[must_use]
|
||||
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.reqnot }
|
||||
#[must_use]
|
||||
pub fn ctx(&self) -> &Ctx { &self.0.ctx }
|
||||
#[must_use]
|
||||
pub fn logger(&self) -> &Logger { &self.0.logger }
|
||||
pub fn system_ctors(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
|
||||
#[must_use]
|
||||
pub fn exprs(&self) -> &ExprStore { &self.0.exprs }
|
||||
#[must_use]
|
||||
pub async fn is_own_sys(&self, id: api::SysId) -> bool {
|
||||
let sys = self.ctx().system_inst(id).await.expect("invalid sender sys id");
|
||||
Rc::ptr_eq(&self.0, &sys.ext().0)
|
||||
@@ -196,6 +244,7 @@ impl Extension {
|
||||
pub async fn assert_own_sys(&self, id: api::SysId) {
|
||||
assert!(self.is_own_sys(id).await, "Incoming message impersonates separate system");
|
||||
}
|
||||
#[must_use]
|
||||
pub fn next_pars(&self) -> NonZeroU64 {
|
||||
let mut next_pars = self.0.next_pars.borrow_mut();
|
||||
*next_pars = next_pars.checked_add(1).unwrap_or(NonZeroU64::new(1).unwrap());
|
||||
@@ -240,10 +289,12 @@ impl Extension {
|
||||
rc.ctx().systems.write().await.remove(&id);
|
||||
}))
|
||||
}
|
||||
#[must_use]
|
||||
pub fn downgrade(&self) -> WeakExtension { WeakExtension(Rc::downgrade(&self.0)) }
|
||||
}
|
||||
|
||||
pub struct WeakExtension(Weak<ExtensionData>);
|
||||
impl WeakExtension {
|
||||
#[must_use]
|
||||
pub fn upgrade(&self) -> Option<Extension> { self.0.upgrade().map(Extension) }
|
||||
}
|
||||
|
||||
93
orchid-host/src/fs.rs
Normal file
93
orchid-host/src/fs.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use async_std::path::PathBuf;
|
||||
use async_stream::stream;
|
||||
use futures::{FutureExt, StreamExt};
|
||||
use hashbrown::HashMap;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::pure_seq::pushed;
|
||||
|
||||
use crate::api;
|
||||
use crate::system::System;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DeclMod(Rc<HashMap<Tok<String>, DeclFS>>);
|
||||
impl DeclMod {
|
||||
pub async fn from_api(
|
||||
api: &std::collections::HashMap<api::TStr, api::EagerVfs>,
|
||||
owner: System,
|
||||
) -> Self {
|
||||
let item_stream = stream! {
|
||||
for (key, value) in api {
|
||||
yield (
|
||||
owner.i().ex(*key).await,
|
||||
DeclFS::from_api(value, owner.clone()).boxed_local().await
|
||||
)
|
||||
}
|
||||
};
|
||||
Self(Rc::new(item_stream.collect().await))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum DeclFS {
|
||||
Lazy(System, api::VfsId),
|
||||
Eager(DeclMod),
|
||||
Path(PathBuf),
|
||||
}
|
||||
impl DeclFS {
|
||||
pub async fn from_api(api: &api::EagerVfs, owner: System) -> Self {
|
||||
match api {
|
||||
api::EagerVfs::Eager(items) => Self::Eager(DeclMod::from_api(items, owner.clone()).await),
|
||||
api::EagerVfs::Lazy(id) => Self::Lazy(owner.clone(), *id),
|
||||
}
|
||||
}
|
||||
pub fn merge(&self, other: &Self) -> Result<Self, Vec<Tok<String>>> {
|
||||
let (Self::Eager(m1), Self::Eager(m2)) = (self, other) else { return Err(Vec::new()) };
|
||||
let mut mix = m1.0.iter().map(|(k, v)| (k.clone(), v.clone())).collect::<HashMap<_, _>>();
|
||||
for (key, value) in m2.0.iter() {
|
||||
match mix.entry(key.clone()) {
|
||||
hashbrown::hash_map::Entry::Vacant(ent) => {
|
||||
ent.insert(value.clone());
|
||||
},
|
||||
hashbrown::hash_map::Entry::Occupied(mut ent) => match ent.get().merge(value) {
|
||||
Err(e) => return Err(pushed(e, key.clone())),
|
||||
Ok(new) => {
|
||||
ent.insert(new);
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
Ok(Self::Eager(DeclMod(Rc::new(mix))))
|
||||
}
|
||||
/// Walk through eager fs. Returns if the path ends, if it hits anything other
|
||||
/// than eager, or if the path is invalid.
|
||||
pub fn walk<'a, 'b>(&'a self, path: &'b [Tok<String>]) -> (&'a DeclFS, &'b [Tok<String>]) {
|
||||
let mut cur = self;
|
||||
for (i, step) in path.iter().enumerate() {
|
||||
match self {
|
||||
fs @ (DeclFS::Path(_) | DeclFS::Lazy(..)) => return (fs, &path[i..]),
|
||||
fs @ DeclFS::Eager(m) => match &m.0.get(step) {
|
||||
None => return (fs, &path[i..]),
|
||||
Some(next) => cur = next,
|
||||
},
|
||||
}
|
||||
}
|
||||
(cur, &[])
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load_code(fs: &DeclFS, systems: &[System], path: &[Tok<String>]) -> OrcRes<Parsed
|
||||
|
||||
pub async fn gather_fs(systems: &[System]) -> Result<DeclFS, Sym> {
|
||||
let (head, tail) = systems.split_first().expect("Empty system list");
|
||||
let mut vfs = head.vfs().await;
|
||||
for sys in tail {
|
||||
match vfs.merge(&sys.vfs().await) {
|
||||
Err(e) => return Err(Sym::new(e.iter().rev().cloned(), head.i()).await.unwrap()),
|
||||
Ok(next) => vfs = next,
|
||||
}
|
||||
}
|
||||
Ok(vfs)
|
||||
}
|
||||
@@ -12,7 +12,7 @@ use orchid_base::tree::recur;
|
||||
|
||||
use crate::api;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::{Expr, ExprParseCtx};
|
||||
use crate::expr::{Expr, ExprParseCtx, ExprWillPanic};
|
||||
use crate::parsed::{ParsTok, ParsTokTree};
|
||||
use crate::system::System;
|
||||
|
||||
@@ -25,6 +25,7 @@ pub struct LexCtx<'a> {
|
||||
pub ctx: &'a Ctx,
|
||||
}
|
||||
impl<'a> LexCtx<'a> {
|
||||
#[must_use]
|
||||
pub fn push<'b>(&'b mut self, pos: u32) -> LexCtx<'b>
|
||||
where 'a: 'b {
|
||||
LexCtx {
|
||||
@@ -36,11 +37,14 @@ impl<'a> LexCtx<'a> {
|
||||
ctx: self.ctx,
|
||||
}
|
||||
}
|
||||
#[must_use]
|
||||
pub fn get_pos(&self) -> u32 { self.end_pos() - self.tail.len() as u32 }
|
||||
#[must_use]
|
||||
pub fn end_pos(&self) -> u32 { self.source.len() as u32 }
|
||||
pub fn set_pos(&mut self, pos: u32) { self.tail = &self.source[pos as usize..] }
|
||||
pub fn push_pos(&mut self, delta: u32) { self.set_pos(self.get_pos() + delta) }
|
||||
pub fn set_tail(&mut self, tail: &'a str) { self.tail = tail }
|
||||
#[must_use]
|
||||
pub fn strip_prefix(&mut self, tgt: &str) -> bool {
|
||||
if let Some(src) = self.tail.strip_prefix(tgt) {
|
||||
self.tail = src;
|
||||
@@ -48,26 +52,29 @@ impl<'a> LexCtx<'a> {
|
||||
}
|
||||
false
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn ser_subtree(&mut self, subtree: ParsTokTree) -> api::TokenTree {
|
||||
let mut exprs = self.ctx.common_exprs.clone();
|
||||
let foo = recur(subtree, &|tt, r| {
|
||||
let without_new_expr = recur(subtree, &|tt, r| {
|
||||
if let ParsTok::NewExpr(expr) = tt.tok {
|
||||
return ParsTok::Handle(expr).at(tt.sr);
|
||||
}
|
||||
r(tt)
|
||||
});
|
||||
foo.into_api(&mut exprs, &mut ()).await
|
||||
without_new_expr.into_api(&mut exprs, &mut ExprWillPanic).await
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn des_subtree(&mut self, tree: &api::TokenTree) -> ParsTokTree {
|
||||
ParsTokTree::from_api(
|
||||
&tree,
|
||||
tree,
|
||||
&mut self.ctx.common_exprs.clone(),
|
||||
&mut ExprParseCtx { ctx: self.ctx.clone(), exprs: self.ctx.common_exprs.clone() },
|
||||
&mut ExprParseCtx { ctx: self.ctx, exprs: &self.ctx.common_exprs },
|
||||
self.path,
|
||||
&self.ctx.i,
|
||||
)
|
||||
.await
|
||||
}
|
||||
#[must_use]
|
||||
pub fn strip_char(&mut self, tgt: char) -> bool {
|
||||
if let Some(src) = self.tail.strip_prefix(tgt) {
|
||||
self.tail = src;
|
||||
@@ -79,6 +86,7 @@ impl<'a> LexCtx<'a> {
|
||||
self.tail = self.tail.trim_start_matches(filter);
|
||||
}
|
||||
pub fn trim_ws(&mut self) { self.trim(|c| c.is_whitespace() && !"\r\n".contains(c)) }
|
||||
#[must_use]
|
||||
pub fn get_start_matches(&mut self, filter: impl Fn(char) -> bool) -> &'a str {
|
||||
let rest = self.tail.trim_start_matches(filter);
|
||||
let matches = &self.tail[..self.tail.len() - rest.len()];
|
||||
@@ -100,6 +108,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
||||
.and_then(|t| t.trim_start_matches(name_char).strip_prefix("::"))
|
||||
{
|
||||
let name = &ctx.tail[..ctx.tail.len() - tail.len() - "::".len()];
|
||||
ctx.set_tail(tail);
|
||||
let body = lex_once(ctx).boxed_local().await?;
|
||||
ParsTok::NS(ctx.ctx.i.i(name).await, Box::new(body))
|
||||
} else if ctx.strip_prefix("--[") {
|
||||
@@ -107,7 +116,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
||||
return Err(mk_errv(
|
||||
ctx.ctx.i.i("Unterminated block comment").await,
|
||||
"This block comment has no ending ]--",
|
||||
[SrcRange::new(start..start + 3, ctx.path).pos().into()],
|
||||
[SrcRange::new(start..start + 3, ctx.path)],
|
||||
));
|
||||
};
|
||||
ctx.set_tail(tail);
|
||||
@@ -124,7 +133,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
||||
return Err(mk_errv(
|
||||
ctx.ctx.i.i("Unclosed lambda").await,
|
||||
"Lambdae started with \\ should separate arguments from body with .",
|
||||
[SrcRange::new(start..start + 1, ctx.path).pos().into()],
|
||||
[SrcRange::new(start..start + 1, ctx.path)],
|
||||
));
|
||||
}
|
||||
arg.push(lex_once(ctx).boxed_local().await?);
|
||||
@@ -139,7 +148,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
||||
return Err(mk_errv(
|
||||
ctx.ctx.i.i("unclosed paren").await,
|
||||
format!("this {lp} has no matching {rp}"),
|
||||
[SrcRange::new(start..start + 1, ctx.path).pos().into()],
|
||||
[SrcRange::new(start..start + 1, ctx.path)],
|
||||
));
|
||||
}
|
||||
body.push(lex_once(ctx).boxed_local().await?);
|
||||
@@ -189,7 +198,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
||||
return Err(mk_errv(
|
||||
ctx.ctx.i.i("Unrecognized character").await,
|
||||
"The following syntax is meaningless.",
|
||||
[SrcRange::new(start..start + 1, ctx.path).pos().into()],
|
||||
[SrcRange::new(start..start + 1, ctx.path)],
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -7,6 +7,7 @@ pub mod execute;
|
||||
pub mod expr;
|
||||
pub mod expr_store;
|
||||
pub mod extension;
|
||||
pub mod fs;
|
||||
pub mod lex;
|
||||
pub mod parse;
|
||||
pub mod parsed;
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
use std::cell::RefCell;
|
||||
|
||||
use futures::FutureExt;
|
||||
use futures::future::join_all;
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
use orchid_base::error::{OrcRes, Reporter, mk_err, mk_errv};
|
||||
use orchid_base::error::{OrcRes, Reporter, mk_errv};
|
||||
use orchid_base::format::fmt;
|
||||
use orchid_base::interner::{Interner, Tok};
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::name::{Sym, VPath};
|
||||
use orchid_base::parse::{
|
||||
Comment, Import, ParseCtx, Parsed, Snippet, expect_end, line_items, parse_multiname,
|
||||
try_pop_no_fluff,
|
||||
@@ -17,7 +14,7 @@ use substack::Substack;
|
||||
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::{Expr, ExprKind, PathSetBuilder};
|
||||
use crate::parsed::{Item, ItemKind, ParsTokTree, ParsedMember, ParsedMemberKind, ParsedModule};
|
||||
use crate::parsed::{Item, ItemKind, ParsedMember, ParsedMemberKind, ParsedModule};
|
||||
use crate::system::System;
|
||||
|
||||
type ParsSnippet<'a> = Snippet<'a, Expr, Expr>;
|
||||
@@ -27,28 +24,23 @@ pub struct HostParseCtxImpl<'a> {
|
||||
pub src: Sym,
|
||||
pub systems: &'a [System],
|
||||
pub reporter: &'a Reporter,
|
||||
pub interner: &'a Interner,
|
||||
pub consts: RefCell<HashMap<Sym, Vec<ParsTokTree>>>,
|
||||
}
|
||||
|
||||
impl ParseCtx for HostParseCtxImpl<'_> {
|
||||
fn reporter(&self) -> &Reporter { self.reporter }
|
||||
fn i(&self) -> &Interner { self.interner }
|
||||
fn i(&self) -> &Interner { &self.ctx.i }
|
||||
}
|
||||
|
||||
impl HostParseCtx for HostParseCtxImpl<'_> {
|
||||
fn ctx(&self) -> &Ctx { &self.ctx }
|
||||
fn systems(&self) -> impl Iterator<Item = &System> { self.systems.iter() }
|
||||
async fn save_const(&self, path: Substack<'_, Tok<String>>, value: Vec<ParsTokTree>) {
|
||||
let name = Sym::new(path.unreverse(), self.interner).await.unwrap();
|
||||
self.consts.borrow_mut().insert(name, value);
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HostParseCtx: ParseCtx {
|
||||
#[must_use]
|
||||
fn ctx(&self) -> &Ctx;
|
||||
#[must_use]
|
||||
fn systems(&self) -> impl Iterator<Item = &System>;
|
||||
async fn save_const(&self, path: Substack<'_, Tok<String>>, value: Vec<ParsTokTree>);
|
||||
}
|
||||
|
||||
pub async fn parse_items(
|
||||
@@ -73,33 +65,10 @@ pub async fn parse_item(
|
||||
n if *n == ctx.i().i("export").await => match try_pop_no_fluff(ctx, postdisc).await? {
|
||||
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } =>
|
||||
parse_exportable_item(ctx, path, comments, true, n.clone(), tail).await,
|
||||
Parsed { output: TokTree { tok: Token::S(Paren::Round, body), .. }, tail } => {
|
||||
expect_end(ctx, tail).await?;
|
||||
let mut ok = Vec::new();
|
||||
for tt in body {
|
||||
let sr = tt.sr.clone();
|
||||
match &tt.tok {
|
||||
Token::Name(n) =>
|
||||
ok.push(Item { comments: comments.clone(), sr, kind: ItemKind::Export(n.clone()) }),
|
||||
Token::NS(..) => ctx.reporter().report(mk_err(
|
||||
ctx.i().i("Compound export").await,
|
||||
"Cannot export compound names (names containing the :: separator)",
|
||||
[sr.pos().into()],
|
||||
)),
|
||||
t => ctx.reporter().report(mk_err(
|
||||
ctx.i().i("Invalid export").await,
|
||||
format!("Invalid export target {}", fmt(t, ctx.i()).await),
|
||||
[sr.pos().into()],
|
||||
)),
|
||||
}
|
||||
}
|
||||
expect_end(ctx, tail).await?;
|
||||
Ok(ok)
|
||||
},
|
||||
Parsed { output, tail: _ } => Err(mk_errv(
|
||||
ctx.i().i("Malformed export").await,
|
||||
"`export` can either prefix other lines or list names inside ( )",
|
||||
[output.sr.pos().into()],
|
||||
[output.sr()],
|
||||
)),
|
||||
},
|
||||
n if *n == ctx.i().i("import").await => {
|
||||
@@ -115,7 +84,7 @@ pub async fn parse_item(
|
||||
Some(_) => Err(mk_errv(
|
||||
ctx.i().i("Expected a line type").await,
|
||||
"All lines must begin with a keyword",
|
||||
[item.sr().pos().into()],
|
||||
[item.sr()],
|
||||
)),
|
||||
None => unreachable!("These lines are filtered and aggregated in earlier stages"),
|
||||
}
|
||||
@@ -138,22 +107,25 @@ pub async fn parse_exportable_item<'a>(
|
||||
discr: Tok<String>,
|
||||
tail: ParsSnippet<'a>,
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
let path_sym = Sym::new(path.unreverse(), ctx.i()).await.expect("Files should have a namespace");
|
||||
let kind = if discr == ctx.i().i("mod").await {
|
||||
let (name, body) = parse_module(ctx, path, tail).await?;
|
||||
ItemKind::Member(ParsedMember { name, full_name: path_sym, kind: ParsedMemberKind::Mod(body) })
|
||||
ItemKind::Member(ParsedMember { name, exported, kind: ParsedMemberKind::Mod(body) })
|
||||
} else if discr == ctx.i().i("const").await {
|
||||
let name = parse_const(ctx, tail, path.clone()).await?;
|
||||
ItemKind::Member(ParsedMember { name, full_name: path_sym, kind: ParsedMemberKind::Const })
|
||||
let (name, expr) = parse_const(ctx, tail, path.clone()).await?;
|
||||
ItemKind::Member(ParsedMember { name, exported, kind: ParsedMemberKind::ParsedConst(expr) })
|
||||
} else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) {
|
||||
let line = sys.parse(path_sym, tail.to_vec(), exported, comments).await?;
|
||||
return parse_items(ctx, path, Snippet::new(tail.prev(), &line)).await;
|
||||
return sys
|
||||
.parse(path, tail.to_vec(), exported, comments, &mut async |stack, lines| {
|
||||
let source = Snippet::new(lines.first().unwrap(), &lines);
|
||||
parse_items(ctx, stack, source).await
|
||||
})
|
||||
.await;
|
||||
} else {
|
||||
let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Unrecognized line type").await,
|
||||
format!("Line types are: const, mod, macro, grammar, {ext_lines}"),
|
||||
[tail.prev().sr.pos().into()],
|
||||
[tail.prev().sr()],
|
||||
));
|
||||
};
|
||||
Ok(vec![Item { comments, sr: tail.sr(), kind }])
|
||||
@@ -170,7 +142,7 @@ pub async fn parse_module<'a>(
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Missing module name").await,
|
||||
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
|
||||
[output.sr.pos().into()],
|
||||
[output.sr()],
|
||||
));
|
||||
},
|
||||
};
|
||||
@@ -180,7 +152,7 @@ pub async fn parse_module<'a>(
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Expected module body").await,
|
||||
format!("A ( block ) was expected, {} was found", fmt(output, ctx.i()).await),
|
||||
[output.sr.pos().into()],
|
||||
[output.sr()],
|
||||
));
|
||||
};
|
||||
let path = path.push(name.clone());
|
||||
@@ -191,13 +163,13 @@ pub async fn parse_const<'a>(
|
||||
ctx: &impl HostParseCtx,
|
||||
tail: ParsSnippet<'a>,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
) -> OrcRes<Tok<String>> {
|
||||
) -> OrcRes<(Tok<String>, Expr)> {
|
||||
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
|
||||
let Some(name) = output.as_name() else {
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Missing module name").await,
|
||||
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
|
||||
[output.sr.pos().into()],
|
||||
[output.sr()],
|
||||
));
|
||||
};
|
||||
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
|
||||
@@ -205,12 +177,15 @@ pub async fn parse_const<'a>(
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Missing = separator").await,
|
||||
format!("Expected = , found {}", fmt(output, ctx.i()).await),
|
||||
[output.sr.pos().into()],
|
||||
[output.sr()],
|
||||
));
|
||||
}
|
||||
try_pop_no_fluff(ctx, tail).await?;
|
||||
ctx.save_const(path, tail[..].to_vec()).await;
|
||||
Ok(name)
|
||||
// ctx.save_const(path, tail[..].to_vec()).await;
|
||||
let final_path =
|
||||
VPath::new(path.unreverse()).name_with_suffix(name.clone()).to_sym(ctx.i()).await;
|
||||
let val = parse_expr(ctx, final_path, PathSetBuilder::new(), tail).await?;
|
||||
Ok((name, val))
|
||||
}
|
||||
|
||||
pub async fn parse_expr(
|
||||
@@ -223,7 +198,7 @@ pub async fn parse_expr(
|
||||
.or_else(|| tail.iter().enumerate().rev().find(|(_, tt)| !tt.is_fluff()))
|
||||
else {
|
||||
return Err(mk_errv(ctx.i().i("Empty expression").await, "Expression ends abruptly here", [
|
||||
tail.sr().pos().into(),
|
||||
tail.sr(),
|
||||
]));
|
||||
};
|
||||
let (function, value) = tail.split_at(last_idx as u32);
|
||||
@@ -241,7 +216,7 @@ pub async fn parse_expr(
|
||||
Token::Handle(expr) => Ok(expr.clone()),
|
||||
Token::NS(n, nametail) => {
|
||||
let mut nametail = nametail;
|
||||
let mut segments = path.iter().chain([n]).cloned().collect_vec();
|
||||
let mut segments = vec![n.clone()];
|
||||
while let Token::NS(n, newtail) = &nametail.tok {
|
||||
segments.push(n.clone());
|
||||
nametail = newtail;
|
||||
@@ -250,7 +225,7 @@ pub async fn parse_expr(
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Loose namespace prefix in constant").await,
|
||||
"Namespace prefixes in constants must be followed by names",
|
||||
[pos.into()],
|
||||
[pos],
|
||||
));
|
||||
};
|
||||
segments.push(n.clone());
|
||||
@@ -261,13 +236,27 @@ pub async fn parse_expr(
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Complex lambda binding in constant").await,
|
||||
"Lambda args in constants must be identified by a single name",
|
||||
[pos.into()],
|
||||
[pos],
|
||||
));
|
||||
};
|
||||
let lambda_builder = psb.lambda(arg);
|
||||
let body = parse_expr(ctx, path.clone(), lambda_builder.stack(), tail).boxed_local().await?;
|
||||
Ok(ExprKind::Lambda(lambda_builder.collect(), body).at(pos.clone()))
|
||||
},
|
||||
_ => todo!("AAAAAA"), // TODO: todo
|
||||
Token::S(Paren::Round, body) =>
|
||||
parse_expr(ctx, path, psb, Snippet::new(head, body)).boxed_local().await,
|
||||
Token::S(..) =>
|
||||
return Err(mk_errv(
|
||||
ctx.i().i("Constants may only contain (), not [] or {}").await,
|
||||
"It seems like you are trying to call a macro. Consider a 'let' line",
|
||||
[pos],
|
||||
)),
|
||||
Token::Name(n) =>
|
||||
if psb.register_arg(n) {
|
||||
Ok(ExprKind::Arg.at(pos))
|
||||
} else {
|
||||
Ok(ExprKind::Const(Sym::new([n.clone()], ctx.i()).await.unwrap()).at(pos))
|
||||
},
|
||||
Token::NewExpr(ex) => Ok(ex.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,67 +1,25 @@
|
||||
use std::fmt::Debug;
|
||||
use std::rc::Rc;
|
||||
|
||||
use async_once_cell::OnceCell;
|
||||
use async_std::sync::{Mutex, RwLock};
|
||||
use async_stream::stream;
|
||||
use futures::FutureExt;
|
||||
use futures::future::join_all;
|
||||
use futures::{FutureExt, StreamExt};
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
use hashbrown::HashSet;
|
||||
use itertools::Itertools;
|
||||
use orchid_base::error::{OrcRes, mk_errv};
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::{Pos, SrcRange};
|
||||
use orchid_base::name::{NameLike, Sym};
|
||||
use orchid_base::location::SrcRange;
|
||||
use orchid_base::parse::{Comment, Import};
|
||||
use orchid_base::tl_cache;
|
||||
use orchid_base::tree::{TokTree, Token, TokenVariant};
|
||||
use orchid_base::tree::{TokTree, Token};
|
||||
|
||||
use crate::api;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::dealias::{ChildErrorKind, ChildResult, Tree};
|
||||
use crate::expr::{Expr, ExprParseCtx, PathSetBuilder};
|
||||
use crate::expr_store::ExprStore;
|
||||
use crate::expr::Expr;
|
||||
use crate::system::System;
|
||||
|
||||
pub type ParsTokTree = TokTree<Expr, Expr>;
|
||||
pub type ParsTok = Token<Expr, Expr>;
|
||||
|
||||
impl TokenVariant<api::ExprTicket> for Expr {
|
||||
type ToApiCtx<'a> = ExprStore;
|
||||
async fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> api::ExprTicket {
|
||||
ctx.give_expr(self.clone());
|
||||
self.id()
|
||||
}
|
||||
type FromApiCtx<'a> = ExprStore;
|
||||
async fn from_api(
|
||||
api: &api::ExprTicket,
|
||||
ctx: &mut Self::FromApiCtx<'_>,
|
||||
_: SrcRange,
|
||||
_: &orchid_base::interner::Interner,
|
||||
) -> Self {
|
||||
let expr = ctx.get_expr(*api).expect("Dangling expr");
|
||||
ctx.take_expr(*api);
|
||||
expr
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenVariant<api::Expression> for Expr {
|
||||
type FromApiCtx<'a> = ExprParseCtx;
|
||||
async fn from_api(
|
||||
api: &api::Expression,
|
||||
ctx: &mut Self::FromApiCtx<'_>,
|
||||
_: SrcRange,
|
||||
_: &orchid_base::interner::Interner,
|
||||
) -> Self {
|
||||
Expr::from_api(api, PathSetBuilder::new(), ctx).await
|
||||
}
|
||||
type ToApiCtx<'a> = ();
|
||||
async fn into_api(self, (): &mut Self::ToApiCtx<'_>) -> api::Expression {
|
||||
panic!("Failed to replace NewExpr before returning sublexer value")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Item {
|
||||
pub sr: SrcRange,
|
||||
@@ -72,10 +30,10 @@ pub struct Item {
|
||||
#[derive(Debug)]
|
||||
pub enum ItemKind {
|
||||
Member(ParsedMember),
|
||||
Export(Tok<String>),
|
||||
Import(Import),
|
||||
}
|
||||
impl ItemKind {
|
||||
#[must_use]
|
||||
pub fn at(self, sr: SrcRange) -> Item { Item { comments: vec![], sr, kind: self } }
|
||||
}
|
||||
|
||||
@@ -84,11 +42,13 @@ impl Format for Item {
|
||||
let comment_text = self.comments.iter().join("\n");
|
||||
let item_text = match &self.kind {
|
||||
ItemKind::Import(i) => format!("import {i}").into(),
|
||||
ItemKind::Export(e) => format!("export {e}").into(),
|
||||
ItemKind::Member(mem) => match &mem.kind {
|
||||
ParsedMemberKind::Const =>
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0}")))
|
||||
.units([mem.name.rc().into()]),
|
||||
ParsedMemberKind::ParsedConst(expr) =>
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} = {1l}")))
|
||||
.units([mem.name.rc().into(), expr.print(c).await]),
|
||||
ParsedMemberKind::DeferredConst(_, sys) =>
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} via {1}")))
|
||||
.units([mem.name.rc().into(), sys.print(c).await]),
|
||||
ParsedMemberKind::Mod(module) =>
|
||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("module {0} {{\n\t{1}\n}}")))
|
||||
.units([mem.name.rc().into(), module.print(c).boxed_local().await]),
|
||||
@@ -101,10 +61,11 @@ impl Format for Item {
|
||||
|
||||
pub struct ParsedMember {
|
||||
pub name: Tok<String>,
|
||||
pub full_name: Sym,
|
||||
pub exported: bool,
|
||||
pub kind: ParsedMemberKind,
|
||||
}
|
||||
impl ParsedMember {
|
||||
#[must_use]
|
||||
pub fn name(&self) -> Tok<String> { self.name.clone() }
|
||||
}
|
||||
impl Debug for ParsedMember {
|
||||
@@ -118,7 +79,8 @@ impl Debug for ParsedMember {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ParsedMemberKind {
|
||||
Const,
|
||||
DeferredConst(api::ParsedConstId, System),
|
||||
ParsedConst(Expr),
|
||||
Mod(ParsedModule),
|
||||
}
|
||||
|
||||
@@ -131,13 +93,13 @@ pub struct ParsedModule {
|
||||
pub items: Vec<Item>,
|
||||
}
|
||||
impl ParsedModule {
|
||||
#[must_use]
|
||||
pub fn new(items: impl IntoIterator<Item = Item>) -> Self {
|
||||
let items = items.into_iter().collect_vec();
|
||||
let exports = (items.iter())
|
||||
.filter_map(|i| match &i.kind {
|
||||
ItemKind::Export(e) => Some(e.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.filter_map(|i| if let ItemKind::Member(m) = &i.kind { Some(m) } else { None })
|
||||
.filter(|m| m.exported)
|
||||
.map(|m| m.name.clone())
|
||||
.collect_vec();
|
||||
Self { exports, items }
|
||||
}
|
||||
@@ -146,32 +108,34 @@ impl ParsedModule {
|
||||
std::mem::swap(self, &mut swap);
|
||||
*self = ParsedModule::new(swap.items.into_iter().chain(other.items))
|
||||
}
|
||||
#[must_use]
|
||||
pub fn get_imports(&self) -> impl IntoIterator<Item = &Import> {
|
||||
(self.items.iter())
|
||||
.filter_map(|it| if let ItemKind::Import(i) = &it.kind { Some(i) } else { None })
|
||||
}
|
||||
}
|
||||
impl Tree for ParsedModule {
|
||||
type Ctx = ();
|
||||
type Ctx<'a> = ();
|
||||
async fn child(
|
||||
&self,
|
||||
key: Tok<String>,
|
||||
public_only: bool,
|
||||
ctx: &mut Self::Ctx,
|
||||
(): &mut Self::Ctx<'_>,
|
||||
) -> ChildResult<'_, Self> {
|
||||
let Some(member) = (self.items.iter())
|
||||
.filter_map(|it| if let ItemKind::Member(m) = &it.kind { Some(m) } else { None })
|
||||
.find(|m| m.name == key)
|
||||
else {
|
||||
return ChildResult::Err(ChildErrorKind::Missing);
|
||||
};
|
||||
if public_only && !self.exports.contains(&key) {
|
||||
return ChildResult::Err(ChildErrorKind::Private);
|
||||
}
|
||||
match &member.kind {
|
||||
ParsedMemberKind::Const => return ChildResult::Err(ChildErrorKind::Constant),
|
||||
ParsedMemberKind::Mod(m) => return ChildResult::Value(m),
|
||||
if let Some(member) = (self.items.iter())
|
||||
.filter_map(|it| if let ItemKind::Member(m) = &it.kind { Some(m) } else { None })
|
||||
.find(|m| m.name == key)
|
||||
{
|
||||
match &member.kind {
|
||||
ParsedMemberKind::DeferredConst(..) | ParsedMemberKind::ParsedConst(_) =>
|
||||
return ChildResult::Err(ChildErrorKind::Constant),
|
||||
ParsedMemberKind::Mod(m) => return ChildResult::Ok(m),
|
||||
}
|
||||
}
|
||||
ChildResult::Err(ChildErrorKind::Missing)
|
||||
}
|
||||
fn children(&self, public_only: bool) -> HashSet<Tok<String>> {
|
||||
let mut public: HashSet<_> = self.exports.iter().cloned().collect();
|
||||
@@ -214,44 +178,6 @@ pub struct ConstPath {
|
||||
steps: Tok<Vec<Tok<String>>>,
|
||||
}
|
||||
impl ConstPath {
|
||||
#[must_use]
|
||||
pub fn to_const(steps: Tok<Vec<Tok<String>>>) -> Self { Self { steps } }
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Root {
|
||||
tree: Rc<ParsedModule>,
|
||||
consts: Rc<RwLock<HashMap<Sym, Expr>>>,
|
||||
}
|
||||
impl Root {
|
||||
pub fn new(module: ParsedModule, consts: HashMap<Sym, Expr>) -> Self {
|
||||
Self { tree: Rc::new(module), consts: Rc::new(RwLock::new(consts)) }
|
||||
}
|
||||
pub async fn get_const_value(&self, name: Sym, pos: Pos, ctx: Ctx) -> OrcRes<Expr> {
|
||||
if let Some(val) = self.consts.read().await.get(&name) {
|
||||
return Ok(val.clone());
|
||||
}
|
||||
let (cn, mp) = name.split_last();
|
||||
let module = self.tree.walk(false, mp.iter().cloned(), &mut ()).await.unwrap();
|
||||
let member = (module.items.iter())
|
||||
.filter_map(|it| if let ItemKind::Member(m) = &it.kind { Some(m) } else { None })
|
||||
.find(|m| m.name() == cn);
|
||||
match member {
|
||||
None => Err(mk_errv(
|
||||
ctx.i.i("Constant does not exist").await,
|
||||
format!("{name} does not refer to a constant"),
|
||||
[pos.clone().into()],
|
||||
)),
|
||||
Some(mem) => match &mem.kind {
|
||||
ParsedMemberKind::Mod(_) => Err(mk_errv(
|
||||
ctx.i.i("module used as constant").await,
|
||||
format!("{name} is a module, not a constant"),
|
||||
[pos.clone().into()],
|
||||
)),
|
||||
ParsedMemberKind::Const => Ok(
|
||||
(self.consts.read().await.get(&name).cloned())
|
||||
.expect("Tree says the path is correct but no value was found"),
|
||||
),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use std::cell::RefCell;
|
||||
use std::io::Write;
|
||||
use std::pin::Pin;
|
||||
|
||||
use async_process::{self, Child, ChildStdin, ChildStdout};
|
||||
@@ -43,17 +44,19 @@ pub async fn ext_command(
|
||||
}
|
||||
}));
|
||||
Ok(ExtInit {
|
||||
header,
|
||||
port: Box::new(Subprocess {
|
||||
name: header.name.clone(),
|
||||
child: RefCell::new(Some(child)),
|
||||
stdin: Some(Mutex::new(Box::pin(stdin))),
|
||||
stdout: Mutex::new(Box::pin(stdout)),
|
||||
ctx,
|
||||
}),
|
||||
header,
|
||||
})
|
||||
}
|
||||
|
||||
pub struct Subprocess {
|
||||
name: String,
|
||||
child: RefCell<Option<Child>>,
|
||||
stdin: Option<Mutex<Pin<Box<ChildStdin>>>>,
|
||||
stdout: Mutex<Pin<Box<ChildStdout>>>,
|
||||
@@ -62,11 +65,20 @@ pub struct Subprocess {
|
||||
impl Drop for Subprocess {
|
||||
fn drop(&mut self) {
|
||||
let mut child = self.child.borrow_mut().take().unwrap();
|
||||
let name = self.name.clone();
|
||||
if std::thread::panicking() {
|
||||
eprintln!("Killing extension {name}");
|
||||
// we don't really care to handle errors here
|
||||
let _: Result<_, _> = std::io::stderr().flush();
|
||||
let _: Result<_, _> = child.kill();
|
||||
return;
|
||||
}
|
||||
let stdin = self.stdin.take().unwrap();
|
||||
(self.ctx.spawn)(Box::pin(async move {
|
||||
stdin.lock().await.close().await.unwrap();
|
||||
let status = child.status().await.expect("Extension exited with error");
|
||||
assert!(status.success(), "Extension exited with error {status}");
|
||||
let status = (child.status().await)
|
||||
.unwrap_or_else(|e| panic!("{e}, extension {name} exited with error"));
|
||||
assert!(status.success(), "Extension {name} exited with error {status}");
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,19 +3,18 @@ use std::fmt;
|
||||
use std::future::Future;
|
||||
use std::rc::{Rc, Weak};
|
||||
|
||||
use async_stream::stream;
|
||||
use derive_destructure::destructure;
|
||||
use futures::StreamExt;
|
||||
use futures::FutureExt;
|
||||
use futures::future::join_all;
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
use memo_map::MemoMap;
|
||||
use orchid_base::char_filter::char_filter_match;
|
||||
use orchid_base::clone;
|
||||
use orchid_base::error::{OrcErrv, OrcRes};
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format};
|
||||
use orchid_base::interner::{Interner, Tok};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::location::SrcRange;
|
||||
use orchid_base::name::{NameLike, Sym, VName};
|
||||
use orchid_base::parse::Comment;
|
||||
use orchid_base::reqnot::{ReqNot, Requester};
|
||||
use orchid_base::tree::ttv_from_api;
|
||||
@@ -24,19 +23,25 @@ use substack::{Stackframe, Substack};
|
||||
|
||||
use crate::api;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::expr::{Expr, ExprParseCtx};
|
||||
use crate::dealias::{absolute_path, walk};
|
||||
use crate::expr::{ExprParseCtx, ExprWillPanic};
|
||||
use crate::expr_store::ExprStore;
|
||||
use crate::extension::{Extension, WeakExtension};
|
||||
use crate::parsed::{ItemKind, ParsTokTree, ParsedModule, Root};
|
||||
use crate::tree::Module;
|
||||
use crate::fs::{DeclFS, DeclMod};
|
||||
use crate::parsed::{Item, ItemKind, ParsTokTree, ParsedMember, ParsedMemberKind, ParsedModule};
|
||||
use crate::tree::Root;
|
||||
|
||||
#[derive(destructure)]
|
||||
struct SystemInstData {
|
||||
deps: Vec<System>,
|
||||
ctx: Ctx,
|
||||
ext: Extension,
|
||||
decl_id: api::SysDeclId,
|
||||
lex_filter: api::CharFilter,
|
||||
id: api::SysId,
|
||||
line_types: Vec<Tok<String>>,
|
||||
vfs: std::collections::HashMap<api::TStr, api::EagerVfs>,
|
||||
pub(crate) const_paths: MemoMap<api::ParsedConstId, Sym>,
|
||||
}
|
||||
impl Drop for SystemInstData {
|
||||
fn drop(&mut self) { self.ext.system_drop(self.id); }
|
||||
@@ -55,15 +60,29 @@ impl fmt::Debug for SystemInstData {
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct System(Rc<SystemInstData>);
|
||||
impl System {
|
||||
#[must_use]
|
||||
pub fn id(&self) -> api::SysId { self.0.id }
|
||||
#[must_use]
|
||||
pub fn ext(&self) -> &Extension { &self.0.ext }
|
||||
#[must_use]
|
||||
pub fn ctx(&self) -> &Ctx { &self.0.ctx }
|
||||
#[must_use]
|
||||
pub fn i(&self) -> &Interner { &self.0.ctx.i }
|
||||
#[must_use]
|
||||
pub fn deps(&self) -> &[System] { &self.0.deps }
|
||||
#[must_use]
|
||||
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { self.0.ext.reqnot() }
|
||||
#[must_use]
|
||||
pub async fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
|
||||
self.reqnot().request(api::GetMember(self.0.id, id)).await
|
||||
}
|
||||
#[must_use]
|
||||
pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() }
|
||||
#[must_use]
|
||||
pub async fn vfs(&self) -> DeclFS {
|
||||
DeclFS::Eager(DeclMod::from_api(&self.0.vfs, self.clone()).await)
|
||||
}
|
||||
#[must_use]
|
||||
pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) }
|
||||
/// Have this system lex a part of the source. It is assumed that
|
||||
/// [Self::can_lex] was called and returned true.
|
||||
@@ -76,24 +95,26 @@ impl System {
|
||||
) -> api::OrcResult<Option<api::LexedExpr>> {
|
||||
self.0.ext.lex_req(source, src, pos, self.id(), r).await
|
||||
}
|
||||
#[must_use]
|
||||
pub fn can_parse(&self, ltyp: Tok<String>) -> bool { self.0.line_types.contains(<yp) }
|
||||
pub fn line_types(&self) -> impl Iterator<Item = &Tok<String>> + '_ { self.0.line_types.iter() }
|
||||
pub async fn parse(
|
||||
&self,
|
||||
module: Sym,
|
||||
path: Substack<'_, Tok<String>>,
|
||||
line: Vec<ParsTokTree>,
|
||||
exported: bool,
|
||||
comments: Vec<Comment>,
|
||||
) -> OrcRes<Vec<ParsTokTree>> {
|
||||
callback: &mut impl AsyncFnMut(Substack<'_, Tok<String>>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>,
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
let src_path = line.first().expect("cannot be empty").sr.path();
|
||||
let line = join_all(line.into_iter().map(|t| async {
|
||||
let mut expr_store = self.0.ext.exprs().clone();
|
||||
t.into_api(&mut expr_store, &mut ()).await
|
||||
t.into_api(&mut expr_store, &mut ExprWillPanic).await
|
||||
}))
|
||||
.await;
|
||||
let comments = comments.iter().map(Comment::to_api).collect_vec();
|
||||
let req = api::ParseLine {
|
||||
module: module.to_api(),
|
||||
module: self.i().i(&path.unreverse()).await.to_api(),
|
||||
src: src_path.to_api(),
|
||||
exported,
|
||||
sys: self.id(),
|
||||
@@ -101,14 +122,69 @@ impl System {
|
||||
line,
|
||||
};
|
||||
match self.reqnot().request(req).await {
|
||||
Ok(parsed) => {
|
||||
let mut pctx = ExprParseCtx { ctx: self.ctx().clone(), exprs: self.ext().exprs().clone() };
|
||||
Ok(parsed_v) => {
|
||||
let mut ext_exprs = self.ext().exprs().clone();
|
||||
Ok(ttv_from_api(parsed, &mut ext_exprs, &mut pctx, &src_path, self.i()).await)
|
||||
struct ConvCtx<'a> {
|
||||
sys: &'a System,
|
||||
src_path: &'a Sym,
|
||||
i: &'a Interner,
|
||||
ext_exprs: &'a mut ExprStore,
|
||||
pctx: &'a mut ExprParseCtx<'a>,
|
||||
}
|
||||
async fn conv(
|
||||
parsed_v: Vec<api::ParsedLine>,
|
||||
module: Substack<'_, Tok<String>>,
|
||||
callback: &'_ mut impl AsyncFnMut(
|
||||
Substack<'_, Tok<String>>,
|
||||
Vec<ParsTokTree>,
|
||||
) -> OrcRes<Vec<Item>>,
|
||||
ctx: &mut ConvCtx<'_>,
|
||||
) -> OrcRes<Vec<Item>> {
|
||||
let mut items = Vec::new();
|
||||
for parsed in parsed_v {
|
||||
let (name, exported, kind) = match parsed.kind {
|
||||
api::ParsedLineKind::Member(api::ParsedMember { name, exported, kind }) =>
|
||||
(name, exported, kind),
|
||||
api::ParsedLineKind::Recursive(rec) => {
|
||||
let tokens = ttv_from_api(rec, ctx.ext_exprs, ctx.pctx, ctx.src_path, ctx.i).await;
|
||||
items.extend(callback(module.clone(), tokens).await?);
|
||||
continue;
|
||||
},
|
||||
};
|
||||
let name = ctx.i.ex(name).await;
|
||||
let mkind = match kind {
|
||||
api::ParsedMemberKind::Module(items) => {
|
||||
let items =
|
||||
conv(items, module.push(name.clone()), callback, ctx).boxed_local().await?;
|
||||
ParsedMemberKind::Mod(ParsedModule::new(items))
|
||||
},
|
||||
api::ParsedMemberKind::Constant(cid) =>
|
||||
ParsedMemberKind::DeferredConst(cid, ctx.sys.clone()),
|
||||
};
|
||||
items.push(Item {
|
||||
comments: join_all(
|
||||
parsed.comments.iter().map(|c| Comment::from_api(c, ctx.src_path.clone(), ctx.i)),
|
||||
)
|
||||
.await,
|
||||
sr: SrcRange::from_api(&parsed.source_range, ctx.i).await,
|
||||
kind: ItemKind::Member(ParsedMember { name, exported, kind: mkind }),
|
||||
})
|
||||
}
|
||||
Ok(items)
|
||||
}
|
||||
conv(parsed_v, path, callback, &mut ConvCtx {
|
||||
i: self.i(),
|
||||
ext_exprs: &mut ext_exprs,
|
||||
pctx: &mut ExprParseCtx { ctx: self.ctx(), exprs: self.ext().exprs() },
|
||||
src_path: &src_path,
|
||||
sys: self,
|
||||
})
|
||||
.await
|
||||
},
|
||||
Err(e) => Err(OrcErrv::from_api(&e, &self.ctx().i).await),
|
||||
}
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn request(&self, req: Vec<u8>) -> Vec<u8> {
|
||||
self.reqnot().request(api::SysFwded(self.id(), req)).await
|
||||
}
|
||||
@@ -118,7 +194,24 @@ impl System {
|
||||
this.ctx.owned_atoms.write().await.remove(&drop);
|
||||
}))
|
||||
}
|
||||
#[must_use]
|
||||
pub fn downgrade(&self) -> WeakSystem { WeakSystem(Rc::downgrade(&self.0)) }
|
||||
/// Implementation of [api::ResolveNames]
|
||||
pub(crate) async fn name_resolver(
|
||||
&self,
|
||||
orig: api::ParsedConstId,
|
||||
) -> impl AsyncFnMut(&[Tok<String>]) -> Option<VName> + use<> {
|
||||
let root = self.0.ctx.root.read().await.upgrade().expect("find_names when root not in context");
|
||||
let orig = self.0.const_paths.get(&orig).expect("origin for find_names invalid").clone();
|
||||
let ctx = self.0.ctx.clone();
|
||||
async move |rel| {
|
||||
let cwd = orig.split_last().1;
|
||||
let abs = absolute_path(cwd, rel, &ctx.i).await.ok()?;
|
||||
let root_data = &mut *root.0.write().await;
|
||||
let walk_ctx = &mut (ctx.clone(), &mut root_data.consts);
|
||||
walk(&root_data.root, false, abs.iter(), walk_ctx).await.is_ok().then_some(abs)
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Format for System {
|
||||
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||
@@ -130,6 +223,7 @@ impl Format for System {
|
||||
|
||||
pub struct WeakSystem(Weak<SystemInstData>);
|
||||
impl WeakSystem {
|
||||
#[must_use]
|
||||
pub fn upgrade(&self) -> Option<System> { self.0.upgrade().map(System) }
|
||||
}
|
||||
|
||||
@@ -138,56 +232,42 @@ pub struct SystemCtor {
|
||||
pub(crate) ext: WeakExtension,
|
||||
}
|
||||
impl SystemCtor {
|
||||
#[must_use]
|
||||
pub fn name(&self) -> &str { &self.decl.name }
|
||||
#[must_use]
|
||||
pub fn priority(&self) -> NotNan<f64> { self.decl.priority }
|
||||
#[must_use]
|
||||
pub fn depends(&self) -> impl ExactSizeIterator<Item = &str> {
|
||||
self.decl.depends.iter().map(|s| &**s)
|
||||
}
|
||||
#[must_use]
|
||||
pub fn id(&self) -> api::SysDeclId { self.decl.id }
|
||||
pub async fn run<'a>(
|
||||
&self,
|
||||
depends: impl IntoIterator<Item = &'a System>,
|
||||
consts: &mut HashMap<Sym, Expr>,
|
||||
) -> (Module, System) {
|
||||
let depends = depends.into_iter().map(|si| si.id()).collect_vec();
|
||||
#[must_use]
|
||||
pub async fn run(&self, deps: Vec<System>) -> (Root, System) {
|
||||
let depends = deps.iter().map(|si| si.id()).collect_vec();
|
||||
debug_assert_eq!(depends.len(), self.decl.depends.len(), "Wrong number of deps provided");
|
||||
let ext = self.ext.upgrade().expect("SystemCtor should be freed before Extension");
|
||||
let id = ext.ctx().next_sys_id();
|
||||
let sys_inst = ext.reqnot().request(api::NewSystem { depends, id, system: self.decl.id }).await;
|
||||
let data = System(Rc::new(SystemInstData {
|
||||
deps,
|
||||
decl_id: self.decl.id,
|
||||
ext: ext.clone(),
|
||||
ctx: ext.ctx().clone(),
|
||||
lex_filter: sys_inst.lex_filter,
|
||||
vfs: sys_inst.vfs,
|
||||
line_types: join_all(sys_inst.line_types.iter().map(|m| Tok::from_api(*m, &ext.ctx().i)))
|
||||
.await,
|
||||
id,
|
||||
const_paths: MemoMap::new(),
|
||||
}));
|
||||
let const_root =
|
||||
Module::from_api((sys_inst.const_root.into_iter()).map(|(k, v)| api::Member {
|
||||
name: k,
|
||||
kind: v,
|
||||
comments: vec![],
|
||||
exported: true,
|
||||
}))
|
||||
.await;
|
||||
let const_root = clone!(data, ext; stream! {
|
||||
for (k, v) in sys_inst.const_root {
|
||||
yield Member::from_api(
|
||||
,
|
||||
&mut ParsedFromApiCx {
|
||||
consts,
|
||||
path: ext.ctx().i.i(&[]).await,
|
||||
sys: &data,
|
||||
}
|
||||
).await;
|
||||
}
|
||||
})
|
||||
.map(|mem| ItemKind::Member(mem).at(Pos::None))
|
||||
.collect::<Vec<_>>()
|
||||
.await;
|
||||
let api_module_root = api::Module {
|
||||
members: (sys_inst.const_root.into_iter())
|
||||
.map(|(k, v)| api::Member { name: k, kind: v, comments: vec![], exported: true })
|
||||
.collect_vec(),
|
||||
};
|
||||
let root = Root::from_api(api_module_root, &data).await;
|
||||
ext.ctx().systems.write().await.insert(id, data.downgrade());
|
||||
let root = ParsedModule::new(const_root);
|
||||
(root, data)
|
||||
}
|
||||
}
|
||||
@@ -219,7 +299,6 @@ pub async fn init_systems(
|
||||
fn walk_deps<'a>(
|
||||
graph: &mut HashMap<&str, &'a SystemCtor>,
|
||||
list: &mut Vec<&'a SystemCtor>,
|
||||
consts: &mut HashMap<Sym, Expr>,
|
||||
chain: Stackframe<&str>,
|
||||
) -> Result<(), SysResolvErr> {
|
||||
if let Some(ctor) = graph.remove(chain.item) {
|
||||
@@ -231,22 +310,21 @@ pub async fn init_systems(
|
||||
circle.extend(Substack::Frame(chain).iter().map(|s| s.to_string()));
|
||||
return Err(SysResolvErr::Loop(circle));
|
||||
}
|
||||
walk_deps(graph, list, consts, Substack::Frame(chain).new_frame(dep))?
|
||||
walk_deps(graph, list, Substack::Frame(chain).new_frame(dep))?
|
||||
}
|
||||
list.push(ctor);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
let mut consts = HashMap::new();
|
||||
for tgt in tgts {
|
||||
walk_deps(&mut to_load, &mut to_load_ordered, &mut consts, Substack::Bottom.new_frame(tgt))?;
|
||||
walk_deps(&mut to_load, &mut to_load_ordered, Substack::Bottom.new_frame(tgt))?;
|
||||
}
|
||||
let mut systems = HashMap::<&str, System>::new();
|
||||
let mut root_mod = ParsedModule::default();
|
||||
let mut root = Root::new(exts.first().unwrap().ctx().clone());
|
||||
for ctor in to_load_ordered.iter() {
|
||||
let (sys_root, sys) = ctor.run(ctor.depends().map(|n| &systems[n]), &mut consts).await;
|
||||
let (sys_root, sys) = ctor.run(ctor.depends().map(|n| systems[n].clone()).collect()).await;
|
||||
systems.insert(ctor.name(), sys);
|
||||
root_mod.merge(sys_root);
|
||||
root = root.merge(&sys_root).await.expect("Conflicting roots");
|
||||
}
|
||||
Ok((Root::new(root_mod, consts), systems.into_values().collect_vec()))
|
||||
Ok((root, systems.into_values().collect_vec()))
|
||||
}
|
||||
|
||||
@@ -1,24 +1,142 @@
|
||||
//! This tree isn't Clone because lazy subtrees are guaranteed to only be loaded
|
||||
//! once
|
||||
use std::cell::RefCell;
|
||||
use std::rc::{Rc, Weak};
|
||||
|
||||
use async_once_cell::OnceCell;
|
||||
use futures::FutureExt;
|
||||
use async_std::sync::RwLock;
|
||||
use futures::{FutureExt, StreamExt, stream};
|
||||
use hashbrown::HashMap;
|
||||
use hashbrown::hash_map::Entry;
|
||||
use itertools::Itertools;
|
||||
use orchid_base::error::{OrcRes, Reporter};
|
||||
use orchid_api::FetchParsedConst;
|
||||
use orchid_base::clone;
|
||||
use orchid_base::error::{OrcRes, Reporter, mk_err, mk_errv};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::{Pos, SrcRange};
|
||||
use orchid_base::name::{Sym, VPath};
|
||||
use orchid_base::reqnot::Requester;
|
||||
|
||||
use crate::api;
|
||||
use crate::ctx::Ctx;
|
||||
use crate::dealias::{DealiasCtx, absolute_path, resolv_glob};
|
||||
use crate::dealias::{ChildErrorKind, Tree, absolute_path, resolv_glob, walk};
|
||||
use crate::expr::{Expr, ExprParseCtx, PathSetBuilder};
|
||||
use crate::parsed::{ParsedMemberKind, ParsedModule, Tree, WalkError, WalkErrorKind};
|
||||
use crate::parsed::{ItemKind, ParsedMemberKind, ParsedModule};
|
||||
use crate::system::System;
|
||||
|
||||
pub struct Tree(Rc<RefCell<Module>>);
|
||||
pub struct RootData {
|
||||
pub root: Module,
|
||||
pub consts: HashMap<Sym, Expr>,
|
||||
pub ctx: Ctx,
|
||||
}
|
||||
#[derive(Clone)]
|
||||
pub struct Root(pub Rc<RwLock<RootData>>);
|
||||
impl Root {
|
||||
#[must_use]
|
||||
pub fn new(ctx: Ctx) -> Self {
|
||||
Root(Rc::new(RwLock::new(RootData {
|
||||
root: Module::default(),
|
||||
consts: HashMap::default(),
|
||||
ctx,
|
||||
})))
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn from_api(api: api::Module, sys: &System) -> Self {
|
||||
let mut consts = HashMap::new();
|
||||
let mut tfac = TreeFromApiCtx { consts: &mut consts, path: sys.i().i(&[][..]).await, sys };
|
||||
let root = Module::from_api(api, &mut tfac).await;
|
||||
Root(Rc::new(RwLock::new(RootData { root, consts, ctx: sys.ctx().clone() })))
|
||||
}
|
||||
pub async fn merge(&self, new: &Root) -> Result<Self, MergeErr> {
|
||||
let this = self.0.read().await;
|
||||
let that = new.0.read().await;
|
||||
let mut consts =
|
||||
this.consts.iter().chain(&that.consts).map(|(k, v)| (k.clone(), v.clone())).collect();
|
||||
let root = this.root.merge(&that.root, this.ctx.clone(), &mut consts).await?;
|
||||
Ok(Self(Rc::new(RwLock::new(RootData { root, consts, ctx: this.ctx.clone() }))))
|
||||
}
|
||||
#[must_use]
|
||||
pub async fn add_parsed(&self, parsed: &ParsedModule, pars_prefix: Sym, rep: &Reporter) -> Self {
|
||||
let mut ref_this = self.0.write().await;
|
||||
let this = &mut *ref_this;
|
||||
let mut deferred_consts = HashMap::new();
|
||||
let mut tfpctx = FromParsedCtx {
|
||||
pars_root: parsed,
|
||||
deferred_consts: &mut deferred_consts,
|
||||
pars_prefix: pars_prefix.clone(),
|
||||
consts: &mut this.consts,
|
||||
root: &this.root,
|
||||
ctx: &this.ctx,
|
||||
rep,
|
||||
};
|
||||
let mut module = Module::from_parsed(parsed, pars_prefix.clone(), &mut tfpctx).await;
|
||||
for step in pars_prefix.iter().rev() {
|
||||
let kind = OnceCell::from(MemberKind::Module(module));
|
||||
let members = HashMap::from([(
|
||||
step.clone(),
|
||||
Rc::new(Member { public: true, lazy: RefCell::new(None), kind }),
|
||||
)]);
|
||||
module = Module { imports: HashMap::new(), members }
|
||||
}
|
||||
let mut consts = this.consts.clone();
|
||||
let root = (this.root.merge(&module, this.ctx.clone(), &mut consts).await)
|
||||
.expect("Merge conflict between parsed and existing module");
|
||||
let new = Root(Rc::new(RwLock::new(RootData { root, consts, ctx: this.ctx.clone() })));
|
||||
*this.ctx.root.write().await = new.downgrade();
|
||||
for (path, (sys_id, pc_id)) in deferred_consts {
|
||||
let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing");
|
||||
let api_expr = sys.reqnot().request(FetchParsedConst { id: pc_id, sys: sys.id() }).await;
|
||||
let mut xp_ctx = ExprParseCtx { ctx: &this.ctx, exprs: sys.ext().exprs() };
|
||||
let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), &mut xp_ctx).await;
|
||||
new.0.write().await.consts.insert(path, expr);
|
||||
}
|
||||
new
|
||||
}
|
||||
pub async fn get_const_value(&self, name: Sym, pos: Pos) -> OrcRes<Expr> {
|
||||
let this = &mut *self.0.write().await;
|
||||
// shortcut for previously visited
|
||||
if let Some(val) = this.consts.get(&name) {
|
||||
return Ok(val.clone());
|
||||
}
|
||||
// load the node, then check if this "walk" call added it to the map
|
||||
let ctx = this.ctx.clone();
|
||||
let module =
|
||||
walk(&this.root, false, name.iter().cloned(), &mut (ctx.clone(), &mut this.consts)).await;
|
||||
if let Some(val) = this.consts.get(&name) {
|
||||
return Ok(val.clone());
|
||||
}
|
||||
match module {
|
||||
Ok(_) => Err(mk_errv(
|
||||
ctx.i.i("module used as constant").await,
|
||||
format!("{name} is a module, not a constant"),
|
||||
[pos],
|
||||
)),
|
||||
Err(e) => match e.kind {
|
||||
ChildErrorKind::Private => panic!("public_only is false"),
|
||||
ChildErrorKind::Constant => panic!("Tree refers to constant not in table"),
|
||||
ChildErrorKind::Missing => Err(mk_errv(
|
||||
ctx.i.i("Constant does not exist").await,
|
||||
format!("{name} does not refer to a constant"),
|
||||
[pos],
|
||||
)),
|
||||
},
|
||||
}
|
||||
}
|
||||
#[must_use]
|
||||
pub fn downgrade(&self) -> WeakRoot { WeakRoot(Rc::downgrade(&self.0)) }
|
||||
}
|
||||
|
||||
pub struct WeakTree(Weak<RefCell<Module>>);
|
||||
#[derive(Clone)]
|
||||
pub struct WeakRoot(Weak<RwLock<RootData>>);
|
||||
impl WeakRoot {
|
||||
#[must_use]
|
||||
pub fn new() -> Self { Self(Weak::new()) }
|
||||
#[must_use]
|
||||
pub fn upgrade(&self) -> Option<Root> { Some(Root(self.0.upgrade()?)) }
|
||||
}
|
||||
impl Default for WeakRoot {
|
||||
fn default() -> Self { Self::new() }
|
||||
}
|
||||
|
||||
pub struct TreeFromApiCtx<'a> {
|
||||
pub sys: &'a System,
|
||||
@@ -26,16 +144,26 @@ pub struct TreeFromApiCtx<'a> {
|
||||
pub path: Tok<Vec<Tok<String>>>,
|
||||
}
|
||||
impl<'a> TreeFromApiCtx<'a> {
|
||||
#[must_use]
|
||||
pub async fn push<'c>(&'c mut self, name: Tok<String>) -> TreeFromApiCtx<'c> {
|
||||
let path = self.sys.ctx().i.i(&self.path.iter().cloned().chain([name]).collect_vec()).await;
|
||||
TreeFromApiCtx { path, consts: &mut *self.consts, sys: self.sys }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ResolvedImport {
|
||||
target: Sym,
|
||||
sr: SrcRange,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Module {
|
||||
pub imports: HashMap<Tok<String>, Result<ResolvedImport, Vec<ResolvedImport>>>,
|
||||
pub members: HashMap<Tok<String>, Rc<Member>>,
|
||||
}
|
||||
impl Module {
|
||||
#[must_use]
|
||||
pub async fn from_api(api: api::Module, ctx: &mut TreeFromApiCtx<'_>) -> Self {
|
||||
let mut members = HashMap::new();
|
||||
for mem in api.members {
|
||||
@@ -44,10 +172,9 @@ impl Module {
|
||||
let name = vname.to_sym(ctx.sys.i()).await;
|
||||
let (lazy, kind) = match mem.kind {
|
||||
api::MemberKind::Lazy(id) =>
|
||||
(Some(LazyMemberHandle { id, sys: ctx.sys.clone(), path: name.clone() }), None),
|
||||
(Some(LazyMemberHandle { id, sys: ctx.sys.id(), path: name.clone() }), None),
|
||||
api::MemberKind::Const(val) => {
|
||||
let mut expr_ctx =
|
||||
ExprParseCtx { ctx: ctx.sys.ctx().clone(), exprs: ctx.sys.ext().exprs().clone() };
|
||||
let mut expr_ctx = ExprParseCtx { ctx: ctx.sys.ctx(), exprs: ctx.sys.ext().exprs() };
|
||||
let expr = Expr::from_api(&val, PathSetBuilder::new(), &mut expr_ctx).await;
|
||||
ctx.consts.insert(name.clone(), expr);
|
||||
(None, Some(MemberKind::Const))
|
||||
@@ -56,99 +183,227 @@ impl Module {
|
||||
let m = Self::from_api(m, &mut ctx.push(mem_name.clone()).await).boxed_local().await;
|
||||
(None, Some(MemberKind::Module(m)))
|
||||
},
|
||||
api::MemberKind::Import(import_path) =>
|
||||
(None, Some(MemberKind::Alias(Sym::from_api(import_path, ctx.sys.i()).await))),
|
||||
};
|
||||
members.insert(
|
||||
mem_name.clone(),
|
||||
Rc::new(Member {
|
||||
path: name.clone(),
|
||||
public: mem.exported,
|
||||
lazy: RefCell::new(lazy),
|
||||
kind: kind.map_or_else(OnceCell::new, OnceCell::from),
|
||||
}),
|
||||
);
|
||||
}
|
||||
Self { members }
|
||||
Self { members, imports: HashMap::new() }
|
||||
}
|
||||
async fn walk(&self, mut path: impl Iterator<Item = Tok<String>>) -> &Self { todo!() }
|
||||
async fn from_parsed(
|
||||
parsed: &ParsedModule,
|
||||
path: Sym,
|
||||
pars_root_path: Sym,
|
||||
pars_root: &ParsedModule,
|
||||
root: &Module,
|
||||
preload: &mut HashMap<Sym, Module>,
|
||||
ctx: &Ctx,
|
||||
rep: &Reporter,
|
||||
) -> Self {
|
||||
let mut imported_names = HashMap::new();
|
||||
for import in parsed.get_imports() {
|
||||
if let Some(n) = import.name.clone() {
|
||||
imported_names.push(n);
|
||||
continue;
|
||||
}
|
||||
// the path in a wildcard import has to be a module
|
||||
if import.path.is_empty() {
|
||||
panic!("Imported root")
|
||||
}
|
||||
let abs_path = match absolute_path(&path, &import.path) {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
rep.report(e.err_obj(&ctx.i, import.sr.pos(), &path.to_string()).await);
|
||||
continue;
|
||||
#[must_use]
|
||||
async fn from_parsed(parsed: &ParsedModule, path: Sym, ctx: &mut FromParsedCtx<'_>) -> Self {
|
||||
let imports_by_name = (parsed.get_imports().into_iter())
|
||||
.filter_map(|i| Some((i.name.clone()?, i)))
|
||||
.into_group_map();
|
||||
let mut glob_imports_by_name = HashMap::<_, Vec<_>>::new();
|
||||
for import in parsed.get_imports().into_iter().filter(|i| i.name.is_none()) {
|
||||
let pos = import.sr.pos();
|
||||
match absolute_path(&path, &import.path, &ctx.ctx.i).await {
|
||||
Err(e) => ctx.rep.report(e.err_obj(&ctx.ctx.i, pos, &import.path.to_string()).await),
|
||||
Ok(abs_path) => {
|
||||
let names_res = match abs_path.strip_prefix(&ctx.pars_prefix[..]) {
|
||||
None => {
|
||||
let mut tree_ctx = (ctx.ctx.clone(), &mut *ctx.consts);
|
||||
resolv_glob(&path, ctx.root, &abs_path, pos, &ctx.ctx.i, &mut tree_ctx).await
|
||||
},
|
||||
Some(sub_tgt) => {
|
||||
let sub_path = (path.strip_prefix(&ctx.pars_prefix[..]))
|
||||
.expect("from_parsed called with path outside pars_prefix");
|
||||
resolv_glob(sub_path, ctx.pars_root, sub_tgt, pos, &ctx.ctx.i, &mut ()).await
|
||||
},
|
||||
};
|
||||
let abs_path = abs_path.to_sym(&ctx.ctx.i).await;
|
||||
match names_res {
|
||||
Err(e) => ctx.rep.report(e),
|
||||
Ok(names) =>
|
||||
for name in names {
|
||||
match glob_imports_by_name.entry(name) {
|
||||
Entry::Occupied(mut o) => o.get_mut().push((abs_path.clone(), import.sr.clone())),
|
||||
Entry::Vacant(v) => {
|
||||
v.insert_entry(vec![(abs_path.clone(), import.sr.clone())]);
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
};
|
||||
let names = if let Some(subpath) = abs_path.strip_prefix(&pars_root_path[..]) {
|
||||
let pars_path = (path.strip_prefix(&pars_root_path[..]))
|
||||
.expect("pars path outside pars root");
|
||||
resolv_glob(&pars_path, pars_root, &subpath, import.sr.pos(), &ctx.i, rep, &mut ()).await
|
||||
} else {
|
||||
resolv_glob(&path, root, &abs_path, import.sr.pos(), &ctx.i, rep, &mut ()).await
|
||||
}
|
||||
}
|
||||
todo!()
|
||||
let mut imports = HashMap::new();
|
||||
let conflicting_imports_msg = ctx.ctx.i.i("Conflicting imports").await;
|
||||
for (key, values) in imports_by_name {
|
||||
if values.len() == 1 {
|
||||
let import = values.into_iter().next().unwrap();
|
||||
let sr = import.sr.clone();
|
||||
let abs_path_res = absolute_path(&path, &import.clone().mspath(), &ctx.ctx.i).await;
|
||||
match abs_path_res {
|
||||
Err(e) => ctx.rep.report(e.err_obj(&ctx.ctx.i, sr.pos(), &import.to_string()).await),
|
||||
Ok(abs_path) => {
|
||||
imports
|
||||
.insert(key, Ok(ResolvedImport { target: abs_path.to_sym(&ctx.ctx.i).await, sr }));
|
||||
},
|
||||
}
|
||||
} else {
|
||||
for item in values {
|
||||
ctx.rep.report(mk_err(
|
||||
conflicting_imports_msg.clone(),
|
||||
format!("{key} is imported multiple times from different modules"),
|
||||
[item.sr.pos().into()],
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
for (key, values) in glob_imports_by_name {
|
||||
if !imports.contains_key(&key) {
|
||||
let i = &ctx.ctx.i;
|
||||
let values = stream::iter(values)
|
||||
.then(|(n, sr)| {
|
||||
clone!(key; async move {
|
||||
ResolvedImport { target: n.to_vname().suffix([key.clone()]).to_sym(i).await, sr }
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.await;
|
||||
imports.insert(key, if values.len() == 1 { Ok(values[0].clone()) } else { Err(values) });
|
||||
}
|
||||
}
|
||||
let self_referential_msg = ctx.ctx.i.i("Self-referential import").await;
|
||||
for (key, value) in imports.iter() {
|
||||
let Ok(import) = value else { continue };
|
||||
if import.target.strip_prefix(&path[..]).is_some_and(|t| t.starts_with(&[key.clone()])) {
|
||||
ctx.rep.report(mk_err(
|
||||
self_referential_msg.clone(),
|
||||
format!("import {} points to itself or a path within itself", &import.target),
|
||||
[import.sr.pos().into()],
|
||||
));
|
||||
}
|
||||
}
|
||||
let mut members = HashMap::new();
|
||||
for item in &parsed.items {
|
||||
match &item.kind {
|
||||
ItemKind::Member(mem) => {
|
||||
let path = path.to_vname().suffix([mem.name.clone()]).to_sym(&ctx.ctx.i).await;
|
||||
let kind = OnceCell::from(MemberKind::from_parsed(&mem.kind, path.clone(), ctx).await);
|
||||
members.insert(
|
||||
mem.name.clone(),
|
||||
Rc::new(Member { kind, lazy: RefCell::default(), public: mem.exported }),
|
||||
);
|
||||
},
|
||||
ItemKind::Import(_) => (),
|
||||
}
|
||||
}
|
||||
Module { imports, members }
|
||||
}
|
||||
pub async fn merge(
|
||||
&self,
|
||||
other: &Module,
|
||||
ctx: Ctx,
|
||||
consts: &mut HashMap<Sym, Expr>,
|
||||
) -> Result<Module, MergeErr> {
|
||||
if !self.imports.is_empty() || !other.imports.is_empty() {
|
||||
return Err(MergeErr { path: VPath::new([]), kind: MergeErrKind::Imports });
|
||||
}
|
||||
let mut members = HashMap::new();
|
||||
for (key, mem) in &other.members {
|
||||
let Some(own) = self.members.get(key) else {
|
||||
members.insert(key.clone(), mem.clone());
|
||||
continue;
|
||||
};
|
||||
if own.public != mem.public {
|
||||
return Err(MergeErr { path: VPath::new([]), kind: MergeErrKind::Visibility });
|
||||
}
|
||||
match (own.kind(ctx.clone(), consts).await, mem.kind(ctx.clone(), consts).await) {
|
||||
(MemberKind::Module(own_sub), MemberKind::Module(sub)) => {
|
||||
match own_sub.merge(sub, ctx.clone(), consts).boxed_local().await {
|
||||
Ok(module) => {
|
||||
members.insert(
|
||||
key.clone(),
|
||||
Rc::new(Member {
|
||||
lazy: RefCell::new(None),
|
||||
public: own.public,
|
||||
kind: OnceCell::from(MemberKind::Module(module)),
|
||||
}),
|
||||
);
|
||||
},
|
||||
Err(mut e) => {
|
||||
e.path = e.path.prefix([key.clone()]);
|
||||
return Err(e);
|
||||
},
|
||||
}
|
||||
},
|
||||
_ => return Err(MergeErr { path: VPath::new([key.clone()]), kind: MergeErrKind::Const }),
|
||||
}
|
||||
}
|
||||
for (key, mem) in &self.members {
|
||||
if let Entry::Vacant(slot) = members.entry(key.clone()) {
|
||||
slot.insert(mem.clone());
|
||||
}
|
||||
}
|
||||
Ok(Module { imports: HashMap::new(), members })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MergeErr {
|
||||
pub path: VPath,
|
||||
pub kind: MergeErrKind,
|
||||
}
|
||||
#[derive(Debug)]
|
||||
pub enum MergeErrKind {
|
||||
Imports,
|
||||
Visibility,
|
||||
Const,
|
||||
}
|
||||
|
||||
pub struct FromParsedCtx<'a> {
|
||||
pars_prefix: Sym,
|
||||
pars_root: &'a ParsedModule,
|
||||
root: &'a Module,
|
||||
consts: &'a mut HashMap<Sym, Expr>,
|
||||
rep: &'a Reporter,
|
||||
ctx: &'a Ctx,
|
||||
deferred_consts: &'a mut HashMap<Sym, (api::SysId, api::ParsedConstId)>,
|
||||
}
|
||||
|
||||
impl Tree for Module {
|
||||
type Ctx = HashMap<Sym, Expr>;
|
||||
async fn walk<I: IntoIterator<Item = Tok<String>>>(
|
||||
type Ctx<'a> = (Ctx, &'a mut HashMap<Sym, Expr>);
|
||||
async fn child(
|
||||
&self,
|
||||
key: Tok<String>,
|
||||
public_only: bool,
|
||||
path: I,
|
||||
ctx: &'_ mut Self::Ctx,
|
||||
) -> Result<&Self, crate::parsed::WalkError> {
|
||||
let mut cur = self;
|
||||
for (pos, step) in path.into_iter().enumerate() {
|
||||
let Some(member) = self.members.get(&step) else {
|
||||
return Err(WalkError{ pos, kind: WalkErrorKind::Missing })
|
||||
};
|
||||
if public_only && !member.public {
|
||||
return Err(WalkError { pos, kind: WalkErrorKind::Private })
|
||||
}
|
||||
match &member.kind {
|
||||
MemberKind::Module(m) => cur = m,
|
||||
MemberKind::Alias()
|
||||
}
|
||||
(ctx, consts): &mut Self::Ctx<'_>,
|
||||
) -> crate::dealias::ChildResult<'_, Self> {
|
||||
let Some(member) = self.members.get(&key) else {
|
||||
return Err(ChildErrorKind::Missing);
|
||||
};
|
||||
if public_only && !member.public {
|
||||
return Err(ChildErrorKind::Private);
|
||||
}
|
||||
match &member.kind(ctx.clone(), consts).await {
|
||||
MemberKind::Module(m) => Ok(m),
|
||||
MemberKind::Const => Err(ChildErrorKind::Constant),
|
||||
}
|
||||
}
|
||||
fn children(&self, public_only: bool) -> hashbrown::HashSet<Tok<String>> {
|
||||
self.members.iter().filter(|(_, v)| !public_only || v.public).map(|(k, _)| k.clone()).collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Member {
|
||||
pub public: bool,
|
||||
pub path: Sym,
|
||||
pub lazy: RefCell<Option<LazyMemberHandle>>,
|
||||
pub kind: OnceCell<MemberKind>,
|
||||
}
|
||||
impl Member {
|
||||
pub async fn kind_mut(&mut self, consts: &mut HashMap<Sym, Expr>) -> &mut MemberKind {
|
||||
self.kind(consts).await;
|
||||
self.kind.get_mut().expect("Thhe above line should have initialized it")
|
||||
}
|
||||
pub async fn kind(&self, consts: &mut HashMap<Sym, Expr>) -> &MemberKind {
|
||||
#[must_use]
|
||||
pub async fn kind<'a>(&'a self, ctx: Ctx, consts: &mut HashMap<Sym, Expr>) -> &'a MemberKind {
|
||||
(self.kind.get_or_init(async {
|
||||
let handle = self.lazy.borrow_mut().take().expect("If kind is uninit, lazy must be Some");
|
||||
handle.run(consts).await
|
||||
handle.run(ctx, consts).await
|
||||
}))
|
||||
.await
|
||||
}
|
||||
@@ -157,65 +412,48 @@ impl Member {
|
||||
pub enum MemberKind {
|
||||
Const,
|
||||
Module(Module),
|
||||
/// This must be pointing at the final value, not a second alias.
|
||||
Alias(Sym),
|
||||
}
|
||||
impl MemberKind {
|
||||
async fn from_parsed(parsed: &ParsedMemberKind, root: &ParsedModule) -> Self {
|
||||
#[must_use]
|
||||
async fn from_parsed(parsed: &ParsedMemberKind, path: Sym, ctx: &mut FromParsedCtx<'_>) -> Self {
|
||||
match parsed {
|
||||
ParsedMemberKind::Const => MemberKind::Const,
|
||||
ParsedMemberKind::Mod(m) => MemberKind::Module(Module::from_parsed(m, root).await),
|
||||
ParsedMemberKind::ParsedConst(expr) => {
|
||||
ctx.consts.insert(path, expr.clone());
|
||||
MemberKind::Const
|
||||
},
|
||||
ParsedMemberKind::DeferredConst(id, sys) => {
|
||||
ctx.deferred_consts.insert(path, (sys.id(), *id));
|
||||
MemberKind::Const
|
||||
},
|
||||
ParsedMemberKind::Mod(m) =>
|
||||
MemberKind::Module(Module::from_parsed(m, path, ctx).boxed_local().await),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LazyMemberHandle {
|
||||
id: api::TreeId,
|
||||
sys: System,
|
||||
sys: api::SysId,
|
||||
path: Sym,
|
||||
}
|
||||
impl LazyMemberHandle {
|
||||
pub async fn run(self, consts: &mut HashMap<Sym, Expr>) -> MemberKind {
|
||||
match self.sys.get_tree(self.id).await {
|
||||
#[must_use]
|
||||
pub async fn run(self, ctx: Ctx, consts: &mut HashMap<Sym, Expr>) -> MemberKind {
|
||||
let sys = ctx.system_inst(self.sys).await.expect("Missing system for lazy member");
|
||||
match sys.get_tree(self.id).await {
|
||||
api::MemberKind::Const(c) => {
|
||||
let mut pctx =
|
||||
ExprParseCtx { ctx: self.sys.ctx().clone(), exprs: self.sys.ext().exprs().clone() };
|
||||
let mut pctx = ExprParseCtx { ctx: &ctx, exprs: sys.ext().exprs() };
|
||||
consts.insert(self.path, Expr::from_api(&c, PathSetBuilder::new(), &mut pctx).await);
|
||||
MemberKind::Const
|
||||
},
|
||||
api::MemberKind::Module(m) => MemberKind::Module(
|
||||
Module::from_api(m, &mut TreeFromApiCtx { sys: &self.sys, consts, path: self.path.tok() })
|
||||
.await,
|
||||
Module::from_api(m, &mut TreeFromApiCtx { sys: &sys, consts, path: self.path.tok() }).await,
|
||||
),
|
||||
api::MemberKind::Lazy(id) => Self { id, ..self }.run(consts).boxed_local().await,
|
||||
api::MemberKind::Import(path) => MemberKind::Alias(Sym::from_api(path, self.sys.i()).await),
|
||||
api::MemberKind::Lazy(id) => Self { id, ..self }.run(ctx, consts).boxed_local().await,
|
||||
}
|
||||
}
|
||||
pub async fn into_member(self, public: bool, path: Sym) -> Member {
|
||||
Member { public, path, kind: OnceCell::new(), lazy: RefCell::new(Some(self)) }
|
||||
#[must_use]
|
||||
pub async fn into_member(self, public: bool) -> Member {
|
||||
Member { public, kind: OnceCell::new(), lazy: RefCell::new(Some(self)) }
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: this one should own but not execute the lazy handle.
|
||||
// Lazy handles should run
|
||||
// - in the tree converter function as needed to resolve imports
|
||||
// - in the tree itself when a constant is loaded
|
||||
// - when a different lazy subtree references them in a wildcard import and
|
||||
// forces the enumeration.
|
||||
//
|
||||
// do we actually need to allow wildcard imports in lazy trees? maybe a
|
||||
// different kind of import is sufficient. Source code never becomes a lazy
|
||||
// tree. What does?
|
||||
// - Systems subtrees rarely reference each other at all. They can't use macros
|
||||
// and they usually point to constants with an embedded expr.
|
||||
// - Compiled libraries on the long run. The code as written may reference
|
||||
// constants by indirect path. But this is actually the same as the above,
|
||||
// they also wouldn't use regular imports because they are distributed as
|
||||
// exprs.
|
||||
//
|
||||
// Everything is distributed either as source code or as exprs. Line parsers
|
||||
// also operate on tokens.
|
||||
//
|
||||
// TODO: The trees produced by systems can be safely changed
|
||||
// to the new kind of tree. This datastructure does not need to support the lazy
|
||||
// handle.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
mod macros;
|
||||
// mod macros;
|
||||
mod std;
|
||||
|
||||
pub use std::number::num_atom::{Float, HomoArray, Int, Num};
|
||||
|
||||
@@ -7,7 +7,9 @@ use orchid_extension::lexer::LexerObj;
|
||||
use orchid_extension::parser::ParserObj;
|
||||
use orchid_extension::system::{System, SystemCard};
|
||||
use orchid_extension::system_ctor::SystemCtor;
|
||||
use orchid_extension::tree::GenItem;
|
||||
use orchid_extension::tree::GenMember;
|
||||
|
||||
use crate::macros::mactree_lexer::MacTreeLexer;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct MacroSystem;
|
||||
@@ -26,7 +28,7 @@ impl SystemCard for MacroSystem {
|
||||
impl System for MacroSystem {
|
||||
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} }
|
||||
fn vfs() -> orchid_extension::fs::DeclFs { DeclFs::Mod(&[]) }
|
||||
fn lexers() -> Vec<LexerObj> { vec![] }
|
||||
fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer] }
|
||||
fn parsers() -> Vec<ParserObj> { vec![] }
|
||||
fn env() -> Vec<GenItem> { vec![] }
|
||||
fn env() -> Vec<GenMember> { vec![] }
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::Display;
|
||||
use std::rc::Rc;
|
||||
|
||||
use futures::future::join_all;
|
||||
use orchid_api::Paren;
|
||||
use orchid_base::error::OrcErrv;
|
||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::tl_cache;
|
||||
use orchid_base::tree::Ph;
|
||||
use orchid_extension::atom::{Atomic, MethodSetBuilder};
|
||||
use orchid_extension::atom::Atomic;
|
||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
|
||||
use orchid_extension::expr::Expr;
|
||||
|
||||
@@ -62,7 +62,7 @@ impl Format for MacTok {
|
||||
},
|
||||
[mtreev_fmt(body, c).await],
|
||||
),
|
||||
Self::Slot => format!("SLOT").into(),
|
||||
Self::Slot => "SLOT".into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -73,3 +73,26 @@ pub async fn mtreev_fmt<'b>(
|
||||
) -> FmtUnit {
|
||||
FmtUnit::sequence(" ", None, join_all(v.into_iter().map(|t| t.print(c))).await)
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub struct Ph {
|
||||
pub name: Tok<String>,
|
||||
pub kind: PhKind,
|
||||
}
|
||||
impl Display for Ph {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self.kind {
|
||||
PhKind::Scalar => write!(f, "${}", self.name),
|
||||
PhKind::Vector { at_least_one: false, priority: 0 } => write!(f, "..${}", self.name),
|
||||
PhKind::Vector { at_least_one: true, priority: 0 } => write!(f, "...${}", self.name),
|
||||
PhKind::Vector { at_least_one: false, priority } => write!(f, "..${}:{priority}", self.name),
|
||||
PhKind::Vector { at_least_one: true, priority } => write!(f, "...${}:{priority}", self.name),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub enum PhKind {
|
||||
Scalar,
|
||||
Vector { at_least_one: bool, priority: u8 },
|
||||
}
|
||||
|
||||
@@ -3,13 +3,9 @@ use std::rc::Rc;
|
||||
|
||||
use futures::FutureExt;
|
||||
use orchid_base::error::{OrcRes, mk_errv};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::parse::name_start;
|
||||
use orchid_base::tokens::PARENS;
|
||||
use orchid_extension::atom::AtomicFeatures;
|
||||
use orchid_extension::gen_expr::atom;
|
||||
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
|
||||
use orchid_extension::tree::{GenTok, GenTokTree};
|
||||
use orchid_extension::tree::{GenTok, GenTokTree, x_tok};
|
||||
|
||||
use crate::macros::mactree::{MacTok, MacTree};
|
||||
|
||||
@@ -17,15 +13,14 @@ use crate::macros::mactree::{MacTok, MacTree};
|
||||
pub struct MacTreeLexer;
|
||||
impl Lexer for MacTreeLexer {
|
||||
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['\''..='\''];
|
||||
async fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
|
||||
async fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||
let Some(tail2) = tail.strip_prefix('\'') else {
|
||||
return Err(err_not_applicable(ctx.i).await.into());
|
||||
return Err(err_not_applicable(ctx.i()).await.into());
|
||||
};
|
||||
let tail3 = tail2.trim_start();
|
||||
return match mac_tree(tail3, ctx).await {
|
||||
Ok((tail4, mactree)) =>
|
||||
Ok((tail4, GenTok::X(mactree.factory()).at(ctx.pos(tail)..ctx.pos(tail4)))),
|
||||
Err(e) => Ok((tail2, GenTok::Bottom(e).at(ctx.tok_ran(1, tail2)))),
|
||||
Ok((tail4, mactree)) => Ok((tail4, x_tok(mactree).at(ctx.pos_tt(tail, tail4)))),
|
||||
Err(e) => Ok((tail2, GenTok::Bottom(e).at(ctx.pos_lt(1, tail2)))),
|
||||
};
|
||||
async fn mac_tree<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, MacTree)> {
|
||||
for (lp, rp, paren) in PARENS {
|
||||
@@ -35,14 +30,14 @@ impl Lexer for MacTreeLexer {
|
||||
let tail2 = body_tail.trim();
|
||||
if let Some(tail3) = tail2.strip_prefix(*rp) {
|
||||
break Ok((tail3, MacTree {
|
||||
pos: Pos::Range(ctx.pos(tail)..ctx.pos(tail3)),
|
||||
pos: ctx.pos_tt(tail, tail3).pos(),
|
||||
tok: Rc::new(MacTok::S(*paren, items)),
|
||||
}));
|
||||
} else if tail2.is_empty() {
|
||||
return Err(mk_errv(
|
||||
ctx.i.i("Unclosed block").await,
|
||||
ctx.i().i("Unclosed block").await,
|
||||
format!("Expected closing {rp}"),
|
||||
[Pos::Range(ctx.tok_ran(1, tail)).into()],
|
||||
[ctx.pos_lt(1, tail)],
|
||||
));
|
||||
}
|
||||
let (new_tail, new_item) = mac_tree(tail2, ctx).boxed_local().await?;
|
||||
@@ -53,11 +48,13 @@ impl Lexer for MacTreeLexer {
|
||||
const INTERPOL: &[&str] = &["$", "..$"];
|
||||
for pref in INTERPOL {
|
||||
let Some(code) = tail.strip_prefix(pref) else { continue };
|
||||
todo!("Register parameter, and push this onto the argument stack held in the atom")
|
||||
}
|
||||
todo!("recursive lexer call");
|
||||
return Err(mk_errv(
|
||||
ctx.i.i("Expected token after '").await,
|
||||
ctx.i().i("Expected token after '").await,
|
||||
format!("Expected a token after ', found {tail:?}"),
|
||||
[Pos::Range(ctx.tok_ran(1, tail)).into()],
|
||||
[ctx.pos_lt(1, tail)],
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
use itertools::Itertools;
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::side::Side;
|
||||
use orchid_base::tree::Ph;
|
||||
|
||||
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
|
||||
use super::vec_attrs::vec_attrs;
|
||||
use crate::macros::mactree::{Ph, PhKind};
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
|
||||
pub type MaxVecSplit<'a> = (&'a [MacTree], (Tok<String>, u8, bool), &'a [MacTree]);
|
||||
@@ -108,24 +107,22 @@ fn mk_scalar(pattern: &MacTree) -> ScalMatcher {
|
||||
mod test {
|
||||
use std::rc::Rc;
|
||||
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::location::SrcRange;
|
||||
use orchid_base::sym;
|
||||
use orchid_base::tokens::Paren;
|
||||
use orchid_base::tree::Ph;
|
||||
use test_executors::spin_on;
|
||||
|
||||
use super::mk_any;
|
||||
use crate::macros::mactree::{Ph, PhKind};
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
|
||||
#[test]
|
||||
fn test_scan() {
|
||||
spin_on(async {
|
||||
let i = Interner::new_master();
|
||||
let ex = |tok: MacTok| async {
|
||||
MacTree { tok: Rc::new(tok), pos: SrcRange::mock(&i).await.pos() }
|
||||
};
|
||||
let ex =
|
||||
|tok: MacTok| async { MacTree { tok: Rc::new(tok), pos: SrcRange::mock(&i).await.pos() } };
|
||||
let pattern = vec![
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
|
||||
@@ -2,11 +2,9 @@ use std::fmt;
|
||||
use std::rc::Rc;
|
||||
|
||||
use itertools::Itertools;
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::tree::Ph;
|
||||
|
||||
use super::any_match::any_match;
|
||||
use super::build::{mk_any, mk_vec};
|
||||
@@ -14,6 +12,7 @@ use super::shared::{AnyMatcher, VecMatcher};
|
||||
use super::state::{MatchState, StateEntry};
|
||||
use super::vec_attrs::vec_attrs;
|
||||
use super::vec_match::vec_match;
|
||||
use crate::macros::mactree::{Ph, PhKind};
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
|
||||
pub fn first_is_vec(pattern: &[MacTree]) -> bool { vec_attrs(pattern.first().unwrap()).is_some() }
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::tree::Ph;
|
||||
|
||||
use crate::macros::mactree::{Ph, PhKind};
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
|
||||
/// Returns the name, priority and at_least_one of the expression if it is
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
use std::mem;
|
||||
use std::rc::Rc;
|
||||
|
||||
use orchid_extension::entrypoint::ExtensionData;
|
||||
use orchid_extension::tokio::tokio_main;
|
||||
use orchid_std::StdSystem;
|
||||
use tokio::task::{LocalSet, spawn_local};
|
||||
|
||||
#[tokio::main(flavor = "current_thread")]
|
||||
pub async fn main() { tokio_main(ExtensionData::new("orchid-std::main", &[&StdSystem])).await }
|
||||
|
||||
@@ -2,9 +2,7 @@ use orchid_api_derive::Coding;
|
||||
use orchid_base::error::OrcRes;
|
||||
use orchid_base::format::FmtUnit;
|
||||
use orchid_base::number::Numeric;
|
||||
use orchid_extension::atom::{
|
||||
AtomFactory, Atomic, AtomicFeatures, MethodSetBuilder, ToAtom, TypAtom,
|
||||
};
|
||||
use orchid_extension::atom::{AtomFactory, Atomic, AtomicFeatures, ToAtom, TypAtom};
|
||||
use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
|
||||
use orchid_extension::conv::TryFromExpr;
|
||||
use orchid_extension::expr::Expr;
|
||||
|
||||
@@ -4,7 +4,7 @@ use orchid_base::error::OrcRes;
|
||||
use orchid_base::number::{num_to_err, parse_num};
|
||||
use orchid_extension::atom::ToAtom;
|
||||
use orchid_extension::lexer::{LexContext, Lexer};
|
||||
use orchid_extension::tree::{GenTok, GenTokTree};
|
||||
use orchid_extension::tree::{GenTokTree, x_tok};
|
||||
|
||||
use super::num_atom::Num;
|
||||
|
||||
@@ -12,13 +12,13 @@ use super::num_atom::Num;
|
||||
pub struct NumLexer;
|
||||
impl Lexer for NumLexer {
|
||||
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['0'..='9'];
|
||||
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
|
||||
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||
let ends_at = all.find(|c: char| !c.is_ascii_hexdigit() && !"xX._pP".contains(c));
|
||||
let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len()));
|
||||
let fac = match parse_num(chars) {
|
||||
Ok(numeric) => Num(numeric).to_atom_factory(),
|
||||
Err(e) => return Err(num_to_err(e, ctx.pos(all), ctx.i).await.into()),
|
||||
Err(e) => return Err(num_to_err(e, ctx.pos(all), &ctx.src, ctx.ctx.i()).await.into()),
|
||||
};
|
||||
Ok((tail, GenTok::X(fac).at(ctx.pos(all)..ctx.pos(tail))))
|
||||
Ok((tail, x_tok(fac).at(ctx.pos_lt(chars.len(), tail))))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use orchid_base::number::Numeric;
|
||||
use orchid_extension::tree::{GenItem, fun, prefix};
|
||||
use orchid_extension::tree::{GenMember, fun, prefix};
|
||||
use ordered_float::NotNan;
|
||||
|
||||
use super::num_atom::{Float, HomoArray, Int, Num};
|
||||
|
||||
pub fn gen_num_lib() -> Vec<GenItem> {
|
||||
pub fn gen_num_lib() -> Vec<GenMember> {
|
||||
prefix("std::number", [
|
||||
fun(true, "add", |a: Num, b: Num| async move {
|
||||
Num(match HomoArray::new([a.0, b.0]) {
|
||||
|
||||
@@ -2,12 +2,12 @@ use never::Never;
|
||||
use orchid_base::reqnot::Receipt;
|
||||
use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
|
||||
use orchid_extension::entrypoint::ExtReq;
|
||||
use orchid_extension::fs::DeclFs;
|
||||
use orchid_extension::fs::DeclVmod;
|
||||
use orchid_extension::lexer::LexerObj;
|
||||
use orchid_extension::parser::ParserObj;
|
||||
use orchid_extension::system::{System, SystemCard};
|
||||
use orchid_extension::system_ctor::SystemCtor;
|
||||
use orchid_extension::tree::{GenItem, merge_trivial};
|
||||
use orchid_extension::tree::{GenMember, merge_trivial};
|
||||
|
||||
use super::number::num_lib::gen_num_lib;
|
||||
use super::string::str_atom::{IntStrAtom, StrAtom};
|
||||
@@ -36,6 +36,6 @@ impl System for StdSystem {
|
||||
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} }
|
||||
fn lexers() -> Vec<LexerObj> { vec![&StringLexer, &NumLexer] }
|
||||
fn parsers() -> Vec<ParserObj> { vec![] }
|
||||
fn vfs() -> DeclFs { DeclFs::Mod(&[]) }
|
||||
fn env() -> Vec<GenItem> { merge_trivial([gen_num_lib(), gen_str_lib()]) }
|
||||
fn vfs() -> DeclVmod { DeclVmod::new(&[]) }
|
||||
fn env() -> Vec<GenMember> { merge_trivial([gen_num_lib(), gen_str_lib()]) }
|
||||
}
|
||||
|
||||
@@ -82,17 +82,17 @@ impl OwnedAtom for IntStrAtom {
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OrcString<'a> {
|
||||
kind: OrcStringKind<'a>,
|
||||
pub struct OrcString {
|
||||
kind: OrcStringKind,
|
||||
ctx: SysCtx,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum OrcStringKind<'a> {
|
||||
Val(TypAtom<'a, StrAtom>),
|
||||
Int(TypAtom<'a, IntStrAtom>),
|
||||
pub enum OrcStringKind {
|
||||
Val(TypAtom<StrAtom>),
|
||||
Int(TypAtom<IntStrAtom>),
|
||||
}
|
||||
impl OrcString<'_> {
|
||||
impl OrcString {
|
||||
pub async fn get_string(&self) -> Rc<String> {
|
||||
match &self.kind {
|
||||
OrcStringKind::Int(tok) => self.ctx.i().ex(**tok).await.rc(),
|
||||
@@ -101,8 +101,8 @@ impl OrcString<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFromExpr for OrcString<'static> {
|
||||
async fn try_from_expr(expr: Expr) -> OrcRes<OrcString<'static>> {
|
||||
impl TryFromExpr for OrcString {
|
||||
async fn try_from_expr(expr: Expr) -> OrcRes<OrcString> {
|
||||
if let Ok(v) = TypAtom::<StrAtom>::try_from_expr(expr.clone()).await {
|
||||
return Ok(OrcString { ctx: expr.ctx(), kind: OrcStringKind::Val(v) });
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use itertools::Itertools;
|
||||
use orchid_base::error::{OrcErr, OrcRes, mk_err, mk_errv};
|
||||
use orchid_base::interner::Interner;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::location::SrcRange;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::sym;
|
||||
use orchid_base::tree::wrap_tokv;
|
||||
use orchid_extension::atom::AtomicFeatures;
|
||||
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
|
||||
use orchid_extension::tree::{GenTok, GenTokTree};
|
||||
use orchid_extension::tree::{GenTokTree, ref_tok, x_tok};
|
||||
|
||||
use super::str_atom::IntStrAtom;
|
||||
|
||||
@@ -32,7 +32,7 @@ struct StringError {
|
||||
|
||||
impl StringError {
|
||||
/// Convert into project error for reporting
|
||||
pub async fn into_proj(self, pos: u32, i: &Interner) -> OrcErr {
|
||||
pub async fn into_proj(self, path: &Sym, pos: u32, i: &Interner) -> OrcErr {
|
||||
let start = pos + self.pos;
|
||||
mk_err(
|
||||
i.i("Failed to parse string").await,
|
||||
@@ -41,7 +41,7 @@ impl StringError {
|
||||
StringErrorKind::BadCodePoint => "The specified number is not a Unicode code point",
|
||||
StringErrorKind::BadEscSeq => "Unrecognized escape sequence",
|
||||
},
|
||||
[Pos::Range(start..start + 1).into()],
|
||||
[SrcRange::new(start..start + 1, path).pos().into()],
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -95,9 +95,9 @@ fn parse_string(str: &str) -> Result<String, StringError> {
|
||||
pub struct StringLexer;
|
||||
impl Lexer for StringLexer {
|
||||
const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['"'..='"', '`'..='`'];
|
||||
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
|
||||
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||
let Some(mut tail) = all.strip_prefix('"') else {
|
||||
return Err(err_not_applicable(ctx.i).await.into());
|
||||
return Err(err_not_applicable(ctx.ctx.i()).await.into());
|
||||
};
|
||||
let mut ret = None;
|
||||
let mut cur = String::new();
|
||||
@@ -107,19 +107,19 @@ impl Lexer for StringLexer {
|
||||
tail: &str,
|
||||
err: &mut Vec<OrcErr>,
|
||||
ctx: &'a LexContext<'a>,
|
||||
) -> GenTokTree<'a> {
|
||||
) -> GenTokTree {
|
||||
let str_val_res = parse_string(&str.split_off(0));
|
||||
if let Err(e) = &str_val_res {
|
||||
err.push(e.clone().into_proj(ctx.pos(tail) - str.len() as u32, ctx.i).await);
|
||||
err.push(e.clone().into_proj(&ctx.src, ctx.pos(tail) - str.len() as u32, ctx.i()).await);
|
||||
}
|
||||
let str_val = str_val_res.unwrap_or_default();
|
||||
GenTok::X(IntStrAtom::from(ctx.i.i(&*str_val).await).factory())
|
||||
.at(ctx.tok_ran(str.len() as u32, tail)) as GenTokTree<'a>
|
||||
x_tok(IntStrAtom::from(ctx.i().i(&*str_val).await)).at(ctx.pos_lt(str.len() as u32, tail))
|
||||
as GenTokTree
|
||||
}
|
||||
let add_frag = |prev: Option<GenTokTree<'a>>, new: GenTokTree<'a>| async {
|
||||
let add_frag = |prev: Option<GenTokTree>, new: GenTokTree| async {
|
||||
let Some(prev) = prev else { return new };
|
||||
let concat_fn =
|
||||
GenTok::Reference(sym!(std::string::concat; ctx.i).await).at(prev.sr.start..prev.sr.start);
|
||||
let concat_fn = ref_tok(sym!(std::string::concat; ctx.i()).await)
|
||||
.at(SrcRange::zw(prev.sr.path(), prev.sr.start()));
|
||||
wrap_tokv([concat_fn, prev, new])
|
||||
};
|
||||
loop {
|
||||
@@ -141,9 +141,9 @@ impl Lexer for StringLexer {
|
||||
} else {
|
||||
let range = ctx.pos(all)..ctx.pos("");
|
||||
return Err(mk_errv(
|
||||
ctx.i.i("No string end").await,
|
||||
ctx.i().i("No string end").await,
|
||||
"String never terminated with \"",
|
||||
[Pos::Range(range.clone()).into()],
|
||||
[SrcRange::new(range.clone(), &ctx.src)],
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use orchid_extension::tree::{GenItem, comments, fun, prefix};
|
||||
use orchid_extension::tree::{GenMember, comments, fun, prefix};
|
||||
|
||||
use super::str_atom::StrAtom;
|
||||
use crate::OrcString;
|
||||
|
||||
pub fn gen_str_lib() -> Vec<GenItem> {
|
||||
pub fn gen_str_lib() -> Vec<GenMember> {
|
||||
prefix("std::string", [comments(
|
||||
["Concatenate two strings"],
|
||||
fun(true, "concat", |left: OrcString<'static>, right: OrcString<'static>| async move {
|
||||
fun(true, "concat", |left: OrcString, right: OrcString| async move {
|
||||
StrAtom::new(Rc::new(left.get_string().await.to_string() + &right.get_string().await))
|
||||
}),
|
||||
)])
|
||||
|
||||
110
orcx/src/main.rs
110
orcx/src/main.rs
@@ -10,19 +10,18 @@ use async_stream::try_stream;
|
||||
use camino::Utf8PathBuf;
|
||||
use clap::{Parser, Subcommand};
|
||||
use futures::{Stream, TryStreamExt, io};
|
||||
use orchid_base::error::ReporterImpl;
|
||||
use orchid_base::error::Reporter;
|
||||
use orchid_base::format::{FmtCtxImpl, Format, take_first};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::logging::{LogStrategy, Logger};
|
||||
use orchid_base::macros::mtreev_fmt;
|
||||
use orchid_base::parse::Snippet;
|
||||
use orchid_base::sym;
|
||||
use orchid_base::tree::ttv_fmt;
|
||||
use orchid_host::ctx::Ctx;
|
||||
use orchid_host::execute::{ExecCtx, ExecResult};
|
||||
use orchid_host::expr::mtreev_to_expr;
|
||||
use orchid_host::expr::PathSetBuilder;
|
||||
use orchid_host::extension::Extension;
|
||||
use orchid_host::lex::lex;
|
||||
use orchid_host::parse::{HostParseCtxImpl, parse_items, parse_mtree};
|
||||
use orchid_host::parse::{HostParseCtxImpl, parse_expr, parse_items};
|
||||
use orchid_host::subprocess::ext_command;
|
||||
use orchid_host::system::init_systems;
|
||||
use substack::Substack;
|
||||
@@ -83,9 +82,9 @@ fn get_all_extensions<'a>(
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> io::Result<ExitCode> {
|
||||
let code = Rc::new(RefCell::new(ExitCode::SUCCESS));
|
||||
let exit_code = Rc::new(RefCell::new(ExitCode::SUCCESS));
|
||||
let local_set = LocalSet::new();
|
||||
let code1 = code.clone();
|
||||
let exit_code1 = exit_code.clone();
|
||||
local_set.spawn_local(async move {
|
||||
let args = Args::parse();
|
||||
let ctx = &Ctx::new(Rc::new(|fut| mem::drop(spawn_local(fut))));
|
||||
@@ -103,7 +102,7 @@ async fn main() -> io::Result<ExitCode> {
|
||||
let mut file = File::open(file.as_std_path()).unwrap();
|
||||
let mut buf = String::new();
|
||||
file.read_to_string(&mut buf).unwrap();
|
||||
let lexemes = lex(i.i(&buf).await, &systems, ctx).await.unwrap();
|
||||
let lexemes = lex(i.i(&buf).await, sym!(usercode; i).await, &systems, ctx).await.unwrap();
|
||||
println!("{}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true))
|
||||
},
|
||||
Commands::Parse { file } => {
|
||||
@@ -111,23 +110,28 @@ async fn main() -> io::Result<ExitCode> {
|
||||
let mut file = File::open(file.as_std_path()).unwrap();
|
||||
let mut buf = String::new();
|
||||
file.read_to_string(&mut buf).unwrap();
|
||||
let lexemes = lex(i.i(&buf).await, &systems, ctx).await.unwrap();
|
||||
let lexemes = lex(i.i(&buf).await, sym!(usercode; i).await, &systems, ctx).await.unwrap();
|
||||
let Some(first) = lexemes.first() else {
|
||||
println!("File empty!");
|
||||
return;
|
||||
};
|
||||
let reporter = ReporterImpl::new();
|
||||
let pctx = HostParseCtxImpl { reporter: &reporter, systems: &systems };
|
||||
let snip = Snippet::new(first, &lexemes, i);
|
||||
let reporter = Reporter::new();
|
||||
let pctx = HostParseCtxImpl {
|
||||
reporter: &reporter,
|
||||
systems: &systems,
|
||||
ctx: ctx.clone(),
|
||||
src: sym!(usercode; i).await,
|
||||
};
|
||||
let snip = Snippet::new(first, &lexemes);
|
||||
let ptree = parse_items(&pctx, Substack::Bottom, snip).await.unwrap();
|
||||
if let Some(errv) = reporter.errv() {
|
||||
eprintln!("{errv}");
|
||||
*code1.borrow_mut() = ExitCode::FAILURE;
|
||||
*exit_code1.borrow_mut() = ExitCode::FAILURE;
|
||||
return;
|
||||
}
|
||||
if ptree.is_empty() {
|
||||
eprintln!("File empty only after parsing, but no errors were reported");
|
||||
*code1.borrow_mut() = ExitCode::FAILURE;
|
||||
*exit_code1.borrow_mut() = ExitCode::FAILURE;
|
||||
return;
|
||||
}
|
||||
for item in ptree {
|
||||
@@ -140,23 +144,38 @@ async fn main() -> io::Result<ExitCode> {
|
||||
std::io::stdout().flush().unwrap();
|
||||
let mut prompt = String::new();
|
||||
stdin().read_line(&mut prompt).await.unwrap();
|
||||
let lexemes = lex(i.i(prompt.trim()).await, &systems, ctx).await.unwrap();
|
||||
eprintln!("lexing");
|
||||
let lexemes =
|
||||
lex(i.i(prompt.trim()).await, sym!(usercode; i).await, &systems, ctx).await.unwrap();
|
||||
eprintln!("lexed");
|
||||
if args.logs {
|
||||
println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true));
|
||||
}
|
||||
let mtreev = parse_mtree(
|
||||
Snippet::new(&lexemes[0], &lexemes, i),
|
||||
Substack::Bottom.push(i.i("orcx").await).push(i.i("input").await),
|
||||
let path = sym!(usercode; i).await;
|
||||
let reporter = Reporter::new();
|
||||
let parse_ctx = HostParseCtxImpl {
|
||||
ctx: ctx.clone(),
|
||||
reporter: &reporter,
|
||||
src: path.clone(),
|
||||
systems: &systems[..],
|
||||
};
|
||||
let parse_res = parse_expr(
|
||||
&parse_ctx,
|
||||
path.clone(),
|
||||
PathSetBuilder::new(),
|
||||
Snippet::new(&lexemes[0], &lexemes),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
if args.logs {
|
||||
let fmt = mtreev_fmt(&mtreev, &FmtCtxImpl { i }).await;
|
||||
println!("parsed: {}", take_first(&fmt, true));
|
||||
}
|
||||
let expr = mtreev_to_expr(&mtreev, Substack::Bottom, ctx).await;
|
||||
let mut xctx =
|
||||
ExecCtx::new(ctx.clone(), logger.clone(), root.clone(), expr.at(Pos::None)).await;
|
||||
.await;
|
||||
eprintln!("parsed");
|
||||
let expr = match reporter.merge(parse_res) {
|
||||
Ok(expr) => expr,
|
||||
Err(e) => {
|
||||
eprintln!("{e}");
|
||||
continue;
|
||||
},
|
||||
};
|
||||
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root.clone(), expr).await;
|
||||
eprintln!("executed");
|
||||
xctx.set_gas(Some(1000));
|
||||
xctx.execute().await;
|
||||
match xctx.result() {
|
||||
@@ -168,18 +187,35 @@ async fn main() -> io::Result<ExitCode> {
|
||||
},
|
||||
Commands::Execute { code } => {
|
||||
let (root, systems) = init_systems(&args.system, &extensions).await.unwrap();
|
||||
let lexemes = lex(i.i(code.trim()).await, &systems, ctx).await.unwrap();
|
||||
let lexemes =
|
||||
lex(i.i(code.trim()).await, sym!(usercode; i).await, &systems, ctx).await.unwrap();
|
||||
if args.logs {
|
||||
println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true));
|
||||
}
|
||||
let mtreev =
|
||||
parse_mtree(Snippet::new(&lexemes[0], &lexemes, i), Substack::Bottom).await.unwrap();
|
||||
if args.logs {
|
||||
let fmt = mtreev_fmt(&mtreev, &FmtCtxImpl { i }).await;
|
||||
println!("parsed: {}", take_first(&fmt, true));
|
||||
}
|
||||
let expr = mtreev_to_expr(&mtreev, Substack::Bottom, ctx).await;
|
||||
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root, expr.at(Pos::None)).await;
|
||||
let path = sym!(usercode; i).await;
|
||||
let reporter = Reporter::new();
|
||||
let parse_ctx = HostParseCtxImpl {
|
||||
ctx: ctx.clone(),
|
||||
reporter: &reporter,
|
||||
src: path.clone(),
|
||||
systems: &systems[..],
|
||||
};
|
||||
let parse_res = parse_expr(
|
||||
&parse_ctx,
|
||||
path.clone(),
|
||||
PathSetBuilder::new(),
|
||||
Snippet::new(&lexemes[0], &lexemes),
|
||||
)
|
||||
.await;
|
||||
let expr = match reporter.merge(parse_res) {
|
||||
Ok(expr) => expr,
|
||||
Err(e) => {
|
||||
eprintln!("{e}");
|
||||
*exit_code1.borrow_mut() = ExitCode::FAILURE;
|
||||
return;
|
||||
},
|
||||
};
|
||||
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root, expr).await;
|
||||
xctx.set_gas(Some(1000));
|
||||
xctx.execute().await;
|
||||
match xctx.result() {
|
||||
@@ -192,6 +228,6 @@ async fn main() -> io::Result<ExitCode> {
|
||||
}
|
||||
});
|
||||
local_set.await;
|
||||
let x = *code.borrow();
|
||||
let x = *exit_code.borrow();
|
||||
Ok(x)
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ max_width = 100
|
||||
error_on_unformatted = true
|
||||
format_macro_matchers = true
|
||||
newline_style = "Unix"
|
||||
normalize_comments = true
|
||||
normalize_comments = false
|
||||
wrap_comments = true
|
||||
comment_width = 80
|
||||
doc_comment_code_block_width = 80
|
||||
|
||||
@@ -9,6 +9,8 @@ use check_api_refs::check_api_refs;
|
||||
use clap::{Parser, Subcommand};
|
||||
use orcx::orcx;
|
||||
|
||||
use crate::orcx::orcxdb;
|
||||
|
||||
#[derive(Parser)]
|
||||
pub struct Args {
|
||||
#[arg(short, long)]
|
||||
@@ -22,7 +24,11 @@ pub enum Commands {
|
||||
CheckApiRefs,
|
||||
Orcx {
|
||||
#[arg(trailing_var_arg = true, num_args = 1..)]
|
||||
orcx_argv: Vec<String>,
|
||||
argv: Vec<String>,
|
||||
},
|
||||
Orcxdb {
|
||||
#[arg(trailing_var_arg = true, num_args = 1..)]
|
||||
argv: Vec<String>,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -35,7 +41,8 @@ fn main() -> io::Result<ExitCode> {
|
||||
let args = Args::parse();
|
||||
match &args.command {
|
||||
Commands::CheckApiRefs => check_api_refs(&args)?,
|
||||
Commands::Orcx { orcx_argv } => orcx(&args, orcx_argv)?,
|
||||
Commands::Orcx { argv } => orcx(&args, argv)?,
|
||||
Commands::Orcxdb { argv } => orcxdb(&args, argv)?,
|
||||
}
|
||||
Ok(if EXIT_OK.load(Ordering::Relaxed) { ExitCode::SUCCESS } else { ExitCode::FAILURE })
|
||||
}
|
||||
|
||||
@@ -14,3 +14,15 @@ pub fn orcx(_args: &Args, argv: &[String]) -> io::Result<()> {
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn orcxdb(_args: &Args, argv: &[String]) -> io::Result<()> {
|
||||
if !Command::new("cargo").args(["build", "-p", "orchid-std"]).status()?.success() {
|
||||
EXIT_OK.store(false, Ordering::Relaxed);
|
||||
return Ok(());
|
||||
}
|
||||
let path = format!("./target/debug/orcx{}", std::env::consts::EXE_SUFFIX);
|
||||
if !Command::new("lldb").args([&path]).args(argv).status()?.success() {
|
||||
EXIT_OK.store(false, Ordering::Relaxed);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user