temp commit
This commit is contained in:
@@ -1,9 +1,11 @@
|
|||||||
[alias]
|
[alias]
|
||||||
xtask = "run --quiet --package xtask --"
|
xtask = "run --quiet --package xtask --"
|
||||||
orcx = "xtask orcx"
|
orcx = "xtask orcx"
|
||||||
|
orcxdb = "xtask orcxdb"
|
||||||
|
|
||||||
[env]
|
[env]
|
||||||
CARGO_WORKSPACE_DIR = { value = "", relative = true }
|
CARGO_WORKSPACE_DIR = { value = "", relative = true }
|
||||||
ORCHID_EXTENSIONS = "target/debug/orchid-std"
|
ORCHID_EXTENSIONS = "target/debug/orchid-std"
|
||||||
ORCHID_DEFAULT_SYSTEMS = "orchid::std"
|
ORCHID_DEFAULT_SYSTEMS = "orchid::std"
|
||||||
ORCHID_LOG_BUFFERS = "true"
|
ORCHID_LOG_BUFFERS = "true"
|
||||||
|
RUSTBACKTRACE = "1"
|
||||||
|
|||||||
62
Cargo.lock
generated
62
Cargo.lock
generated
@@ -862,6 +862,25 @@ version = "1.0.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "include_dir"
|
||||||
|
version = "0.7.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "923d117408f1e49d914f1a379a309cffe4f18c05cf4e3d12e613a15fc81bd0dd"
|
||||||
|
dependencies = [
|
||||||
|
"include_dir_macros",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "include_dir_macros"
|
||||||
|
version = "0.7.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7cab85a7ed0bd5f0e76d93846e0147172bed2e2d3f859bcc33a8d9699cad1a75"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2 1.0.92",
|
||||||
|
"quote 1.0.38",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "indexmap"
|
name = "indexmap"
|
||||||
version = "2.7.0"
|
version = "2.7.0"
|
||||||
@@ -872,6 +891,17 @@ dependencies = [
|
|||||||
"hashbrown 0.15.2",
|
"hashbrown 0.15.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "io-uring"
|
||||||
|
version = "0.7.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags",
|
||||||
|
"cfg-if",
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "is_terminal_polyfill"
|
name = "is_terminal_polyfill"
|
||||||
version = "1.70.1"
|
version = "1.70.1"
|
||||||
@@ -1142,6 +1172,7 @@ dependencies = [
|
|||||||
"dyn-clone",
|
"dyn-clone",
|
||||||
"futures",
|
"futures",
|
||||||
"hashbrown 0.15.2",
|
"hashbrown 0.15.2",
|
||||||
|
"include_dir",
|
||||||
"itertools",
|
"itertools",
|
||||||
"konst",
|
"konst",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
@@ -1153,8 +1184,8 @@ dependencies = [
|
|||||||
"orchid-api-traits",
|
"orchid-api-traits",
|
||||||
"orchid-base",
|
"orchid-base",
|
||||||
"ordered-float",
|
"ordered-float",
|
||||||
"paste",
|
"pastey",
|
||||||
"some_executor 0.4.0",
|
"some_executor 0.5.1",
|
||||||
"substack",
|
"substack",
|
||||||
"tokio",
|
"tokio",
|
||||||
"trait-set",
|
"trait-set",
|
||||||
@@ -1174,6 +1205,7 @@ dependencies = [
|
|||||||
"hashbrown 0.15.2",
|
"hashbrown 0.15.2",
|
||||||
"itertools",
|
"itertools",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
|
"memo-map",
|
||||||
"never",
|
"never",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"orchid-api",
|
"orchid-api",
|
||||||
@@ -1274,6 +1306,12 @@ version = "1.0.15"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
|
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pastey"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b3a8cb46bdc156b1c90460339ae6bfd45ba0394e5effbaa640badb4987fdc261"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pin-project-lite"
|
name = "pin-project-lite"
|
||||||
version = "0.2.16"
|
version = "0.2.16"
|
||||||
@@ -1708,6 +1746,20 @@ dependencies = [
|
|||||||
"web-time",
|
"web-time",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "some_executor"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "eb323f85458f395c28b5ea35a2626e9f46a35f1d730d37e6fa24dcf2848835ee"
|
||||||
|
dependencies = [
|
||||||
|
"atomic-waker",
|
||||||
|
"priority",
|
||||||
|
"wasm-bindgen",
|
||||||
|
"wasm_thread",
|
||||||
|
"web-sys",
|
||||||
|
"web-time",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "stdio-perftest"
|
name = "stdio-perftest"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
@@ -1805,17 +1857,19 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio"
|
name = "tokio"
|
||||||
version = "1.43.0"
|
version = "1.46.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e"
|
checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"backtrace",
|
"backtrace",
|
||||||
"bytes",
|
"bytes",
|
||||||
|
"io-uring",
|
||||||
"libc",
|
"libc",
|
||||||
"mio",
|
"mio",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"signal-hook-registry",
|
"signal-hook-registry",
|
||||||
|
"slab",
|
||||||
"socket2",
|
"socket2",
|
||||||
"tokio-macros",
|
"tokio-macros",
|
||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
|
|||||||
12
SWAP.md
12
SWAP.md
@@ -1,12 +1,10 @@
|
|||||||
## Async conversion
|
Since the macro AST is built as a custom tokenizer inside the system, it needs access to the import set. On the other hand, import sets aren't available until after parsing. Need a way to place this order in a lexer without restricting the expression value of the lexer.
|
||||||
|
|
||||||
consider converting extension's SysCtx to a typed context bag
|
The daft option of accepting import resolution queries at runtime is available but consider better options.
|
||||||
|
|
||||||
align fn atom and macros on both sides with new design. No global state.
|
|
||||||
|
|
||||||
## alternate extension mechanism
|
## alternate extension mechanism
|
||||||
|
|
||||||
The Macro extension needs to be in the same compilation unit as the interpreter because the interpreter needs to proactively access its datastructures (in particular, it needs to generate MacTree from TokTree)
|
The STD system will have a lot of traffic for trivial operations like algebra, stream IO will likely not be fast enough. A faster system is in order.
|
||||||
|
|
||||||
Ideally, it should reuse `orchid-extension` for message routing and decoding.
|
Ideally, it should reuse `orchid-extension` for message routing and decoding.
|
||||||
|
|
||||||
@@ -14,10 +12,8 @@ Ideally, it should reuse `orchid-extension` for message routing and decoding.
|
|||||||
|
|
||||||
## Preprocessor extension
|
## Preprocessor extension
|
||||||
|
|
||||||
Must figure out how preprocessor can both be a System and referenced in the interpreter
|
The macro system will not be privileged, it can take control from the interpreter via a custom top-level "let" line type.
|
||||||
|
|
||||||
Must actually write macro system as recorded in note
|
Must actually write macro system as recorded in note
|
||||||
|
|
||||||
At this point swappable preprocessors aren't a target because interaction with module system sounds complicated
|
|
||||||
|
|
||||||
Check if any of this needs interpreter, if so, start with that
|
Check if any of this needs interpreter, if so, start with that
|
||||||
@@ -4,11 +4,21 @@ use std::num::NonZeroU64;
|
|||||||
use orchid_api_derive::{Coding, Hierarchy};
|
use orchid_api_derive::{Coding, Hierarchy};
|
||||||
use orchid_api_traits::Request;
|
use orchid_api_traits::Request;
|
||||||
|
|
||||||
use crate::{HostExtReq, OrcResult, SysId, TStr, TStrv, TokenTree};
|
use crate::{
|
||||||
|
Expression, ExtHostReq, HostExtReq, OrcResult, SourceRange, SysId, TStr, TStrv, TokenTree,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||||
pub struct ParsId(pub NonZeroU64);
|
pub struct ParsId(pub NonZeroU64);
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||||
|
pub struct ParsedConstId(pub NonZeroU64);
|
||||||
|
|
||||||
|
/// Parse a single source line. Return values can be modules, constants, or
|
||||||
|
/// token sequences for re-parsing. These re-parsed token sequences can also
|
||||||
|
/// represent raw language items such as modules, imports, and const. This is
|
||||||
|
/// how we enable generating imports without forcing import syntax to affect API
|
||||||
|
/// versioning
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||||
#[extends(HostExtReq)]
|
#[extends(HostExtReq)]
|
||||||
pub struct ParseLine {
|
pub struct ParseLine {
|
||||||
@@ -23,7 +33,46 @@ pub struct ParseLine {
|
|||||||
pub line: Vec<TokenTree>,
|
pub line: Vec<TokenTree>,
|
||||||
}
|
}
|
||||||
impl Request for ParseLine {
|
impl Request for ParseLine {
|
||||||
type Response = OrcResult<Vec<TokenTree>>;
|
type Response = OrcResult<Vec<ParsedLine>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Coding)]
|
||||||
|
pub struct ParsedLine {
|
||||||
|
pub comments: Vec<Comment>,
|
||||||
|
pub source_range: SourceRange,
|
||||||
|
pub kind: ParsedLineKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Coding)]
|
||||||
|
pub enum ParsedLineKind {
|
||||||
|
Recursive(Vec<TokenTree>),
|
||||||
|
Member(ParsedMember),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Coding)]
|
||||||
|
pub struct ParsedMember {
|
||||||
|
pub name: TStr,
|
||||||
|
pub exported: bool,
|
||||||
|
pub kind: ParsedMemberKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Coding)]
|
||||||
|
pub enum ParsedMemberKind {
|
||||||
|
Constant(ParsedConstId),
|
||||||
|
Module(Vec<ParsedLine>),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Obtain the value of a parsed constant. This is guaranteed to be called after
|
||||||
|
/// the last [ParseLine] but before any [crate::AtomReq]. As such, in principle
|
||||||
|
/// the macro engine could run here.
|
||||||
|
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||||
|
#[extends(HostExtReq)]
|
||||||
|
pub struct FetchParsedConst {
|
||||||
|
pub sys: SysId,
|
||||||
|
pub id: ParsedConstId,
|
||||||
|
}
|
||||||
|
impl Request for FetchParsedConst {
|
||||||
|
type Response = Expression;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding)]
|
#[derive(Clone, Debug, Coding)]
|
||||||
@@ -31,3 +80,25 @@ pub struct Comment {
|
|||||||
pub text: TStr,
|
pub text: TStr,
|
||||||
pub range: Range<u32>,
|
pub range: Range<u32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Resolve relative names from the perspective of a constant. This can only be
|
||||||
|
/// called during a [FetchParsedConst] call, but it can be called for a
|
||||||
|
/// different [ParsedConstId] from the one in [FetchParsedConst].
|
||||||
|
///
|
||||||
|
/// Each name is either resolved to an alias or existing constant `Some(TStrv)`
|
||||||
|
/// or not resolved `None`. An error is never raised, as names may have a
|
||||||
|
/// primary meaning such as a local binding which can be overridden by specific
|
||||||
|
/// true names such as those triggering macro keywords. It is not recommended to
|
||||||
|
/// define syntax that can break by defining arbitrary constants, as line
|
||||||
|
/// parsers can define new ones at will.
|
||||||
|
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||||
|
#[extends(ExtHostReq)]
|
||||||
|
pub struct ResolveNames {
|
||||||
|
pub sys: SysId,
|
||||||
|
pub constid: ParsedConstId,
|
||||||
|
pub names: Vec<TStrv>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Request for ResolveNames {
|
||||||
|
type Response = Vec<Option<TStrv>>;
|
||||||
|
}
|
||||||
|
|||||||
@@ -89,6 +89,7 @@ pub enum ExtHostReq {
|
|||||||
ExprReq(expr::ExprReq),
|
ExprReq(expr::ExprReq),
|
||||||
SubLex(lexer::SubLex),
|
SubLex(lexer::SubLex),
|
||||||
LsModule(tree::LsModule),
|
LsModule(tree::LsModule),
|
||||||
|
ResolveNames(parser::ResolveNames),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Notifications sent from the extension to the host
|
/// Notifications sent from the extension to the host
|
||||||
@@ -117,8 +118,9 @@ pub enum HostExtReq {
|
|||||||
DeserAtom(atom::DeserAtom),
|
DeserAtom(atom::DeserAtom),
|
||||||
LexExpr(lexer::LexExpr),
|
LexExpr(lexer::LexExpr),
|
||||||
ParseLine(parser::ParseLine),
|
ParseLine(parser::ParseLine),
|
||||||
|
FetchParsedConst(parser::FetchParsedConst),
|
||||||
GetMember(tree::GetMember),
|
GetMember(tree::GetMember),
|
||||||
VfsReq(vfs::VfsReq),
|
VfsRead(vfs::VfsRead),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Notifications sent from the host to the extension
|
/// Notifications sent from the host to the extension
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ use orchid_api_derive::{Coding, Hierarchy};
|
|||||||
use orchid_api_traits::Request;
|
use orchid_api_traits::Request;
|
||||||
use ordered_float::NotNan;
|
use ordered_float::NotNan;
|
||||||
|
|
||||||
use crate::{CharFilter, ExtHostReq, HostExtNotif, HostExtReq, MemberKind, TStr};
|
use crate::{CharFilter, EagerVfs, ExtHostReq, HostExtNotif, HostExtReq, MemberKind, TStr};
|
||||||
|
|
||||||
/// ID of a system type
|
/// ID of a system type
|
||||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Coding)]
|
||||||
@@ -63,6 +63,7 @@ pub struct NewSystemResponse {
|
|||||||
pub lex_filter: CharFilter,
|
pub lex_filter: CharFilter,
|
||||||
pub line_types: Vec<TStr>,
|
pub line_types: Vec<TStr>,
|
||||||
pub const_root: HashMap<TStr, MemberKind>,
|
pub const_root: HashMap<TStr, MemberKind>,
|
||||||
|
pub vfs: HashMap<TStr, EagerVfs>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||||
|
|||||||
@@ -72,7 +72,6 @@ pub struct Member {
|
|||||||
pub enum MemberKind {
|
pub enum MemberKind {
|
||||||
Const(Expression),
|
Const(Expression),
|
||||||
Module(Module),
|
Module(Module),
|
||||||
Import(TStrv),
|
|
||||||
Lazy(TreeId),
|
Lazy(TreeId),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -105,10 +104,10 @@ pub enum LsModuleError {
|
|||||||
TreeUnavailable,
|
TreeUnavailable,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Information about a module sent from the host to an extension. By necessity,
|
||||||
|
/// members and imports are non-overlapping.
|
||||||
#[derive(Clone, Debug, Coding)]
|
#[derive(Clone, Debug, Coding)]
|
||||||
pub struct ModuleInfo {
|
pub struct ModuleInfo {
|
||||||
/// If the name isn't a canonical name, returns the true name.
|
|
||||||
pub canonical: Option<TStrv>,
|
|
||||||
/// List the names defined in this module
|
/// List the names defined in this module
|
||||||
pub members: HashMap<TStr, MemberInfo>,
|
pub members: HashMap<TStr, MemberInfo>,
|
||||||
}
|
}
|
||||||
@@ -116,9 +115,7 @@ pub struct ModuleInfo {
|
|||||||
#[derive(Clone, Copy, Debug, Coding)]
|
#[derive(Clone, Copy, Debug, Coding)]
|
||||||
pub struct MemberInfo {
|
pub struct MemberInfo {
|
||||||
/// true if the name is exported
|
/// true if the name is exported
|
||||||
pub exported: bool,
|
pub public: bool,
|
||||||
/// If it's imported, you can find the canonical name here
|
|
||||||
pub canonical: Option<TStrv>,
|
|
||||||
/// Whether the tree item is a constant value or a module
|
/// Whether the tree item is a constant value or a module
|
||||||
pub kind: MemberInfoKind,
|
pub kind: MemberInfoKind,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ pub enum Loaded {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
#[derive(Clone, Debug, Coding, Hierarchy)]
|
||||||
#[extends(VfsReq, HostExtReq)]
|
#[extends(HostExtReq)]
|
||||||
pub struct VfsRead(pub SysId, pub VfsId, pub Vec<TStr>);
|
pub struct VfsRead(pub SysId, pub VfsId, pub Vec<TStr>);
|
||||||
impl Request for VfsRead {
|
impl Request for VfsRead {
|
||||||
type Response = OrcResult<Loaded>;
|
type Response = OrcResult<Loaded>;
|
||||||
@@ -30,18 +30,3 @@ pub enum EagerVfs {
|
|||||||
Lazy(VfsId),
|
Lazy(VfsId),
|
||||||
Eager(HashMap<TStr, EagerVfs>),
|
Eager(HashMap<TStr, EagerVfs>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
|
||||||
#[extends(VfsReq, HostExtReq)]
|
|
||||||
pub struct GetVfs(pub SysId);
|
|
||||||
impl Request for GetVfs {
|
|
||||||
type Response = EagerVfs;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Coding, Hierarchy)]
|
|
||||||
#[extends(HostExtReq)]
|
|
||||||
#[extendable]
|
|
||||||
pub enum VfsReq {
|
|
||||||
GetVfs(GetVfs),
|
|
||||||
VfsRead(VfsRead),
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -14,7 +14,9 @@ pub type Spawner = Rc<dyn Fn(LocalBoxFuture<'static, ()>)>;
|
|||||||
///
|
///
|
||||||
/// There are no ordering guarantees about these
|
/// There are no ordering guarantees about these
|
||||||
pub trait ExtPort {
|
pub trait ExtPort {
|
||||||
|
#[must_use]
|
||||||
fn send<'a>(&'a self, msg: &'a [u8]) -> LocalBoxFuture<'a, ()>;
|
fn send<'a>(&'a self, msg: &'a [u8]) -> LocalBoxFuture<'a, ()>;
|
||||||
|
#[must_use]
|
||||||
fn recv(&self) -> LocalBoxFuture<'_, Option<Vec<u8>>>;
|
fn recv(&self) -> LocalBoxFuture<'_, Option<Vec<u8>>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -39,6 +39,14 @@ impl ErrPos {
|
|||||||
impl From<Pos> for ErrPos {
|
impl From<Pos> for ErrPos {
|
||||||
fn from(origin: Pos) -> Self { Self { position: origin, message: None } }
|
fn from(origin: Pos) -> Self { Self { position: origin, message: None } }
|
||||||
}
|
}
|
||||||
|
impl fmt::Display for ErrPos {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match &self.message {
|
||||||
|
Some(msg) => write!(f, "{}: {}", self.position, msg),
|
||||||
|
None => write!(f, "{}", self.position),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct OrcErr {
|
pub struct OrcErr {
|
||||||
@@ -71,7 +79,7 @@ impl From<OrcErr> for Vec<OrcErr> {
|
|||||||
}
|
}
|
||||||
impl fmt::Display for OrcErr {
|
impl fmt::Display for OrcErr {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let pstr = self.positions.iter().map(|p| format!("{p:?}")).join("; ");
|
let pstr = self.positions.iter().map(|p| format!("{p}")).join("; ");
|
||||||
write!(f, "{}: {} @ {}", self.description, self.message, pstr)
|
write!(f, "{}: {} @ {}", self.description, self.message, pstr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -161,12 +169,12 @@ pub fn mk_err(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_errv(
|
pub fn mk_errv<I: Into<ErrPos>>(
|
||||||
description: Tok<String>,
|
description: Tok<String>,
|
||||||
message: impl AsRef<str>,
|
message: impl AsRef<str>,
|
||||||
posv: impl IntoIterator<Item = ErrPos>,
|
posv: impl IntoIterator<Item = I>,
|
||||||
) -> OrcErrv {
|
) -> OrcErrv {
|
||||||
mk_err(description, message, posv).into()
|
mk_err(description, message, posv.into_iter().map_into()).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Reporter {
|
pub struct Reporter {
|
||||||
@@ -177,6 +185,14 @@ impl Reporter {
|
|||||||
pub fn report(&self, e: impl Into<OrcErrv>) { self.errors.borrow_mut().extend(e.into()) }
|
pub fn report(&self, e: impl Into<OrcErrv>) { self.errors.borrow_mut().extend(e.into()) }
|
||||||
pub fn new() -> Self { Self { errors: RefCell::new(vec![]) } }
|
pub fn new() -> Self { Self { errors: RefCell::new(vec![]) } }
|
||||||
pub fn errv(self) -> Option<OrcErrv> { OrcErrv::new(self.errors.into_inner()).ok() }
|
pub fn errv(self) -> Option<OrcErrv> { OrcErrv::new(self.errors.into_inner()).ok() }
|
||||||
|
pub fn merge<T>(self, res: OrcRes<T>) -> OrcRes<T> {
|
||||||
|
match (res, self.errv()) {
|
||||||
|
(res, None) => res,
|
||||||
|
(Ok(_), Some(errv)) => Err(errv),
|
||||||
|
(Err(e), Some(errv)) => Err(e + errv),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn is_empty(&self) -> bool { self.errors.borrow().is_empty() }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for Reporter {
|
impl Default for Reporter {
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ use crate::interner::Interner;
|
|||||||
use crate::{api, match_mapping};
|
use crate::{api, match_mapping};
|
||||||
|
|
||||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||||
|
#[must_use]
|
||||||
pub struct FmtUnit {
|
pub struct FmtUnit {
|
||||||
pub subs: Vec<FmtUnit>,
|
pub subs: Vec<FmtUnit>,
|
||||||
pub variants: Rc<Variants>,
|
pub variants: Rc<Variants>,
|
||||||
@@ -209,6 +210,9 @@ impl From<Rc<String>> for Variants {
|
|||||||
impl From<String> for Variants {
|
impl From<String> for Variants {
|
||||||
fn from(value: String) -> Self { Self::from(Rc::new(value)) }
|
fn from(value: String) -> Self { Self::from(Rc::new(value)) }
|
||||||
}
|
}
|
||||||
|
impl From<&str> for Variants {
|
||||||
|
fn from(value: &str) -> Self { Self::from(value.to_string()) }
|
||||||
|
}
|
||||||
impl FromStr for Variants {
|
impl FromStr for Variants {
|
||||||
type Err = Infallible;
|
type Err = Infallible;
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Self::default().bounded(s)) }
|
fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Self::default().bounded(s)) }
|
||||||
@@ -265,6 +269,7 @@ impl FmtCtx for FmtCtxImpl<'_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait Format {
|
pub trait Format {
|
||||||
|
#[must_use]
|
||||||
fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> impl Future<Output = FmtUnit> + 'a;
|
fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> impl Future<Output = FmtUnit> + 'a;
|
||||||
}
|
}
|
||||||
impl Format for Never {
|
impl Format for Never {
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ use std::ops::Range;
|
|||||||
|
|
||||||
use trait_set::trait_set;
|
use trait_set::trait_set;
|
||||||
|
|
||||||
|
use crate::error::ErrPos;
|
||||||
use crate::interner::{Interner, Tok};
|
use crate::interner::{Interner, Tok};
|
||||||
use crate::name::Sym;
|
use crate::name::Sym;
|
||||||
use crate::{api, match_mapping, sym};
|
use crate::{api, match_mapping, sym};
|
||||||
@@ -51,6 +52,17 @@ impl Pos {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl fmt::Display for Pos {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Pos::Inherit => f.write_str("Unresolved inherited position"),
|
||||||
|
Pos::SlotTarget => f.write_str("Unresolved slot target position"),
|
||||||
|
Pos::None => f.write_str("N/A"),
|
||||||
|
Pos::Gen(g) => write!(f, "{g}"),
|
||||||
|
Pos::SrcRange(sr) => write!(f, "{sr}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Exact source code location. Includes where the code was loaded from, what
|
/// Exact source code location. Includes where the code was loaded from, what
|
||||||
/// the original source code was, and a byte range.
|
/// the original source code was, and a byte range.
|
||||||
@@ -90,13 +102,24 @@ impl SrcRange {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn zw(path: Sym, pos: u32) -> Self { Self { path, range: pos..pos } }
|
pub fn zw(path: Sym, pos: u32) -> Self { Self { path, range: pos..pos } }
|
||||||
async fn from_api(api: &api::SourceRange, i: &Interner) -> Self {
|
pub async fn from_api(api: &api::SourceRange, i: &Interner) -> Self {
|
||||||
Self { path: Sym::from_api(api.path, i).await, range: api.range.clone() }
|
Self { path: Sym::from_api(api.path, i).await, range: api.range.clone() }
|
||||||
}
|
}
|
||||||
fn to_api(&self) -> api::SourceRange {
|
pub fn to_api(&self) -> api::SourceRange {
|
||||||
api::SourceRange { path: self.path.to_api(), range: self.range.clone() }
|
api::SourceRange { path: self.path.to_api(), range: self.range.clone() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
impl From<SrcRange> for ErrPos {
|
||||||
|
fn from(val: SrcRange) -> Self { val.pos().into() }
|
||||||
|
}
|
||||||
|
impl fmt::Display for SrcRange {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self.range.len() {
|
||||||
|
0 => write!(f, "{}:{}", self.path(), self.range.start),
|
||||||
|
n => write!(f, "{}:{}+{}", self.path(), self.range.start, n),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Information about a code generator attached to the generated code
|
/// Information about a code generator attached to the generated code
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
|
|||||||
@@ -53,12 +53,12 @@ impl VPath {
|
|||||||
pub fn into_name(self) -> Result<VName, EmptyNameError> { VName::new(self.0) }
|
pub fn into_name(self) -> Result<VName, EmptyNameError> { VName::new(self.0) }
|
||||||
/// Add a token to the path. Since now we know that it can't be empty, turn it
|
/// Add a token to the path. Since now we know that it can't be empty, turn it
|
||||||
/// into a name.
|
/// into a name.
|
||||||
pub fn name_with_prefix(self, name: Tok<String>) -> VName {
|
pub fn name_with_suffix(self, name: Tok<String>) -> VName {
|
||||||
VName(self.into_iter().chain([name]).collect())
|
VName(self.into_iter().chain([name]).collect())
|
||||||
}
|
}
|
||||||
/// Add a token to the beginning of the. Since now we know that it can't be
|
/// Add a token to the beginning of the. Since now we know that it can't be
|
||||||
/// empty, turn it into a name.
|
/// empty, turn it into a name.
|
||||||
pub fn name_with_suffix(self, name: Tok<String>) -> VName {
|
pub fn name_with_prefix(self, name: Tok<String>) -> VName {
|
||||||
VName([name].into_iter().chain(self).collect())
|
VName([name].into_iter().chain(self).collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -236,6 +236,9 @@ impl Sym {
|
|||||||
Self::from_tok(Tok::from_api(marker, i).await).expect("Empty sequence found for serialized Sym")
|
Self::from_tok(Tok::from_api(marker, i).await).expect("Empty sequence found for serialized Sym")
|
||||||
}
|
}
|
||||||
pub fn to_api(&self) -> api::TStrv { self.tok().to_api() }
|
pub fn to_api(&self) -> api::TStrv { self.tok().to_api() }
|
||||||
|
pub async fn push(&self, tok: Tok<String>, i: &Interner) -> Sym {
|
||||||
|
Self::new(self.0.iter().cloned().chain([tok]), i).await.unwrap()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
impl fmt::Debug for Sym {
|
impl fmt::Debug for Sym {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Sym({self})") }
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Sym({self})") }
|
||||||
|
|||||||
@@ -15,7 +15,9 @@ use crate::name::{Sym, VName, VPath};
|
|||||||
use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_range};
|
use crate::tree::{ExprRepr, ExtraTok, Paren, TokTree, Token, ttv_range};
|
||||||
|
|
||||||
pub trait ParseCtx {
|
pub trait ParseCtx {
|
||||||
|
#[must_use]
|
||||||
fn i(&self) -> &Interner;
|
fn i(&self) -> &Interner;
|
||||||
|
#[must_use]
|
||||||
fn reporter(&self) -> &Reporter;
|
fn reporter(&self) -> &Reporter;
|
||||||
}
|
}
|
||||||
pub struct ParseCtxImpl<'a> {
|
pub struct ParseCtxImpl<'a> {
|
||||||
@@ -171,7 +173,7 @@ pub async fn try_pop_no_fluff<'a, A: ExprRepr, X: ExtraTok>(
|
|||||||
None => Err(mk_errv(
|
None => Err(mk_errv(
|
||||||
ctx.i().i("Unexpected end").await,
|
ctx.i().i("Unexpected end").await,
|
||||||
"Line ends abruptly; more tokens were expected",
|
"Line ends abruptly; more tokens were expected",
|
||||||
[snip.sr().pos().into()],
|
[snip.sr()],
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -184,7 +186,7 @@ pub async fn expect_end(
|
|||||||
Some(surplus) => Err(mk_errv(
|
Some(surplus) => Err(mk_errv(
|
||||||
ctx.i().i("Extra code after end of line").await,
|
ctx.i().i("Extra code after end of line").await,
|
||||||
"Code found after the end of the line",
|
"Code found after the end of the line",
|
||||||
[surplus.sr.pos().into()],
|
[surplus.sr.pos()],
|
||||||
)),
|
)),
|
||||||
None => Ok(()),
|
None => Ok(()),
|
||||||
}
|
}
|
||||||
@@ -201,7 +203,7 @@ pub async fn expect_tok<'a, A: ExprRepr, X: ExtraTok>(
|
|||||||
t => Err(mk_errv(
|
t => Err(mk_errv(
|
||||||
ctx.i().i("Expected specific keyword").await,
|
ctx.i().i("Expected specific keyword").await,
|
||||||
format!("Expected {tok} but found {:?}", fmt(t, ctx.i()).await),
|
format!("Expected {tok} but found {:?}", fmt(t, ctx.i()).await),
|
||||||
[head.sr.pos().into()],
|
[head.sr()],
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -221,7 +223,7 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
|
|||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.i().i("Expected token").await,
|
ctx.i().i("Expected token").await,
|
||||||
"Expected a name, a parenthesized list of names, or a globstar.",
|
"Expected a name, a parenthesized list of names, or a globstar.",
|
||||||
[tail.sr().pos().into()],
|
[tail.sr().pos()],
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let ret = rec(tt, ctx).await;
|
let ret = rec(tt, ctx).await;
|
||||||
@@ -264,7 +266,7 @@ pub async fn parse_multiname<'a, A: ExprRepr, X: ExtraTok>(
|
|||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.i().i("Unrecognized name end").await,
|
ctx.i().i("Unrecognized name end").await,
|
||||||
format!("Names cannot end with {:?} tokens", fmt(t, ctx.i()).await),
|
format!("Names cannot end with {:?} tokens", fmt(t, ctx.i()).await),
|
||||||
[ttpos.into()],
|
[ttpos],
|
||||||
));
|
));
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,12 +23,14 @@ use crate::{api, match_mapping, tl_cache};
|
|||||||
pub trait TokenVariant<ApiEquiv: Clone + Debug + Coding>: Format + Clone + fmt::Debug {
|
pub trait TokenVariant<ApiEquiv: Clone + Debug + Coding>: Format + Clone + fmt::Debug {
|
||||||
type FromApiCtx<'a>;
|
type FromApiCtx<'a>;
|
||||||
type ToApiCtx<'a>;
|
type ToApiCtx<'a>;
|
||||||
|
#[must_use]
|
||||||
fn from_api(
|
fn from_api(
|
||||||
api: &ApiEquiv,
|
api: &ApiEquiv,
|
||||||
ctx: &mut Self::FromApiCtx<'_>,
|
ctx: &mut Self::FromApiCtx<'_>,
|
||||||
pos: SrcRange,
|
pos: SrcRange,
|
||||||
i: &Interner,
|
i: &Interner,
|
||||||
) -> impl Future<Output = Self>;
|
) -> impl Future<Output = Self>;
|
||||||
|
#[must_use]
|
||||||
fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> impl Future<Output = ApiEquiv>;
|
fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> impl Future<Output = ApiEquiv>;
|
||||||
}
|
}
|
||||||
impl<T: Clone + Debug + Coding> TokenVariant<T> for Never {
|
impl<T: Clone + Debug + Coding> TokenVariant<T> for Never {
|
||||||
@@ -70,7 +72,9 @@ pub fn recur<H: ExprRepr, X: ExtraTok>(
|
|||||||
|
|
||||||
pub trait AtomRepr: Clone + Format {
|
pub trait AtomRepr: Clone + Format {
|
||||||
type Ctx: ?Sized;
|
type Ctx: ?Sized;
|
||||||
|
#[must_use]
|
||||||
fn from_api(api: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> impl Future<Output = Self>;
|
fn from_api(api: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> impl Future<Output = Self>;
|
||||||
|
#[must_use]
|
||||||
fn to_api(&self) -> impl Future<Output = orchid_api::Atom> + '_;
|
fn to_api(&self) -> impl Future<Output = orchid_api::Atom> + '_;
|
||||||
}
|
}
|
||||||
impl AtomRepr for Never {
|
impl AtomRepr for Never {
|
||||||
@@ -133,9 +137,9 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
|
|||||||
NS(n.to_api(), b => Box::new(b.into_api(hctx, xctx).boxed_local().await)),
|
NS(n.to_api(), b => Box::new(b.into_api(hctx, xctx).boxed_local().await)),
|
||||||
Bottom(e.to_api()),
|
Bottom(e.to_api()),
|
||||||
Comment(c.clone()),
|
Comment(c.clone()),
|
||||||
LambdaHead(arg => ttv_into_api(arg, hctx, xctx).await),
|
LambdaHead(arg => ttv_into_api(arg, hctx, xctx).boxed_local().await),
|
||||||
Name(nn.to_api()),
|
Name(nn.to_api()),
|
||||||
S(p, b => ttv_into_api(b, hctx, xctx).await),
|
S(p, b => ttv_into_api(b, hctx, xctx).boxed_local().await),
|
||||||
Handle(hand.into_api(hctx).await),
|
Handle(hand.into_api(hctx).await),
|
||||||
NewExpr(expr.into_api(xctx).await),
|
NewExpr(expr.into_api(xctx).await),
|
||||||
});
|
});
|
||||||
@@ -163,6 +167,7 @@ impl<H: ExprRepr, X: ExtraTok> TokTree<H, X> {
|
|||||||
body.insert(0, Token::LambdaHead(arg).at(arg_range));
|
body.insert(0, Token::LambdaHead(arg).at(arg_range));
|
||||||
Token::S(Paren::Round, body).at(s_range)
|
Token::S(Paren::Round, body).at(s_range)
|
||||||
}
|
}
|
||||||
|
pub fn sr(&self) -> SrcRange { self.sr.clone() }
|
||||||
}
|
}
|
||||||
impl<H: ExprRepr, X: ExtraTok> Format for TokTree<H, X> {
|
impl<H: ExprRepr, X: ExtraTok> Format for TokTree<H, X> {
|
||||||
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
async fn print<'a>(&'a self, c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ derive_destructure = "1.0.0"
|
|||||||
dyn-clone = "1.0.17"
|
dyn-clone = "1.0.17"
|
||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
hashbrown = "0.15.2"
|
hashbrown = "0.15.2"
|
||||||
|
include_dir = { version = "0.7.4", optional = true }
|
||||||
itertools = "0.14.0"
|
itertools = "0.14.0"
|
||||||
konst = "0.3.16"
|
konst = "0.3.16"
|
||||||
lazy_static = "1.5.0"
|
lazy_static = "1.5.0"
|
||||||
@@ -25,8 +26,8 @@ orchid-api-derive = { version = "0.1.0", path = "../orchid-api-derive" }
|
|||||||
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
|
orchid-api-traits = { version = "0.1.0", path = "../orchid-api-traits" }
|
||||||
orchid-base = { version = "0.1.0", path = "../orchid-base" }
|
orchid-base = { version = "0.1.0", path = "../orchid-base" }
|
||||||
ordered-float = "5.0.0"
|
ordered-float = "5.0.0"
|
||||||
paste = "1.0.15"
|
pastey = "0.1.0"
|
||||||
some_executor = "0.4.0"
|
some_executor = "0.5.1"
|
||||||
substack = "1.1.1"
|
substack = "1.1.1"
|
||||||
tokio = { version = "1.43.0", optional = true }
|
tokio = { version = "1.46.1", optional = true }
|
||||||
trait-set = "0.3.0"
|
trait-set = "0.3.0"
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ impl<'a> AtomReadGuard<'a> {
|
|||||||
async fn new(id: api::AtomId, ctx: &'a SysCtx) -> Self {
|
async fn new(id: api::AtomId, ctx: &'a SysCtx) -> Self {
|
||||||
let guard = ctx.get_or_default::<ObjStore>().objects.read().await;
|
let guard = ctx.get_or_default::<ObjStore>().objects.read().await;
|
||||||
let valid = guard.iter().map(|i| i.0).collect_vec();
|
let valid = guard.iter().map(|i| i.0).collect_vec();
|
||||||
assert!(guard.get(&id).is_some(), "Received invalid atom ID: {:?} not in {:?}", id, valid);
|
assert!(guard.get(&id).is_some(), "Received invalid atom ID: {id:?} not in {valid:?}");
|
||||||
Self { id, guard }
|
Self { id, guard }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ use std::num::NonZero;
|
|||||||
use std::pin::Pin;
|
use std::pin::Pin;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use async_std::channel::{self, Receiver, RecvError, Sender};
|
use async_std::channel::{self, Receiver, Sender};
|
||||||
use async_std::stream;
|
use async_std::stream;
|
||||||
use async_std::sync::Mutex;
|
use async_std::sync::Mutex;
|
||||||
use futures::future::{LocalBoxFuture, join_all};
|
use futures::future::{LocalBoxFuture, join_all};
|
||||||
@@ -22,7 +22,7 @@ use orchid_base::logging::Logger;
|
|||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::parse::{Comment, Snippet};
|
use orchid_base::parse::{Comment, Snippet};
|
||||||
use orchid_base::reqnot::{ReqNot, RequestHandle, Requester};
|
use orchid_base::reqnot::{ReqNot, RequestHandle, Requester};
|
||||||
use orchid_base::tree::{TokenVariant, ttv_from_api, ttv_into_api};
|
use orchid_base::tree::{TokenVariant, ttv_from_api};
|
||||||
use substack::Substack;
|
use substack::Substack;
|
||||||
use trait_set::trait_set;
|
use trait_set::trait_set;
|
||||||
|
|
||||||
@@ -32,6 +32,7 @@ use crate::atom_owned::take_atom;
|
|||||||
use crate::expr::{Expr, ExprHandle};
|
use crate::expr::{Expr, ExprHandle};
|
||||||
use crate::fs::VirtFS;
|
use crate::fs::VirtFS;
|
||||||
use crate::lexer::{LexContext, err_cascade, err_not_applicable};
|
use crate::lexer::{LexContext, err_cascade, err_not_applicable};
|
||||||
|
use crate::parser::{ParsCtx, get_const, linev_into_api};
|
||||||
use crate::system::{SysCtx, atom_by_idx};
|
use crate::system::{SysCtx, atom_by_idx};
|
||||||
use crate::system_ctor::{CtedObj, DynSystemCtor};
|
use crate::system_ctor::{CtedObj, DynSystemCtor};
|
||||||
use crate::tree::{GenTok, GenTokTree, LazyMemberFactory, TreeIntoApiCtxImpl};
|
use crate::tree::{GenTok, GenTokTree, LazyMemberFactory, TreeIntoApiCtxImpl};
|
||||||
@@ -47,7 +48,6 @@ impl ExtensionData {
|
|||||||
pub fn new(name: &'static str, systems: &'static [&'static dyn DynSystemCtor]) -> Self {
|
pub fn new(name: &'static str, systems: &'static [&'static dyn DynSystemCtor]) -> Self {
|
||||||
Self { name, systems }
|
Self { name, systems }
|
||||||
}
|
}
|
||||||
// pub fn main(self) { extension_main(self) }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum MemberRecord {
|
pub enum MemberRecord {
|
||||||
@@ -57,7 +57,6 @@ pub enum MemberRecord {
|
|||||||
|
|
||||||
pub struct SystemRecord {
|
pub struct SystemRecord {
|
||||||
vfses: HashMap<api::VfsId, &'static dyn VirtFS>,
|
vfses: HashMap<api::VfsId, &'static dyn VirtFS>,
|
||||||
declfs: api::EagerVfs,
|
|
||||||
lazy_members: HashMap<api::TreeId, MemberRecord>,
|
lazy_members: HashMap<api::TreeId, MemberRecord>,
|
||||||
ctx: SysCtx,
|
ctx: SysCtx,
|
||||||
}
|
}
|
||||||
@@ -84,19 +83,6 @@ pub async fn with_atom_record<'a, F: Future<Output = SysCtx>, T>(
|
|||||||
cb(atom_record, ctx, id, data).await
|
cb(atom_record, ctx, id, data).await
|
||||||
}
|
}
|
||||||
|
|
||||||
// pub fn extension_main(data: ExtensionData) {
|
|
||||||
|
|
||||||
// if thread::Builder::new()
|
|
||||||
// .name(format!("ext-main:{}", data.name))
|
|
||||||
// .spawn(|| extension_main_logic(data))
|
|
||||||
// .unwrap()
|
|
||||||
// .join()
|
|
||||||
// .is_err()
|
|
||||||
// {
|
|
||||||
// process::exit(-1)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
pub struct ExtensionOwner {
|
pub struct ExtensionOwner {
|
||||||
_interner_cell: Rc<RefCell<Option<Interner>>>,
|
_interner_cell: Rc<RefCell<Option<Interner>>>,
|
||||||
_systems_lock: Rc<Mutex<HashMap<api::SysId, SystemRecord>>>,
|
_systems_lock: Rc<Mutex<HashMap<api::SysId, SystemRecord>>>,
|
||||||
@@ -109,12 +95,7 @@ impl ExtPort for ExtensionOwner {
|
|||||||
Box::pin(async { self.out_send.send(msg.to_vec()).boxed_local().await.unwrap() })
|
Box::pin(async { self.out_send.send(msg.to_vec()).boxed_local().await.unwrap() })
|
||||||
}
|
}
|
||||||
fn recv(&self) -> LocalBoxFuture<'_, Option<Vec<u8>>> {
|
fn recv(&self) -> LocalBoxFuture<'_, Option<Vec<u8>>> {
|
||||||
Box::pin(async {
|
Box::pin(async { (self.out_recv.recv().await).ok() })
|
||||||
match self.out_recv.recv().await {
|
|
||||||
Ok(v) => Some(v),
|
|
||||||
Err(RecvError) => None,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -141,8 +122,7 @@ pub fn extension_init(
|
|||||||
let get_ctx = clone!(systems_weak; move |id: api::SysId| clone!(systems_weak; async move {
|
let get_ctx = clone!(systems_weak; move |id: api::SysId| clone!(systems_weak; async move {
|
||||||
let systems =
|
let systems =
|
||||||
systems_weak.upgrade().expect("System table dropped before request processing done");
|
systems_weak.upgrade().expect("System table dropped before request processing done");
|
||||||
let x = systems.lock().await.get(&id).expect("System not found").ctx.clone();
|
systems.lock().await.get(&id).expect("System not found").ctx.clone()
|
||||||
x
|
|
||||||
}));
|
}));
|
||||||
let init_ctx = {
|
let init_ctx = {
|
||||||
clone!(interner_weak, spawner, logger);
|
clone!(interner_weak, spawner, logger);
|
||||||
@@ -200,32 +180,25 @@ pub fn extension_init(
|
|||||||
.then(|mem| {
|
.then(|mem| {
|
||||||
let (req, lazy_mems) = (&hand, &lazy_mems);
|
let (req, lazy_mems) = (&hand, &lazy_mems);
|
||||||
clone!(i, ctx; async move {
|
clone!(i, ctx; async move {
|
||||||
let name = i.i(&mem.name).await.to_api();
|
let mut tia_ctx = TreeIntoApiCtxImpl {
|
||||||
let value = mem.kind.into_api(&mut TreeIntoApiCtxImpl {
|
|
||||||
lazy_members: &mut *lazy_mems.lock().await,
|
lazy_members: &mut *lazy_mems.lock().await,
|
||||||
sys: ctx,
|
sys: ctx,
|
||||||
basepath: &[],
|
basepath: &[],
|
||||||
path: Substack::Bottom,
|
path: Substack::Bottom,
|
||||||
req
|
req
|
||||||
})
|
};
|
||||||
.await;
|
(i.i(&mem.name).await.to_api(), mem.kind.into_api(&mut tia_ctx).await)
|
||||||
(name, value)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
.await;
|
.await;
|
||||||
let declfs = cted.inst().dyn_vfs().to_api_rec(&mut vfses, &i).await;
|
let vfs = cted.inst().dyn_vfs().to_api_rec(&mut vfses, &i).await;
|
||||||
let record =
|
let record = SystemRecord { vfses, ctx, lazy_members: lazy_mems.into_inner() };
|
||||||
SystemRecord { declfs, vfses, ctx, lazy_members: lazy_mems.into_inner() };
|
|
||||||
let systems = systems_weak.upgrade().expect("System constructed during shutdown");
|
let systems = systems_weak.upgrade().expect("System constructed during shutdown");
|
||||||
systems.lock().await.insert(new_sys.id, record);
|
systems.lock().await.insert(new_sys.id, record);
|
||||||
hand
|
let response =
|
||||||
.handle(&new_sys, &api::NewSystemResponse {
|
api::NewSystemResponse { lex_filter, const_root, line_types: vec![], vfs };
|
||||||
lex_filter,
|
hand.handle(&new_sys, &response).await
|
||||||
const_root,
|
|
||||||
line_types: vec![],
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
},
|
},
|
||||||
api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) => {
|
api::HostExtReq::GetMember(get_tree @ api::GetMember(sys_id, tree_id)) => {
|
||||||
let sys_ctx = get_ctx(sys_id).await;
|
let sys_ctx = get_ctx(sys_id).await;
|
||||||
@@ -248,18 +221,13 @@ pub fn extension_init(
|
|||||||
};
|
};
|
||||||
hand.handle(&get_tree, &tree.into_api(&mut tia_ctx).await).await
|
hand.handle(&get_tree, &tree.into_api(&mut tia_ctx).await).await
|
||||||
},
|
},
|
||||||
api::HostExtReq::VfsReq(api::VfsReq::GetVfs(get_vfs @ api::GetVfs(sys_id))) => {
|
|
||||||
let systems = systems_weak.upgrade().expect("VFS root requested during shutdown");
|
|
||||||
let systems_g = systems.lock().await;
|
|
||||||
hand.handle(&get_vfs, &systems_g[&sys_id].declfs).await
|
|
||||||
},
|
|
||||||
api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => {
|
api::HostExtReq::SysReq(api::SysReq::SysFwded(fwd)) => {
|
||||||
let api::SysFwded(sys_id, payload) = fwd;
|
let api::SysFwded(sys_id, payload) = fwd;
|
||||||
let ctx = get_ctx(sys_id).await;
|
let ctx = get_ctx(sys_id).await;
|
||||||
let sys = ctx.cted().inst();
|
let sys = ctx.cted().inst();
|
||||||
sys.dyn_request(hand, payload).await
|
sys.dyn_request(hand, payload).await
|
||||||
},
|
},
|
||||||
api::HostExtReq::VfsReq(api::VfsReq::VfsRead(vfs_read)) => {
|
api::HostExtReq::VfsRead(vfs_read) => {
|
||||||
let api::VfsRead(sys_id, vfs_id, path) = &vfs_read;
|
let api::VfsRead(sys_id, vfs_id, path) = &vfs_read;
|
||||||
let ctx = get_ctx(*sys_id).await;
|
let ctx = get_ctx(*sys_id).await;
|
||||||
let systems = systems_weak.upgrade().expect("VFS requested during shutdoown");
|
let systems = systems_weak.upgrade().expect("VFS requested during shutdoown");
|
||||||
@@ -308,13 +276,18 @@ pub fn extension_init(
|
|||||||
let parser =
|
let parser =
|
||||||
parsers.iter().find(|p| p.line_head() == **name).expect("No parser candidate");
|
parsers.iter().find(|p| p.line_head() == **name).expect("No parser candidate");
|
||||||
let module = Sym::from_api(*module, ctx.i()).await;
|
let module = Sym::from_api(*module, ctx.i()).await;
|
||||||
let o_line = match parser.parse(ctx.clone(), module, *exported, comments, tail).await
|
let pctx = ParsCtx::new(ctx.clone(), module);
|
||||||
{
|
let o_line = match parser.parse(pctx, *exported, comments, tail).await {
|
||||||
Err(e) => Err(e.to_api()),
|
Err(e) => Err(e.to_api()),
|
||||||
Ok(t) => Ok(ttv_into_api(t, &mut (), &mut (ctx.clone(), &hand)).await),
|
Ok(t) => Ok(linev_into_api(t, ctx.clone(), &hand).await),
|
||||||
};
|
};
|
||||||
hand.handle(&pline, &o_line).await
|
hand.handle(&pline, &o_line).await
|
||||||
},
|
},
|
||||||
|
api::HostExtReq::FetchParsedConst(ref fpc @ api::FetchParsedConst { id, sys }) => {
|
||||||
|
let ctx = get_ctx(sys).await;
|
||||||
|
let cnst = get_const(id, ctx.clone()).await;
|
||||||
|
hand.handle(fpc, &cnst.api_return(ctx, &hand).await).await
|
||||||
|
},
|
||||||
api::HostExtReq::AtomReq(atom_req) => {
|
api::HostExtReq::AtomReq(atom_req) => {
|
||||||
let atom = atom_req.get_atom();
|
let atom = atom_req.get_atom();
|
||||||
let atom_req = atom_req.clone();
|
let atom_req = atom_req.clone();
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
|
use std::borrow::Cow;
|
||||||
use std::num::NonZero;
|
use std::num::NonZero;
|
||||||
|
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use futures::future::LocalBoxFuture;
|
use futures::future::LocalBoxFuture;
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use orchid_base::interner::{Interner, Tok};
|
use orchid_base::interner::{Interner, Tok};
|
||||||
|
use orchid_base::pure_seq::pushed;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::system::SysCtx;
|
use crate::system::SysCtx;
|
||||||
@@ -16,11 +18,58 @@ pub trait VirtFS: Send + Sync + 'static {
|
|||||||
) -> LocalBoxFuture<'a, api::OrcResult<api::Loaded>>;
|
) -> LocalBoxFuture<'a, api::OrcResult<api::Loaded>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct DeclVmod(Cow<'static, [(&'static str, DeclFs)]>);
|
||||||
|
impl DeclVmod {
|
||||||
|
pub fn new(items: &'static [(&'static str, DeclFs)]) -> DeclVmod {
|
||||||
|
DeclVmod(Cow::Borrowed(items))
|
||||||
|
}
|
||||||
|
pub fn entry(
|
||||||
|
key: &'static str,
|
||||||
|
items: &'static [(&'static str, DeclFs)],
|
||||||
|
) -> (&'static str, DeclVmod) {
|
||||||
|
(key, DeclVmod(Cow::Borrowed(items)))
|
||||||
|
}
|
||||||
|
pub fn merge(&self, other: &Self) -> Result<Self, Vec<&'static str>> {
|
||||||
|
let mut items = Vec::new();
|
||||||
|
for (k, v1) in self.0.iter() {
|
||||||
|
items.push((*k, match other.0.iter().find(|(k2, _)| k == k2) {
|
||||||
|
Some((_, v2)) => v1.merge(v2).map_err(|e| pushed::<_, Vec<_>>(e, *k))?,
|
||||||
|
None => v1.clone(),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
for (k, v) in other.0.iter() {
|
||||||
|
if !items.iter().any(|(k2, _)| k2 == k) {
|
||||||
|
items.push((*k, v.clone()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(Self(Cow::Owned(items)))
|
||||||
|
}
|
||||||
|
pub async fn to_api_rec(
|
||||||
|
&self,
|
||||||
|
vfses: &mut HashMap<api::VfsId, &'static dyn VirtFS>,
|
||||||
|
i: &Interner,
|
||||||
|
) -> std::collections::HashMap<api::TStr, api::EagerVfs> {
|
||||||
|
let mut output = std::collections::HashMap::new();
|
||||||
|
for (k, v) in self.0.iter() {
|
||||||
|
output.insert(i.i::<String>(*k).await.to_api(), v.to_api_rec(vfses, i).boxed_local().await);
|
||||||
|
}
|
||||||
|
output
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
pub enum DeclFs {
|
pub enum DeclFs {
|
||||||
Lazy(&'static dyn VirtFS),
|
Lazy(&'static dyn VirtFS),
|
||||||
Mod(&'static [(&'static str, DeclFs)]),
|
Mod(DeclVmod),
|
||||||
}
|
}
|
||||||
impl DeclFs {
|
impl DeclFs {
|
||||||
|
pub fn merge(&self, other: &Self) -> Result<Self, Vec<&'static str>> {
|
||||||
|
match (self, other) {
|
||||||
|
(Self::Mod(m1), Self::Mod(m2)) => Ok(Self::Mod(m1.merge(m2)?)),
|
||||||
|
(..) => Err(Vec::new()),
|
||||||
|
}
|
||||||
|
}
|
||||||
pub async fn to_api_rec(
|
pub async fn to_api_rec(
|
||||||
&self,
|
&self,
|
||||||
vfses: &mut HashMap<api::VfsId, &'static dyn VirtFS>,
|
vfses: &mut HashMap<api::VfsId, &'static dyn VirtFS>,
|
||||||
@@ -33,14 +82,7 @@ impl DeclFs {
|
|||||||
vfses.insert(id, *fs);
|
vfses.insert(id, *fs);
|
||||||
api::EagerVfs::Lazy(id)
|
api::EagerVfs::Lazy(id)
|
||||||
},
|
},
|
||||||
DeclFs::Mod(children) => {
|
DeclFs::Mod(m) => api::EagerVfs::Eager(m.to_api_rec(vfses, i).await),
|
||||||
let mut output = std::collections::HashMap::new();
|
|
||||||
for (k, v) in children.iter() {
|
|
||||||
output
|
|
||||||
.insert(i.i::<String>(*k).await.to_api(), v.to_api_rec(vfses, i).boxed_local().await);
|
|
||||||
}
|
|
||||||
api::EagerVfs::Eager(output)
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -141,7 +141,7 @@ mod expr_func_derives {
|
|||||||
|
|
||||||
macro_rules! expr_func_derive {
|
macro_rules! expr_func_derive {
|
||||||
($arity: tt, $($t:ident),*) => {
|
($arity: tt, $($t:ident),*) => {
|
||||||
paste::paste!{
|
pastey::paste!{
|
||||||
impl<
|
impl<
|
||||||
$($t: TryFromExpr, )*
|
$($t: TryFromExpr, )*
|
||||||
Out: ToExpr,
|
Out: ToExpr,
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
use std::fmt;
|
||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
use std::ops::RangeInclusive;
|
use std::ops::RangeInclusive;
|
||||||
|
|
||||||
@@ -49,10 +50,15 @@ impl<'a> LexContext<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn pos(&self, tail: &'a str) -> u32 { (self.text.len() - tail.len()) as u32 }
|
pub fn pos(&self, tail: &'a str) -> u32 { (self.text.len() - tail.len()) as u32 }
|
||||||
|
pub fn pos_tt(&self, tail_with: &'a str, tail_without: &'a str) -> SrcRange {
|
||||||
pub fn tok_ran(&self, len: u32, tail: &'a str) -> SrcRange {
|
SrcRange::new(self.pos(tail_with)..self.pos(tail_without), &self.src)
|
||||||
SrcRange::new(self.pos(tail) - len..self.pos(tail), &self.src)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn pos_lt(&self, len: impl TryInto<u32, Error: fmt::Debug>, tail: &'a str) -> SrcRange {
|
||||||
|
SrcRange::new(self.pos(tail) - len.try_into().unwrap()..self.pos(tail), &self.src)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn i(&self) -> &Interner { self.ctx.i() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Lexer: Send + Sync + Sized + Default + 'static {
|
pub trait Lexer: Send + Sync + Sized + Default + 'static {
|
||||||
|
|||||||
@@ -1,50 +1,179 @@
|
|||||||
use futures::future::LocalBoxFuture;
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
|
use futures::FutureExt;
|
||||||
|
use futures::future::{LocalBoxFuture, join_all};
|
||||||
|
use itertools::Itertools;
|
||||||
|
use orchid_api::ResolveNames;
|
||||||
use orchid_base::error::OrcRes;
|
use orchid_base::error::OrcRes;
|
||||||
|
use orchid_base::id_store::IdStore;
|
||||||
|
use orchid_base::interner::Tok;
|
||||||
|
use orchid_base::location::SrcRange;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::parse::{Comment, Snippet};
|
use orchid_base::parse::{Comment, Snippet};
|
||||||
|
use orchid_base::reqnot::{ReqHandlish, Requester};
|
||||||
|
use orchid_base::tree::ttv_into_api;
|
||||||
|
|
||||||
|
use crate::api;
|
||||||
use crate::expr::Expr;
|
use crate::expr::Expr;
|
||||||
use crate::gen_expr::GExpr;
|
use crate::gen_expr::GExpr;
|
||||||
use crate::system::SysCtx;
|
use crate::system::{SysCtx, SysCtxEntry};
|
||||||
use crate::tree::GenTokTree;
|
use crate::tree::GenTokTree;
|
||||||
|
|
||||||
pub type GenSnippet<'a> = Snippet<'a, Expr, GExpr>;
|
pub type GenSnippet<'a> = Snippet<'a, Expr, GExpr>;
|
||||||
|
|
||||||
pub trait Parser: Send + Sync + Sized + Default + 'static {
|
pub trait Parser: Send + Sync + Sized + Default + 'static {
|
||||||
const LINE_HEAD: &'static str;
|
const LINE_HEAD: &'static str;
|
||||||
fn parse(
|
fn parse<'a>(
|
||||||
ctx: SysCtx,
|
ctx: ParsCtx<'a>,
|
||||||
module: Sym,
|
|
||||||
exported: bool,
|
exported: bool,
|
||||||
comments: Vec<Comment>,
|
comments: Vec<Comment>,
|
||||||
line: GenSnippet<'_>,
|
line: GenSnippet<'a>,
|
||||||
) -> impl Future<Output = OrcRes<Vec<GenTokTree>>> + '_;
|
) -> impl Future<Output = OrcRes<Vec<ParsedLine>>> + 'a;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait DynParser: Send + Sync + 'static {
|
pub trait DynParser: Send + Sync + 'static {
|
||||||
fn line_head(&self) -> &'static str;
|
fn line_head(&self) -> &'static str;
|
||||||
fn parse<'a>(
|
fn parse<'a>(
|
||||||
&self,
|
&self,
|
||||||
ctx: SysCtx,
|
ctx: ParsCtx<'a>,
|
||||||
module: Sym,
|
|
||||||
exported: bool,
|
exported: bool,
|
||||||
comments: Vec<Comment>,
|
comments: Vec<Comment>,
|
||||||
line: GenSnippet<'a>,
|
line: GenSnippet<'a>,
|
||||||
) -> LocalBoxFuture<'a, OrcRes<Vec<GenTokTree>>>;
|
) -> LocalBoxFuture<'a, OrcRes<Vec<ParsedLine>>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Parser> DynParser for T {
|
impl<T: Parser> DynParser for T {
|
||||||
fn line_head(&self) -> &'static str { Self::LINE_HEAD }
|
fn line_head(&self) -> &'static str { Self::LINE_HEAD }
|
||||||
fn parse<'a>(
|
fn parse<'a>(
|
||||||
&self,
|
&self,
|
||||||
ctx: SysCtx,
|
ctx: ParsCtx<'a>,
|
||||||
module: Sym,
|
|
||||||
exported: bool,
|
exported: bool,
|
||||||
comments: Vec<Comment>,
|
comments: Vec<Comment>,
|
||||||
line: GenSnippet<'a>,
|
line: GenSnippet<'a>,
|
||||||
) -> LocalBoxFuture<'a, OrcRes<Vec<GenTokTree>>> {
|
) -> LocalBoxFuture<'a, OrcRes<Vec<ParsedLine>>> {
|
||||||
Box::pin(async move { Self::parse(ctx, module, exported, comments, line).await })
|
Box::pin(async move { Self::parse(ctx, exported, comments, line).await })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type ParserObj = &'static dyn DynParser;
|
pub type ParserObj = &'static dyn DynParser;
|
||||||
|
|
||||||
|
pub struct ParsCtx<'a> {
|
||||||
|
_parse: PhantomData<&'a mut ()>,
|
||||||
|
ctx: SysCtx,
|
||||||
|
module: Sym,
|
||||||
|
}
|
||||||
|
impl ParsCtx<'_> {
|
||||||
|
pub(crate) fn new(ctx: SysCtx, module: Sym) -> Self { Self { _parse: PhantomData, ctx, module } }
|
||||||
|
pub fn ctx(&self) -> &SysCtx { &self.ctx }
|
||||||
|
pub fn module(&self) -> Sym { self.module.clone() }
|
||||||
|
}
|
||||||
|
|
||||||
|
type BoxConstCallback = Box<dyn FnOnce(ConstCtx) -> LocalBoxFuture<'static, GExpr>>;
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct ParsedConstCtxEntry {
|
||||||
|
consts: IdStore<BoxConstCallback>,
|
||||||
|
}
|
||||||
|
impl SysCtxEntry for ParsedConstCtxEntry {}
|
||||||
|
|
||||||
|
pub struct ParsedLine {
|
||||||
|
pub sr: SrcRange,
|
||||||
|
pub comments: Vec<Comment>,
|
||||||
|
pub kind: ParsedLineKind,
|
||||||
|
}
|
||||||
|
impl ParsedLine {
|
||||||
|
pub async fn into_api(self, ctx: SysCtx, hand: &dyn ReqHandlish) -> api::ParsedLine {
|
||||||
|
api::ParsedLine {
|
||||||
|
comments: self.comments.into_iter().map(|c| c.to_api()).collect(),
|
||||||
|
source_range: self.sr.to_api(),
|
||||||
|
kind: match self.kind {
|
||||||
|
ParsedLineKind::Mem(mem) => api::ParsedLineKind::Member(api::ParsedMember {
|
||||||
|
name: mem.name.to_api(),
|
||||||
|
exported: mem.exported,
|
||||||
|
kind: match mem.kind {
|
||||||
|
ParsedMemKind::Const(cb) => api::ParsedMemberKind::Constant(api::ParsedConstId(
|
||||||
|
ctx.get_or_default::<ParsedConstCtxEntry>().consts.add(cb).id(),
|
||||||
|
)),
|
||||||
|
ParsedMemKind::Mod(plv) =>
|
||||||
|
api::ParsedMemberKind::Module(linev_into_api(plv, ctx, hand).boxed_local().await),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
ParsedLineKind::Rec(tv) =>
|
||||||
|
api::ParsedLineKind::Recursive(ttv_into_api(tv, &mut (), &mut (ctx, hand)).await),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn linev_into_api(
|
||||||
|
v: Vec<ParsedLine>,
|
||||||
|
ctx: SysCtx,
|
||||||
|
hand: &dyn ReqHandlish,
|
||||||
|
) -> Vec<api::ParsedLine> {
|
||||||
|
join_all(v.into_iter().map(|l| l.into_api(ctx.clone(), hand))).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum ParsedLineKind {
|
||||||
|
Mem(ParsedMem),
|
||||||
|
Rec(Vec<GenTokTree>),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ParsedMem {
|
||||||
|
name: Tok<String>,
|
||||||
|
exported: bool,
|
||||||
|
kind: ParsedMemKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum ParsedMemKind {
|
||||||
|
Const(BoxConstCallback),
|
||||||
|
Mod(Vec<ParsedLine>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ParsedMemKind {
|
||||||
|
pub fn cnst<F: AsyncFnOnce(ConstCtx) -> GExpr + 'static>(f: F) -> Self {
|
||||||
|
Self::Const(Box::new(|ctx| Box::pin(f(ctx))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* TODO: how the macro runner uses the multi-stage loader
|
||||||
|
|
||||||
|
Since the macro runner actually has to invoke the interpreter,
|
||||||
|
it'll run at const-time and not at postparse-time anyway.
|
||||||
|
|
||||||
|
pasing stage establishes the role of every constant as a macro keyword
|
||||||
|
postparse / const load links up constants with every macro they can directly invoke
|
||||||
|
the constants representing the keywords might not actually be postparsed,
|
||||||
|
\ the connection is instead made by detecting in the macro system that the
|
||||||
|
\ resolved name is owned by a macro
|
||||||
|
the returned constant from this call is always an entrypoint call to
|
||||||
|
\ the macro system
|
||||||
|
the constants representing the keywords resolve to panic
|
||||||
|
execute relies on these links detected in the extension to dispatch relevant macros
|
||||||
|
*/
|
||||||
|
|
||||||
|
pub struct ConstCtx {
|
||||||
|
ctx: SysCtx,
|
||||||
|
constid: api::ParsedConstId,
|
||||||
|
}
|
||||||
|
impl ConstCtx {
|
||||||
|
pub async fn names<const N: usize>(&self, names: [&Sym; N]) -> [Option<Sym>; N] {
|
||||||
|
let resolve_names = ResolveNames {
|
||||||
|
constid: self.constid,
|
||||||
|
sys: self.ctx.sys_id(),
|
||||||
|
names: names.into_iter().map(|n| n.to_api()).collect_vec(),
|
||||||
|
};
|
||||||
|
let names = self.ctx.reqnot().request(resolve_names).await;
|
||||||
|
let mut results = [const { None }; N];
|
||||||
|
for (i, name) in names.into_iter().enumerate().filter_map(|(i, n)| Some((i, n?))) {
|
||||||
|
results[i] = Some(Sym::from_api(name, self.ctx.i()).await);
|
||||||
|
}
|
||||||
|
results
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn get_const(id: api::ParsedConstId, ctx: SysCtx) -> GExpr {
|
||||||
|
let ent = ctx.get::<ParsedConstCtxEntry>();
|
||||||
|
let rec = ent.consts.get(id.0).expect("Bad ID or double read of parsed const");
|
||||||
|
let ctx = ConstCtx { constid: id, ctx: ctx.clone() };
|
||||||
|
rec.remove()(ctx).await
|
||||||
|
}
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ use orchid_base::reqnot::{Receipt, ReqNot};
|
|||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId, AtomicFeatures, ForeignAtom, TypAtom, get_info};
|
use crate::atom::{AtomCtx, AtomDynfo, AtomTypeId, AtomicFeatures, ForeignAtom, TypAtom, get_info};
|
||||||
use crate::entrypoint::ExtReq;
|
use crate::entrypoint::ExtReq;
|
||||||
use crate::fs::DeclFs;
|
use crate::fs::DeclVmod;
|
||||||
use crate::func_atom::Fun;
|
use crate::func_atom::Fun;
|
||||||
use crate::lexer::LexerObj;
|
use crate::lexer::LexerObj;
|
||||||
use crate::parser::ParserObj;
|
use crate::parser::ParserObj;
|
||||||
@@ -83,7 +83,7 @@ impl<T: SystemCard> DynSystemCard for T {
|
|||||||
/// System as defined by author
|
/// System as defined by author
|
||||||
pub trait System: Send + Sync + SystemCard + 'static {
|
pub trait System: Send + Sync + SystemCard + 'static {
|
||||||
fn env() -> Vec<GenMember>;
|
fn env() -> Vec<GenMember>;
|
||||||
fn vfs() -> DeclFs;
|
fn vfs() -> DeclVmod;
|
||||||
fn lexers() -> Vec<LexerObj>;
|
fn lexers() -> Vec<LexerObj>;
|
||||||
fn parsers() -> Vec<ParserObj>;
|
fn parsers() -> Vec<ParserObj>;
|
||||||
fn request(hand: ExtReq<'_>, req: Self::Req) -> impl Future<Output = Receipt<'_>>;
|
fn request(hand: ExtReq<'_>, req: Self::Req) -> impl Future<Output = Receipt<'_>>;
|
||||||
@@ -91,7 +91,7 @@ pub trait System: Send + Sync + SystemCard + 'static {
|
|||||||
|
|
||||||
pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
||||||
fn dyn_env(&self) -> Vec<GenMember>;
|
fn dyn_env(&self) -> Vec<GenMember>;
|
||||||
fn dyn_vfs(&self) -> DeclFs;
|
fn dyn_vfs(&self) -> DeclVmod;
|
||||||
fn dyn_lexers(&self) -> Vec<LexerObj>;
|
fn dyn_lexers(&self) -> Vec<LexerObj>;
|
||||||
fn dyn_parsers(&self) -> Vec<ParserObj>;
|
fn dyn_parsers(&self) -> Vec<ParserObj>;
|
||||||
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>>;
|
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>>;
|
||||||
@@ -100,7 +100,7 @@ pub trait DynSystem: Send + Sync + DynSystemCard + 'static {
|
|||||||
|
|
||||||
impl<T: System> DynSystem for T {
|
impl<T: System> DynSystem for T {
|
||||||
fn dyn_env(&self) -> Vec<GenMember> { Self::env() }
|
fn dyn_env(&self) -> Vec<GenMember> { Self::env() }
|
||||||
fn dyn_vfs(&self) -> DeclFs { Self::vfs() }
|
fn dyn_vfs(&self) -> DeclVmod { Self::vfs() }
|
||||||
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
|
fn dyn_lexers(&self) -> Vec<LexerObj> { Self::lexers() }
|
||||||
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
|
fn dyn_parsers(&self) -> Vec<ParserObj> { Self::parsers() }
|
||||||
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>> {
|
fn dyn_request<'a>(&self, hand: ExtReq<'a>, req: Vec<u8>) -> LocalBoxFuture<'a, Receipt<'a>> {
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ impl<T: SystemCtor> DynSystemCtor for T {
|
|||||||
mod dep_set_tuple_impls {
|
mod dep_set_tuple_impls {
|
||||||
use orchid_base::box_chain;
|
use orchid_base::box_chain;
|
||||||
use orchid_base::boxed_iter::BoxedIter;
|
use orchid_base::boxed_iter::BoxedIter;
|
||||||
use paste::paste;
|
use pastey::paste;
|
||||||
|
|
||||||
use super::{DepDef, DepSat};
|
use super::{DepDef, DepSat};
|
||||||
use crate::api;
|
use crate::api;
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ use crate::conv::ToExpr;
|
|||||||
use crate::entrypoint::MemberRecord;
|
use crate::entrypoint::MemberRecord;
|
||||||
use crate::expr::{Expr, ExprHandle};
|
use crate::expr::{Expr, ExprHandle};
|
||||||
use crate::func_atom::{ExprFunc, Fun};
|
use crate::func_atom::{ExprFunc, Fun};
|
||||||
use crate::gen_expr::{GExpr, arg, call, lambda, seq};
|
use crate::gen_expr::{GExpr, arg, call, lambda, seq, sym_ref};
|
||||||
use crate::system::SysCtx;
|
use crate::system::SysCtx;
|
||||||
|
|
||||||
pub type GenTokTree = TokTree<Expr, GExpr>;
|
pub type GenTokTree = TokTree<Expr, GExpr>;
|
||||||
@@ -65,6 +65,9 @@ impl TokenVariant<api::ExprTicket> for Expr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn x_tok(x: impl ToExpr) -> GenTok { GenTok::NewExpr(x.to_expr()) }
|
||||||
|
pub fn ref_tok(path: Sym) -> GenTok { GenTok::NewExpr(sym_ref(path)) }
|
||||||
|
|
||||||
pub fn cnst(public: bool, name: &str, value: impl ToExpr) -> Vec<GenMember> {
|
pub fn cnst(public: bool, name: &str, value: impl ToExpr) -> Vec<GenMember> {
|
||||||
vec![GenMember {
|
vec![GenMember {
|
||||||
name: name.to_string(),
|
name: name.to_string(),
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ futures = "0.3.31"
|
|||||||
hashbrown = "0.15.2"
|
hashbrown = "0.15.2"
|
||||||
itertools = "0.14.0"
|
itertools = "0.14.0"
|
||||||
lazy_static = "1.5.0"
|
lazy_static = "1.5.0"
|
||||||
|
memo-map = "0.3.3"
|
||||||
never = "0.1.0"
|
never = "0.1.0"
|
||||||
num-traits = "0.2.19"
|
num-traits = "0.2.19"
|
||||||
orchid-api = { version = "0.1.0", path = "../orchid-api" }
|
orchid-api = { version = "0.1.0", path = "../orchid-api" }
|
||||||
|
|||||||
@@ -20,10 +20,12 @@ pub struct AtomData {
|
|||||||
data: Vec<u8>,
|
data: Vec<u8>,
|
||||||
}
|
}
|
||||||
impl AtomData {
|
impl AtomData {
|
||||||
|
#[must_use]
|
||||||
fn api(self) -> api::Atom {
|
fn api(self) -> api::Atom {
|
||||||
let (owner, drop, data) = self.destructure();
|
let (owner, drop, data) = self.destructure();
|
||||||
api::Atom { data, drop, owner: owner.id() }
|
api::Atom { data, drop, owner: owner.id() }
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
fn api_ref(&self) -> api::Atom {
|
fn api_ref(&self) -> api::Atom {
|
||||||
api::Atom { data: self.data.clone(), drop: self.drop, owner: self.owner.id() }
|
api::Atom { data: self.data.clone(), drop: self.drop, owner: self.owner.id() }
|
||||||
}
|
}
|
||||||
@@ -48,6 +50,7 @@ impl fmt::Debug for AtomData {
|
|||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct AtomHand(Rc<AtomData>);
|
pub struct AtomHand(Rc<AtomData>);
|
||||||
impl AtomHand {
|
impl AtomHand {
|
||||||
|
#[must_use]
|
||||||
pub(crate) async fn new(api::Atom { data, drop, owner }: api::Atom, ctx: &Ctx) -> Self {
|
pub(crate) async fn new(api::Atom { data, drop, owner }: api::Atom, ctx: &Ctx) -> Self {
|
||||||
let create = || async {
|
let create = || async {
|
||||||
let owner = ctx.system_inst(owner).await.expect("Dropped system created atom");
|
let owner = ctx.system_inst(owner).await.expect("Dropped system created atom");
|
||||||
@@ -67,6 +70,7 @@ impl AtomHand {
|
|||||||
create().await
|
create().await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub async fn call(self, arg: Expr) -> api::Expression {
|
pub async fn call(self, arg: Expr) -> api::Expression {
|
||||||
let owner_sys = self.0.owner.clone();
|
let owner_sys = self.0.owner.clone();
|
||||||
let reqnot = owner_sys.reqnot();
|
let reqnot = owner_sys.reqnot();
|
||||||
@@ -76,13 +80,18 @@ impl AtomHand {
|
|||||||
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), arg.id())).await,
|
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), arg.id())).await,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn sys(&self) -> &System { &self.0.owner }
|
pub fn sys(&self) -> &System { &self.0.owner }
|
||||||
|
#[must_use]
|
||||||
pub fn ext(&self) -> &Extension { self.sys().ext() }
|
pub fn ext(&self) -> &Extension { self.sys().ext() }
|
||||||
pub async fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
|
pub async fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
|
||||||
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req)).await
|
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req)).await
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn api_ref(&self) -> api::Atom { self.0.api_ref() }
|
pub fn api_ref(&self) -> api::Atom { self.0.api_ref() }
|
||||||
|
#[must_use]
|
||||||
pub async fn to_string(&self) -> String { take_first_fmt(self, &self.0.owner.ctx().i).await }
|
pub async fn to_string(&self) -> String { take_first_fmt(self, &self.0.owner.ctx().i).await }
|
||||||
|
#[must_use]
|
||||||
pub fn downgrade(&self) -> WeakAtomHand { WeakAtomHand(Rc::downgrade(&self.0)) }
|
pub fn downgrade(&self) -> WeakAtomHand { WeakAtomHand(Rc::downgrade(&self.0)) }
|
||||||
}
|
}
|
||||||
impl Format for AtomHand {
|
impl Format for AtomHand {
|
||||||
@@ -100,5 +109,6 @@ impl AtomRepr for AtomHand {
|
|||||||
|
|
||||||
pub struct WeakAtomHand(Weak<AtomData>);
|
pub struct WeakAtomHand(Weak<AtomData>);
|
||||||
impl WeakAtomHand {
|
impl WeakAtomHand {
|
||||||
|
#[must_use]
|
||||||
pub fn upgrade(&self) -> Option<AtomHand> { self.0.upgrade().map(AtomHand) }
|
pub fn upgrade(&self) -> Option<AtomHand> { self.0.upgrade().map(AtomHand) }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ use std::num::{NonZero, NonZeroU16};
|
|||||||
use std::rc::{Rc, Weak};
|
use std::rc::{Rc, Weak};
|
||||||
use std::{fmt, ops};
|
use std::{fmt, ops};
|
||||||
|
|
||||||
use async_once_cell::OnceCell;
|
|
||||||
use async_std::sync::RwLock;
|
use async_std::sync::RwLock;
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use orchid_api::SysId;
|
use orchid_api::SysId;
|
||||||
@@ -13,17 +12,17 @@ use orchid_base::interner::Interner;
|
|||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::atom::WeakAtomHand;
|
use crate::atom::WeakAtomHand;
|
||||||
use crate::expr_store::ExprStore;
|
use crate::expr_store::ExprStore;
|
||||||
use crate::parsed::Root;
|
|
||||||
use crate::system::{System, WeakSystem};
|
use crate::system::{System, WeakSystem};
|
||||||
|
use crate::tree::WeakRoot;
|
||||||
|
|
||||||
pub struct CtxData {
|
pub struct CtxData {
|
||||||
pub i: Rc<Interner>,
|
pub i: Interner,
|
||||||
pub spawn: Spawner,
|
pub spawn: Spawner,
|
||||||
pub systems: RwLock<HashMap<api::SysId, WeakSystem>>,
|
pub systems: RwLock<HashMap<api::SysId, WeakSystem>>,
|
||||||
pub system_id: RefCell<NonZeroU16>,
|
pub system_id: RefCell<NonZeroU16>,
|
||||||
pub owned_atoms: RwLock<HashMap<api::AtomId, WeakAtomHand>>,
|
pub owned_atoms: RwLock<HashMap<api::AtomId, WeakAtomHand>>,
|
||||||
pub common_exprs: ExprStore,
|
pub common_exprs: ExprStore,
|
||||||
pub root: OnceCell<Weak<Root>>,
|
pub root: RwLock<WeakRoot>,
|
||||||
}
|
}
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Ctx(Rc<CtxData>);
|
pub struct Ctx(Rc<CtxData>);
|
||||||
@@ -31,30 +30,39 @@ impl ops::Deref for Ctx {
|
|||||||
type Target = CtxData;
|
type Target = CtxData;
|
||||||
fn deref(&self) -> &Self::Target { &self.0 }
|
fn deref(&self) -> &Self::Target { &self.0 }
|
||||||
}
|
}
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct WeakCtx(Weak<CtxData>);
|
||||||
|
impl WeakCtx {
|
||||||
|
#[must_use]
|
||||||
|
pub fn try_upgrade(&self) -> Option<Ctx> { Some(Ctx(self.0.upgrade()?)) }
|
||||||
|
#[must_use]
|
||||||
|
pub fn upgrade(&self) -> Ctx { self.try_upgrade().expect("Ctx manually kept alive until exit") }
|
||||||
|
}
|
||||||
impl Ctx {
|
impl Ctx {
|
||||||
|
#[must_use]
|
||||||
pub fn new(spawn: Spawner) -> Self {
|
pub fn new(spawn: Spawner) -> Self {
|
||||||
Self(Rc::new(CtxData {
|
Self(Rc::new(CtxData {
|
||||||
spawn,
|
spawn,
|
||||||
i: Rc::default(),
|
i: Interner::default(),
|
||||||
systems: RwLock::default(),
|
systems: RwLock::default(),
|
||||||
system_id: RefCell::new(NonZero::new(1).unwrap()),
|
system_id: RefCell::new(NonZero::new(1).unwrap()),
|
||||||
owned_atoms: RwLock::default(),
|
owned_atoms: RwLock::default(),
|
||||||
common_exprs: ExprStore::default(),
|
common_exprs: ExprStore::default(),
|
||||||
root: OnceCell::default(),
|
root: RwLock::default(),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub(crate) async fn system_inst(&self, id: api::SysId) -> Option<System> {
|
pub(crate) async fn system_inst(&self, id: api::SysId) -> Option<System> {
|
||||||
self.systems.read().await.get(&id).and_then(WeakSystem::upgrade)
|
self.systems.read().await.get(&id).and_then(WeakSystem::upgrade)
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub(crate) fn next_sys_id(&self) -> api::SysId {
|
pub(crate) fn next_sys_id(&self) -> api::SysId {
|
||||||
let mut g = self.system_id.borrow_mut();
|
let mut g = self.system_id.borrow_mut();
|
||||||
*g = g.checked_add(1).unwrap_or(NonZeroU16::new(1).unwrap());
|
*g = g.checked_add(1).unwrap_or(NonZeroU16::new(1).unwrap());
|
||||||
SysId(*g)
|
SysId(*g)
|
||||||
}
|
}
|
||||||
pub async fn set_root(&self, root: Weak<Root>) {
|
#[must_use]
|
||||||
assert!(self.root.get().is_none(), "Root already assigned");
|
pub fn downgrade(&self) -> WeakCtx { WeakCtx(Rc::downgrade(&self.0)) }
|
||||||
self.root.get_or_init(async { root }).await;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
impl fmt::Debug for Ctx {
|
impl fmt::Debug for Ctx {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
|||||||
@@ -1,16 +1,9 @@
|
|||||||
use std::collections::VecDeque;
|
use hashbrown::HashSet;
|
||||||
|
use itertools::Itertools;
|
||||||
use futures::FutureExt;
|
|
||||||
use hashbrown::{HashMap, HashSet};
|
|
||||||
use itertools::{Either, Itertools};
|
|
||||||
use orchid_base::error::{OrcErr, OrcRes, Reporter, mk_err, mk_errv};
|
use orchid_base::error::{OrcErr, OrcRes, Reporter, mk_err, mk_errv};
|
||||||
use orchid_base::interner::{Interner, Tok};
|
use orchid_base::interner::{Interner, Tok};
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::Pos;
|
||||||
use orchid_base::name::{NameLike, Sym, VName};
|
use orchid_base::name::VName;
|
||||||
use substack::Substack;
|
|
||||||
|
|
||||||
use crate::expr::Expr;
|
|
||||||
use crate::parsed::{ItemKind, ParsedMemberKind, ParsedModule};
|
|
||||||
|
|
||||||
/// Errors produced by absolute_path
|
/// Errors produced by absolute_path
|
||||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||||
@@ -48,29 +41,24 @@ impl AbsPathError {
|
|||||||
///
|
///
|
||||||
/// if the relative path contains as many or more `super` segments than the
|
/// if the relative path contains as many or more `super` segments than the
|
||||||
/// length of the absolute path.
|
/// length of the absolute path.
|
||||||
pub fn absolute_path(
|
pub async fn absolute_path(
|
||||||
mut cwd: &[Tok<String>],
|
mut cwd: &[Tok<String>],
|
||||||
mut rel: &[Tok<String>],
|
mut rel: &[Tok<String>],
|
||||||
|
i: &Interner,
|
||||||
) -> Result<VName, AbsPathError> {
|
) -> Result<VName, AbsPathError> {
|
||||||
let mut relative = false;
|
let i_self = i.i("self").await;
|
||||||
if rel.first().map(|t| t.as_str()) == Some("self") {
|
let i_super = i.i("super").await;
|
||||||
relative = true;
|
let relative = rel.first().is_some_and(|s| *s != i_self && *s != i_super);
|
||||||
rel = rel.split_first().expect("checked above").1;
|
if let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h != i_self) {
|
||||||
|
rel = tail;
|
||||||
} else {
|
} else {
|
||||||
while rel.first().map(|t| t.as_str()) == Some("super") {
|
while let Some((_, tail)) = rel.split_first().filter(|(h, _)| **h == i_super) {
|
||||||
match cwd.split_last() {
|
cwd = cwd.split_last().ok_or(AbsPathError::TooManySupers)?.1;
|
||||||
Some((_, torso)) => cwd = torso,
|
rel = tail;
|
||||||
None => return Err(AbsPathError::TooManySupers),
|
|
||||||
};
|
|
||||||
rel = rel.split_first().expect("checked above").1;
|
|
||||||
relative = true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
match relative {
|
if relative { VName::new(cwd.iter().chain(rel).cloned()) } else { VName::new(rel.to_vec()) }
|
||||||
true => VName::new(cwd.iter().chain(rel).cloned()),
|
.map_err(|_| AbsPathError::RootPath)
|
||||||
false => VName::new(rel.to_vec()),
|
|
||||||
}
|
|
||||||
.map_err(|_| AbsPathError::RootPath)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct DealiasCtx<'a> {
|
pub struct DealiasCtx<'a> {
|
||||||
@@ -84,8 +72,7 @@ pub async fn resolv_glob<Mod: Tree>(
|
|||||||
abs_path: &[Tok<String>],
|
abs_path: &[Tok<String>],
|
||||||
pos: Pos,
|
pos: Pos,
|
||||||
i: &Interner,
|
i: &Interner,
|
||||||
rep: &Reporter,
|
ctx: &mut Mod::Ctx<'_>,
|
||||||
ctx: &mut Mod::Ctx,
|
|
||||||
) -> OrcRes<HashSet<Tok<String>>> {
|
) -> OrcRes<HashSet<Tok<String>>> {
|
||||||
let coprefix_len = cwd.iter().zip(abs_path).take_while(|(a, b)| a == b).count();
|
let coprefix_len = cwd.iter().zip(abs_path).take_while(|(a, b)| a == b).count();
|
||||||
let (co_prefix, diff_path) = abs_path.split_at(abs_path.len().min(coprefix_len + 1));
|
let (co_prefix, diff_path) = abs_path.split_at(abs_path.len().min(coprefix_len + 1));
|
||||||
@@ -96,35 +83,34 @@ pub async fn resolv_glob<Mod: Tree>(
|
|||||||
Err(e) => {
|
Err(e) => {
|
||||||
let path = abs_path[..=coprefix_len + e.pos].iter().join("::");
|
let path = abs_path[..=coprefix_len + e.pos].iter().join("::");
|
||||||
let (tk, msg) = match e.kind {
|
let (tk, msg) = match e.kind {
|
||||||
ChildErrorKind::Constant =>
|
ChildErrorKind::Constant => ("Invalid import path", format!("{path} is a const")),
|
||||||
(i.i("Invalid import path").await, format!("{path} is a const")),
|
ChildErrorKind::Missing => ("Invalid import path", format!("{path} not found")),
|
||||||
ChildErrorKind::Missing => (i.i("Invalid import path").await, format!("{path} not found")),
|
ChildErrorKind::Private => ("Import inaccessible", format!("{path} is private")),
|
||||||
ChildErrorKind::Private => (i.i("Import inaccessible").await, format!("{path} is private")),
|
|
||||||
};
|
};
|
||||||
return Err(mk_errv(tk, msg, [pos.into()]));
|
return Err(mk_errv(i.i(tk).await, msg, [pos]));
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
Ok(target_module.children(coprefix_len < abs_path.len()))
|
Ok(target_module.children(coprefix_len < abs_path.len()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum ChildResult<'a, T: Tree + ?Sized> {
|
pub type ChildResult<'a, T> = Result<&'a T, ChildErrorKind>;
|
||||||
Value(&'a T),
|
|
||||||
Err(ChildErrorKind),
|
|
||||||
Alias(&'a [Tok<String>]),
|
|
||||||
}
|
|
||||||
pub trait Tree {
|
pub trait Tree {
|
||||||
type Ctx;
|
type Ctx<'a>;
|
||||||
|
#[must_use]
|
||||||
fn children(&self, public_only: bool) -> HashSet<Tok<String>>;
|
fn children(&self, public_only: bool) -> HashSet<Tok<String>>;
|
||||||
|
#[must_use]
|
||||||
fn child(
|
fn child(
|
||||||
&self,
|
&self,
|
||||||
key: Tok<String>,
|
key: Tok<String>,
|
||||||
public_only: bool,
|
public_only: bool,
|
||||||
ctx: &mut Self::Ctx,
|
ctx: &mut Self::Ctx<'_>,
|
||||||
) -> impl Future<Output = ChildResult<'_, Self>>;
|
) -> impl Future<Output = ChildResult<'_, Self>>;
|
||||||
}
|
}
|
||||||
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||||
pub enum ChildErrorKind {
|
pub enum ChildErrorKind {
|
||||||
Missing,
|
Missing,
|
||||||
|
/// Only thrown if public_only is true
|
||||||
Private,
|
Private,
|
||||||
Constant,
|
Constant,
|
||||||
}
|
}
|
||||||
@@ -144,42 +130,17 @@ pub struct ChildError {
|
|||||||
//
|
//
|
||||||
// caveat: we need to check EVERY IMPORT to ensure that all
|
// caveat: we need to check EVERY IMPORT to ensure that all
|
||||||
// errors are raised
|
// errors are raised
|
||||||
|
pub async fn walk<'a, T: Tree>(
|
||||||
async fn walk_no_access_chk<'a, T: Tree>(
|
|
||||||
root: &'a T,
|
|
||||||
cur: &mut &'a T,
|
|
||||||
path: impl IntoIterator<Item = Tok<String>, IntoIter: DoubleEndedIterator>,
|
|
||||||
ctx: &mut T::Ctx,
|
|
||||||
) -> Result<(), ChildErrorKind> {
|
|
||||||
// this VecDeque is used like a stack to leverage its Extend implementation.
|
|
||||||
let mut path: VecDeque<Tok<String>> = path.into_iter().rev().collect();
|
|
||||||
while let Some(step) = path.pop_back() {
|
|
||||||
match cur.child(step, false, ctx).await {
|
|
||||||
ChildResult::Alias(target) => {
|
|
||||||
path.extend(target.iter().cloned().rev());
|
|
||||||
*cur = root;
|
|
||||||
},
|
|
||||||
ChildResult::Err(e) => return Err(e),
|
|
||||||
ChildResult::Value(v) => *cur = v,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn walk<'a, T: Tree>(
|
|
||||||
root: &'a T,
|
root: &'a T,
|
||||||
public_only: bool,
|
public_only: bool,
|
||||||
path: impl IntoIterator<Item = Tok<String>>,
|
path: impl IntoIterator<Item = Tok<String>>,
|
||||||
ctx: &mut T::Ctx,
|
ctx: &mut T::Ctx<'_>,
|
||||||
) -> Result<&'a T, ChildError> {
|
) -> Result<&'a T, ChildError> {
|
||||||
let mut cur = root;
|
let mut cur = root;
|
||||||
for (i, item) in path.into_iter().enumerate() {
|
for (i, item) in path.into_iter().enumerate() {
|
||||||
match cur.child(item, public_only, ctx).await {
|
match cur.child(item, public_only, ctx).await {
|
||||||
ChildResult::Value(v) => cur = v,
|
ChildResult::Ok(v) => cur = v,
|
||||||
ChildResult::Err(kind) => return Err(ChildError { pos: i, kind }),
|
ChildResult::Err(kind) => return Err(ChildError { pos: i, kind }),
|
||||||
ChildResult::Alias(path) => (walk_no_access_chk(root, &mut cur, path.iter().cloned(), ctx)
|
|
||||||
.await)
|
|
||||||
.map_err(|kind| ChildError { kind, pos: i })?,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(cur)
|
Ok(cur)
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ use orchid_base::logging::Logger;
|
|||||||
|
|
||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
use crate::expr::{Expr, ExprKind, ExprParseCtx, PathSet, PathSetBuilder, Step};
|
use crate::expr::{Expr, ExprKind, ExprParseCtx, PathSet, PathSetBuilder, Step};
|
||||||
|
use crate::tree::Root;
|
||||||
|
|
||||||
type ExprGuard = Bound<RwLockWriteGuard<'static, ExprKind>, Expr>;
|
type ExprGuard = Bound<RwLockWriteGuard<'static, ExprKind>, Expr>;
|
||||||
|
|
||||||
@@ -36,16 +37,21 @@ pub struct ExecCtx {
|
|||||||
cur_pos: Pos,
|
cur_pos: Pos,
|
||||||
did_pop: bool,
|
did_pop: bool,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
|
root: Root,
|
||||||
}
|
}
|
||||||
impl ExecCtx {
|
impl ExecCtx {
|
||||||
pub async fn new(ctx: Ctx, logger: Logger, init: Expr) -> Self {
|
#[must_use]
|
||||||
|
pub async fn new(ctx: Ctx, logger: Logger, root: Root, init: Expr) -> Self {
|
||||||
let cur_pos = init.pos();
|
let cur_pos = init.pos();
|
||||||
let cur = Bound::async_new(init, |init| init.kind().write()).await;
|
let cur = Bound::async_new(init, |init| init.kind().write()).await;
|
||||||
Self { ctx, gas: None, stack: vec![], cur, cur_pos, did_pop: false, logger }
|
Self { ctx, gas: None, stack: vec![], cur, cur_pos, did_pop: false, logger, root }
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn remaining_gas(&self) -> u64 { self.gas.expect("queried remaining_gas but no gas was set") }
|
pub fn remaining_gas(&self) -> u64 { self.gas.expect("queried remaining_gas but no gas was set") }
|
||||||
pub fn set_gas(&mut self, gas: Option<u64>) { self.gas = gas }
|
pub fn set_gas(&mut self, gas: Option<u64>) { self.gas = gas }
|
||||||
|
#[must_use]
|
||||||
pub fn idle(&self) -> bool { self.did_pop }
|
pub fn idle(&self) -> bool { self.did_pop }
|
||||||
|
#[must_use]
|
||||||
pub fn result(self) -> ExecResult {
|
pub fn result(self) -> ExecResult {
|
||||||
if self.idle() {
|
if self.idle() {
|
||||||
match &*self.cur {
|
match &*self.cur {
|
||||||
@@ -56,15 +62,18 @@ impl ExecCtx {
|
|||||||
ExecResult::Gas(self)
|
ExecResult::Gas(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn use_gas(&mut self, amount: u64) -> bool {
|
pub fn use_gas(&mut self, amount: u64) -> bool {
|
||||||
if let Some(gas) = &mut self.gas {
|
if let Some(gas) = &mut self.gas {
|
||||||
*gas -= amount;
|
*gas -= amount;
|
||||||
}
|
}
|
||||||
self.gas != Some(0)
|
self.gas != Some(0)
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub async fn try_lock(&self, ex: &Expr) -> ExprGuard {
|
pub async fn try_lock(&self, ex: &Expr) -> ExprGuard {
|
||||||
Bound::async_new(ex.clone(), |ex| ex.kind().write()).await
|
Bound::async_new(ex.clone(), |ex| ex.kind().write()).await
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub async fn unpack_ident(&self, ex: &Expr) -> Expr {
|
pub async fn unpack_ident(&self, ex: &Expr) -> Expr {
|
||||||
match ex.kind().try_write().as_deref_mut() {
|
match ex.kind().try_write().as_deref_mut() {
|
||||||
Some(ExprKind::Identity(ex)) => {
|
Some(ExprKind::Identity(ex)) => {
|
||||||
@@ -89,14 +98,11 @@ impl ExecCtx {
|
|||||||
},
|
},
|
||||||
ExprKind::Seq(a, b) if !self.did_pop => (ExprKind::Seq(a.clone(), b), StackOp::Push(a)),
|
ExprKind::Seq(a, b) if !self.did_pop => (ExprKind::Seq(a.clone(), b), StackOp::Push(a)),
|
||||||
ExprKind::Seq(_, b) => (ExprKind::Identity(b), StackOp::Nop),
|
ExprKind::Seq(_, b) => (ExprKind::Identity(b), StackOp::Nop),
|
||||||
ExprKind::Const(name) => {
|
ExprKind::Const(name) =>
|
||||||
let root = (self.ctx.root.get().and_then(|v| v.upgrade()))
|
match self.root.get_const_value(name, self.cur_pos.clone()).await {
|
||||||
.expect("Root not assigned before execute call");
|
|
||||||
match root.get_const_value(name, self.cur_pos.clone(), self.ctx.clone()).await {
|
|
||||||
Err(e) => (ExprKind::Bottom(e), StackOp::Pop),
|
Err(e) => (ExprKind::Bottom(e), StackOp::Pop),
|
||||||
Ok(v) => (ExprKind::Identity(v), StackOp::Nop),
|
Ok(v) => (ExprKind::Identity(v), StackOp::Nop),
|
||||||
}
|
},
|
||||||
},
|
|
||||||
ExprKind::Arg => panic!("This should not appear outside function bodies"),
|
ExprKind::Arg => panic!("This should not appear outside function bodies"),
|
||||||
ek @ ExprKind::Atom(_) => (ek, StackOp::Pop),
|
ek @ ExprKind::Atom(_) => (ek, StackOp::Pop),
|
||||||
ExprKind::Bottom(bot) => (ExprKind::Bottom(bot.clone()), StackOp::Unwind(bot)),
|
ExprKind::Bottom(bot) => (ExprKind::Bottom(bot.clone()), StackOp::Unwind(bot)),
|
||||||
@@ -105,7 +111,7 @@ impl ExecCtx {
|
|||||||
Ok(atom) => {
|
Ok(atom) => {
|
||||||
let ext = atom.sys().ext().clone();
|
let ext = atom.sys().ext().clone();
|
||||||
let x_norm = self.unpack_ident(&x).await;
|
let x_norm = self.unpack_ident(&x).await;
|
||||||
let mut parse_ctx = ExprParseCtx { ctx: self.ctx.clone(), exprs: ext.exprs().clone() };
|
let mut parse_ctx = ExprParseCtx { ctx: &self.ctx, exprs: ext.exprs() };
|
||||||
let val =
|
let val =
|
||||||
Expr::from_api(&atom.call(x_norm).await, PathSetBuilder::new(), &mut parse_ctx).await;
|
Expr::from_api(&atom.call(x_norm).await, PathSetBuilder::new(), &mut parse_ctx).await;
|
||||||
(ExprKind::Identity(val.clone()), StackOp::Swap(val))
|
(ExprKind::Identity(val.clone()), StackOp::Swap(val))
|
||||||
@@ -117,12 +123,10 @@ impl ExecCtx {
|
|||||||
ExprKind::Atom(a) => {
|
ExprKind::Atom(a) => {
|
||||||
let ext = a.sys().ext().clone();
|
let ext = a.sys().ext().clone();
|
||||||
let x_norm = self.unpack_ident(&x).await;
|
let x_norm = self.unpack_ident(&x).await;
|
||||||
let mut parse_ctx =
|
|
||||||
ExprParseCtx { ctx: ext.ctx().clone(), exprs: ext.exprs().clone() };
|
|
||||||
let val = Expr::from_api(
|
let val = Expr::from_api(
|
||||||
&a.clone().call(x_norm).await,
|
&a.clone().call(x_norm).await,
|
||||||
PathSetBuilder::new(),
|
PathSetBuilder::new(),
|
||||||
&mut parse_ctx,
|
&mut ExprParseCtx { ctx: ext.ctx(), exprs: ext.exprs() },
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
(ExprKind::Identity(val.clone()), StackOp::Swap(val))
|
(ExprKind::Identity(val.clone()), StackOp::Swap(val))
|
||||||
@@ -168,6 +172,7 @@ impl ExecCtx {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
async fn substitute(
|
async fn substitute(
|
||||||
src: &Expr,
|
src: &Expr,
|
||||||
path: &[Step],
|
path: &[Step],
|
||||||
|
|||||||
@@ -10,10 +10,11 @@ use hashbrown::HashSet;
|
|||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::OrcErrv;
|
use orchid_base::error::OrcErrv;
|
||||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::interner::Interner;
|
||||||
|
use orchid_base::location::{Pos, SrcRange};
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::tl_cache;
|
use orchid_base::tl_cache;
|
||||||
use orchid_base::tree::{AtomRepr, indent};
|
use orchid_base::tree::{AtomRepr, TokenVariant, indent};
|
||||||
use substack::Substack;
|
use substack::Substack;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
@@ -22,9 +23,9 @@ use crate::ctx::Ctx;
|
|||||||
use crate::expr_store::ExprStore;
|
use crate::expr_store::ExprStore;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct ExprParseCtx {
|
pub struct ExprParseCtx<'a> {
|
||||||
pub ctx: Ctx,
|
pub ctx: &'a Ctx,
|
||||||
pub exprs: ExprStore,
|
pub exprs: &'a ExprStore,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@@ -36,6 +37,7 @@ pub struct ExprData {
|
|||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Expr(Rc<ExprData>);
|
pub struct Expr(Rc<ExprData>);
|
||||||
impl Expr {
|
impl Expr {
|
||||||
|
#[must_use]
|
||||||
pub fn pos(&self) -> Pos { self.0.pos.clone() }
|
pub fn pos(&self) -> Pos { self.0.pos.clone() }
|
||||||
pub async fn try_into_owned_atom(self) -> Result<AtomHand, Self> {
|
pub async fn try_into_owned_atom(self) -> Result<AtomHand, Self> {
|
||||||
match Rc::try_unwrap(self.0) {
|
match Rc::try_unwrap(self.0) {
|
||||||
@@ -46,25 +48,29 @@ impl Expr {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub async fn as_atom(&self) -> Option<AtomHand> {
|
pub async fn as_atom(&self) -> Option<AtomHand> {
|
||||||
if let ExprKind::Atom(a) = &*self.kind().read().await { Some(a.clone()) } else { None }
|
if let ExprKind::Atom(a) = &*self.kind().read().await { Some(a.clone()) } else { None }
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn strong_count(&self) -> usize { Rc::strong_count(&self.0) }
|
pub fn strong_count(&self) -> usize { Rc::strong_count(&self.0) }
|
||||||
|
#[must_use]
|
||||||
pub fn id(&self) -> api::ExprTicket {
|
pub fn id(&self) -> api::ExprTicket {
|
||||||
api::ExprTicket(
|
api::ExprTicket(
|
||||||
NonZeroU64::new(self.0.as_ref() as *const ExprData as usize as u64)
|
NonZeroU64::new(self.0.as_ref() as *const ExprData as usize as u64)
|
||||||
.expect("this is a ref, it cannot be null"),
|
.expect("this is a ref, it cannot be null"),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub async fn from_api(
|
pub async fn from_api(
|
||||||
api: &api::Expression,
|
api: &api::Expression,
|
||||||
psb: PathSetBuilder<'_, u64>,
|
psb: PathSetBuilder<'_, u64>,
|
||||||
ctx: &mut ExprParseCtx,
|
ctx: &mut ExprParseCtx<'_>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let pos = Pos::from_api(&api.location, &ctx.ctx.i).await;
|
let pos = Pos::from_api(&api.location, &ctx.ctx.i).await;
|
||||||
let kind = match &api.kind {
|
let kind = match &api.kind {
|
||||||
api::ExpressionKind::Arg(n) => {
|
api::ExpressionKind::Arg(n) => {
|
||||||
assert!(psb.register_arg(&n), "Arguments must be enclosed in a matching lambda");
|
assert!(psb.register_arg(n), "Arguments must be enclosed in a matching lambda");
|
||||||
ExprKind::Arg
|
ExprKind::Arg
|
||||||
},
|
},
|
||||||
api::ExpressionKind::Bottom(bot) =>
|
api::ExpressionKind::Bottom(bot) =>
|
||||||
@@ -72,14 +78,14 @@ impl Expr {
|
|||||||
api::ExpressionKind::Call(f, x) => {
|
api::ExpressionKind::Call(f, x) => {
|
||||||
let (lpsb, rpsb) = psb.split();
|
let (lpsb, rpsb) = psb.split();
|
||||||
ExprKind::Call(
|
ExprKind::Call(
|
||||||
Expr::from_api(&f, lpsb, ctx).boxed_local().await,
|
Expr::from_api(f, lpsb, ctx).boxed_local().await,
|
||||||
Expr::from_api(&x, rpsb, ctx).boxed_local().await,
|
Expr::from_api(x, rpsb, ctx).boxed_local().await,
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
api::ExpressionKind::Const(name) => ExprKind::Const(Sym::from_api(*name, &ctx.ctx.i).await),
|
api::ExpressionKind::Const(name) => ExprKind::Const(Sym::from_api(*name, &ctx.ctx.i).await),
|
||||||
api::ExpressionKind::Lambda(x, body) => {
|
api::ExpressionKind::Lambda(x, body) => {
|
||||||
let lbuilder = psb.lambda(&x);
|
let lbuilder = psb.lambda(x);
|
||||||
let body = Expr::from_api(&body, lbuilder.stack(), ctx).boxed_local().await;
|
let body = Expr::from_api(body, lbuilder.stack(), ctx).boxed_local().await;
|
||||||
ExprKind::Lambda(lbuilder.collect(), body)
|
ExprKind::Lambda(lbuilder.collect(), body)
|
||||||
},
|
},
|
||||||
api::ExpressionKind::NewAtom(a) =>
|
api::ExpressionKind::NewAtom(a) =>
|
||||||
@@ -88,13 +94,14 @@ impl Expr {
|
|||||||
api::ExpressionKind::Seq(a, b) => {
|
api::ExpressionKind::Seq(a, b) => {
|
||||||
let (apsb, bpsb) = psb.split();
|
let (apsb, bpsb) = psb.split();
|
||||||
ExprKind::Seq(
|
ExprKind::Seq(
|
||||||
Expr::from_api(&a, apsb, ctx).boxed_local().await,
|
Expr::from_api(a, apsb, ctx).boxed_local().await,
|
||||||
Expr::from_api(&b, bpsb, ctx).boxed_local().await,
|
Expr::from_api(b, bpsb, ctx).boxed_local().await,
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
Self(Rc::new(ExprData { pos, kind: RwLock::new(kind) }))
|
Self(Rc::new(ExprData { pos, kind: RwLock::new(kind) }))
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub async fn to_api(&self) -> api::InspectedKind {
|
pub async fn to_api(&self) -> api::InspectedKind {
|
||||||
use api::InspectedKind as K;
|
use api::InspectedKind as K;
|
||||||
match &*self.0.kind.read().await {
|
match &*self.0.kind.read().await {
|
||||||
@@ -104,6 +111,7 @@ impl Expr {
|
|||||||
_ => K::Opaque,
|
_ => K::Opaque,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn kind(&self) -> &RwLock<ExprKind> { &self.0.kind }
|
pub fn kind(&self) -> &RwLock<ExprKind> { &self.0.kind }
|
||||||
}
|
}
|
||||||
impl Format for Expr {
|
impl Format for Expr {
|
||||||
@@ -139,6 +147,7 @@ pub enum ExprKind {
|
|||||||
Missing,
|
Missing,
|
||||||
}
|
}
|
||||||
impl ExprKind {
|
impl ExprKind {
|
||||||
|
#[must_use]
|
||||||
pub fn at(self, pos: Pos) -> Expr { Expr(Rc::new(ExprData { pos, kind: RwLock::new(self) })) }
|
pub fn at(self, pos: Pos) -> Expr { Expr(Rc::new(ExprData { pos, kind: RwLock::new(self) })) }
|
||||||
}
|
}
|
||||||
impl Format for ExprKind {
|
impl Format for ExprKind {
|
||||||
@@ -198,13 +207,16 @@ pub enum PathSetFrame<'a, T: PartialEq> {
|
|||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct PathSetBuilder<'a, T: PartialEq>(Substack<'a, PathSetFrame<'a, T>>);
|
pub struct PathSetBuilder<'a, T: PartialEq>(Substack<'a, PathSetFrame<'a, T>>);
|
||||||
impl<'a, T: PartialEq> PathSetBuilder<'a, T> {
|
impl<'a, T: PartialEq> PathSetBuilder<'a, T> {
|
||||||
|
#[must_use]
|
||||||
pub fn new() -> Self { Self(Substack::Bottom) }
|
pub fn new() -> Self { Self(Substack::Bottom) }
|
||||||
|
#[must_use]
|
||||||
pub fn split(&'a self) -> (Self, Self) {
|
pub fn split(&'a self) -> (Self, Self) {
|
||||||
(
|
(
|
||||||
Self(self.0.push(PathSetFrame::Step(Step::Left))),
|
Self(self.0.push(PathSetFrame::Step(Step::Left))),
|
||||||
Self(self.0.push(PathSetFrame::Step(Step::Right))),
|
Self(self.0.push(PathSetFrame::Step(Step::Right))),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn lambda<'b>(self, arg: &'b T) -> LambdaBuilder<'b, T>
|
pub fn lambda<'b>(self, arg: &'b T) -> LambdaBuilder<'b, T>
|
||||||
where 'a: 'b {
|
where 'a: 'b {
|
||||||
LambdaBuilder { arg, path: RefCell::default(), stack: self }
|
LambdaBuilder { arg, path: RefCell::default(), stack: self }
|
||||||
@@ -264,15 +276,21 @@ impl<'a, T: PartialEq> PathSetBuilder<'a, T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a, T: PartialEq> Default for PathSetBuilder<'a, T> {
|
||||||
|
fn default() -> Self { Self::new() }
|
||||||
|
}
|
||||||
|
|
||||||
pub struct LambdaBuilder<'a, T: PartialEq> {
|
pub struct LambdaBuilder<'a, T: PartialEq> {
|
||||||
arg: &'a T,
|
arg: &'a T,
|
||||||
path: RefCell<Option<PathSet>>,
|
path: RefCell<Option<PathSet>>,
|
||||||
stack: PathSetBuilder<'a, T>,
|
stack: PathSetBuilder<'a, T>,
|
||||||
}
|
}
|
||||||
impl<'a, T: PartialEq> LambdaBuilder<'a, T> {
|
impl<'a, T: PartialEq> LambdaBuilder<'a, T> {
|
||||||
|
#[must_use]
|
||||||
pub fn stack(&'a self) -> PathSetBuilder<'a, T> {
|
pub fn stack(&'a self) -> PathSetBuilder<'a, T> {
|
||||||
PathSetBuilder(self.stack.0.push(PathSetFrame::Lambda(self.arg, &self.path)))
|
PathSetBuilder(self.stack.0.push(PathSetFrame::Lambda(self.arg, &self.path)))
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn collect(self) -> Option<PathSet> { self.path.into_inner() }
|
pub fn collect(self) -> Option<PathSet> { self.path.into_inner() }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -285,6 +303,7 @@ pub struct PathSet {
|
|||||||
pub next: Option<(Box<PathSet>, Box<PathSet>)>,
|
pub next: Option<(Box<PathSet>, Box<PathSet>)>,
|
||||||
}
|
}
|
||||||
impl PathSet {
|
impl PathSet {
|
||||||
|
#[must_use]
|
||||||
pub fn next(&self) -> Option<(&PathSet, &PathSet)> {
|
pub fn next(&self) -> Option<(&PathSet, &PathSet)> {
|
||||||
self.next.as_ref().map(|(l, r)| (&**l, &**r))
|
self.next.as_ref().map(|(l, r)| (&**l, &**r))
|
||||||
}
|
}
|
||||||
@@ -305,6 +324,7 @@ impl fmt::Display for PathSet {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn bot_expr(err: impl Into<OrcErrv>) -> Expr {
|
pub fn bot_expr(err: impl Into<OrcErrv>) -> Expr {
|
||||||
let errv: OrcErrv = err.into();
|
let errv: OrcErrv = err.into();
|
||||||
let pos = errv.pos_iter().next().map_or(Pos::None, |ep| ep.position.clone());
|
let pos = errv.pos_iter().next().map_or(Pos::None, |ep| ep.position.clone());
|
||||||
@@ -313,5 +333,44 @@ pub fn bot_expr(err: impl Into<OrcErrv>) -> Expr {
|
|||||||
|
|
||||||
pub struct WeakExpr(Weak<ExprData>);
|
pub struct WeakExpr(Weak<ExprData>);
|
||||||
impl WeakExpr {
|
impl WeakExpr {
|
||||||
|
#[must_use]
|
||||||
pub fn upgrade(&self) -> Option<Expr> { self.0.upgrade().map(Expr) }
|
pub fn upgrade(&self) -> Option<Expr> { self.0.upgrade().map(Expr) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TokenVariant<api::ExprTicket> for Expr {
|
||||||
|
type FromApiCtx<'a> = ExprStore;
|
||||||
|
async fn from_api(
|
||||||
|
api: &api::ExprTicket,
|
||||||
|
ctx: &mut Self::FromApiCtx<'_>,
|
||||||
|
_: SrcRange,
|
||||||
|
_: &Interner,
|
||||||
|
) -> Self {
|
||||||
|
ctx.get_expr(*api).expect("Invalid ticket")
|
||||||
|
}
|
||||||
|
type ToApiCtx<'a> = ExprStore;
|
||||||
|
async fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> api::ExprTicket {
|
||||||
|
let id = self.id();
|
||||||
|
ctx.give_expr(self);
|
||||||
|
id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Acknowledgment that expr serialization is impossible and thus will panic.
|
||||||
|
#[derive(Debug, Clone, Copy, Default)]
|
||||||
|
pub struct ExprWillPanic;
|
||||||
|
|
||||||
|
impl TokenVariant<api::Expression> for Expr {
|
||||||
|
type FromApiCtx<'a> = ExprParseCtx<'a>;
|
||||||
|
async fn from_api(
|
||||||
|
api: &api::Expression,
|
||||||
|
ctx: &mut Self::FromApiCtx<'_>,
|
||||||
|
_: SrcRange,
|
||||||
|
_: &Interner,
|
||||||
|
) -> Self {
|
||||||
|
Self::from_api(api, PathSetBuilder::new(), ctx).await
|
||||||
|
}
|
||||||
|
type ToApiCtx<'a> = ExprWillPanic;
|
||||||
|
async fn into_api(self, ExprWillPanic: &mut Self::ToApiCtx<'_>) -> api::Expression {
|
||||||
|
panic!("Cannot serialize expr!")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ pub struct ExprStoreData {
|
|||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct ExprStore(Rc<ExprStoreData>);
|
pub struct ExprStore(Rc<ExprStoreData>);
|
||||||
impl ExprStore {
|
impl ExprStore {
|
||||||
|
#[must_use]
|
||||||
pub fn derive(&self) -> Self {
|
pub fn derive(&self) -> Self {
|
||||||
Self(Rc::new(ExprStoreData { exprs: RefCell::default(), parent: Some(self.clone()) }))
|
Self(Rc::new(ExprStoreData { exprs: RefCell::default(), parent: Some(self.clone()) }))
|
||||||
}
|
}
|
||||||
@@ -31,6 +32,7 @@ impl ExprStore {
|
|||||||
(self.0.exprs.borrow_mut().entry(ticket))
|
(self.0.exprs.borrow_mut().entry(ticket))
|
||||||
.and_replace_entry_with(|_, (rc, rt)| (1 < rc).then_some((rc - 1, rt)));
|
.and_replace_entry_with(|_, (rc, rt)| (1 < rc).then_some((rc - 1, rt)));
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn get_expr(&self, ticket: api::ExprTicket) -> Option<Expr> {
|
pub fn get_expr(&self, ticket: api::ExprTicket) -> Option<Expr> {
|
||||||
(self.0.exprs.borrow().get(&ticket).map(|(_, expr)| expr.clone()))
|
(self.0.exprs.borrow().get(&ticket).map(|(_, expr)| expr.clone()))
|
||||||
.or_else(|| self.0.parent.as_ref()?.get_expr(ticket))
|
.or_else(|| self.0.parent.as_ref()?.get_expr(ticket))
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ use futures::future::{join, join_all};
|
|||||||
use futures::{StreamExt, stream};
|
use futures::{StreamExt, stream};
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_api::{HostMsgSet, LsModule};
|
|
||||||
use orchid_api_traits::Request;
|
use orchid_api_traits::Request;
|
||||||
use orchid_base::builtin::ExtInit;
|
use orchid_base::builtin::ExtInit;
|
||||||
use orchid_base::clone;
|
use orchid_base::clone;
|
||||||
@@ -26,8 +25,10 @@ use orchid_base::reqnot::{DynRequester, ReqNot, Requester as _};
|
|||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::atom::AtomHand;
|
use crate::atom::AtomHand;
|
||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
|
use crate::dealias::{ChildError, ChildErrorKind, walk};
|
||||||
use crate::expr_store::ExprStore;
|
use crate::expr_store::ExprStore;
|
||||||
use crate::system::SystemCtor;
|
use crate::system::SystemCtor;
|
||||||
|
use crate::tree::MemberKind;
|
||||||
|
|
||||||
pub struct ReqPair<R: Request>(R, Sender<R::Response>);
|
pub struct ReqPair<R: Request>(R, Sender<R::Response>);
|
||||||
|
|
||||||
@@ -38,7 +39,6 @@ pub struct ReqPair<R: Request>(R, Sender<R::Response>);
|
|||||||
#[derive(destructure)]
|
#[derive(destructure)]
|
||||||
pub struct ExtensionData {
|
pub struct ExtensionData {
|
||||||
ctx: Ctx,
|
ctx: Ctx,
|
||||||
init: Rc<ExtInit>,
|
|
||||||
reqnot: ReqNot<api::HostMsgSet>,
|
reqnot: ReqNot<api::HostMsgSet>,
|
||||||
systems: Vec<SystemCtor>,
|
systems: Vec<SystemCtor>,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
@@ -85,7 +85,6 @@ impl Extension {
|
|||||||
.map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) })
|
.map(|decl| SystemCtor { decl, ext: WeakExtension(weak.clone()) })
|
||||||
.collect(),
|
.collect(),
|
||||||
logger: logger.clone(),
|
logger: logger.clone(),
|
||||||
init: init.clone(),
|
|
||||||
next_pars: RefCell::new(NonZeroU64::new(1).unwrap()),
|
next_pars: RefCell::new(NonZeroU64::new(1).unwrap()),
|
||||||
lex_recur: Mutex::default(),
|
lex_recur: Mutex::default(),
|
||||||
reqnot: ReqNot::new(
|
reqnot: ReqNot::new(
|
||||||
@@ -168,8 +167,52 @@ impl Extension {
|
|||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
},
|
},
|
||||||
api::ExtHostReq::LsModule(ref ls @ LsModule(ref sys, ref path)) => {
|
api::ExtHostReq::LsModule(ref ls @ api::LsModule(_sys, path)) => {
|
||||||
todo!() // TODO
|
let reply: <api::LsModule as Request>::Response = 'reply: {
|
||||||
|
let path = i.ex(path).await;
|
||||||
|
let root = (ctx.root.read().await.upgrade())
|
||||||
|
.expect("LSModule called when root isn't in context");
|
||||||
|
let root_data = &mut *root.0.write().await;
|
||||||
|
let mut walk_ctx = (ctx.clone(), &mut root_data.consts);
|
||||||
|
let module =
|
||||||
|
match walk(&root_data.root, false, path.iter().cloned(), &mut walk_ctx)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(module) => module,
|
||||||
|
Err(ChildError { kind, .. }) =>
|
||||||
|
break 'reply Err(match kind {
|
||||||
|
ChildErrorKind::Private => panic!("Access checking was disabled"),
|
||||||
|
ChildErrorKind::Constant => api::LsModuleError::IsConstant,
|
||||||
|
ChildErrorKind::Missing => api::LsModuleError::InvalidPath,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
let mut members = std::collections::HashMap::new();
|
||||||
|
for (k, v) in &module.members {
|
||||||
|
let kind = match v.kind(ctx.clone(), &mut root_data.consts).await {
|
||||||
|
MemberKind::Const => api::MemberInfoKind::Constant,
|
||||||
|
MemberKind::Module(_) => api::MemberInfoKind::Module,
|
||||||
|
};
|
||||||
|
members.insert(k.to_api(), api::MemberInfo { public: v.public, kind });
|
||||||
|
}
|
||||||
|
Ok(api::ModuleInfo { members })
|
||||||
|
};
|
||||||
|
hand.handle(ls, &reply).await
|
||||||
|
},
|
||||||
|
api::ExtHostReq::ResolveNames(ref rn) => {
|
||||||
|
let api::ResolveNames { constid, names, sys } = rn;
|
||||||
|
let mut resolver = {
|
||||||
|
let systems = ctx.systems.read().await;
|
||||||
|
let weak_sys = systems.get(sys).expect("ResolveNames for invalid sys");
|
||||||
|
let sys = weak_sys.upgrade().expect("ResolveNames after sys drop");
|
||||||
|
sys.name_resolver(*constid).await
|
||||||
|
};
|
||||||
|
let mut responses = vec![const { None }; names.len()];
|
||||||
|
for (i, name) in names.iter().enumerate() {
|
||||||
|
if let Some(abs) = resolver(&ctx.i.ex(*name).await[..]).await {
|
||||||
|
responses[i] = Some(abs.to_sym(&ctx.i).await.to_api())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
hand.handle(rn, &responses).await
|
||||||
},
|
},
|
||||||
api::ExtHostReq::ExtAtomPrint(ref eap @ api::ExtAtomPrint(ref atom)) => {
|
api::ExtHostReq::ExtAtomPrint(ref eap @ api::ExtAtomPrint(ref atom)) => {
|
||||||
let atom = AtomHand::new(atom.clone(), &ctx).await;
|
let atom = AtomHand::new(atom.clone(), &ctx).await;
|
||||||
@@ -184,11 +227,16 @@ impl Extension {
|
|||||||
}
|
}
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
pub(crate) fn reqnot(&self) -> &ReqNot<HostMsgSet> { &self.0.reqnot }
|
#[must_use]
|
||||||
|
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { &self.0.reqnot }
|
||||||
|
#[must_use]
|
||||||
pub fn ctx(&self) -> &Ctx { &self.0.ctx }
|
pub fn ctx(&self) -> &Ctx { &self.0.ctx }
|
||||||
|
#[must_use]
|
||||||
pub fn logger(&self) -> &Logger { &self.0.logger }
|
pub fn logger(&self) -> &Logger { &self.0.logger }
|
||||||
pub fn system_ctors(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
|
pub fn system_ctors(&self) -> impl Iterator<Item = &SystemCtor> { self.0.systems.iter() }
|
||||||
|
#[must_use]
|
||||||
pub fn exprs(&self) -> &ExprStore { &self.0.exprs }
|
pub fn exprs(&self) -> &ExprStore { &self.0.exprs }
|
||||||
|
#[must_use]
|
||||||
pub async fn is_own_sys(&self, id: api::SysId) -> bool {
|
pub async fn is_own_sys(&self, id: api::SysId) -> bool {
|
||||||
let sys = self.ctx().system_inst(id).await.expect("invalid sender sys id");
|
let sys = self.ctx().system_inst(id).await.expect("invalid sender sys id");
|
||||||
Rc::ptr_eq(&self.0, &sys.ext().0)
|
Rc::ptr_eq(&self.0, &sys.ext().0)
|
||||||
@@ -196,6 +244,7 @@ impl Extension {
|
|||||||
pub async fn assert_own_sys(&self, id: api::SysId) {
|
pub async fn assert_own_sys(&self, id: api::SysId) {
|
||||||
assert!(self.is_own_sys(id).await, "Incoming message impersonates separate system");
|
assert!(self.is_own_sys(id).await, "Incoming message impersonates separate system");
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn next_pars(&self) -> NonZeroU64 {
|
pub fn next_pars(&self) -> NonZeroU64 {
|
||||||
let mut next_pars = self.0.next_pars.borrow_mut();
|
let mut next_pars = self.0.next_pars.borrow_mut();
|
||||||
*next_pars = next_pars.checked_add(1).unwrap_or(NonZeroU64::new(1).unwrap());
|
*next_pars = next_pars.checked_add(1).unwrap_or(NonZeroU64::new(1).unwrap());
|
||||||
@@ -240,10 +289,12 @@ impl Extension {
|
|||||||
rc.ctx().systems.write().await.remove(&id);
|
rc.ctx().systems.write().await.remove(&id);
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn downgrade(&self) -> WeakExtension { WeakExtension(Rc::downgrade(&self.0)) }
|
pub fn downgrade(&self) -> WeakExtension { WeakExtension(Rc::downgrade(&self.0)) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct WeakExtension(Weak<ExtensionData>);
|
pub struct WeakExtension(Weak<ExtensionData>);
|
||||||
impl WeakExtension {
|
impl WeakExtension {
|
||||||
|
#[must_use]
|
||||||
pub fn upgrade(&self) -> Option<Extension> { self.0.upgrade().map(Extension) }
|
pub fn upgrade(&self) -> Option<Extension> { self.0.upgrade().map(Extension) }
|
||||||
}
|
}
|
||||||
|
|||||||
93
orchid-host/src/fs.rs
Normal file
93
orchid-host/src/fs.rs
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use async_std::path::PathBuf;
|
||||||
|
use async_stream::stream;
|
||||||
|
use futures::{FutureExt, StreamExt};
|
||||||
|
use hashbrown::HashMap;
|
||||||
|
use orchid_base::interner::Tok;
|
||||||
|
use orchid_base::name::Sym;
|
||||||
|
use orchid_base::pure_seq::pushed;
|
||||||
|
|
||||||
|
use crate::api;
|
||||||
|
use crate::system::System;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct DeclMod(Rc<HashMap<Tok<String>, DeclFS>>);
|
||||||
|
impl DeclMod {
|
||||||
|
pub async fn from_api(
|
||||||
|
api: &std::collections::HashMap<api::TStr, api::EagerVfs>,
|
||||||
|
owner: System,
|
||||||
|
) -> Self {
|
||||||
|
let item_stream = stream! {
|
||||||
|
for (key, value) in api {
|
||||||
|
yield (
|
||||||
|
owner.i().ex(*key).await,
|
||||||
|
DeclFS::from_api(value, owner.clone()).boxed_local().await
|
||||||
|
)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Self(Rc::new(item_stream.collect().await))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub enum DeclFS {
|
||||||
|
Lazy(System, api::VfsId),
|
||||||
|
Eager(DeclMod),
|
||||||
|
Path(PathBuf),
|
||||||
|
}
|
||||||
|
impl DeclFS {
|
||||||
|
pub async fn from_api(api: &api::EagerVfs, owner: System) -> Self {
|
||||||
|
match api {
|
||||||
|
api::EagerVfs::Eager(items) => Self::Eager(DeclMod::from_api(items, owner.clone()).await),
|
||||||
|
api::EagerVfs::Lazy(id) => Self::Lazy(owner.clone(), *id),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn merge(&self, other: &Self) -> Result<Self, Vec<Tok<String>>> {
|
||||||
|
let (Self::Eager(m1), Self::Eager(m2)) = (self, other) else { return Err(Vec::new()) };
|
||||||
|
let mut mix = m1.0.iter().map(|(k, v)| (k.clone(), v.clone())).collect::<HashMap<_, _>>();
|
||||||
|
for (key, value) in m2.0.iter() {
|
||||||
|
match mix.entry(key.clone()) {
|
||||||
|
hashbrown::hash_map::Entry::Vacant(ent) => {
|
||||||
|
ent.insert(value.clone());
|
||||||
|
},
|
||||||
|
hashbrown::hash_map::Entry::Occupied(mut ent) => match ent.get().merge(value) {
|
||||||
|
Err(e) => return Err(pushed(e, key.clone())),
|
||||||
|
Ok(new) => {
|
||||||
|
ent.insert(new);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(Self::Eager(DeclMod(Rc::new(mix))))
|
||||||
|
}
|
||||||
|
/// Walk through eager fs. Returns if the path ends, if it hits anything other
|
||||||
|
/// than eager, or if the path is invalid.
|
||||||
|
pub fn walk<'a, 'b>(&'a self, path: &'b [Tok<String>]) -> (&'a DeclFS, &'b [Tok<String>]) {
|
||||||
|
let mut cur = self;
|
||||||
|
for (i, step) in path.iter().enumerate() {
|
||||||
|
match self {
|
||||||
|
fs @ (DeclFS::Path(_) | DeclFS::Lazy(..)) => return (fs, &path[i..]),
|
||||||
|
fs @ DeclFS::Eager(m) => match &m.0.get(step) {
|
||||||
|
None => return (fs, &path[i..]),
|
||||||
|
Some(next) => cur = next,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(cur, &[])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_code(fs: &DeclFS, systems: &[System], path: &[Tok<String>]) -> OrcRes<Parsed
|
||||||
|
|
||||||
|
pub async fn gather_fs(systems: &[System]) -> Result<DeclFS, Sym> {
|
||||||
|
let (head, tail) = systems.split_first().expect("Empty system list");
|
||||||
|
let mut vfs = head.vfs().await;
|
||||||
|
for sys in tail {
|
||||||
|
match vfs.merge(&sys.vfs().await) {
|
||||||
|
Err(e) => return Err(Sym::new(e.iter().rev().cloned(), head.i()).await.unwrap()),
|
||||||
|
Ok(next) => vfs = next,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(vfs)
|
||||||
|
}
|
||||||
@@ -12,7 +12,7 @@ use orchid_base::tree::recur;
|
|||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
use crate::expr::{Expr, ExprParseCtx};
|
use crate::expr::{Expr, ExprParseCtx, ExprWillPanic};
|
||||||
use crate::parsed::{ParsTok, ParsTokTree};
|
use crate::parsed::{ParsTok, ParsTokTree};
|
||||||
use crate::system::System;
|
use crate::system::System;
|
||||||
|
|
||||||
@@ -25,6 +25,7 @@ pub struct LexCtx<'a> {
|
|||||||
pub ctx: &'a Ctx,
|
pub ctx: &'a Ctx,
|
||||||
}
|
}
|
||||||
impl<'a> LexCtx<'a> {
|
impl<'a> LexCtx<'a> {
|
||||||
|
#[must_use]
|
||||||
pub fn push<'b>(&'b mut self, pos: u32) -> LexCtx<'b>
|
pub fn push<'b>(&'b mut self, pos: u32) -> LexCtx<'b>
|
||||||
where 'a: 'b {
|
where 'a: 'b {
|
||||||
LexCtx {
|
LexCtx {
|
||||||
@@ -36,11 +37,14 @@ impl<'a> LexCtx<'a> {
|
|||||||
ctx: self.ctx,
|
ctx: self.ctx,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn get_pos(&self) -> u32 { self.end_pos() - self.tail.len() as u32 }
|
pub fn get_pos(&self) -> u32 { self.end_pos() - self.tail.len() as u32 }
|
||||||
|
#[must_use]
|
||||||
pub fn end_pos(&self) -> u32 { self.source.len() as u32 }
|
pub fn end_pos(&self) -> u32 { self.source.len() as u32 }
|
||||||
pub fn set_pos(&mut self, pos: u32) { self.tail = &self.source[pos as usize..] }
|
pub fn set_pos(&mut self, pos: u32) { self.tail = &self.source[pos as usize..] }
|
||||||
pub fn push_pos(&mut self, delta: u32) { self.set_pos(self.get_pos() + delta) }
|
pub fn push_pos(&mut self, delta: u32) { self.set_pos(self.get_pos() + delta) }
|
||||||
pub fn set_tail(&mut self, tail: &'a str) { self.tail = tail }
|
pub fn set_tail(&mut self, tail: &'a str) { self.tail = tail }
|
||||||
|
#[must_use]
|
||||||
pub fn strip_prefix(&mut self, tgt: &str) -> bool {
|
pub fn strip_prefix(&mut self, tgt: &str) -> bool {
|
||||||
if let Some(src) = self.tail.strip_prefix(tgt) {
|
if let Some(src) = self.tail.strip_prefix(tgt) {
|
||||||
self.tail = src;
|
self.tail = src;
|
||||||
@@ -48,26 +52,29 @@ impl<'a> LexCtx<'a> {
|
|||||||
}
|
}
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub async fn ser_subtree(&mut self, subtree: ParsTokTree) -> api::TokenTree {
|
pub async fn ser_subtree(&mut self, subtree: ParsTokTree) -> api::TokenTree {
|
||||||
let mut exprs = self.ctx.common_exprs.clone();
|
let mut exprs = self.ctx.common_exprs.clone();
|
||||||
let foo = recur(subtree, &|tt, r| {
|
let without_new_expr = recur(subtree, &|tt, r| {
|
||||||
if let ParsTok::NewExpr(expr) = tt.tok {
|
if let ParsTok::NewExpr(expr) = tt.tok {
|
||||||
return ParsTok::Handle(expr).at(tt.sr);
|
return ParsTok::Handle(expr).at(tt.sr);
|
||||||
}
|
}
|
||||||
r(tt)
|
r(tt)
|
||||||
});
|
});
|
||||||
foo.into_api(&mut exprs, &mut ()).await
|
without_new_expr.into_api(&mut exprs, &mut ExprWillPanic).await
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub async fn des_subtree(&mut self, tree: &api::TokenTree) -> ParsTokTree {
|
pub async fn des_subtree(&mut self, tree: &api::TokenTree) -> ParsTokTree {
|
||||||
ParsTokTree::from_api(
|
ParsTokTree::from_api(
|
||||||
&tree,
|
tree,
|
||||||
&mut self.ctx.common_exprs.clone(),
|
&mut self.ctx.common_exprs.clone(),
|
||||||
&mut ExprParseCtx { ctx: self.ctx.clone(), exprs: self.ctx.common_exprs.clone() },
|
&mut ExprParseCtx { ctx: self.ctx, exprs: &self.ctx.common_exprs },
|
||||||
self.path,
|
self.path,
|
||||||
&self.ctx.i,
|
&self.ctx.i,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn strip_char(&mut self, tgt: char) -> bool {
|
pub fn strip_char(&mut self, tgt: char) -> bool {
|
||||||
if let Some(src) = self.tail.strip_prefix(tgt) {
|
if let Some(src) = self.tail.strip_prefix(tgt) {
|
||||||
self.tail = src;
|
self.tail = src;
|
||||||
@@ -79,6 +86,7 @@ impl<'a> LexCtx<'a> {
|
|||||||
self.tail = self.tail.trim_start_matches(filter);
|
self.tail = self.tail.trim_start_matches(filter);
|
||||||
}
|
}
|
||||||
pub fn trim_ws(&mut self) { self.trim(|c| c.is_whitespace() && !"\r\n".contains(c)) }
|
pub fn trim_ws(&mut self) { self.trim(|c| c.is_whitespace() && !"\r\n".contains(c)) }
|
||||||
|
#[must_use]
|
||||||
pub fn get_start_matches(&mut self, filter: impl Fn(char) -> bool) -> &'a str {
|
pub fn get_start_matches(&mut self, filter: impl Fn(char) -> bool) -> &'a str {
|
||||||
let rest = self.tail.trim_start_matches(filter);
|
let rest = self.tail.trim_start_matches(filter);
|
||||||
let matches = &self.tail[..self.tail.len() - rest.len()];
|
let matches = &self.tail[..self.tail.len() - rest.len()];
|
||||||
@@ -100,6 +108,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
|||||||
.and_then(|t| t.trim_start_matches(name_char).strip_prefix("::"))
|
.and_then(|t| t.trim_start_matches(name_char).strip_prefix("::"))
|
||||||
{
|
{
|
||||||
let name = &ctx.tail[..ctx.tail.len() - tail.len() - "::".len()];
|
let name = &ctx.tail[..ctx.tail.len() - tail.len() - "::".len()];
|
||||||
|
ctx.set_tail(tail);
|
||||||
let body = lex_once(ctx).boxed_local().await?;
|
let body = lex_once(ctx).boxed_local().await?;
|
||||||
ParsTok::NS(ctx.ctx.i.i(name).await, Box::new(body))
|
ParsTok::NS(ctx.ctx.i.i(name).await, Box::new(body))
|
||||||
} else if ctx.strip_prefix("--[") {
|
} else if ctx.strip_prefix("--[") {
|
||||||
@@ -107,7 +116,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
|||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.ctx.i.i("Unterminated block comment").await,
|
ctx.ctx.i.i("Unterminated block comment").await,
|
||||||
"This block comment has no ending ]--",
|
"This block comment has no ending ]--",
|
||||||
[SrcRange::new(start..start + 3, ctx.path).pos().into()],
|
[SrcRange::new(start..start + 3, ctx.path)],
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
ctx.set_tail(tail);
|
ctx.set_tail(tail);
|
||||||
@@ -124,7 +133,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
|||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.ctx.i.i("Unclosed lambda").await,
|
ctx.ctx.i.i("Unclosed lambda").await,
|
||||||
"Lambdae started with \\ should separate arguments from body with .",
|
"Lambdae started with \\ should separate arguments from body with .",
|
||||||
[SrcRange::new(start..start + 1, ctx.path).pos().into()],
|
[SrcRange::new(start..start + 1, ctx.path)],
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
arg.push(lex_once(ctx).boxed_local().await?);
|
arg.push(lex_once(ctx).boxed_local().await?);
|
||||||
@@ -139,7 +148,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
|||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.ctx.i.i("unclosed paren").await,
|
ctx.ctx.i.i("unclosed paren").await,
|
||||||
format!("this {lp} has no matching {rp}"),
|
format!("this {lp} has no matching {rp}"),
|
||||||
[SrcRange::new(start..start + 1, ctx.path).pos().into()],
|
[SrcRange::new(start..start + 1, ctx.path)],
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
body.push(lex_once(ctx).boxed_local().await?);
|
body.push(lex_once(ctx).boxed_local().await?);
|
||||||
@@ -189,7 +198,7 @@ pub async fn lex_once(ctx: &mut LexCtx<'_>) -> OrcRes<ParsTokTree> {
|
|||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.ctx.i.i("Unrecognized character").await,
|
ctx.ctx.i.i("Unrecognized character").await,
|
||||||
"The following syntax is meaningless.",
|
"The following syntax is meaningless.",
|
||||||
[SrcRange::new(start..start + 1, ctx.path).pos().into()],
|
[SrcRange::new(start..start + 1, ctx.path)],
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ pub mod execute;
|
|||||||
pub mod expr;
|
pub mod expr;
|
||||||
pub mod expr_store;
|
pub mod expr_store;
|
||||||
pub mod extension;
|
pub mod extension;
|
||||||
|
pub mod fs;
|
||||||
pub mod lex;
|
pub mod lex;
|
||||||
pub mod parse;
|
pub mod parse;
|
||||||
pub mod parsed;
|
pub mod parsed;
|
||||||
|
|||||||
@@ -1,13 +1,10 @@
|
|||||||
use std::cell::RefCell;
|
|
||||||
|
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use futures::future::join_all;
|
use futures::future::join_all;
|
||||||
use hashbrown::HashMap;
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::{OrcRes, Reporter, mk_err, mk_errv};
|
use orchid_base::error::{OrcRes, Reporter, mk_errv};
|
||||||
use orchid_base::format::fmt;
|
use orchid_base::format::fmt;
|
||||||
use orchid_base::interner::{Interner, Tok};
|
use orchid_base::interner::{Interner, Tok};
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::{Sym, VPath};
|
||||||
use orchid_base::parse::{
|
use orchid_base::parse::{
|
||||||
Comment, Import, ParseCtx, Parsed, Snippet, expect_end, line_items, parse_multiname,
|
Comment, Import, ParseCtx, Parsed, Snippet, expect_end, line_items, parse_multiname,
|
||||||
try_pop_no_fluff,
|
try_pop_no_fluff,
|
||||||
@@ -17,7 +14,7 @@ use substack::Substack;
|
|||||||
|
|
||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
use crate::expr::{Expr, ExprKind, PathSetBuilder};
|
use crate::expr::{Expr, ExprKind, PathSetBuilder};
|
||||||
use crate::parsed::{Item, ItemKind, ParsTokTree, ParsedMember, ParsedMemberKind, ParsedModule};
|
use crate::parsed::{Item, ItemKind, ParsedMember, ParsedMemberKind, ParsedModule};
|
||||||
use crate::system::System;
|
use crate::system::System;
|
||||||
|
|
||||||
type ParsSnippet<'a> = Snippet<'a, Expr, Expr>;
|
type ParsSnippet<'a> = Snippet<'a, Expr, Expr>;
|
||||||
@@ -27,28 +24,23 @@ pub struct HostParseCtxImpl<'a> {
|
|||||||
pub src: Sym,
|
pub src: Sym,
|
||||||
pub systems: &'a [System],
|
pub systems: &'a [System],
|
||||||
pub reporter: &'a Reporter,
|
pub reporter: &'a Reporter,
|
||||||
pub interner: &'a Interner,
|
|
||||||
pub consts: RefCell<HashMap<Sym, Vec<ParsTokTree>>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ParseCtx for HostParseCtxImpl<'_> {
|
impl ParseCtx for HostParseCtxImpl<'_> {
|
||||||
fn reporter(&self) -> &Reporter { self.reporter }
|
fn reporter(&self) -> &Reporter { self.reporter }
|
||||||
fn i(&self) -> &Interner { self.interner }
|
fn i(&self) -> &Interner { &self.ctx.i }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HostParseCtx for HostParseCtxImpl<'_> {
|
impl HostParseCtx for HostParseCtxImpl<'_> {
|
||||||
fn ctx(&self) -> &Ctx { &self.ctx }
|
fn ctx(&self) -> &Ctx { &self.ctx }
|
||||||
fn systems(&self) -> impl Iterator<Item = &System> { self.systems.iter() }
|
fn systems(&self) -> impl Iterator<Item = &System> { self.systems.iter() }
|
||||||
async fn save_const(&self, path: Substack<'_, Tok<String>>, value: Vec<ParsTokTree>) {
|
|
||||||
let name = Sym::new(path.unreverse(), self.interner).await.unwrap();
|
|
||||||
self.consts.borrow_mut().insert(name, value);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait HostParseCtx: ParseCtx {
|
pub trait HostParseCtx: ParseCtx {
|
||||||
|
#[must_use]
|
||||||
fn ctx(&self) -> &Ctx;
|
fn ctx(&self) -> &Ctx;
|
||||||
|
#[must_use]
|
||||||
fn systems(&self) -> impl Iterator<Item = &System>;
|
fn systems(&self) -> impl Iterator<Item = &System>;
|
||||||
async fn save_const(&self, path: Substack<'_, Tok<String>>, value: Vec<ParsTokTree>);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn parse_items(
|
pub async fn parse_items(
|
||||||
@@ -73,33 +65,10 @@ pub async fn parse_item(
|
|||||||
n if *n == ctx.i().i("export").await => match try_pop_no_fluff(ctx, postdisc).await? {
|
n if *n == ctx.i().i("export").await => match try_pop_no_fluff(ctx, postdisc).await? {
|
||||||
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } =>
|
Parsed { output: TokTree { tok: Token::Name(n), .. }, tail } =>
|
||||||
parse_exportable_item(ctx, path, comments, true, n.clone(), tail).await,
|
parse_exportable_item(ctx, path, comments, true, n.clone(), tail).await,
|
||||||
Parsed { output: TokTree { tok: Token::S(Paren::Round, body), .. }, tail } => {
|
|
||||||
expect_end(ctx, tail).await?;
|
|
||||||
let mut ok = Vec::new();
|
|
||||||
for tt in body {
|
|
||||||
let sr = tt.sr.clone();
|
|
||||||
match &tt.tok {
|
|
||||||
Token::Name(n) =>
|
|
||||||
ok.push(Item { comments: comments.clone(), sr, kind: ItemKind::Export(n.clone()) }),
|
|
||||||
Token::NS(..) => ctx.reporter().report(mk_err(
|
|
||||||
ctx.i().i("Compound export").await,
|
|
||||||
"Cannot export compound names (names containing the :: separator)",
|
|
||||||
[sr.pos().into()],
|
|
||||||
)),
|
|
||||||
t => ctx.reporter().report(mk_err(
|
|
||||||
ctx.i().i("Invalid export").await,
|
|
||||||
format!("Invalid export target {}", fmt(t, ctx.i()).await),
|
|
||||||
[sr.pos().into()],
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
expect_end(ctx, tail).await?;
|
|
||||||
Ok(ok)
|
|
||||||
},
|
|
||||||
Parsed { output, tail: _ } => Err(mk_errv(
|
Parsed { output, tail: _ } => Err(mk_errv(
|
||||||
ctx.i().i("Malformed export").await,
|
ctx.i().i("Malformed export").await,
|
||||||
"`export` can either prefix other lines or list names inside ( )",
|
"`export` can either prefix other lines or list names inside ( )",
|
||||||
[output.sr.pos().into()],
|
[output.sr()],
|
||||||
)),
|
)),
|
||||||
},
|
},
|
||||||
n if *n == ctx.i().i("import").await => {
|
n if *n == ctx.i().i("import").await => {
|
||||||
@@ -115,7 +84,7 @@ pub async fn parse_item(
|
|||||||
Some(_) => Err(mk_errv(
|
Some(_) => Err(mk_errv(
|
||||||
ctx.i().i("Expected a line type").await,
|
ctx.i().i("Expected a line type").await,
|
||||||
"All lines must begin with a keyword",
|
"All lines must begin with a keyword",
|
||||||
[item.sr().pos().into()],
|
[item.sr()],
|
||||||
)),
|
)),
|
||||||
None => unreachable!("These lines are filtered and aggregated in earlier stages"),
|
None => unreachable!("These lines are filtered and aggregated in earlier stages"),
|
||||||
}
|
}
|
||||||
@@ -138,22 +107,25 @@ pub async fn parse_exportable_item<'a>(
|
|||||||
discr: Tok<String>,
|
discr: Tok<String>,
|
||||||
tail: ParsSnippet<'a>,
|
tail: ParsSnippet<'a>,
|
||||||
) -> OrcRes<Vec<Item>> {
|
) -> OrcRes<Vec<Item>> {
|
||||||
let path_sym = Sym::new(path.unreverse(), ctx.i()).await.expect("Files should have a namespace");
|
|
||||||
let kind = if discr == ctx.i().i("mod").await {
|
let kind = if discr == ctx.i().i("mod").await {
|
||||||
let (name, body) = parse_module(ctx, path, tail).await?;
|
let (name, body) = parse_module(ctx, path, tail).await?;
|
||||||
ItemKind::Member(ParsedMember { name, full_name: path_sym, kind: ParsedMemberKind::Mod(body) })
|
ItemKind::Member(ParsedMember { name, exported, kind: ParsedMemberKind::Mod(body) })
|
||||||
} else if discr == ctx.i().i("const").await {
|
} else if discr == ctx.i().i("const").await {
|
||||||
let name = parse_const(ctx, tail, path.clone()).await?;
|
let (name, expr) = parse_const(ctx, tail, path.clone()).await?;
|
||||||
ItemKind::Member(ParsedMember { name, full_name: path_sym, kind: ParsedMemberKind::Const })
|
ItemKind::Member(ParsedMember { name, exported, kind: ParsedMemberKind::ParsedConst(expr) })
|
||||||
} else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) {
|
} else if let Some(sys) = ctx.systems().find(|s| s.can_parse(discr.clone())) {
|
||||||
let line = sys.parse(path_sym, tail.to_vec(), exported, comments).await?;
|
return sys
|
||||||
return parse_items(ctx, path, Snippet::new(tail.prev(), &line)).await;
|
.parse(path, tail.to_vec(), exported, comments, &mut async |stack, lines| {
|
||||||
|
let source = Snippet::new(lines.first().unwrap(), &lines);
|
||||||
|
parse_items(ctx, stack, source).await
|
||||||
|
})
|
||||||
|
.await;
|
||||||
} else {
|
} else {
|
||||||
let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
|
let ext_lines = ctx.systems().flat_map(System::line_types).join(", ");
|
||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.i().i("Unrecognized line type").await,
|
ctx.i().i("Unrecognized line type").await,
|
||||||
format!("Line types are: const, mod, macro, grammar, {ext_lines}"),
|
format!("Line types are: const, mod, macro, grammar, {ext_lines}"),
|
||||||
[tail.prev().sr.pos().into()],
|
[tail.prev().sr()],
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
Ok(vec![Item { comments, sr: tail.sr(), kind }])
|
Ok(vec![Item { comments, sr: tail.sr(), kind }])
|
||||||
@@ -170,7 +142,7 @@ pub async fn parse_module<'a>(
|
|||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.i().i("Missing module name").await,
|
ctx.i().i("Missing module name").await,
|
||||||
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
|
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
|
||||||
[output.sr.pos().into()],
|
[output.sr()],
|
||||||
));
|
));
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -180,7 +152,7 @@ pub async fn parse_module<'a>(
|
|||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.i().i("Expected module body").await,
|
ctx.i().i("Expected module body").await,
|
||||||
format!("A ( block ) was expected, {} was found", fmt(output, ctx.i()).await),
|
format!("A ( block ) was expected, {} was found", fmt(output, ctx.i()).await),
|
||||||
[output.sr.pos().into()],
|
[output.sr()],
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let path = path.push(name.clone());
|
let path = path.push(name.clone());
|
||||||
@@ -191,13 +163,13 @@ pub async fn parse_const<'a>(
|
|||||||
ctx: &impl HostParseCtx,
|
ctx: &impl HostParseCtx,
|
||||||
tail: ParsSnippet<'a>,
|
tail: ParsSnippet<'a>,
|
||||||
path: Substack<'_, Tok<String>>,
|
path: Substack<'_, Tok<String>>,
|
||||||
) -> OrcRes<Tok<String>> {
|
) -> OrcRes<(Tok<String>, Expr)> {
|
||||||
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
|
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
|
||||||
let Some(name) = output.as_name() else {
|
let Some(name) = output.as_name() else {
|
||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.i().i("Missing module name").await,
|
ctx.i().i("Missing module name").await,
|
||||||
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
|
format!("A name was expected, {} was found", fmt(output, ctx.i()).await),
|
||||||
[output.sr.pos().into()],
|
[output.sr()],
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
|
let Parsed { output, tail } = try_pop_no_fluff(ctx, tail).await?;
|
||||||
@@ -205,12 +177,15 @@ pub async fn parse_const<'a>(
|
|||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.i().i("Missing = separator").await,
|
ctx.i().i("Missing = separator").await,
|
||||||
format!("Expected = , found {}", fmt(output, ctx.i()).await),
|
format!("Expected = , found {}", fmt(output, ctx.i()).await),
|
||||||
[output.sr.pos().into()],
|
[output.sr()],
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
try_pop_no_fluff(ctx, tail).await?;
|
try_pop_no_fluff(ctx, tail).await?;
|
||||||
ctx.save_const(path, tail[..].to_vec()).await;
|
// ctx.save_const(path, tail[..].to_vec()).await;
|
||||||
Ok(name)
|
let final_path =
|
||||||
|
VPath::new(path.unreverse()).name_with_suffix(name.clone()).to_sym(ctx.i()).await;
|
||||||
|
let val = parse_expr(ctx, final_path, PathSetBuilder::new(), tail).await?;
|
||||||
|
Ok((name, val))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn parse_expr(
|
pub async fn parse_expr(
|
||||||
@@ -223,7 +198,7 @@ pub async fn parse_expr(
|
|||||||
.or_else(|| tail.iter().enumerate().rev().find(|(_, tt)| !tt.is_fluff()))
|
.or_else(|| tail.iter().enumerate().rev().find(|(_, tt)| !tt.is_fluff()))
|
||||||
else {
|
else {
|
||||||
return Err(mk_errv(ctx.i().i("Empty expression").await, "Expression ends abruptly here", [
|
return Err(mk_errv(ctx.i().i("Empty expression").await, "Expression ends abruptly here", [
|
||||||
tail.sr().pos().into(),
|
tail.sr(),
|
||||||
]));
|
]));
|
||||||
};
|
};
|
||||||
let (function, value) = tail.split_at(last_idx as u32);
|
let (function, value) = tail.split_at(last_idx as u32);
|
||||||
@@ -241,7 +216,7 @@ pub async fn parse_expr(
|
|||||||
Token::Handle(expr) => Ok(expr.clone()),
|
Token::Handle(expr) => Ok(expr.clone()),
|
||||||
Token::NS(n, nametail) => {
|
Token::NS(n, nametail) => {
|
||||||
let mut nametail = nametail;
|
let mut nametail = nametail;
|
||||||
let mut segments = path.iter().chain([n]).cloned().collect_vec();
|
let mut segments = vec![n.clone()];
|
||||||
while let Token::NS(n, newtail) = &nametail.tok {
|
while let Token::NS(n, newtail) = &nametail.tok {
|
||||||
segments.push(n.clone());
|
segments.push(n.clone());
|
||||||
nametail = newtail;
|
nametail = newtail;
|
||||||
@@ -250,7 +225,7 @@ pub async fn parse_expr(
|
|||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.i().i("Loose namespace prefix in constant").await,
|
ctx.i().i("Loose namespace prefix in constant").await,
|
||||||
"Namespace prefixes in constants must be followed by names",
|
"Namespace prefixes in constants must be followed by names",
|
||||||
[pos.into()],
|
[pos],
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
segments.push(n.clone());
|
segments.push(n.clone());
|
||||||
@@ -261,13 +236,27 @@ pub async fn parse_expr(
|
|||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.i().i("Complex lambda binding in constant").await,
|
ctx.i().i("Complex lambda binding in constant").await,
|
||||||
"Lambda args in constants must be identified by a single name",
|
"Lambda args in constants must be identified by a single name",
|
||||||
[pos.into()],
|
[pos],
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
let lambda_builder = psb.lambda(arg);
|
let lambda_builder = psb.lambda(arg);
|
||||||
let body = parse_expr(ctx, path.clone(), lambda_builder.stack(), tail).boxed_local().await?;
|
let body = parse_expr(ctx, path.clone(), lambda_builder.stack(), tail).boxed_local().await?;
|
||||||
Ok(ExprKind::Lambda(lambda_builder.collect(), body).at(pos.clone()))
|
Ok(ExprKind::Lambda(lambda_builder.collect(), body).at(pos.clone()))
|
||||||
},
|
},
|
||||||
_ => todo!("AAAAAA"), // TODO: todo
|
Token::S(Paren::Round, body) =>
|
||||||
|
parse_expr(ctx, path, psb, Snippet::new(head, body)).boxed_local().await,
|
||||||
|
Token::S(..) =>
|
||||||
|
return Err(mk_errv(
|
||||||
|
ctx.i().i("Constants may only contain (), not [] or {}").await,
|
||||||
|
"It seems like you are trying to call a macro. Consider a 'let' line",
|
||||||
|
[pos],
|
||||||
|
)),
|
||||||
|
Token::Name(n) =>
|
||||||
|
if psb.register_arg(n) {
|
||||||
|
Ok(ExprKind::Arg.at(pos))
|
||||||
|
} else {
|
||||||
|
Ok(ExprKind::Const(Sym::new([n.clone()], ctx.i()).await.unwrap()).at(pos))
|
||||||
|
},
|
||||||
|
Token::NewExpr(ex) => Ok(ex.clone()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,67 +1,25 @@
|
|||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use async_once_cell::OnceCell;
|
use futures::FutureExt;
|
||||||
use async_std::sync::{Mutex, RwLock};
|
|
||||||
use async_stream::stream;
|
|
||||||
use futures::future::join_all;
|
use futures::future::join_all;
|
||||||
use futures::{FutureExt, StreamExt};
|
use hashbrown::HashSet;
|
||||||
use hashbrown::{HashMap, HashSet};
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::{OrcRes, mk_errv};
|
|
||||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||||
use orchid_base::interner::Tok;
|
use orchid_base::interner::Tok;
|
||||||
use orchid_base::location::{Pos, SrcRange};
|
use orchid_base::location::SrcRange;
|
||||||
use orchid_base::name::{NameLike, Sym};
|
|
||||||
use orchid_base::parse::{Comment, Import};
|
use orchid_base::parse::{Comment, Import};
|
||||||
use orchid_base::tl_cache;
|
use orchid_base::tl_cache;
|
||||||
use orchid_base::tree::{TokTree, Token, TokenVariant};
|
use orchid_base::tree::{TokTree, Token};
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::ctx::Ctx;
|
|
||||||
use crate::dealias::{ChildErrorKind, ChildResult, Tree};
|
use crate::dealias::{ChildErrorKind, ChildResult, Tree};
|
||||||
use crate::expr::{Expr, ExprParseCtx, PathSetBuilder};
|
use crate::expr::Expr;
|
||||||
use crate::expr_store::ExprStore;
|
|
||||||
use crate::system::System;
|
use crate::system::System;
|
||||||
|
|
||||||
pub type ParsTokTree = TokTree<Expr, Expr>;
|
pub type ParsTokTree = TokTree<Expr, Expr>;
|
||||||
pub type ParsTok = Token<Expr, Expr>;
|
pub type ParsTok = Token<Expr, Expr>;
|
||||||
|
|
||||||
impl TokenVariant<api::ExprTicket> for Expr {
|
|
||||||
type ToApiCtx<'a> = ExprStore;
|
|
||||||
async fn into_api(self, ctx: &mut Self::ToApiCtx<'_>) -> api::ExprTicket {
|
|
||||||
ctx.give_expr(self.clone());
|
|
||||||
self.id()
|
|
||||||
}
|
|
||||||
type FromApiCtx<'a> = ExprStore;
|
|
||||||
async fn from_api(
|
|
||||||
api: &api::ExprTicket,
|
|
||||||
ctx: &mut Self::FromApiCtx<'_>,
|
|
||||||
_: SrcRange,
|
|
||||||
_: &orchid_base::interner::Interner,
|
|
||||||
) -> Self {
|
|
||||||
let expr = ctx.get_expr(*api).expect("Dangling expr");
|
|
||||||
ctx.take_expr(*api);
|
|
||||||
expr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenVariant<api::Expression> for Expr {
|
|
||||||
type FromApiCtx<'a> = ExprParseCtx;
|
|
||||||
async fn from_api(
|
|
||||||
api: &api::Expression,
|
|
||||||
ctx: &mut Self::FromApiCtx<'_>,
|
|
||||||
_: SrcRange,
|
|
||||||
_: &orchid_base::interner::Interner,
|
|
||||||
) -> Self {
|
|
||||||
Expr::from_api(api, PathSetBuilder::new(), ctx).await
|
|
||||||
}
|
|
||||||
type ToApiCtx<'a> = ();
|
|
||||||
async fn into_api(self, (): &mut Self::ToApiCtx<'_>) -> api::Expression {
|
|
||||||
panic!("Failed to replace NewExpr before returning sublexer value")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Item {
|
pub struct Item {
|
||||||
pub sr: SrcRange,
|
pub sr: SrcRange,
|
||||||
@@ -72,10 +30,10 @@ pub struct Item {
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum ItemKind {
|
pub enum ItemKind {
|
||||||
Member(ParsedMember),
|
Member(ParsedMember),
|
||||||
Export(Tok<String>),
|
|
||||||
Import(Import),
|
Import(Import),
|
||||||
}
|
}
|
||||||
impl ItemKind {
|
impl ItemKind {
|
||||||
|
#[must_use]
|
||||||
pub fn at(self, sr: SrcRange) -> Item { Item { comments: vec![], sr, kind: self } }
|
pub fn at(self, sr: SrcRange) -> Item { Item { comments: vec![], sr, kind: self } }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -84,11 +42,13 @@ impl Format for Item {
|
|||||||
let comment_text = self.comments.iter().join("\n");
|
let comment_text = self.comments.iter().join("\n");
|
||||||
let item_text = match &self.kind {
|
let item_text = match &self.kind {
|
||||||
ItemKind::Import(i) => format!("import {i}").into(),
|
ItemKind::Import(i) => format!("import {i}").into(),
|
||||||
ItemKind::Export(e) => format!("export {e}").into(),
|
|
||||||
ItemKind::Member(mem) => match &mem.kind {
|
ItemKind::Member(mem) => match &mem.kind {
|
||||||
ParsedMemberKind::Const =>
|
ParsedMemberKind::ParsedConst(expr) =>
|
||||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0}")))
|
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} = {1l}")))
|
||||||
.units([mem.name.rc().into()]),
|
.units([mem.name.rc().into(), expr.print(c).await]),
|
||||||
|
ParsedMemberKind::DeferredConst(_, sys) =>
|
||||||
|
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("const {0} via {1}")))
|
||||||
|
.units([mem.name.rc().into(), sys.print(c).await]),
|
||||||
ParsedMemberKind::Mod(module) =>
|
ParsedMemberKind::Mod(module) =>
|
||||||
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("module {0} {{\n\t{1}\n}}")))
|
tl_cache!(Rc<Variants>: Rc::new(Variants::default().bounded("module {0} {{\n\t{1}\n}}")))
|
||||||
.units([mem.name.rc().into(), module.print(c).boxed_local().await]),
|
.units([mem.name.rc().into(), module.print(c).boxed_local().await]),
|
||||||
@@ -101,10 +61,11 @@ impl Format for Item {
|
|||||||
|
|
||||||
pub struct ParsedMember {
|
pub struct ParsedMember {
|
||||||
pub name: Tok<String>,
|
pub name: Tok<String>,
|
||||||
pub full_name: Sym,
|
pub exported: bool,
|
||||||
pub kind: ParsedMemberKind,
|
pub kind: ParsedMemberKind,
|
||||||
}
|
}
|
||||||
impl ParsedMember {
|
impl ParsedMember {
|
||||||
|
#[must_use]
|
||||||
pub fn name(&self) -> Tok<String> { self.name.clone() }
|
pub fn name(&self) -> Tok<String> { self.name.clone() }
|
||||||
}
|
}
|
||||||
impl Debug for ParsedMember {
|
impl Debug for ParsedMember {
|
||||||
@@ -118,7 +79,8 @@ impl Debug for ParsedMember {
|
|||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum ParsedMemberKind {
|
pub enum ParsedMemberKind {
|
||||||
Const,
|
DeferredConst(api::ParsedConstId, System),
|
||||||
|
ParsedConst(Expr),
|
||||||
Mod(ParsedModule),
|
Mod(ParsedModule),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -131,13 +93,13 @@ pub struct ParsedModule {
|
|||||||
pub items: Vec<Item>,
|
pub items: Vec<Item>,
|
||||||
}
|
}
|
||||||
impl ParsedModule {
|
impl ParsedModule {
|
||||||
|
#[must_use]
|
||||||
pub fn new(items: impl IntoIterator<Item = Item>) -> Self {
|
pub fn new(items: impl IntoIterator<Item = Item>) -> Self {
|
||||||
let items = items.into_iter().collect_vec();
|
let items = items.into_iter().collect_vec();
|
||||||
let exports = (items.iter())
|
let exports = (items.iter())
|
||||||
.filter_map(|i| match &i.kind {
|
.filter_map(|i| if let ItemKind::Member(m) = &i.kind { Some(m) } else { None })
|
||||||
ItemKind::Export(e) => Some(e.clone()),
|
.filter(|m| m.exported)
|
||||||
_ => None,
|
.map(|m| m.name.clone())
|
||||||
})
|
|
||||||
.collect_vec();
|
.collect_vec();
|
||||||
Self { exports, items }
|
Self { exports, items }
|
||||||
}
|
}
|
||||||
@@ -146,32 +108,34 @@ impl ParsedModule {
|
|||||||
std::mem::swap(self, &mut swap);
|
std::mem::swap(self, &mut swap);
|
||||||
*self = ParsedModule::new(swap.items.into_iter().chain(other.items))
|
*self = ParsedModule::new(swap.items.into_iter().chain(other.items))
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn get_imports(&self) -> impl IntoIterator<Item = &Import> {
|
pub fn get_imports(&self) -> impl IntoIterator<Item = &Import> {
|
||||||
(self.items.iter())
|
(self.items.iter())
|
||||||
.filter_map(|it| if let ItemKind::Import(i) = &it.kind { Some(i) } else { None })
|
.filter_map(|it| if let ItemKind::Import(i) = &it.kind { Some(i) } else { None })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl Tree for ParsedModule {
|
impl Tree for ParsedModule {
|
||||||
type Ctx = ();
|
type Ctx<'a> = ();
|
||||||
async fn child(
|
async fn child(
|
||||||
&self,
|
&self,
|
||||||
key: Tok<String>,
|
key: Tok<String>,
|
||||||
public_only: bool,
|
public_only: bool,
|
||||||
ctx: &mut Self::Ctx,
|
(): &mut Self::Ctx<'_>,
|
||||||
) -> ChildResult<'_, Self> {
|
) -> ChildResult<'_, Self> {
|
||||||
let Some(member) = (self.items.iter())
|
|
||||||
.filter_map(|it| if let ItemKind::Member(m) = &it.kind { Some(m) } else { None })
|
|
||||||
.find(|m| m.name == key)
|
|
||||||
else {
|
|
||||||
return ChildResult::Err(ChildErrorKind::Missing);
|
|
||||||
};
|
|
||||||
if public_only && !self.exports.contains(&key) {
|
if public_only && !self.exports.contains(&key) {
|
||||||
return ChildResult::Err(ChildErrorKind::Private);
|
return ChildResult::Err(ChildErrorKind::Private);
|
||||||
}
|
}
|
||||||
match &member.kind {
|
if let Some(member) = (self.items.iter())
|
||||||
ParsedMemberKind::Const => return ChildResult::Err(ChildErrorKind::Constant),
|
.filter_map(|it| if let ItemKind::Member(m) = &it.kind { Some(m) } else { None })
|
||||||
ParsedMemberKind::Mod(m) => return ChildResult::Value(m),
|
.find(|m| m.name == key)
|
||||||
|
{
|
||||||
|
match &member.kind {
|
||||||
|
ParsedMemberKind::DeferredConst(..) | ParsedMemberKind::ParsedConst(_) =>
|
||||||
|
return ChildResult::Err(ChildErrorKind::Constant),
|
||||||
|
ParsedMemberKind::Mod(m) => return ChildResult::Ok(m),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
ChildResult::Err(ChildErrorKind::Missing)
|
||||||
}
|
}
|
||||||
fn children(&self, public_only: bool) -> HashSet<Tok<String>> {
|
fn children(&self, public_only: bool) -> HashSet<Tok<String>> {
|
||||||
let mut public: HashSet<_> = self.exports.iter().cloned().collect();
|
let mut public: HashSet<_> = self.exports.iter().cloned().collect();
|
||||||
@@ -214,44 +178,6 @@ pub struct ConstPath {
|
|||||||
steps: Tok<Vec<Tok<String>>>,
|
steps: Tok<Vec<Tok<String>>>,
|
||||||
}
|
}
|
||||||
impl ConstPath {
|
impl ConstPath {
|
||||||
|
#[must_use]
|
||||||
pub fn to_const(steps: Tok<Vec<Tok<String>>>) -> Self { Self { steps } }
|
pub fn to_const(steps: Tok<Vec<Tok<String>>>) -> Self { Self { steps } }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Root {
|
|
||||||
tree: Rc<ParsedModule>,
|
|
||||||
consts: Rc<RwLock<HashMap<Sym, Expr>>>,
|
|
||||||
}
|
|
||||||
impl Root {
|
|
||||||
pub fn new(module: ParsedModule, consts: HashMap<Sym, Expr>) -> Self {
|
|
||||||
Self { tree: Rc::new(module), consts: Rc::new(RwLock::new(consts)) }
|
|
||||||
}
|
|
||||||
pub async fn get_const_value(&self, name: Sym, pos: Pos, ctx: Ctx) -> OrcRes<Expr> {
|
|
||||||
if let Some(val) = self.consts.read().await.get(&name) {
|
|
||||||
return Ok(val.clone());
|
|
||||||
}
|
|
||||||
let (cn, mp) = name.split_last();
|
|
||||||
let module = self.tree.walk(false, mp.iter().cloned(), &mut ()).await.unwrap();
|
|
||||||
let member = (module.items.iter())
|
|
||||||
.filter_map(|it| if let ItemKind::Member(m) = &it.kind { Some(m) } else { None })
|
|
||||||
.find(|m| m.name() == cn);
|
|
||||||
match member {
|
|
||||||
None => Err(mk_errv(
|
|
||||||
ctx.i.i("Constant does not exist").await,
|
|
||||||
format!("{name} does not refer to a constant"),
|
|
||||||
[pos.clone().into()],
|
|
||||||
)),
|
|
||||||
Some(mem) => match &mem.kind {
|
|
||||||
ParsedMemberKind::Mod(_) => Err(mk_errv(
|
|
||||||
ctx.i.i("module used as constant").await,
|
|
||||||
format!("{name} is a module, not a constant"),
|
|
||||||
[pos.clone().into()],
|
|
||||||
)),
|
|
||||||
ParsedMemberKind::Const => Ok(
|
|
||||||
(self.consts.read().await.get(&name).cloned())
|
|
||||||
.expect("Tree says the path is correct but no value was found"),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
|
use std::io::Write;
|
||||||
use std::pin::Pin;
|
use std::pin::Pin;
|
||||||
|
|
||||||
use async_process::{self, Child, ChildStdin, ChildStdout};
|
use async_process::{self, Child, ChildStdin, ChildStdout};
|
||||||
@@ -43,17 +44,19 @@ pub async fn ext_command(
|
|||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
Ok(ExtInit {
|
Ok(ExtInit {
|
||||||
header,
|
|
||||||
port: Box::new(Subprocess {
|
port: Box::new(Subprocess {
|
||||||
|
name: header.name.clone(),
|
||||||
child: RefCell::new(Some(child)),
|
child: RefCell::new(Some(child)),
|
||||||
stdin: Some(Mutex::new(Box::pin(stdin))),
|
stdin: Some(Mutex::new(Box::pin(stdin))),
|
||||||
stdout: Mutex::new(Box::pin(stdout)),
|
stdout: Mutex::new(Box::pin(stdout)),
|
||||||
ctx,
|
ctx,
|
||||||
}),
|
}),
|
||||||
|
header,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Subprocess {
|
pub struct Subprocess {
|
||||||
|
name: String,
|
||||||
child: RefCell<Option<Child>>,
|
child: RefCell<Option<Child>>,
|
||||||
stdin: Option<Mutex<Pin<Box<ChildStdin>>>>,
|
stdin: Option<Mutex<Pin<Box<ChildStdin>>>>,
|
||||||
stdout: Mutex<Pin<Box<ChildStdout>>>,
|
stdout: Mutex<Pin<Box<ChildStdout>>>,
|
||||||
@@ -62,11 +65,20 @@ pub struct Subprocess {
|
|||||||
impl Drop for Subprocess {
|
impl Drop for Subprocess {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let mut child = self.child.borrow_mut().take().unwrap();
|
let mut child = self.child.borrow_mut().take().unwrap();
|
||||||
|
let name = self.name.clone();
|
||||||
|
if std::thread::panicking() {
|
||||||
|
eprintln!("Killing extension {name}");
|
||||||
|
// we don't really care to handle errors here
|
||||||
|
let _: Result<_, _> = std::io::stderr().flush();
|
||||||
|
let _: Result<_, _> = child.kill();
|
||||||
|
return;
|
||||||
|
}
|
||||||
let stdin = self.stdin.take().unwrap();
|
let stdin = self.stdin.take().unwrap();
|
||||||
(self.ctx.spawn)(Box::pin(async move {
|
(self.ctx.spawn)(Box::pin(async move {
|
||||||
stdin.lock().await.close().await.unwrap();
|
stdin.lock().await.close().await.unwrap();
|
||||||
let status = child.status().await.expect("Extension exited with error");
|
let status = (child.status().await)
|
||||||
assert!(status.success(), "Extension exited with error {status}");
|
.unwrap_or_else(|e| panic!("{e}, extension {name} exited with error"));
|
||||||
|
assert!(status.success(), "Extension {name} exited with error {status}");
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,19 +3,18 @@ use std::fmt;
|
|||||||
use std::future::Future;
|
use std::future::Future;
|
||||||
use std::rc::{Rc, Weak};
|
use std::rc::{Rc, Weak};
|
||||||
|
|
||||||
use async_stream::stream;
|
|
||||||
use derive_destructure::destructure;
|
use derive_destructure::destructure;
|
||||||
use futures::StreamExt;
|
use futures::FutureExt;
|
||||||
use futures::future::join_all;
|
use futures::future::join_all;
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use memo_map::MemoMap;
|
||||||
use orchid_base::char_filter::char_filter_match;
|
use orchid_base::char_filter::char_filter_match;
|
||||||
use orchid_base::clone;
|
|
||||||
use orchid_base::error::{OrcErrv, OrcRes};
|
use orchid_base::error::{OrcErrv, OrcRes};
|
||||||
use orchid_base::format::{FmtCtx, FmtUnit, Format};
|
use orchid_base::format::{FmtCtx, FmtUnit, Format};
|
||||||
use orchid_base::interner::{Interner, Tok};
|
use orchid_base::interner::{Interner, Tok};
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::SrcRange;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::{NameLike, Sym, VName};
|
||||||
use orchid_base::parse::Comment;
|
use orchid_base::parse::Comment;
|
||||||
use orchid_base::reqnot::{ReqNot, Requester};
|
use orchid_base::reqnot::{ReqNot, Requester};
|
||||||
use orchid_base::tree::ttv_from_api;
|
use orchid_base::tree::ttv_from_api;
|
||||||
@@ -24,19 +23,25 @@ use substack::{Stackframe, Substack};
|
|||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
use crate::expr::{Expr, ExprParseCtx};
|
use crate::dealias::{absolute_path, walk};
|
||||||
|
use crate::expr::{ExprParseCtx, ExprWillPanic};
|
||||||
|
use crate::expr_store::ExprStore;
|
||||||
use crate::extension::{Extension, WeakExtension};
|
use crate::extension::{Extension, WeakExtension};
|
||||||
use crate::parsed::{ItemKind, ParsTokTree, ParsedModule, Root};
|
use crate::fs::{DeclFS, DeclMod};
|
||||||
use crate::tree::Module;
|
use crate::parsed::{Item, ItemKind, ParsTokTree, ParsedMember, ParsedMemberKind, ParsedModule};
|
||||||
|
use crate::tree::Root;
|
||||||
|
|
||||||
#[derive(destructure)]
|
#[derive(destructure)]
|
||||||
struct SystemInstData {
|
struct SystemInstData {
|
||||||
|
deps: Vec<System>,
|
||||||
ctx: Ctx,
|
ctx: Ctx,
|
||||||
ext: Extension,
|
ext: Extension,
|
||||||
decl_id: api::SysDeclId,
|
decl_id: api::SysDeclId,
|
||||||
lex_filter: api::CharFilter,
|
lex_filter: api::CharFilter,
|
||||||
id: api::SysId,
|
id: api::SysId,
|
||||||
line_types: Vec<Tok<String>>,
|
line_types: Vec<Tok<String>>,
|
||||||
|
vfs: std::collections::HashMap<api::TStr, api::EagerVfs>,
|
||||||
|
pub(crate) const_paths: MemoMap<api::ParsedConstId, Sym>,
|
||||||
}
|
}
|
||||||
impl Drop for SystemInstData {
|
impl Drop for SystemInstData {
|
||||||
fn drop(&mut self) { self.ext.system_drop(self.id); }
|
fn drop(&mut self) { self.ext.system_drop(self.id); }
|
||||||
@@ -55,15 +60,29 @@ impl fmt::Debug for SystemInstData {
|
|||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct System(Rc<SystemInstData>);
|
pub struct System(Rc<SystemInstData>);
|
||||||
impl System {
|
impl System {
|
||||||
|
#[must_use]
|
||||||
pub fn id(&self) -> api::SysId { self.0.id }
|
pub fn id(&self) -> api::SysId { self.0.id }
|
||||||
|
#[must_use]
|
||||||
pub fn ext(&self) -> &Extension { &self.0.ext }
|
pub fn ext(&self) -> &Extension { &self.0.ext }
|
||||||
|
#[must_use]
|
||||||
pub fn ctx(&self) -> &Ctx { &self.0.ctx }
|
pub fn ctx(&self) -> &Ctx { &self.0.ctx }
|
||||||
|
#[must_use]
|
||||||
pub fn i(&self) -> &Interner { &self.0.ctx.i }
|
pub fn i(&self) -> &Interner { &self.0.ctx.i }
|
||||||
|
#[must_use]
|
||||||
|
pub fn deps(&self) -> &[System] { &self.0.deps }
|
||||||
|
#[must_use]
|
||||||
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { self.0.ext.reqnot() }
|
pub(crate) fn reqnot(&self) -> &ReqNot<api::HostMsgSet> { self.0.ext.reqnot() }
|
||||||
|
#[must_use]
|
||||||
pub async fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
|
pub async fn get_tree(&self, id: api::TreeId) -> api::MemberKind {
|
||||||
self.reqnot().request(api::GetMember(self.0.id, id)).await
|
self.reqnot().request(api::GetMember(self.0.id, id)).await
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() }
|
pub fn has_lexer(&self) -> bool { !self.0.lex_filter.0.is_empty() }
|
||||||
|
#[must_use]
|
||||||
|
pub async fn vfs(&self) -> DeclFS {
|
||||||
|
DeclFS::Eager(DeclMod::from_api(&self.0.vfs, self.clone()).await)
|
||||||
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) }
|
pub fn can_lex(&self, c: char) -> bool { char_filter_match(&self.0.lex_filter, c) }
|
||||||
/// Have this system lex a part of the source. It is assumed that
|
/// Have this system lex a part of the source. It is assumed that
|
||||||
/// [Self::can_lex] was called and returned true.
|
/// [Self::can_lex] was called and returned true.
|
||||||
@@ -76,24 +95,26 @@ impl System {
|
|||||||
) -> api::OrcResult<Option<api::LexedExpr>> {
|
) -> api::OrcResult<Option<api::LexedExpr>> {
|
||||||
self.0.ext.lex_req(source, src, pos, self.id(), r).await
|
self.0.ext.lex_req(source, src, pos, self.id(), r).await
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn can_parse(&self, ltyp: Tok<String>) -> bool { self.0.line_types.contains(<yp) }
|
pub fn can_parse(&self, ltyp: Tok<String>) -> bool { self.0.line_types.contains(<yp) }
|
||||||
pub fn line_types(&self) -> impl Iterator<Item = &Tok<String>> + '_ { self.0.line_types.iter() }
|
pub fn line_types(&self) -> impl Iterator<Item = &Tok<String>> + '_ { self.0.line_types.iter() }
|
||||||
pub async fn parse(
|
pub async fn parse(
|
||||||
&self,
|
&self,
|
||||||
module: Sym,
|
path: Substack<'_, Tok<String>>,
|
||||||
line: Vec<ParsTokTree>,
|
line: Vec<ParsTokTree>,
|
||||||
exported: bool,
|
exported: bool,
|
||||||
comments: Vec<Comment>,
|
comments: Vec<Comment>,
|
||||||
) -> OrcRes<Vec<ParsTokTree>> {
|
callback: &mut impl AsyncFnMut(Substack<'_, Tok<String>>, Vec<ParsTokTree>) -> OrcRes<Vec<Item>>,
|
||||||
|
) -> OrcRes<Vec<Item>> {
|
||||||
let src_path = line.first().expect("cannot be empty").sr.path();
|
let src_path = line.first().expect("cannot be empty").sr.path();
|
||||||
let line = join_all(line.into_iter().map(|t| async {
|
let line = join_all(line.into_iter().map(|t| async {
|
||||||
let mut expr_store = self.0.ext.exprs().clone();
|
let mut expr_store = self.0.ext.exprs().clone();
|
||||||
t.into_api(&mut expr_store, &mut ()).await
|
t.into_api(&mut expr_store, &mut ExprWillPanic).await
|
||||||
}))
|
}))
|
||||||
.await;
|
.await;
|
||||||
let comments = comments.iter().map(Comment::to_api).collect_vec();
|
let comments = comments.iter().map(Comment::to_api).collect_vec();
|
||||||
let req = api::ParseLine {
|
let req = api::ParseLine {
|
||||||
module: module.to_api(),
|
module: self.i().i(&path.unreverse()).await.to_api(),
|
||||||
src: src_path.to_api(),
|
src: src_path.to_api(),
|
||||||
exported,
|
exported,
|
||||||
sys: self.id(),
|
sys: self.id(),
|
||||||
@@ -101,14 +122,69 @@ impl System {
|
|||||||
line,
|
line,
|
||||||
};
|
};
|
||||||
match self.reqnot().request(req).await {
|
match self.reqnot().request(req).await {
|
||||||
Ok(parsed) => {
|
Ok(parsed_v) => {
|
||||||
let mut pctx = ExprParseCtx { ctx: self.ctx().clone(), exprs: self.ext().exprs().clone() };
|
|
||||||
let mut ext_exprs = self.ext().exprs().clone();
|
let mut ext_exprs = self.ext().exprs().clone();
|
||||||
Ok(ttv_from_api(parsed, &mut ext_exprs, &mut pctx, &src_path, self.i()).await)
|
struct ConvCtx<'a> {
|
||||||
|
sys: &'a System,
|
||||||
|
src_path: &'a Sym,
|
||||||
|
i: &'a Interner,
|
||||||
|
ext_exprs: &'a mut ExprStore,
|
||||||
|
pctx: &'a mut ExprParseCtx<'a>,
|
||||||
|
}
|
||||||
|
async fn conv(
|
||||||
|
parsed_v: Vec<api::ParsedLine>,
|
||||||
|
module: Substack<'_, Tok<String>>,
|
||||||
|
callback: &'_ mut impl AsyncFnMut(
|
||||||
|
Substack<'_, Tok<String>>,
|
||||||
|
Vec<ParsTokTree>,
|
||||||
|
) -> OrcRes<Vec<Item>>,
|
||||||
|
ctx: &mut ConvCtx<'_>,
|
||||||
|
) -> OrcRes<Vec<Item>> {
|
||||||
|
let mut items = Vec::new();
|
||||||
|
for parsed in parsed_v {
|
||||||
|
let (name, exported, kind) = match parsed.kind {
|
||||||
|
api::ParsedLineKind::Member(api::ParsedMember { name, exported, kind }) =>
|
||||||
|
(name, exported, kind),
|
||||||
|
api::ParsedLineKind::Recursive(rec) => {
|
||||||
|
let tokens = ttv_from_api(rec, ctx.ext_exprs, ctx.pctx, ctx.src_path, ctx.i).await;
|
||||||
|
items.extend(callback(module.clone(), tokens).await?);
|
||||||
|
continue;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
let name = ctx.i.ex(name).await;
|
||||||
|
let mkind = match kind {
|
||||||
|
api::ParsedMemberKind::Module(items) => {
|
||||||
|
let items =
|
||||||
|
conv(items, module.push(name.clone()), callback, ctx).boxed_local().await?;
|
||||||
|
ParsedMemberKind::Mod(ParsedModule::new(items))
|
||||||
|
},
|
||||||
|
api::ParsedMemberKind::Constant(cid) =>
|
||||||
|
ParsedMemberKind::DeferredConst(cid, ctx.sys.clone()),
|
||||||
|
};
|
||||||
|
items.push(Item {
|
||||||
|
comments: join_all(
|
||||||
|
parsed.comments.iter().map(|c| Comment::from_api(c, ctx.src_path.clone(), ctx.i)),
|
||||||
|
)
|
||||||
|
.await,
|
||||||
|
sr: SrcRange::from_api(&parsed.source_range, ctx.i).await,
|
||||||
|
kind: ItemKind::Member(ParsedMember { name, exported, kind: mkind }),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Ok(items)
|
||||||
|
}
|
||||||
|
conv(parsed_v, path, callback, &mut ConvCtx {
|
||||||
|
i: self.i(),
|
||||||
|
ext_exprs: &mut ext_exprs,
|
||||||
|
pctx: &mut ExprParseCtx { ctx: self.ctx(), exprs: self.ext().exprs() },
|
||||||
|
src_path: &src_path,
|
||||||
|
sys: self,
|
||||||
|
})
|
||||||
|
.await
|
||||||
},
|
},
|
||||||
Err(e) => Err(OrcErrv::from_api(&e, &self.ctx().i).await),
|
Err(e) => Err(OrcErrv::from_api(&e, &self.ctx().i).await),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub async fn request(&self, req: Vec<u8>) -> Vec<u8> {
|
pub async fn request(&self, req: Vec<u8>) -> Vec<u8> {
|
||||||
self.reqnot().request(api::SysFwded(self.id(), req)).await
|
self.reqnot().request(api::SysFwded(self.id(), req)).await
|
||||||
}
|
}
|
||||||
@@ -118,7 +194,24 @@ impl System {
|
|||||||
this.ctx.owned_atoms.write().await.remove(&drop);
|
this.ctx.owned_atoms.write().await.remove(&drop);
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn downgrade(&self) -> WeakSystem { WeakSystem(Rc::downgrade(&self.0)) }
|
pub fn downgrade(&self) -> WeakSystem { WeakSystem(Rc::downgrade(&self.0)) }
|
||||||
|
/// Implementation of [api::ResolveNames]
|
||||||
|
pub(crate) async fn name_resolver(
|
||||||
|
&self,
|
||||||
|
orig: api::ParsedConstId,
|
||||||
|
) -> impl AsyncFnMut(&[Tok<String>]) -> Option<VName> + use<> {
|
||||||
|
let root = self.0.ctx.root.read().await.upgrade().expect("find_names when root not in context");
|
||||||
|
let orig = self.0.const_paths.get(&orig).expect("origin for find_names invalid").clone();
|
||||||
|
let ctx = self.0.ctx.clone();
|
||||||
|
async move |rel| {
|
||||||
|
let cwd = orig.split_last().1;
|
||||||
|
let abs = absolute_path(cwd, rel, &ctx.i).await.ok()?;
|
||||||
|
let root_data = &mut *root.0.write().await;
|
||||||
|
let walk_ctx = &mut (ctx.clone(), &mut root_data.consts);
|
||||||
|
walk(&root_data.root, false, abs.iter(), walk_ctx).await.is_ok().then_some(abs)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
impl Format for System {
|
impl Format for System {
|
||||||
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
async fn print<'a>(&'a self, _c: &'a (impl FmtCtx + ?Sized + 'a)) -> FmtUnit {
|
||||||
@@ -130,6 +223,7 @@ impl Format for System {
|
|||||||
|
|
||||||
pub struct WeakSystem(Weak<SystemInstData>);
|
pub struct WeakSystem(Weak<SystemInstData>);
|
||||||
impl WeakSystem {
|
impl WeakSystem {
|
||||||
|
#[must_use]
|
||||||
pub fn upgrade(&self) -> Option<System> { self.0.upgrade().map(System) }
|
pub fn upgrade(&self) -> Option<System> { self.0.upgrade().map(System) }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -138,56 +232,42 @@ pub struct SystemCtor {
|
|||||||
pub(crate) ext: WeakExtension,
|
pub(crate) ext: WeakExtension,
|
||||||
}
|
}
|
||||||
impl SystemCtor {
|
impl SystemCtor {
|
||||||
|
#[must_use]
|
||||||
pub fn name(&self) -> &str { &self.decl.name }
|
pub fn name(&self) -> &str { &self.decl.name }
|
||||||
|
#[must_use]
|
||||||
pub fn priority(&self) -> NotNan<f64> { self.decl.priority }
|
pub fn priority(&self) -> NotNan<f64> { self.decl.priority }
|
||||||
|
#[must_use]
|
||||||
pub fn depends(&self) -> impl ExactSizeIterator<Item = &str> {
|
pub fn depends(&self) -> impl ExactSizeIterator<Item = &str> {
|
||||||
self.decl.depends.iter().map(|s| &**s)
|
self.decl.depends.iter().map(|s| &**s)
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn id(&self) -> api::SysDeclId { self.decl.id }
|
pub fn id(&self) -> api::SysDeclId { self.decl.id }
|
||||||
pub async fn run<'a>(
|
#[must_use]
|
||||||
&self,
|
pub async fn run(&self, deps: Vec<System>) -> (Root, System) {
|
||||||
depends: impl IntoIterator<Item = &'a System>,
|
let depends = deps.iter().map(|si| si.id()).collect_vec();
|
||||||
consts: &mut HashMap<Sym, Expr>,
|
|
||||||
) -> (Module, System) {
|
|
||||||
let depends = depends.into_iter().map(|si| si.id()).collect_vec();
|
|
||||||
debug_assert_eq!(depends.len(), self.decl.depends.len(), "Wrong number of deps provided");
|
debug_assert_eq!(depends.len(), self.decl.depends.len(), "Wrong number of deps provided");
|
||||||
let ext = self.ext.upgrade().expect("SystemCtor should be freed before Extension");
|
let ext = self.ext.upgrade().expect("SystemCtor should be freed before Extension");
|
||||||
let id = ext.ctx().next_sys_id();
|
let id = ext.ctx().next_sys_id();
|
||||||
let sys_inst = ext.reqnot().request(api::NewSystem { depends, id, system: self.decl.id }).await;
|
let sys_inst = ext.reqnot().request(api::NewSystem { depends, id, system: self.decl.id }).await;
|
||||||
let data = System(Rc::new(SystemInstData {
|
let data = System(Rc::new(SystemInstData {
|
||||||
|
deps,
|
||||||
decl_id: self.decl.id,
|
decl_id: self.decl.id,
|
||||||
ext: ext.clone(),
|
ext: ext.clone(),
|
||||||
ctx: ext.ctx().clone(),
|
ctx: ext.ctx().clone(),
|
||||||
lex_filter: sys_inst.lex_filter,
|
lex_filter: sys_inst.lex_filter,
|
||||||
|
vfs: sys_inst.vfs,
|
||||||
line_types: join_all(sys_inst.line_types.iter().map(|m| Tok::from_api(*m, &ext.ctx().i)))
|
line_types: join_all(sys_inst.line_types.iter().map(|m| Tok::from_api(*m, &ext.ctx().i)))
|
||||||
.await,
|
.await,
|
||||||
id,
|
id,
|
||||||
|
const_paths: MemoMap::new(),
|
||||||
}));
|
}));
|
||||||
let const_root =
|
let api_module_root = api::Module {
|
||||||
Module::from_api((sys_inst.const_root.into_iter()).map(|(k, v)| api::Member {
|
members: (sys_inst.const_root.into_iter())
|
||||||
name: k,
|
.map(|(k, v)| api::Member { name: k, kind: v, comments: vec![], exported: true })
|
||||||
kind: v,
|
.collect_vec(),
|
||||||
comments: vec![],
|
};
|
||||||
exported: true,
|
let root = Root::from_api(api_module_root, &data).await;
|
||||||
}))
|
|
||||||
.await;
|
|
||||||
let const_root = clone!(data, ext; stream! {
|
|
||||||
for (k, v) in sys_inst.const_root {
|
|
||||||
yield Member::from_api(
|
|
||||||
,
|
|
||||||
&mut ParsedFromApiCx {
|
|
||||||
consts,
|
|
||||||
path: ext.ctx().i.i(&[]).await,
|
|
||||||
sys: &data,
|
|
||||||
}
|
|
||||||
).await;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.map(|mem| ItemKind::Member(mem).at(Pos::None))
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.await;
|
|
||||||
ext.ctx().systems.write().await.insert(id, data.downgrade());
|
ext.ctx().systems.write().await.insert(id, data.downgrade());
|
||||||
let root = ParsedModule::new(const_root);
|
|
||||||
(root, data)
|
(root, data)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -219,7 +299,6 @@ pub async fn init_systems(
|
|||||||
fn walk_deps<'a>(
|
fn walk_deps<'a>(
|
||||||
graph: &mut HashMap<&str, &'a SystemCtor>,
|
graph: &mut HashMap<&str, &'a SystemCtor>,
|
||||||
list: &mut Vec<&'a SystemCtor>,
|
list: &mut Vec<&'a SystemCtor>,
|
||||||
consts: &mut HashMap<Sym, Expr>,
|
|
||||||
chain: Stackframe<&str>,
|
chain: Stackframe<&str>,
|
||||||
) -> Result<(), SysResolvErr> {
|
) -> Result<(), SysResolvErr> {
|
||||||
if let Some(ctor) = graph.remove(chain.item) {
|
if let Some(ctor) = graph.remove(chain.item) {
|
||||||
@@ -231,22 +310,21 @@ pub async fn init_systems(
|
|||||||
circle.extend(Substack::Frame(chain).iter().map(|s| s.to_string()));
|
circle.extend(Substack::Frame(chain).iter().map(|s| s.to_string()));
|
||||||
return Err(SysResolvErr::Loop(circle));
|
return Err(SysResolvErr::Loop(circle));
|
||||||
}
|
}
|
||||||
walk_deps(graph, list, consts, Substack::Frame(chain).new_frame(dep))?
|
walk_deps(graph, list, Substack::Frame(chain).new_frame(dep))?
|
||||||
}
|
}
|
||||||
list.push(ctor);
|
list.push(ctor);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
let mut consts = HashMap::new();
|
|
||||||
for tgt in tgts {
|
for tgt in tgts {
|
||||||
walk_deps(&mut to_load, &mut to_load_ordered, &mut consts, Substack::Bottom.new_frame(tgt))?;
|
walk_deps(&mut to_load, &mut to_load_ordered, Substack::Bottom.new_frame(tgt))?;
|
||||||
}
|
}
|
||||||
let mut systems = HashMap::<&str, System>::new();
|
let mut systems = HashMap::<&str, System>::new();
|
||||||
let mut root_mod = ParsedModule::default();
|
let mut root = Root::new(exts.first().unwrap().ctx().clone());
|
||||||
for ctor in to_load_ordered.iter() {
|
for ctor in to_load_ordered.iter() {
|
||||||
let (sys_root, sys) = ctor.run(ctor.depends().map(|n| &systems[n]), &mut consts).await;
|
let (sys_root, sys) = ctor.run(ctor.depends().map(|n| systems[n].clone()).collect()).await;
|
||||||
systems.insert(ctor.name(), sys);
|
systems.insert(ctor.name(), sys);
|
||||||
root_mod.merge(sys_root);
|
root = root.merge(&sys_root).await.expect("Conflicting roots");
|
||||||
}
|
}
|
||||||
Ok((Root::new(root_mod, consts), systems.into_values().collect_vec()))
|
Ok((root, systems.into_values().collect_vec()))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,24 +1,142 @@
|
|||||||
|
//! This tree isn't Clone because lazy subtrees are guaranteed to only be loaded
|
||||||
|
//! once
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::rc::{Rc, Weak};
|
use std::rc::{Rc, Weak};
|
||||||
|
|
||||||
use async_once_cell::OnceCell;
|
use async_once_cell::OnceCell;
|
||||||
use futures::FutureExt;
|
use async_std::sync::RwLock;
|
||||||
|
use futures::{FutureExt, StreamExt, stream};
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
|
use hashbrown::hash_map::Entry;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::{OrcRes, Reporter};
|
use orchid_api::FetchParsedConst;
|
||||||
|
use orchid_base::clone;
|
||||||
|
use orchid_base::error::{OrcRes, Reporter, mk_err, mk_errv};
|
||||||
use orchid_base::interner::Tok;
|
use orchid_base::interner::Tok;
|
||||||
|
use orchid_base::location::{Pos, SrcRange};
|
||||||
use orchid_base::name::{Sym, VPath};
|
use orchid_base::name::{Sym, VPath};
|
||||||
|
use orchid_base::reqnot::Requester;
|
||||||
|
|
||||||
use crate::api;
|
use crate::api;
|
||||||
use crate::ctx::Ctx;
|
use crate::ctx::Ctx;
|
||||||
use crate::dealias::{DealiasCtx, absolute_path, resolv_glob};
|
use crate::dealias::{ChildErrorKind, Tree, absolute_path, resolv_glob, walk};
|
||||||
use crate::expr::{Expr, ExprParseCtx, PathSetBuilder};
|
use crate::expr::{Expr, ExprParseCtx, PathSetBuilder};
|
||||||
use crate::parsed::{ParsedMemberKind, ParsedModule, Tree, WalkError, WalkErrorKind};
|
use crate::parsed::{ItemKind, ParsedMemberKind, ParsedModule};
|
||||||
use crate::system::System;
|
use crate::system::System;
|
||||||
|
|
||||||
pub struct Tree(Rc<RefCell<Module>>);
|
pub struct RootData {
|
||||||
|
pub root: Module,
|
||||||
|
pub consts: HashMap<Sym, Expr>,
|
||||||
|
pub ctx: Ctx,
|
||||||
|
}
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Root(pub Rc<RwLock<RootData>>);
|
||||||
|
impl Root {
|
||||||
|
#[must_use]
|
||||||
|
pub fn new(ctx: Ctx) -> Self {
|
||||||
|
Root(Rc::new(RwLock::new(RootData {
|
||||||
|
root: Module::default(),
|
||||||
|
consts: HashMap::default(),
|
||||||
|
ctx,
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
#[must_use]
|
||||||
|
pub async fn from_api(api: api::Module, sys: &System) -> Self {
|
||||||
|
let mut consts = HashMap::new();
|
||||||
|
let mut tfac = TreeFromApiCtx { consts: &mut consts, path: sys.i().i(&[][..]).await, sys };
|
||||||
|
let root = Module::from_api(api, &mut tfac).await;
|
||||||
|
Root(Rc::new(RwLock::new(RootData { root, consts, ctx: sys.ctx().clone() })))
|
||||||
|
}
|
||||||
|
pub async fn merge(&self, new: &Root) -> Result<Self, MergeErr> {
|
||||||
|
let this = self.0.read().await;
|
||||||
|
let that = new.0.read().await;
|
||||||
|
let mut consts =
|
||||||
|
this.consts.iter().chain(&that.consts).map(|(k, v)| (k.clone(), v.clone())).collect();
|
||||||
|
let root = this.root.merge(&that.root, this.ctx.clone(), &mut consts).await?;
|
||||||
|
Ok(Self(Rc::new(RwLock::new(RootData { root, consts, ctx: this.ctx.clone() }))))
|
||||||
|
}
|
||||||
|
#[must_use]
|
||||||
|
pub async fn add_parsed(&self, parsed: &ParsedModule, pars_prefix: Sym, rep: &Reporter) -> Self {
|
||||||
|
let mut ref_this = self.0.write().await;
|
||||||
|
let this = &mut *ref_this;
|
||||||
|
let mut deferred_consts = HashMap::new();
|
||||||
|
let mut tfpctx = FromParsedCtx {
|
||||||
|
pars_root: parsed,
|
||||||
|
deferred_consts: &mut deferred_consts,
|
||||||
|
pars_prefix: pars_prefix.clone(),
|
||||||
|
consts: &mut this.consts,
|
||||||
|
root: &this.root,
|
||||||
|
ctx: &this.ctx,
|
||||||
|
rep,
|
||||||
|
};
|
||||||
|
let mut module = Module::from_parsed(parsed, pars_prefix.clone(), &mut tfpctx).await;
|
||||||
|
for step in pars_prefix.iter().rev() {
|
||||||
|
let kind = OnceCell::from(MemberKind::Module(module));
|
||||||
|
let members = HashMap::from([(
|
||||||
|
step.clone(),
|
||||||
|
Rc::new(Member { public: true, lazy: RefCell::new(None), kind }),
|
||||||
|
)]);
|
||||||
|
module = Module { imports: HashMap::new(), members }
|
||||||
|
}
|
||||||
|
let mut consts = this.consts.clone();
|
||||||
|
let root = (this.root.merge(&module, this.ctx.clone(), &mut consts).await)
|
||||||
|
.expect("Merge conflict between parsed and existing module");
|
||||||
|
let new = Root(Rc::new(RwLock::new(RootData { root, consts, ctx: this.ctx.clone() })));
|
||||||
|
*this.ctx.root.write().await = new.downgrade();
|
||||||
|
for (path, (sys_id, pc_id)) in deferred_consts {
|
||||||
|
let sys = this.ctx.system_inst(sys_id).await.expect("System dropped since parsing");
|
||||||
|
let api_expr = sys.reqnot().request(FetchParsedConst { id: pc_id, sys: sys.id() }).await;
|
||||||
|
let mut xp_ctx = ExprParseCtx { ctx: &this.ctx, exprs: sys.ext().exprs() };
|
||||||
|
let expr = Expr::from_api(&api_expr, PathSetBuilder::new(), &mut xp_ctx).await;
|
||||||
|
new.0.write().await.consts.insert(path, expr);
|
||||||
|
}
|
||||||
|
new
|
||||||
|
}
|
||||||
|
pub async fn get_const_value(&self, name: Sym, pos: Pos) -> OrcRes<Expr> {
|
||||||
|
let this = &mut *self.0.write().await;
|
||||||
|
// shortcut for previously visited
|
||||||
|
if let Some(val) = this.consts.get(&name) {
|
||||||
|
return Ok(val.clone());
|
||||||
|
}
|
||||||
|
// load the node, then check if this "walk" call added it to the map
|
||||||
|
let ctx = this.ctx.clone();
|
||||||
|
let module =
|
||||||
|
walk(&this.root, false, name.iter().cloned(), &mut (ctx.clone(), &mut this.consts)).await;
|
||||||
|
if let Some(val) = this.consts.get(&name) {
|
||||||
|
return Ok(val.clone());
|
||||||
|
}
|
||||||
|
match module {
|
||||||
|
Ok(_) => Err(mk_errv(
|
||||||
|
ctx.i.i("module used as constant").await,
|
||||||
|
format!("{name} is a module, not a constant"),
|
||||||
|
[pos],
|
||||||
|
)),
|
||||||
|
Err(e) => match e.kind {
|
||||||
|
ChildErrorKind::Private => panic!("public_only is false"),
|
||||||
|
ChildErrorKind::Constant => panic!("Tree refers to constant not in table"),
|
||||||
|
ChildErrorKind::Missing => Err(mk_errv(
|
||||||
|
ctx.i.i("Constant does not exist").await,
|
||||||
|
format!("{name} does not refer to a constant"),
|
||||||
|
[pos],
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[must_use]
|
||||||
|
pub fn downgrade(&self) -> WeakRoot { WeakRoot(Rc::downgrade(&self.0)) }
|
||||||
|
}
|
||||||
|
|
||||||
pub struct WeakTree(Weak<RefCell<Module>>);
|
#[derive(Clone)]
|
||||||
|
pub struct WeakRoot(Weak<RwLock<RootData>>);
|
||||||
|
impl WeakRoot {
|
||||||
|
#[must_use]
|
||||||
|
pub fn new() -> Self { Self(Weak::new()) }
|
||||||
|
#[must_use]
|
||||||
|
pub fn upgrade(&self) -> Option<Root> { Some(Root(self.0.upgrade()?)) }
|
||||||
|
}
|
||||||
|
impl Default for WeakRoot {
|
||||||
|
fn default() -> Self { Self::new() }
|
||||||
|
}
|
||||||
|
|
||||||
pub struct TreeFromApiCtx<'a> {
|
pub struct TreeFromApiCtx<'a> {
|
||||||
pub sys: &'a System,
|
pub sys: &'a System,
|
||||||
@@ -26,16 +144,26 @@ pub struct TreeFromApiCtx<'a> {
|
|||||||
pub path: Tok<Vec<Tok<String>>>,
|
pub path: Tok<Vec<Tok<String>>>,
|
||||||
}
|
}
|
||||||
impl<'a> TreeFromApiCtx<'a> {
|
impl<'a> TreeFromApiCtx<'a> {
|
||||||
|
#[must_use]
|
||||||
pub async fn push<'c>(&'c mut self, name: Tok<String>) -> TreeFromApiCtx<'c> {
|
pub async fn push<'c>(&'c mut self, name: Tok<String>) -> TreeFromApiCtx<'c> {
|
||||||
let path = self.sys.ctx().i.i(&self.path.iter().cloned().chain([name]).collect_vec()).await;
|
let path = self.sys.ctx().i.i(&self.path.iter().cloned().chain([name]).collect_vec()).await;
|
||||||
TreeFromApiCtx { path, consts: &mut *self.consts, sys: self.sys }
|
TreeFromApiCtx { path, consts: &mut *self.consts, sys: self.sys }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
pub struct ResolvedImport {
|
||||||
|
target: Sym,
|
||||||
|
sr: SrcRange,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Default)]
|
||||||
pub struct Module {
|
pub struct Module {
|
||||||
|
pub imports: HashMap<Tok<String>, Result<ResolvedImport, Vec<ResolvedImport>>>,
|
||||||
pub members: HashMap<Tok<String>, Rc<Member>>,
|
pub members: HashMap<Tok<String>, Rc<Member>>,
|
||||||
}
|
}
|
||||||
impl Module {
|
impl Module {
|
||||||
|
#[must_use]
|
||||||
pub async fn from_api(api: api::Module, ctx: &mut TreeFromApiCtx<'_>) -> Self {
|
pub async fn from_api(api: api::Module, ctx: &mut TreeFromApiCtx<'_>) -> Self {
|
||||||
let mut members = HashMap::new();
|
let mut members = HashMap::new();
|
||||||
for mem in api.members {
|
for mem in api.members {
|
||||||
@@ -44,10 +172,9 @@ impl Module {
|
|||||||
let name = vname.to_sym(ctx.sys.i()).await;
|
let name = vname.to_sym(ctx.sys.i()).await;
|
||||||
let (lazy, kind) = match mem.kind {
|
let (lazy, kind) = match mem.kind {
|
||||||
api::MemberKind::Lazy(id) =>
|
api::MemberKind::Lazy(id) =>
|
||||||
(Some(LazyMemberHandle { id, sys: ctx.sys.clone(), path: name.clone() }), None),
|
(Some(LazyMemberHandle { id, sys: ctx.sys.id(), path: name.clone() }), None),
|
||||||
api::MemberKind::Const(val) => {
|
api::MemberKind::Const(val) => {
|
||||||
let mut expr_ctx =
|
let mut expr_ctx = ExprParseCtx { ctx: ctx.sys.ctx(), exprs: ctx.sys.ext().exprs() };
|
||||||
ExprParseCtx { ctx: ctx.sys.ctx().clone(), exprs: ctx.sys.ext().exprs().clone() };
|
|
||||||
let expr = Expr::from_api(&val, PathSetBuilder::new(), &mut expr_ctx).await;
|
let expr = Expr::from_api(&val, PathSetBuilder::new(), &mut expr_ctx).await;
|
||||||
ctx.consts.insert(name.clone(), expr);
|
ctx.consts.insert(name.clone(), expr);
|
||||||
(None, Some(MemberKind::Const))
|
(None, Some(MemberKind::Const))
|
||||||
@@ -56,99 +183,227 @@ impl Module {
|
|||||||
let m = Self::from_api(m, &mut ctx.push(mem_name.clone()).await).boxed_local().await;
|
let m = Self::from_api(m, &mut ctx.push(mem_name.clone()).await).boxed_local().await;
|
||||||
(None, Some(MemberKind::Module(m)))
|
(None, Some(MemberKind::Module(m)))
|
||||||
},
|
},
|
||||||
api::MemberKind::Import(import_path) =>
|
|
||||||
(None, Some(MemberKind::Alias(Sym::from_api(import_path, ctx.sys.i()).await))),
|
|
||||||
};
|
};
|
||||||
members.insert(
|
members.insert(
|
||||||
mem_name.clone(),
|
mem_name.clone(),
|
||||||
Rc::new(Member {
|
Rc::new(Member {
|
||||||
path: name.clone(),
|
|
||||||
public: mem.exported,
|
public: mem.exported,
|
||||||
lazy: RefCell::new(lazy),
|
lazy: RefCell::new(lazy),
|
||||||
kind: kind.map_or_else(OnceCell::new, OnceCell::from),
|
kind: kind.map_or_else(OnceCell::new, OnceCell::from),
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Self { members }
|
Self { members, imports: HashMap::new() }
|
||||||
}
|
}
|
||||||
async fn walk(&self, mut path: impl Iterator<Item = Tok<String>>) -> &Self { todo!() }
|
#[must_use]
|
||||||
async fn from_parsed(
|
async fn from_parsed(parsed: &ParsedModule, path: Sym, ctx: &mut FromParsedCtx<'_>) -> Self {
|
||||||
parsed: &ParsedModule,
|
let imports_by_name = (parsed.get_imports().into_iter())
|
||||||
path: Sym,
|
.filter_map(|i| Some((i.name.clone()?, i)))
|
||||||
pars_root_path: Sym,
|
.into_group_map();
|
||||||
pars_root: &ParsedModule,
|
let mut glob_imports_by_name = HashMap::<_, Vec<_>>::new();
|
||||||
root: &Module,
|
for import in parsed.get_imports().into_iter().filter(|i| i.name.is_none()) {
|
||||||
preload: &mut HashMap<Sym, Module>,
|
let pos = import.sr.pos();
|
||||||
ctx: &Ctx,
|
match absolute_path(&path, &import.path, &ctx.ctx.i).await {
|
||||||
rep: &Reporter,
|
Err(e) => ctx.rep.report(e.err_obj(&ctx.ctx.i, pos, &import.path.to_string()).await),
|
||||||
) -> Self {
|
Ok(abs_path) => {
|
||||||
let mut imported_names = HashMap::new();
|
let names_res = match abs_path.strip_prefix(&ctx.pars_prefix[..]) {
|
||||||
for import in parsed.get_imports() {
|
None => {
|
||||||
if let Some(n) = import.name.clone() {
|
let mut tree_ctx = (ctx.ctx.clone(), &mut *ctx.consts);
|
||||||
imported_names.push(n);
|
resolv_glob(&path, ctx.root, &abs_path, pos, &ctx.ctx.i, &mut tree_ctx).await
|
||||||
continue;
|
},
|
||||||
}
|
Some(sub_tgt) => {
|
||||||
// the path in a wildcard import has to be a module
|
let sub_path = (path.strip_prefix(&ctx.pars_prefix[..]))
|
||||||
if import.path.is_empty() {
|
.expect("from_parsed called with path outside pars_prefix");
|
||||||
panic!("Imported root")
|
resolv_glob(sub_path, ctx.pars_root, sub_tgt, pos, &ctx.ctx.i, &mut ()).await
|
||||||
}
|
},
|
||||||
let abs_path = match absolute_path(&path, &import.path) {
|
};
|
||||||
Ok(p) => p,
|
let abs_path = abs_path.to_sym(&ctx.ctx.i).await;
|
||||||
Err(e) => {
|
match names_res {
|
||||||
rep.report(e.err_obj(&ctx.i, import.sr.pos(), &path.to_string()).await);
|
Err(e) => ctx.rep.report(e),
|
||||||
continue;
|
Ok(names) =>
|
||||||
|
for name in names {
|
||||||
|
match glob_imports_by_name.entry(name) {
|
||||||
|
Entry::Occupied(mut o) => o.get_mut().push((abs_path.clone(), import.sr.clone())),
|
||||||
|
Entry::Vacant(v) => {
|
||||||
|
v.insert_entry(vec![(abs_path.clone(), import.sr.clone())]);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
},
|
},
|
||||||
};
|
|
||||||
let names = if let Some(subpath) = abs_path.strip_prefix(&pars_root_path[..]) {
|
|
||||||
let pars_path = (path.strip_prefix(&pars_root_path[..]))
|
|
||||||
.expect("pars path outside pars root");
|
|
||||||
resolv_glob(&pars_path, pars_root, &subpath, import.sr.pos(), &ctx.i, rep, &mut ()).await
|
|
||||||
} else {
|
|
||||||
resolv_glob(&path, root, &abs_path, import.sr.pos(), &ctx.i, rep, &mut ()).await
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
todo!()
|
let mut imports = HashMap::new();
|
||||||
|
let conflicting_imports_msg = ctx.ctx.i.i("Conflicting imports").await;
|
||||||
|
for (key, values) in imports_by_name {
|
||||||
|
if values.len() == 1 {
|
||||||
|
let import = values.into_iter().next().unwrap();
|
||||||
|
let sr = import.sr.clone();
|
||||||
|
let abs_path_res = absolute_path(&path, &import.clone().mspath(), &ctx.ctx.i).await;
|
||||||
|
match abs_path_res {
|
||||||
|
Err(e) => ctx.rep.report(e.err_obj(&ctx.ctx.i, sr.pos(), &import.to_string()).await),
|
||||||
|
Ok(abs_path) => {
|
||||||
|
imports
|
||||||
|
.insert(key, Ok(ResolvedImport { target: abs_path.to_sym(&ctx.ctx.i).await, sr }));
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for item in values {
|
||||||
|
ctx.rep.report(mk_err(
|
||||||
|
conflicting_imports_msg.clone(),
|
||||||
|
format!("{key} is imported multiple times from different modules"),
|
||||||
|
[item.sr.pos().into()],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (key, values) in glob_imports_by_name {
|
||||||
|
if !imports.contains_key(&key) {
|
||||||
|
let i = &ctx.ctx.i;
|
||||||
|
let values = stream::iter(values)
|
||||||
|
.then(|(n, sr)| {
|
||||||
|
clone!(key; async move {
|
||||||
|
ResolvedImport { target: n.to_vname().suffix([key.clone()]).to_sym(i).await, sr }
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.await;
|
||||||
|
imports.insert(key, if values.len() == 1 { Ok(values[0].clone()) } else { Err(values) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let self_referential_msg = ctx.ctx.i.i("Self-referential import").await;
|
||||||
|
for (key, value) in imports.iter() {
|
||||||
|
let Ok(import) = value else { continue };
|
||||||
|
if import.target.strip_prefix(&path[..]).is_some_and(|t| t.starts_with(&[key.clone()])) {
|
||||||
|
ctx.rep.report(mk_err(
|
||||||
|
self_referential_msg.clone(),
|
||||||
|
format!("import {} points to itself or a path within itself", &import.target),
|
||||||
|
[import.sr.pos().into()],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut members = HashMap::new();
|
||||||
|
for item in &parsed.items {
|
||||||
|
match &item.kind {
|
||||||
|
ItemKind::Member(mem) => {
|
||||||
|
let path = path.to_vname().suffix([mem.name.clone()]).to_sym(&ctx.ctx.i).await;
|
||||||
|
let kind = OnceCell::from(MemberKind::from_parsed(&mem.kind, path.clone(), ctx).await);
|
||||||
|
members.insert(
|
||||||
|
mem.name.clone(),
|
||||||
|
Rc::new(Member { kind, lazy: RefCell::default(), public: mem.exported }),
|
||||||
|
);
|
||||||
|
},
|
||||||
|
ItemKind::Import(_) => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Module { imports, members }
|
||||||
|
}
|
||||||
|
pub async fn merge(
|
||||||
|
&self,
|
||||||
|
other: &Module,
|
||||||
|
ctx: Ctx,
|
||||||
|
consts: &mut HashMap<Sym, Expr>,
|
||||||
|
) -> Result<Module, MergeErr> {
|
||||||
|
if !self.imports.is_empty() || !other.imports.is_empty() {
|
||||||
|
return Err(MergeErr { path: VPath::new([]), kind: MergeErrKind::Imports });
|
||||||
|
}
|
||||||
|
let mut members = HashMap::new();
|
||||||
|
for (key, mem) in &other.members {
|
||||||
|
let Some(own) = self.members.get(key) else {
|
||||||
|
members.insert(key.clone(), mem.clone());
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
if own.public != mem.public {
|
||||||
|
return Err(MergeErr { path: VPath::new([]), kind: MergeErrKind::Visibility });
|
||||||
|
}
|
||||||
|
match (own.kind(ctx.clone(), consts).await, mem.kind(ctx.clone(), consts).await) {
|
||||||
|
(MemberKind::Module(own_sub), MemberKind::Module(sub)) => {
|
||||||
|
match own_sub.merge(sub, ctx.clone(), consts).boxed_local().await {
|
||||||
|
Ok(module) => {
|
||||||
|
members.insert(
|
||||||
|
key.clone(),
|
||||||
|
Rc::new(Member {
|
||||||
|
lazy: RefCell::new(None),
|
||||||
|
public: own.public,
|
||||||
|
kind: OnceCell::from(MemberKind::Module(module)),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
},
|
||||||
|
Err(mut e) => {
|
||||||
|
e.path = e.path.prefix([key.clone()]);
|
||||||
|
return Err(e);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => return Err(MergeErr { path: VPath::new([key.clone()]), kind: MergeErrKind::Const }),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (key, mem) in &self.members {
|
||||||
|
if let Entry::Vacant(slot) = members.entry(key.clone()) {
|
||||||
|
slot.insert(mem.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(Module { imports: HashMap::new(), members })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct MergeErr {
|
||||||
|
pub path: VPath,
|
||||||
|
pub kind: MergeErrKind,
|
||||||
|
}
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum MergeErrKind {
|
||||||
|
Imports,
|
||||||
|
Visibility,
|
||||||
|
Const,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct FromParsedCtx<'a> {
|
||||||
|
pars_prefix: Sym,
|
||||||
|
pars_root: &'a ParsedModule,
|
||||||
|
root: &'a Module,
|
||||||
|
consts: &'a mut HashMap<Sym, Expr>,
|
||||||
|
rep: &'a Reporter,
|
||||||
|
ctx: &'a Ctx,
|
||||||
|
deferred_consts: &'a mut HashMap<Sym, (api::SysId, api::ParsedConstId)>,
|
||||||
|
}
|
||||||
|
|
||||||
impl Tree for Module {
|
impl Tree for Module {
|
||||||
type Ctx = HashMap<Sym, Expr>;
|
type Ctx<'a> = (Ctx, &'a mut HashMap<Sym, Expr>);
|
||||||
async fn walk<I: IntoIterator<Item = Tok<String>>>(
|
async fn child(
|
||||||
&self,
|
&self,
|
||||||
|
key: Tok<String>,
|
||||||
public_only: bool,
|
public_only: bool,
|
||||||
path: I,
|
(ctx, consts): &mut Self::Ctx<'_>,
|
||||||
ctx: &'_ mut Self::Ctx,
|
) -> crate::dealias::ChildResult<'_, Self> {
|
||||||
) -> Result<&Self, crate::parsed::WalkError> {
|
let Some(member) = self.members.get(&key) else {
|
||||||
let mut cur = self;
|
return Err(ChildErrorKind::Missing);
|
||||||
for (pos, step) in path.into_iter().enumerate() {
|
};
|
||||||
let Some(member) = self.members.get(&step) else {
|
if public_only && !member.public {
|
||||||
return Err(WalkError{ pos, kind: WalkErrorKind::Missing })
|
return Err(ChildErrorKind::Private);
|
||||||
};
|
|
||||||
if public_only && !member.public {
|
|
||||||
return Err(WalkError { pos, kind: WalkErrorKind::Private })
|
|
||||||
}
|
|
||||||
match &member.kind {
|
|
||||||
MemberKind::Module(m) => cur = m,
|
|
||||||
MemberKind::Alias()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
match &member.kind(ctx.clone(), consts).await {
|
||||||
|
MemberKind::Module(m) => Ok(m),
|
||||||
|
MemberKind::Const => Err(ChildErrorKind::Constant),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn children(&self, public_only: bool) -> hashbrown::HashSet<Tok<String>> {
|
||||||
|
self.members.iter().filter(|(_, v)| !public_only || v.public).map(|(k, _)| k.clone()).collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Member {
|
pub struct Member {
|
||||||
pub public: bool,
|
pub public: bool,
|
||||||
pub path: Sym,
|
|
||||||
pub lazy: RefCell<Option<LazyMemberHandle>>,
|
pub lazy: RefCell<Option<LazyMemberHandle>>,
|
||||||
pub kind: OnceCell<MemberKind>,
|
pub kind: OnceCell<MemberKind>,
|
||||||
}
|
}
|
||||||
impl Member {
|
impl Member {
|
||||||
pub async fn kind_mut(&mut self, consts: &mut HashMap<Sym, Expr>) -> &mut MemberKind {
|
#[must_use]
|
||||||
self.kind(consts).await;
|
pub async fn kind<'a>(&'a self, ctx: Ctx, consts: &mut HashMap<Sym, Expr>) -> &'a MemberKind {
|
||||||
self.kind.get_mut().expect("Thhe above line should have initialized it")
|
|
||||||
}
|
|
||||||
pub async fn kind(&self, consts: &mut HashMap<Sym, Expr>) -> &MemberKind {
|
|
||||||
(self.kind.get_or_init(async {
|
(self.kind.get_or_init(async {
|
||||||
let handle = self.lazy.borrow_mut().take().expect("If kind is uninit, lazy must be Some");
|
let handle = self.lazy.borrow_mut().take().expect("If kind is uninit, lazy must be Some");
|
||||||
handle.run(consts).await
|
handle.run(ctx, consts).await
|
||||||
}))
|
}))
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
@@ -157,65 +412,48 @@ impl Member {
|
|||||||
pub enum MemberKind {
|
pub enum MemberKind {
|
||||||
Const,
|
Const,
|
||||||
Module(Module),
|
Module(Module),
|
||||||
/// This must be pointing at the final value, not a second alias.
|
|
||||||
Alias(Sym),
|
|
||||||
}
|
}
|
||||||
impl MemberKind {
|
impl MemberKind {
|
||||||
async fn from_parsed(parsed: &ParsedMemberKind, root: &ParsedModule) -> Self {
|
#[must_use]
|
||||||
|
async fn from_parsed(parsed: &ParsedMemberKind, path: Sym, ctx: &mut FromParsedCtx<'_>) -> Self {
|
||||||
match parsed {
|
match parsed {
|
||||||
ParsedMemberKind::Const => MemberKind::Const,
|
ParsedMemberKind::ParsedConst(expr) => {
|
||||||
ParsedMemberKind::Mod(m) => MemberKind::Module(Module::from_parsed(m, root).await),
|
ctx.consts.insert(path, expr.clone());
|
||||||
|
MemberKind::Const
|
||||||
|
},
|
||||||
|
ParsedMemberKind::DeferredConst(id, sys) => {
|
||||||
|
ctx.deferred_consts.insert(path, (sys.id(), *id));
|
||||||
|
MemberKind::Const
|
||||||
|
},
|
||||||
|
ParsedMemberKind::Mod(m) =>
|
||||||
|
MemberKind::Module(Module::from_parsed(m, path, ctx).boxed_local().await),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LazyMemberHandle {
|
pub struct LazyMemberHandle {
|
||||||
id: api::TreeId,
|
id: api::TreeId,
|
||||||
sys: System,
|
sys: api::SysId,
|
||||||
path: Sym,
|
path: Sym,
|
||||||
}
|
}
|
||||||
impl LazyMemberHandle {
|
impl LazyMemberHandle {
|
||||||
pub async fn run(self, consts: &mut HashMap<Sym, Expr>) -> MemberKind {
|
#[must_use]
|
||||||
match self.sys.get_tree(self.id).await {
|
pub async fn run(self, ctx: Ctx, consts: &mut HashMap<Sym, Expr>) -> MemberKind {
|
||||||
|
let sys = ctx.system_inst(self.sys).await.expect("Missing system for lazy member");
|
||||||
|
match sys.get_tree(self.id).await {
|
||||||
api::MemberKind::Const(c) => {
|
api::MemberKind::Const(c) => {
|
||||||
let mut pctx =
|
let mut pctx = ExprParseCtx { ctx: &ctx, exprs: sys.ext().exprs() };
|
||||||
ExprParseCtx { ctx: self.sys.ctx().clone(), exprs: self.sys.ext().exprs().clone() };
|
|
||||||
consts.insert(self.path, Expr::from_api(&c, PathSetBuilder::new(), &mut pctx).await);
|
consts.insert(self.path, Expr::from_api(&c, PathSetBuilder::new(), &mut pctx).await);
|
||||||
MemberKind::Const
|
MemberKind::Const
|
||||||
},
|
},
|
||||||
api::MemberKind::Module(m) => MemberKind::Module(
|
api::MemberKind::Module(m) => MemberKind::Module(
|
||||||
Module::from_api(m, &mut TreeFromApiCtx { sys: &self.sys, consts, path: self.path.tok() })
|
Module::from_api(m, &mut TreeFromApiCtx { sys: &sys, consts, path: self.path.tok() }).await,
|
||||||
.await,
|
|
||||||
),
|
),
|
||||||
api::MemberKind::Lazy(id) => Self { id, ..self }.run(consts).boxed_local().await,
|
api::MemberKind::Lazy(id) => Self { id, ..self }.run(ctx, consts).boxed_local().await,
|
||||||
api::MemberKind::Import(path) => MemberKind::Alias(Sym::from_api(path, self.sys.i()).await),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub async fn into_member(self, public: bool, path: Sym) -> Member {
|
#[must_use]
|
||||||
Member { public, path, kind: OnceCell::new(), lazy: RefCell::new(Some(self)) }
|
pub async fn into_member(self, public: bool) -> Member {
|
||||||
|
Member { public, kind: OnceCell::new(), lazy: RefCell::new(Some(self)) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: this one should own but not execute the lazy handle.
|
|
||||||
// Lazy handles should run
|
|
||||||
// - in the tree converter function as needed to resolve imports
|
|
||||||
// - in the tree itself when a constant is loaded
|
|
||||||
// - when a different lazy subtree references them in a wildcard import and
|
|
||||||
// forces the enumeration.
|
|
||||||
//
|
|
||||||
// do we actually need to allow wildcard imports in lazy trees? maybe a
|
|
||||||
// different kind of import is sufficient. Source code never becomes a lazy
|
|
||||||
// tree. What does?
|
|
||||||
// - Systems subtrees rarely reference each other at all. They can't use macros
|
|
||||||
// and they usually point to constants with an embedded expr.
|
|
||||||
// - Compiled libraries on the long run. The code as written may reference
|
|
||||||
// constants by indirect path. But this is actually the same as the above,
|
|
||||||
// they also wouldn't use regular imports because they are distributed as
|
|
||||||
// exprs.
|
|
||||||
//
|
|
||||||
// Everything is distributed either as source code or as exprs. Line parsers
|
|
||||||
// also operate on tokens.
|
|
||||||
//
|
|
||||||
// TODO: The trees produced by systems can be safely changed
|
|
||||||
// to the new kind of tree. This datastructure does not need to support the lazy
|
|
||||||
// handle.
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
mod macros;
|
// mod macros;
|
||||||
mod std;
|
mod std;
|
||||||
|
|
||||||
pub use std::number::num_atom::{Float, HomoArray, Int, Num};
|
pub use std::number::num_atom::{Float, HomoArray, Int, Num};
|
||||||
|
|||||||
@@ -7,7 +7,9 @@ use orchid_extension::lexer::LexerObj;
|
|||||||
use orchid_extension::parser::ParserObj;
|
use orchid_extension::parser::ParserObj;
|
||||||
use orchid_extension::system::{System, SystemCard};
|
use orchid_extension::system::{System, SystemCard};
|
||||||
use orchid_extension::system_ctor::SystemCtor;
|
use orchid_extension::system_ctor::SystemCtor;
|
||||||
use orchid_extension::tree::GenItem;
|
use orchid_extension::tree::GenMember;
|
||||||
|
|
||||||
|
use crate::macros::mactree_lexer::MacTreeLexer;
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct MacroSystem;
|
pub struct MacroSystem;
|
||||||
@@ -26,7 +28,7 @@ impl SystemCard for MacroSystem {
|
|||||||
impl System for MacroSystem {
|
impl System for MacroSystem {
|
||||||
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} }
|
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} }
|
||||||
fn vfs() -> orchid_extension::fs::DeclFs { DeclFs::Mod(&[]) }
|
fn vfs() -> orchid_extension::fs::DeclFs { DeclFs::Mod(&[]) }
|
||||||
fn lexers() -> Vec<LexerObj> { vec![] }
|
fn lexers() -> Vec<LexerObj> { vec![&MacTreeLexer] }
|
||||||
fn parsers() -> Vec<ParserObj> { vec![] }
|
fn parsers() -> Vec<ParserObj> { vec![] }
|
||||||
fn env() -> Vec<GenItem> { vec![] }
|
fn env() -> Vec<GenMember> { vec![] }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,15 +1,15 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::fmt::Display;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use futures::future::join_all;
|
use futures::future::join_all;
|
||||||
use orchid_api::Paren;
|
use orchid_api::Paren;
|
||||||
use orchid_base::error::OrcErrv;
|
|
||||||
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
use orchid_base::format::{FmtCtx, FmtUnit, Format, Variants};
|
||||||
|
use orchid_base::interner::Tok;
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::Pos;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::tl_cache;
|
use orchid_base::tl_cache;
|
||||||
use orchid_base::tree::Ph;
|
use orchid_extension::atom::Atomic;
|
||||||
use orchid_extension::atom::{Atomic, MethodSetBuilder};
|
|
||||||
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
|
use orchid_extension::atom_owned::{OwnedAtom, OwnedVariant};
|
||||||
use orchid_extension::expr::Expr;
|
use orchid_extension::expr::Expr;
|
||||||
|
|
||||||
@@ -62,7 +62,7 @@ impl Format for MacTok {
|
|||||||
},
|
},
|
||||||
[mtreev_fmt(body, c).await],
|
[mtreev_fmt(body, c).await],
|
||||||
),
|
),
|
||||||
Self::Slot => format!("SLOT").into(),
|
Self::Slot => "SLOT".into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -73,3 +73,26 @@ pub async fn mtreev_fmt<'b>(
|
|||||||
) -> FmtUnit {
|
) -> FmtUnit {
|
||||||
FmtUnit::sequence(" ", None, join_all(v.into_iter().map(|t| t.print(c))).await)
|
FmtUnit::sequence(" ", None, join_all(v.into_iter().map(|t| t.print(c))).await)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
|
||||||
|
pub struct Ph {
|
||||||
|
pub name: Tok<String>,
|
||||||
|
pub kind: PhKind,
|
||||||
|
}
|
||||||
|
impl Display for Ph {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self.kind {
|
||||||
|
PhKind::Scalar => write!(f, "${}", self.name),
|
||||||
|
PhKind::Vector { at_least_one: false, priority: 0 } => write!(f, "..${}", self.name),
|
||||||
|
PhKind::Vector { at_least_one: true, priority: 0 } => write!(f, "...${}", self.name),
|
||||||
|
PhKind::Vector { at_least_one: false, priority } => write!(f, "..${}:{priority}", self.name),
|
||||||
|
PhKind::Vector { at_least_one: true, priority } => write!(f, "...${}:{priority}", self.name),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
|
||||||
|
pub enum PhKind {
|
||||||
|
Scalar,
|
||||||
|
Vector { at_least_one: bool, priority: u8 },
|
||||||
|
}
|
||||||
|
|||||||
@@ -3,13 +3,9 @@ use std::rc::Rc;
|
|||||||
|
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use orchid_base::error::{OrcRes, mk_errv};
|
use orchid_base::error::{OrcRes, mk_errv};
|
||||||
use orchid_base::location::Pos;
|
|
||||||
use orchid_base::parse::name_start;
|
|
||||||
use orchid_base::tokens::PARENS;
|
use orchid_base::tokens::PARENS;
|
||||||
use orchid_extension::atom::AtomicFeatures;
|
|
||||||
use orchid_extension::gen_expr::atom;
|
|
||||||
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
|
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
|
||||||
use orchid_extension::tree::{GenTok, GenTokTree};
|
use orchid_extension::tree::{GenTok, GenTokTree, x_tok};
|
||||||
|
|
||||||
use crate::macros::mactree::{MacTok, MacTree};
|
use crate::macros::mactree::{MacTok, MacTree};
|
||||||
|
|
||||||
@@ -17,15 +13,14 @@ use crate::macros::mactree::{MacTok, MacTree};
|
|||||||
pub struct MacTreeLexer;
|
pub struct MacTreeLexer;
|
||||||
impl Lexer for MacTreeLexer {
|
impl Lexer for MacTreeLexer {
|
||||||
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['\''..='\''];
|
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['\''..='\''];
|
||||||
async fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
|
async fn lex<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||||
let Some(tail2) = tail.strip_prefix('\'') else {
|
let Some(tail2) = tail.strip_prefix('\'') else {
|
||||||
return Err(err_not_applicable(ctx.i).await.into());
|
return Err(err_not_applicable(ctx.i()).await.into());
|
||||||
};
|
};
|
||||||
let tail3 = tail2.trim_start();
|
let tail3 = tail2.trim_start();
|
||||||
return match mac_tree(tail3, ctx).await {
|
return match mac_tree(tail3, ctx).await {
|
||||||
Ok((tail4, mactree)) =>
|
Ok((tail4, mactree)) => Ok((tail4, x_tok(mactree).at(ctx.pos_tt(tail, tail4)))),
|
||||||
Ok((tail4, GenTok::X(mactree.factory()).at(ctx.pos(tail)..ctx.pos(tail4)))),
|
Err(e) => Ok((tail2, GenTok::Bottom(e).at(ctx.pos_lt(1, tail2)))),
|
||||||
Err(e) => Ok((tail2, GenTok::Bottom(e).at(ctx.tok_ran(1, tail2)))),
|
|
||||||
};
|
};
|
||||||
async fn mac_tree<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, MacTree)> {
|
async fn mac_tree<'a>(tail: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, MacTree)> {
|
||||||
for (lp, rp, paren) in PARENS {
|
for (lp, rp, paren) in PARENS {
|
||||||
@@ -35,14 +30,14 @@ impl Lexer for MacTreeLexer {
|
|||||||
let tail2 = body_tail.trim();
|
let tail2 = body_tail.trim();
|
||||||
if let Some(tail3) = tail2.strip_prefix(*rp) {
|
if let Some(tail3) = tail2.strip_prefix(*rp) {
|
||||||
break Ok((tail3, MacTree {
|
break Ok((tail3, MacTree {
|
||||||
pos: Pos::Range(ctx.pos(tail)..ctx.pos(tail3)),
|
pos: ctx.pos_tt(tail, tail3).pos(),
|
||||||
tok: Rc::new(MacTok::S(*paren, items)),
|
tok: Rc::new(MacTok::S(*paren, items)),
|
||||||
}));
|
}));
|
||||||
} else if tail2.is_empty() {
|
} else if tail2.is_empty() {
|
||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.i.i("Unclosed block").await,
|
ctx.i().i("Unclosed block").await,
|
||||||
format!("Expected closing {rp}"),
|
format!("Expected closing {rp}"),
|
||||||
[Pos::Range(ctx.tok_ran(1, tail)).into()],
|
[ctx.pos_lt(1, tail)],
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
let (new_tail, new_item) = mac_tree(tail2, ctx).boxed_local().await?;
|
let (new_tail, new_item) = mac_tree(tail2, ctx).boxed_local().await?;
|
||||||
@@ -53,11 +48,13 @@ impl Lexer for MacTreeLexer {
|
|||||||
const INTERPOL: &[&str] = &["$", "..$"];
|
const INTERPOL: &[&str] = &["$", "..$"];
|
||||||
for pref in INTERPOL {
|
for pref in INTERPOL {
|
||||||
let Some(code) = tail.strip_prefix(pref) else { continue };
|
let Some(code) = tail.strip_prefix(pref) else { continue };
|
||||||
|
todo!("Register parameter, and push this onto the argument stack held in the atom")
|
||||||
}
|
}
|
||||||
|
todo!("recursive lexer call");
|
||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.i.i("Expected token after '").await,
|
ctx.i().i("Expected token after '").await,
|
||||||
format!("Expected a token after ', found {tail:?}"),
|
format!("Expected a token after ', found {tail:?}"),
|
||||||
[Pos::Range(ctx.tok_ran(1, tail)).into()],
|
[ctx.pos_lt(1, tail)],
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_api::PhKind;
|
|
||||||
use orchid_base::interner::Tok;
|
use orchid_base::interner::Tok;
|
||||||
use orchid_base::side::Side;
|
use orchid_base::side::Side;
|
||||||
use orchid_base::tree::Ph;
|
|
||||||
|
|
||||||
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
|
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
|
||||||
use super::vec_attrs::vec_attrs;
|
use super::vec_attrs::vec_attrs;
|
||||||
|
use crate::macros::mactree::{Ph, PhKind};
|
||||||
use crate::macros::{MacTok, MacTree};
|
use crate::macros::{MacTok, MacTree};
|
||||||
|
|
||||||
pub type MaxVecSplit<'a> = (&'a [MacTree], (Tok<String>, u8, bool), &'a [MacTree]);
|
pub type MaxVecSplit<'a> = (&'a [MacTree], (Tok<String>, u8, bool), &'a [MacTree]);
|
||||||
@@ -108,24 +107,22 @@ fn mk_scalar(pattern: &MacTree) -> ScalMatcher {
|
|||||||
mod test {
|
mod test {
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use orchid_api::PhKind;
|
|
||||||
use orchid_base::interner::Interner;
|
use orchid_base::interner::Interner;
|
||||||
use orchid_base::location::SrcRange;
|
use orchid_base::location::SrcRange;
|
||||||
use orchid_base::sym;
|
use orchid_base::sym;
|
||||||
use orchid_base::tokens::Paren;
|
use orchid_base::tokens::Paren;
|
||||||
use orchid_base::tree::Ph;
|
|
||||||
use test_executors::spin_on;
|
use test_executors::spin_on;
|
||||||
|
|
||||||
use super::mk_any;
|
use super::mk_any;
|
||||||
|
use crate::macros::mactree::{Ph, PhKind};
|
||||||
use crate::macros::{MacTok, MacTree};
|
use crate::macros::{MacTok, MacTree};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_scan() {
|
fn test_scan() {
|
||||||
spin_on(async {
|
spin_on(async {
|
||||||
let i = Interner::new_master();
|
let i = Interner::new_master();
|
||||||
let ex = |tok: MacTok| async {
|
let ex =
|
||||||
MacTree { tok: Rc::new(tok), pos: SrcRange::mock(&i).await.pos() }
|
|tok: MacTok| async { MacTree { tok: Rc::new(tok), pos: SrcRange::mock(&i).await.pos() } };
|
||||||
};
|
|
||||||
let pattern = vec![
|
let pattern = vec![
|
||||||
ex(MacTok::Ph(Ph {
|
ex(MacTok::Ph(Ph {
|
||||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||||
|
|||||||
@@ -2,11 +2,9 @@ use std::fmt;
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_api::PhKind;
|
|
||||||
use orchid_base::interner::Interner;
|
use orchid_base::interner::Interner;
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::Pos;
|
||||||
use orchid_base::name::Sym;
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::tree::Ph;
|
|
||||||
|
|
||||||
use super::any_match::any_match;
|
use super::any_match::any_match;
|
||||||
use super::build::{mk_any, mk_vec};
|
use super::build::{mk_any, mk_vec};
|
||||||
@@ -14,6 +12,7 @@ use super::shared::{AnyMatcher, VecMatcher};
|
|||||||
use super::state::{MatchState, StateEntry};
|
use super::state::{MatchState, StateEntry};
|
||||||
use super::vec_attrs::vec_attrs;
|
use super::vec_attrs::vec_attrs;
|
||||||
use super::vec_match::vec_match;
|
use super::vec_match::vec_match;
|
||||||
|
use crate::macros::mactree::{Ph, PhKind};
|
||||||
use crate::macros::{MacTok, MacTree};
|
use crate::macros::{MacTok, MacTree};
|
||||||
|
|
||||||
pub fn first_is_vec(pattern: &[MacTree]) -> bool { vec_attrs(pattern.first().unwrap()).is_some() }
|
pub fn first_is_vec(pattern: &[MacTree]) -> bool { vec_attrs(pattern.first().unwrap()).is_some() }
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
use orchid_api::PhKind;
|
|
||||||
use orchid_base::interner::Tok;
|
use orchid_base::interner::Tok;
|
||||||
use orchid_base::tree::Ph;
|
|
||||||
|
|
||||||
|
use crate::macros::mactree::{Ph, PhKind};
|
||||||
use crate::macros::{MacTok, MacTree};
|
use crate::macros::{MacTok, MacTree};
|
||||||
|
|
||||||
/// Returns the name, priority and at_least_one of the expression if it is
|
/// Returns the name, priority and at_least_one of the expression if it is
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
use std::mem;
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use orchid_extension::entrypoint::ExtensionData;
|
use orchid_extension::entrypoint::ExtensionData;
|
||||||
use orchid_extension::tokio::tokio_main;
|
use orchid_extension::tokio::tokio_main;
|
||||||
use orchid_std::StdSystem;
|
use orchid_std::StdSystem;
|
||||||
use tokio::task::{LocalSet, spawn_local};
|
|
||||||
|
|
||||||
#[tokio::main(flavor = "current_thread")]
|
#[tokio::main(flavor = "current_thread")]
|
||||||
pub async fn main() { tokio_main(ExtensionData::new("orchid-std::main", &[&StdSystem])).await }
|
pub async fn main() { tokio_main(ExtensionData::new("orchid-std::main", &[&StdSystem])).await }
|
||||||
|
|||||||
@@ -2,9 +2,7 @@ use orchid_api_derive::Coding;
|
|||||||
use orchid_base::error::OrcRes;
|
use orchid_base::error::OrcRes;
|
||||||
use orchid_base::format::FmtUnit;
|
use orchid_base::format::FmtUnit;
|
||||||
use orchid_base::number::Numeric;
|
use orchid_base::number::Numeric;
|
||||||
use orchid_extension::atom::{
|
use orchid_extension::atom::{AtomFactory, Atomic, AtomicFeatures, ToAtom, TypAtom};
|
||||||
AtomFactory, Atomic, AtomicFeatures, MethodSetBuilder, ToAtom, TypAtom,
|
|
||||||
};
|
|
||||||
use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
|
use orchid_extension::atom_thin::{ThinAtom, ThinVariant};
|
||||||
use orchid_extension::conv::TryFromExpr;
|
use orchid_extension::conv::TryFromExpr;
|
||||||
use orchid_extension::expr::Expr;
|
use orchid_extension::expr::Expr;
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use orchid_base::error::OrcRes;
|
|||||||
use orchid_base::number::{num_to_err, parse_num};
|
use orchid_base::number::{num_to_err, parse_num};
|
||||||
use orchid_extension::atom::ToAtom;
|
use orchid_extension::atom::ToAtom;
|
||||||
use orchid_extension::lexer::{LexContext, Lexer};
|
use orchid_extension::lexer::{LexContext, Lexer};
|
||||||
use orchid_extension::tree::{GenTok, GenTokTree};
|
use orchid_extension::tree::{GenTokTree, x_tok};
|
||||||
|
|
||||||
use super::num_atom::Num;
|
use super::num_atom::Num;
|
||||||
|
|
||||||
@@ -12,13 +12,13 @@ use super::num_atom::Num;
|
|||||||
pub struct NumLexer;
|
pub struct NumLexer;
|
||||||
impl Lexer for NumLexer {
|
impl Lexer for NumLexer {
|
||||||
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['0'..='9'];
|
const CHAR_FILTER: &'static [RangeInclusive<char>] = &['0'..='9'];
|
||||||
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
|
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||||
let ends_at = all.find(|c: char| !c.is_ascii_hexdigit() && !"xX._pP".contains(c));
|
let ends_at = all.find(|c: char| !c.is_ascii_hexdigit() && !"xX._pP".contains(c));
|
||||||
let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len()));
|
let (chars, tail) = all.split_at(ends_at.unwrap_or(all.len()));
|
||||||
let fac = match parse_num(chars) {
|
let fac = match parse_num(chars) {
|
||||||
Ok(numeric) => Num(numeric).to_atom_factory(),
|
Ok(numeric) => Num(numeric).to_atom_factory(),
|
||||||
Err(e) => return Err(num_to_err(e, ctx.pos(all), ctx.i).await.into()),
|
Err(e) => return Err(num_to_err(e, ctx.pos(all), &ctx.src, ctx.ctx.i()).await.into()),
|
||||||
};
|
};
|
||||||
Ok((tail, GenTok::X(fac).at(ctx.pos(all)..ctx.pos(tail))))
|
Ok((tail, x_tok(fac).at(ctx.pos_lt(chars.len(), tail))))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
use orchid_base::number::Numeric;
|
use orchid_base::number::Numeric;
|
||||||
use orchid_extension::tree::{GenItem, fun, prefix};
|
use orchid_extension::tree::{GenMember, fun, prefix};
|
||||||
use ordered_float::NotNan;
|
use ordered_float::NotNan;
|
||||||
|
|
||||||
use super::num_atom::{Float, HomoArray, Int, Num};
|
use super::num_atom::{Float, HomoArray, Int, Num};
|
||||||
|
|
||||||
pub fn gen_num_lib() -> Vec<GenItem> {
|
pub fn gen_num_lib() -> Vec<GenMember> {
|
||||||
prefix("std::number", [
|
prefix("std::number", [
|
||||||
fun(true, "add", |a: Num, b: Num| async move {
|
fun(true, "add", |a: Num, b: Num| async move {
|
||||||
Num(match HomoArray::new([a.0, b.0]) {
|
Num(match HomoArray::new([a.0, b.0]) {
|
||||||
|
|||||||
@@ -2,12 +2,12 @@ use never::Never;
|
|||||||
use orchid_base::reqnot::Receipt;
|
use orchid_base::reqnot::Receipt;
|
||||||
use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
|
use orchid_extension::atom::{AtomDynfo, AtomicFeatures};
|
||||||
use orchid_extension::entrypoint::ExtReq;
|
use orchid_extension::entrypoint::ExtReq;
|
||||||
use orchid_extension::fs::DeclFs;
|
use orchid_extension::fs::DeclVmod;
|
||||||
use orchid_extension::lexer::LexerObj;
|
use orchid_extension::lexer::LexerObj;
|
||||||
use orchid_extension::parser::ParserObj;
|
use orchid_extension::parser::ParserObj;
|
||||||
use orchid_extension::system::{System, SystemCard};
|
use orchid_extension::system::{System, SystemCard};
|
||||||
use orchid_extension::system_ctor::SystemCtor;
|
use orchid_extension::system_ctor::SystemCtor;
|
||||||
use orchid_extension::tree::{GenItem, merge_trivial};
|
use orchid_extension::tree::{GenMember, merge_trivial};
|
||||||
|
|
||||||
use super::number::num_lib::gen_num_lib;
|
use super::number::num_lib::gen_num_lib;
|
||||||
use super::string::str_atom::{IntStrAtom, StrAtom};
|
use super::string::str_atom::{IntStrAtom, StrAtom};
|
||||||
@@ -36,6 +36,6 @@ impl System for StdSystem {
|
|||||||
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} }
|
async fn request(_: ExtReq<'_>, req: Self::Req) -> Receipt<'_> { match req {} }
|
||||||
fn lexers() -> Vec<LexerObj> { vec![&StringLexer, &NumLexer] }
|
fn lexers() -> Vec<LexerObj> { vec![&StringLexer, &NumLexer] }
|
||||||
fn parsers() -> Vec<ParserObj> { vec![] }
|
fn parsers() -> Vec<ParserObj> { vec![] }
|
||||||
fn vfs() -> DeclFs { DeclFs::Mod(&[]) }
|
fn vfs() -> DeclVmod { DeclVmod::new(&[]) }
|
||||||
fn env() -> Vec<GenItem> { merge_trivial([gen_num_lib(), gen_str_lib()]) }
|
fn env() -> Vec<GenMember> { merge_trivial([gen_num_lib(), gen_str_lib()]) }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -82,17 +82,17 @@ impl OwnedAtom for IntStrAtom {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct OrcString<'a> {
|
pub struct OrcString {
|
||||||
kind: OrcStringKind<'a>,
|
kind: OrcStringKind,
|
||||||
ctx: SysCtx,
|
ctx: SysCtx,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum OrcStringKind<'a> {
|
pub enum OrcStringKind {
|
||||||
Val(TypAtom<'a, StrAtom>),
|
Val(TypAtom<StrAtom>),
|
||||||
Int(TypAtom<'a, IntStrAtom>),
|
Int(TypAtom<IntStrAtom>),
|
||||||
}
|
}
|
||||||
impl OrcString<'_> {
|
impl OrcString {
|
||||||
pub async fn get_string(&self) -> Rc<String> {
|
pub async fn get_string(&self) -> Rc<String> {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
OrcStringKind::Int(tok) => self.ctx.i().ex(**tok).await.rc(),
|
OrcStringKind::Int(tok) => self.ctx.i().ex(**tok).await.rc(),
|
||||||
@@ -101,8 +101,8 @@ impl OrcString<'_> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFromExpr for OrcString<'static> {
|
impl TryFromExpr for OrcString {
|
||||||
async fn try_from_expr(expr: Expr) -> OrcRes<OrcString<'static>> {
|
async fn try_from_expr(expr: Expr) -> OrcRes<OrcString> {
|
||||||
if let Ok(v) = TypAtom::<StrAtom>::try_from_expr(expr.clone()).await {
|
if let Ok(v) = TypAtom::<StrAtom>::try_from_expr(expr.clone()).await {
|
||||||
return Ok(OrcString { ctx: expr.ctx(), kind: OrcStringKind::Val(v) });
|
return Ok(OrcString { ctx: expr.ctx(), kind: OrcStringKind::Val(v) });
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchid_base::error::{OrcErr, OrcRes, mk_err, mk_errv};
|
use orchid_base::error::{OrcErr, OrcRes, mk_err, mk_errv};
|
||||||
use orchid_base::interner::Interner;
|
use orchid_base::interner::Interner;
|
||||||
use orchid_base::location::Pos;
|
use orchid_base::location::SrcRange;
|
||||||
|
use orchid_base::name::Sym;
|
||||||
use orchid_base::sym;
|
use orchid_base::sym;
|
||||||
use orchid_base::tree::wrap_tokv;
|
use orchid_base::tree::wrap_tokv;
|
||||||
use orchid_extension::atom::AtomicFeatures;
|
|
||||||
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
|
use orchid_extension::lexer::{LexContext, Lexer, err_not_applicable};
|
||||||
use orchid_extension::tree::{GenTok, GenTokTree};
|
use orchid_extension::tree::{GenTokTree, ref_tok, x_tok};
|
||||||
|
|
||||||
use super::str_atom::IntStrAtom;
|
use super::str_atom::IntStrAtom;
|
||||||
|
|
||||||
@@ -32,7 +32,7 @@ struct StringError {
|
|||||||
|
|
||||||
impl StringError {
|
impl StringError {
|
||||||
/// Convert into project error for reporting
|
/// Convert into project error for reporting
|
||||||
pub async fn into_proj(self, pos: u32, i: &Interner) -> OrcErr {
|
pub async fn into_proj(self, path: &Sym, pos: u32, i: &Interner) -> OrcErr {
|
||||||
let start = pos + self.pos;
|
let start = pos + self.pos;
|
||||||
mk_err(
|
mk_err(
|
||||||
i.i("Failed to parse string").await,
|
i.i("Failed to parse string").await,
|
||||||
@@ -41,7 +41,7 @@ impl StringError {
|
|||||||
StringErrorKind::BadCodePoint => "The specified number is not a Unicode code point",
|
StringErrorKind::BadCodePoint => "The specified number is not a Unicode code point",
|
||||||
StringErrorKind::BadEscSeq => "Unrecognized escape sequence",
|
StringErrorKind::BadEscSeq => "Unrecognized escape sequence",
|
||||||
},
|
},
|
||||||
[Pos::Range(start..start + 1).into()],
|
[SrcRange::new(start..start + 1, path).pos().into()],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -95,9 +95,9 @@ fn parse_string(str: &str) -> Result<String, StringError> {
|
|||||||
pub struct StringLexer;
|
pub struct StringLexer;
|
||||||
impl Lexer for StringLexer {
|
impl Lexer for StringLexer {
|
||||||
const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['"'..='"', '`'..='`'];
|
const CHAR_FILTER: &'static [std::ops::RangeInclusive<char>] = &['"'..='"', '`'..='`'];
|
||||||
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree<'a>)> {
|
async fn lex<'a>(all: &'a str, ctx: &'a LexContext<'a>) -> OrcRes<(&'a str, GenTokTree)> {
|
||||||
let Some(mut tail) = all.strip_prefix('"') else {
|
let Some(mut tail) = all.strip_prefix('"') else {
|
||||||
return Err(err_not_applicable(ctx.i).await.into());
|
return Err(err_not_applicable(ctx.ctx.i()).await.into());
|
||||||
};
|
};
|
||||||
let mut ret = None;
|
let mut ret = None;
|
||||||
let mut cur = String::new();
|
let mut cur = String::new();
|
||||||
@@ -107,19 +107,19 @@ impl Lexer for StringLexer {
|
|||||||
tail: &str,
|
tail: &str,
|
||||||
err: &mut Vec<OrcErr>,
|
err: &mut Vec<OrcErr>,
|
||||||
ctx: &'a LexContext<'a>,
|
ctx: &'a LexContext<'a>,
|
||||||
) -> GenTokTree<'a> {
|
) -> GenTokTree {
|
||||||
let str_val_res = parse_string(&str.split_off(0));
|
let str_val_res = parse_string(&str.split_off(0));
|
||||||
if let Err(e) = &str_val_res {
|
if let Err(e) = &str_val_res {
|
||||||
err.push(e.clone().into_proj(ctx.pos(tail) - str.len() as u32, ctx.i).await);
|
err.push(e.clone().into_proj(&ctx.src, ctx.pos(tail) - str.len() as u32, ctx.i()).await);
|
||||||
}
|
}
|
||||||
let str_val = str_val_res.unwrap_or_default();
|
let str_val = str_val_res.unwrap_or_default();
|
||||||
GenTok::X(IntStrAtom::from(ctx.i.i(&*str_val).await).factory())
|
x_tok(IntStrAtom::from(ctx.i().i(&*str_val).await)).at(ctx.pos_lt(str.len() as u32, tail))
|
||||||
.at(ctx.tok_ran(str.len() as u32, tail)) as GenTokTree<'a>
|
as GenTokTree
|
||||||
}
|
}
|
||||||
let add_frag = |prev: Option<GenTokTree<'a>>, new: GenTokTree<'a>| async {
|
let add_frag = |prev: Option<GenTokTree>, new: GenTokTree| async {
|
||||||
let Some(prev) = prev else { return new };
|
let Some(prev) = prev else { return new };
|
||||||
let concat_fn =
|
let concat_fn = ref_tok(sym!(std::string::concat; ctx.i()).await)
|
||||||
GenTok::Reference(sym!(std::string::concat; ctx.i).await).at(prev.sr.start..prev.sr.start);
|
.at(SrcRange::zw(prev.sr.path(), prev.sr.start()));
|
||||||
wrap_tokv([concat_fn, prev, new])
|
wrap_tokv([concat_fn, prev, new])
|
||||||
};
|
};
|
||||||
loop {
|
loop {
|
||||||
@@ -141,9 +141,9 @@ impl Lexer for StringLexer {
|
|||||||
} else {
|
} else {
|
||||||
let range = ctx.pos(all)..ctx.pos("");
|
let range = ctx.pos(all)..ctx.pos("");
|
||||||
return Err(mk_errv(
|
return Err(mk_errv(
|
||||||
ctx.i.i("No string end").await,
|
ctx.i().i("No string end").await,
|
||||||
"String never terminated with \"",
|
"String never terminated with \"",
|
||||||
[Pos::Range(range.clone()).into()],
|
[SrcRange::new(range.clone(), &ctx.src)],
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use orchid_extension::tree::{GenItem, comments, fun, prefix};
|
use orchid_extension::tree::{GenMember, comments, fun, prefix};
|
||||||
|
|
||||||
use super::str_atom::StrAtom;
|
use super::str_atom::StrAtom;
|
||||||
use crate::OrcString;
|
use crate::OrcString;
|
||||||
|
|
||||||
pub fn gen_str_lib() -> Vec<GenItem> {
|
pub fn gen_str_lib() -> Vec<GenMember> {
|
||||||
prefix("std::string", [comments(
|
prefix("std::string", [comments(
|
||||||
["Concatenate two strings"],
|
["Concatenate two strings"],
|
||||||
fun(true, "concat", |left: OrcString<'static>, right: OrcString<'static>| async move {
|
fun(true, "concat", |left: OrcString, right: OrcString| async move {
|
||||||
StrAtom::new(Rc::new(left.get_string().await.to_string() + &right.get_string().await))
|
StrAtom::new(Rc::new(left.get_string().await.to_string() + &right.get_string().await))
|
||||||
}),
|
}),
|
||||||
)])
|
)])
|
||||||
|
|||||||
110
orcx/src/main.rs
110
orcx/src/main.rs
@@ -10,19 +10,18 @@ use async_stream::try_stream;
|
|||||||
use camino::Utf8PathBuf;
|
use camino::Utf8PathBuf;
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
use futures::{Stream, TryStreamExt, io};
|
use futures::{Stream, TryStreamExt, io};
|
||||||
use orchid_base::error::ReporterImpl;
|
use orchid_base::error::Reporter;
|
||||||
use orchid_base::format::{FmtCtxImpl, Format, take_first};
|
use orchid_base::format::{FmtCtxImpl, Format, take_first};
|
||||||
use orchid_base::location::Pos;
|
|
||||||
use orchid_base::logging::{LogStrategy, Logger};
|
use orchid_base::logging::{LogStrategy, Logger};
|
||||||
use orchid_base::macros::mtreev_fmt;
|
|
||||||
use orchid_base::parse::Snippet;
|
use orchid_base::parse::Snippet;
|
||||||
|
use orchid_base::sym;
|
||||||
use orchid_base::tree::ttv_fmt;
|
use orchid_base::tree::ttv_fmt;
|
||||||
use orchid_host::ctx::Ctx;
|
use orchid_host::ctx::Ctx;
|
||||||
use orchid_host::execute::{ExecCtx, ExecResult};
|
use orchid_host::execute::{ExecCtx, ExecResult};
|
||||||
use orchid_host::expr::mtreev_to_expr;
|
use orchid_host::expr::PathSetBuilder;
|
||||||
use orchid_host::extension::Extension;
|
use orchid_host::extension::Extension;
|
||||||
use orchid_host::lex::lex;
|
use orchid_host::lex::lex;
|
||||||
use orchid_host::parse::{HostParseCtxImpl, parse_items, parse_mtree};
|
use orchid_host::parse::{HostParseCtxImpl, parse_expr, parse_items};
|
||||||
use orchid_host::subprocess::ext_command;
|
use orchid_host::subprocess::ext_command;
|
||||||
use orchid_host::system::init_systems;
|
use orchid_host::system::init_systems;
|
||||||
use substack::Substack;
|
use substack::Substack;
|
||||||
@@ -83,9 +82,9 @@ fn get_all_extensions<'a>(
|
|||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> io::Result<ExitCode> {
|
async fn main() -> io::Result<ExitCode> {
|
||||||
let code = Rc::new(RefCell::new(ExitCode::SUCCESS));
|
let exit_code = Rc::new(RefCell::new(ExitCode::SUCCESS));
|
||||||
let local_set = LocalSet::new();
|
let local_set = LocalSet::new();
|
||||||
let code1 = code.clone();
|
let exit_code1 = exit_code.clone();
|
||||||
local_set.spawn_local(async move {
|
local_set.spawn_local(async move {
|
||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
let ctx = &Ctx::new(Rc::new(|fut| mem::drop(spawn_local(fut))));
|
let ctx = &Ctx::new(Rc::new(|fut| mem::drop(spawn_local(fut))));
|
||||||
@@ -103,7 +102,7 @@ async fn main() -> io::Result<ExitCode> {
|
|||||||
let mut file = File::open(file.as_std_path()).unwrap();
|
let mut file = File::open(file.as_std_path()).unwrap();
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
file.read_to_string(&mut buf).unwrap();
|
file.read_to_string(&mut buf).unwrap();
|
||||||
let lexemes = lex(i.i(&buf).await, &systems, ctx).await.unwrap();
|
let lexemes = lex(i.i(&buf).await, sym!(usercode; i).await, &systems, ctx).await.unwrap();
|
||||||
println!("{}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true))
|
println!("{}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true))
|
||||||
},
|
},
|
||||||
Commands::Parse { file } => {
|
Commands::Parse { file } => {
|
||||||
@@ -111,23 +110,28 @@ async fn main() -> io::Result<ExitCode> {
|
|||||||
let mut file = File::open(file.as_std_path()).unwrap();
|
let mut file = File::open(file.as_std_path()).unwrap();
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
file.read_to_string(&mut buf).unwrap();
|
file.read_to_string(&mut buf).unwrap();
|
||||||
let lexemes = lex(i.i(&buf).await, &systems, ctx).await.unwrap();
|
let lexemes = lex(i.i(&buf).await, sym!(usercode; i).await, &systems, ctx).await.unwrap();
|
||||||
let Some(first) = lexemes.first() else {
|
let Some(first) = lexemes.first() else {
|
||||||
println!("File empty!");
|
println!("File empty!");
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
let reporter = ReporterImpl::new();
|
let reporter = Reporter::new();
|
||||||
let pctx = HostParseCtxImpl { reporter: &reporter, systems: &systems };
|
let pctx = HostParseCtxImpl {
|
||||||
let snip = Snippet::new(first, &lexemes, i);
|
reporter: &reporter,
|
||||||
|
systems: &systems,
|
||||||
|
ctx: ctx.clone(),
|
||||||
|
src: sym!(usercode; i).await,
|
||||||
|
};
|
||||||
|
let snip = Snippet::new(first, &lexemes);
|
||||||
let ptree = parse_items(&pctx, Substack::Bottom, snip).await.unwrap();
|
let ptree = parse_items(&pctx, Substack::Bottom, snip).await.unwrap();
|
||||||
if let Some(errv) = reporter.errv() {
|
if let Some(errv) = reporter.errv() {
|
||||||
eprintln!("{errv}");
|
eprintln!("{errv}");
|
||||||
*code1.borrow_mut() = ExitCode::FAILURE;
|
*exit_code1.borrow_mut() = ExitCode::FAILURE;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if ptree.is_empty() {
|
if ptree.is_empty() {
|
||||||
eprintln!("File empty only after parsing, but no errors were reported");
|
eprintln!("File empty only after parsing, but no errors were reported");
|
||||||
*code1.borrow_mut() = ExitCode::FAILURE;
|
*exit_code1.borrow_mut() = ExitCode::FAILURE;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
for item in ptree {
|
for item in ptree {
|
||||||
@@ -140,23 +144,38 @@ async fn main() -> io::Result<ExitCode> {
|
|||||||
std::io::stdout().flush().unwrap();
|
std::io::stdout().flush().unwrap();
|
||||||
let mut prompt = String::new();
|
let mut prompt = String::new();
|
||||||
stdin().read_line(&mut prompt).await.unwrap();
|
stdin().read_line(&mut prompt).await.unwrap();
|
||||||
let lexemes = lex(i.i(prompt.trim()).await, &systems, ctx).await.unwrap();
|
eprintln!("lexing");
|
||||||
|
let lexemes =
|
||||||
|
lex(i.i(prompt.trim()).await, sym!(usercode; i).await, &systems, ctx).await.unwrap();
|
||||||
|
eprintln!("lexed");
|
||||||
if args.logs {
|
if args.logs {
|
||||||
println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true));
|
println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true));
|
||||||
}
|
}
|
||||||
let mtreev = parse_mtree(
|
let path = sym!(usercode; i).await;
|
||||||
Snippet::new(&lexemes[0], &lexemes, i),
|
let reporter = Reporter::new();
|
||||||
Substack::Bottom.push(i.i("orcx").await).push(i.i("input").await),
|
let parse_ctx = HostParseCtxImpl {
|
||||||
|
ctx: ctx.clone(),
|
||||||
|
reporter: &reporter,
|
||||||
|
src: path.clone(),
|
||||||
|
systems: &systems[..],
|
||||||
|
};
|
||||||
|
let parse_res = parse_expr(
|
||||||
|
&parse_ctx,
|
||||||
|
path.clone(),
|
||||||
|
PathSetBuilder::new(),
|
||||||
|
Snippet::new(&lexemes[0], &lexemes),
|
||||||
)
|
)
|
||||||
.await
|
.await;
|
||||||
.unwrap();
|
eprintln!("parsed");
|
||||||
if args.logs {
|
let expr = match reporter.merge(parse_res) {
|
||||||
let fmt = mtreev_fmt(&mtreev, &FmtCtxImpl { i }).await;
|
Ok(expr) => expr,
|
||||||
println!("parsed: {}", take_first(&fmt, true));
|
Err(e) => {
|
||||||
}
|
eprintln!("{e}");
|
||||||
let expr = mtreev_to_expr(&mtreev, Substack::Bottom, ctx).await;
|
continue;
|
||||||
let mut xctx =
|
},
|
||||||
ExecCtx::new(ctx.clone(), logger.clone(), root.clone(), expr.at(Pos::None)).await;
|
};
|
||||||
|
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root.clone(), expr).await;
|
||||||
|
eprintln!("executed");
|
||||||
xctx.set_gas(Some(1000));
|
xctx.set_gas(Some(1000));
|
||||||
xctx.execute().await;
|
xctx.execute().await;
|
||||||
match xctx.result() {
|
match xctx.result() {
|
||||||
@@ -168,18 +187,35 @@ async fn main() -> io::Result<ExitCode> {
|
|||||||
},
|
},
|
||||||
Commands::Execute { code } => {
|
Commands::Execute { code } => {
|
||||||
let (root, systems) = init_systems(&args.system, &extensions).await.unwrap();
|
let (root, systems) = init_systems(&args.system, &extensions).await.unwrap();
|
||||||
let lexemes = lex(i.i(code.trim()).await, &systems, ctx).await.unwrap();
|
let lexemes =
|
||||||
|
lex(i.i(code.trim()).await, sym!(usercode; i).await, &systems, ctx).await.unwrap();
|
||||||
if args.logs {
|
if args.logs {
|
||||||
println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true));
|
println!("lexed: {}", take_first(&ttv_fmt(&lexemes, &FmtCtxImpl { i }).await, true));
|
||||||
}
|
}
|
||||||
let mtreev =
|
let path = sym!(usercode; i).await;
|
||||||
parse_mtree(Snippet::new(&lexemes[0], &lexemes, i), Substack::Bottom).await.unwrap();
|
let reporter = Reporter::new();
|
||||||
if args.logs {
|
let parse_ctx = HostParseCtxImpl {
|
||||||
let fmt = mtreev_fmt(&mtreev, &FmtCtxImpl { i }).await;
|
ctx: ctx.clone(),
|
||||||
println!("parsed: {}", take_first(&fmt, true));
|
reporter: &reporter,
|
||||||
}
|
src: path.clone(),
|
||||||
let expr = mtreev_to_expr(&mtreev, Substack::Bottom, ctx).await;
|
systems: &systems[..],
|
||||||
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root, expr.at(Pos::None)).await;
|
};
|
||||||
|
let parse_res = parse_expr(
|
||||||
|
&parse_ctx,
|
||||||
|
path.clone(),
|
||||||
|
PathSetBuilder::new(),
|
||||||
|
Snippet::new(&lexemes[0], &lexemes),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
let expr = match reporter.merge(parse_res) {
|
||||||
|
Ok(expr) => expr,
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("{e}");
|
||||||
|
*exit_code1.borrow_mut() = ExitCode::FAILURE;
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
let mut xctx = ExecCtx::new(ctx.clone(), logger.clone(), root, expr).await;
|
||||||
xctx.set_gas(Some(1000));
|
xctx.set_gas(Some(1000));
|
||||||
xctx.execute().await;
|
xctx.execute().await;
|
||||||
match xctx.result() {
|
match xctx.result() {
|
||||||
@@ -192,6 +228,6 @@ async fn main() -> io::Result<ExitCode> {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
local_set.await;
|
local_set.await;
|
||||||
let x = *code.borrow();
|
let x = *exit_code.borrow();
|
||||||
Ok(x)
|
Ok(x)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ max_width = 100
|
|||||||
error_on_unformatted = true
|
error_on_unformatted = true
|
||||||
format_macro_matchers = true
|
format_macro_matchers = true
|
||||||
newline_style = "Unix"
|
newline_style = "Unix"
|
||||||
normalize_comments = true
|
normalize_comments = false
|
||||||
wrap_comments = true
|
wrap_comments = true
|
||||||
comment_width = 80
|
comment_width = 80
|
||||||
doc_comment_code_block_width = 80
|
doc_comment_code_block_width = 80
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ use check_api_refs::check_api_refs;
|
|||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
use orcx::orcx;
|
use orcx::orcx;
|
||||||
|
|
||||||
|
use crate::orcx::orcxdb;
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
pub struct Args {
|
pub struct Args {
|
||||||
#[arg(short, long)]
|
#[arg(short, long)]
|
||||||
@@ -22,7 +24,11 @@ pub enum Commands {
|
|||||||
CheckApiRefs,
|
CheckApiRefs,
|
||||||
Orcx {
|
Orcx {
|
||||||
#[arg(trailing_var_arg = true, num_args = 1..)]
|
#[arg(trailing_var_arg = true, num_args = 1..)]
|
||||||
orcx_argv: Vec<String>,
|
argv: Vec<String>,
|
||||||
|
},
|
||||||
|
Orcxdb {
|
||||||
|
#[arg(trailing_var_arg = true, num_args = 1..)]
|
||||||
|
argv: Vec<String>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -35,7 +41,8 @@ fn main() -> io::Result<ExitCode> {
|
|||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
match &args.command {
|
match &args.command {
|
||||||
Commands::CheckApiRefs => check_api_refs(&args)?,
|
Commands::CheckApiRefs => check_api_refs(&args)?,
|
||||||
Commands::Orcx { orcx_argv } => orcx(&args, orcx_argv)?,
|
Commands::Orcx { argv } => orcx(&args, argv)?,
|
||||||
|
Commands::Orcxdb { argv } => orcxdb(&args, argv)?,
|
||||||
}
|
}
|
||||||
Ok(if EXIT_OK.load(Ordering::Relaxed) { ExitCode::SUCCESS } else { ExitCode::FAILURE })
|
Ok(if EXIT_OK.load(Ordering::Relaxed) { ExitCode::SUCCESS } else { ExitCode::FAILURE })
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,3 +14,15 @@ pub fn orcx(_args: &Args, argv: &[String]) -> io::Result<()> {
|
|||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn orcxdb(_args: &Args, argv: &[String]) -> io::Result<()> {
|
||||||
|
if !Command::new("cargo").args(["build", "-p", "orchid-std"]).status()?.success() {
|
||||||
|
EXIT_OK.store(false, Ordering::Relaxed);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let path = format!("./target/debug/orcx{}", std::env::consts::EXE_SUFFIX);
|
||||||
|
if !Command::new("lldb").args([&path]).args(argv).status()?.success() {
|
||||||
|
EXIT_OK.store(false, Ordering::Relaxed);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user