Gitbutler >:(
I don't understand this piece of software at all
This commit is contained in:
10
README.md
10
README.md
@@ -34,3 +34,13 @@ Orchids and mangrove trees form complex ecosystems; The flowers persuade the tre
|
||||
## Contribution
|
||||
|
||||
All contributions are welcome. For the time being, use the issue tracker to discuss ideas.
|
||||
|
||||
## Forks
|
||||
|
||||
The code in this repository is available under the GNU GPLv3, but identifying marks stored in the repository are restricted for use with an unmodified copy of this software. If you distribute modified versions of this software, you must either replace these identifying marks or modify them in a way that clearly indicates that what you are distributing is a derivative work and not this official vversion. You must also replace any contact information in such a way that your derivative work does not suggest that we may be contacted about issues. Your derivative work may use the original identifying marks and contact information to identify this project as its basis, while emphasizing that the authors of the original project are neither in control of, nor liable for the derivative work.
|
||||
|
||||
Identifying marks include the Orchid logo, the ribbon image above, and the names "Orchid", "Orchidlang" unless they are part of a technical interface.
|
||||
|
||||
Contact information includes email addresses, links to the source code and issue tracker.
|
||||
|
||||
Words listed as identifying marks are explicltly not considered as such when they apear in technical interfaces or APIs. For example, shell commands, identifiers within the language,
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
pub trait ApiEquiv {
|
||||
type Api;
|
||||
}
|
||||
|
||||
pub trait ToApi: Sized + ApiEquiv {
|
||||
type Ctx;
|
||||
fn to_api(&self, ctx: &mut Self::Ctx) -> Self::Api;
|
||||
fn into_api(self, ctx: &mut Self::Ctx) -> Self::Api { self.to_api(ctx) }
|
||||
}
|
||||
|
||||
pub trait FromApi: ApiEquiv {
|
||||
type Ctx;
|
||||
fn from_api(api: &Self::Api, ctx: &mut Self::Ctx) -> Self;
|
||||
}
|
||||
|
||||
/// This is the weakest kind of conversion possible;
|
||||
/// By holding a reference to the source type, you can provide a reference to the target type.
|
||||
/// Unlike Into, the target type may hold references into the source,
|
||||
/// but unlike AsRef, it doesn't have to be fully contained in the source.
|
||||
/// The resulting object is stackbound so its utility is very limited.
|
||||
pub trait ProjectionMut<T> {
|
||||
fn with_built<R>(&mut self, cb: impl FnOnce(&mut T) -> R) -> R;
|
||||
}
|
||||
impl<T> ProjectionMut<T> for T {
|
||||
fn with_built<R>(&mut self, cb: impl FnOnce(&mut T) -> R) -> R { cb(self) }
|
||||
}
|
||||
@@ -2,10 +2,8 @@ mod coding;
|
||||
mod helpers;
|
||||
mod hierarchy;
|
||||
mod relations;
|
||||
mod api_conv;
|
||||
|
||||
pub use coding::*;
|
||||
pub use helpers::*;
|
||||
pub use hierarchy::*;
|
||||
pub use relations::*;
|
||||
pub use api_conv::*;
|
||||
|
||||
@@ -5,10 +5,10 @@ use orchid_api_derive::{Coding, Hierarchy};
|
||||
use orchid_api_traits::Request;
|
||||
use ordered_float::NotNan;
|
||||
|
||||
use crate::{Comment, ExtHostReq, HostExtReq, Location, OrcResult, Paren, ParsId, SysId, TStr, TStrv};
|
||||
use crate::{Atom, Comment, ExtHostReq, HostExtReq, Location, OrcResult, Paren, ParsId, SysId, TStr, TStrv};
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub struct MacroTreeId(NonZeroU64);
|
||||
#[derive(Clone, Copy, Debug, Coding, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct MacroTreeId(pub NonZeroU64);
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
pub struct MacroTree {
|
||||
@@ -23,6 +23,7 @@ pub enum MacroToken {
|
||||
Slot(MacroTreeId),
|
||||
Lambda(Vec<MacroTree>, Vec<MacroTree>),
|
||||
Ph(Placeholder),
|
||||
Atom(Atom),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Coding)]
|
||||
|
||||
@@ -5,7 +5,7 @@ use std::sync::Arc;
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::api;
|
||||
use crate::interner::{deintern, Tok};
|
||||
use crate::interner::Tok;
|
||||
use crate::location::Pos;
|
||||
|
||||
/// A point of interest in resolving the error, such as the point where
|
||||
@@ -18,21 +18,21 @@ pub struct ErrPos {
|
||||
pub message: Option<Arc<String>>,
|
||||
}
|
||||
impl ErrPos {
|
||||
pub fn from_api(pel: &api::ErrLocation) -> Self {
|
||||
pub fn new(msg: &str, position: Pos) -> Self {
|
||||
Self { message: Some(Arc::new(msg.to_string())), position }
|
||||
}
|
||||
fn from_api(api: &api::ErrLocation) -> Self {
|
||||
Self {
|
||||
message: Some(pel.message.clone()).filter(|s| !s.is_empty()),
|
||||
position: Pos::from_api(&pel.location),
|
||||
message: Some(api.message.clone()).filter(|s| !s.is_empty()),
|
||||
position: Pos::from_api(&api.location),
|
||||
}
|
||||
}
|
||||
pub fn to_api(&self) -> api::ErrLocation {
|
||||
fn to_api(&self) -> api::ErrLocation {
|
||||
api::ErrLocation {
|
||||
message: self.message.clone().unwrap_or_default(),
|
||||
location: self.position.to_api(),
|
||||
}
|
||||
}
|
||||
pub fn new(msg: &str, position: Pos) -> Self {
|
||||
Self { message: Some(Arc::new(msg.to_string())), position }
|
||||
}
|
||||
}
|
||||
impl From<Pos> for ErrPos {
|
||||
fn from(origin: Pos) -> Self { Self { position: origin, message: None } }
|
||||
@@ -45,20 +45,20 @@ pub struct OrcErr {
|
||||
pub positions: Vec<ErrPos>,
|
||||
}
|
||||
impl OrcErr {
|
||||
pub fn from_api(err: &api::OrcError) -> Self {
|
||||
Self {
|
||||
description: deintern(err.description),
|
||||
message: err.message.clone(),
|
||||
positions: err.locations.iter().map(ErrPos::from_api).collect(),
|
||||
}
|
||||
}
|
||||
pub fn to_api(&self) -> api::OrcError {
|
||||
fn to_api(&self) -> api::OrcError {
|
||||
api::OrcError {
|
||||
description: self.description.marker(),
|
||||
description: self.description.to_api(),
|
||||
message: self.message.clone(),
|
||||
locations: self.positions.iter().map(ErrPos::to_api).collect(),
|
||||
}
|
||||
}
|
||||
fn from_api(api: &api::OrcError) -> Self {
|
||||
Self {
|
||||
description: Tok::from_api(api.description),
|
||||
message: api.message.clone(),
|
||||
positions: api.locations.iter().map(ErrPos::from_api).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Eq for OrcErr {}
|
||||
impl PartialEq for OrcErr {
|
||||
@@ -90,14 +90,6 @@ impl OrcErrv {
|
||||
if v.is_empty() { Err(EmptyErrv) } else { Ok(Self(v)) }
|
||||
}
|
||||
#[must_use]
|
||||
pub fn to_api(&self) -> Vec<api::OrcError> { self.0.iter().map(OrcErr::to_api).collect_vec() }
|
||||
#[must_use]
|
||||
pub fn from_api<'a>(apiv: impl IntoIterator<Item = &'a api::OrcError>) -> Self {
|
||||
let v = apiv.into_iter().map(OrcErr::from_api).collect_vec();
|
||||
assert!(!v.is_empty(), "Error condition with 0 errors");
|
||||
Self(v)
|
||||
}
|
||||
#[must_use]
|
||||
pub fn extended<T>(mut self, errors: impl IntoIterator<Item = T>) -> Self
|
||||
where Self: Extend<T> {
|
||||
self.extend(errors);
|
||||
@@ -119,6 +111,10 @@ impl OrcErrv {
|
||||
pub fn pos_iter(&self) -> impl Iterator<Item = ErrPos> + '_ {
|
||||
self.0.iter().flat_map(|e| e.positions.iter().cloned())
|
||||
}
|
||||
pub fn to_api(&self) -> Vec<api::OrcError> { self.0.iter().map(OrcErr::to_api).collect() }
|
||||
pub fn from_api<'a>(api: impl IntoIterator<Item = &'a api::OrcError>) -> Self {
|
||||
Self(api.into_iter().map(OrcErr::from_api).collect())
|
||||
}
|
||||
}
|
||||
impl From<OrcErr> for OrcErrv {
|
||||
fn from(value: OrcErr) -> Self { Self(vec![value]) }
|
||||
|
||||
@@ -10,7 +10,6 @@ use itertools::Itertools as _;
|
||||
use orchid_api_traits::{Decode, Encode, Request};
|
||||
|
||||
use crate::api;
|
||||
use orchid_api_traits::{ApiEquiv, FromApi, ToApi};
|
||||
use crate::reqnot::{DynRequester, Requester};
|
||||
|
||||
/// Clippy crashes while verifying `Tok: Sized` without this and I cba to create
|
||||
@@ -25,7 +24,10 @@ pub struct Tok<T: Interned> {
|
||||
}
|
||||
impl<T: Interned> Tok<T> {
|
||||
pub fn new(data: Arc<T>, marker: T::Marker) -> Self { Self { data, marker: ForceSized(marker) } }
|
||||
pub fn marker(&self) -> T::Marker { self.marker.0 }
|
||||
pub fn to_api(&self) -> T::Marker { self.marker.0 }
|
||||
pub fn from_api<M>(marker: M) -> Self where M: InternMarker<Interned = T> {
|
||||
deintern(marker)
|
||||
}
|
||||
pub fn arc(&self) -> Arc<T> { self.data.clone() }
|
||||
}
|
||||
impl<T: Interned> Deref for Tok<T> {
|
||||
@@ -34,7 +36,7 @@ impl<T: Interned> Deref for Tok<T> {
|
||||
fn deref(&self) -> &Self::Target { self.data.as_ref() }
|
||||
}
|
||||
impl<T: Interned> Ord for Tok<T> {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.marker().cmp(&other.marker()) }
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.to_api().cmp(&other.to_api()) }
|
||||
}
|
||||
impl<T: Interned> PartialOrd for Tok<T> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) }
|
||||
@@ -44,7 +46,7 @@ impl<T: Interned> PartialEq for Tok<T> {
|
||||
fn eq(&self, other: &Self) -> bool { self.cmp(other).is_eq() }
|
||||
}
|
||||
impl<T: Interned> hash::Hash for Tok<T> {
|
||||
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.marker().hash(state) }
|
||||
fn hash<H: hash::Hasher>(&self, state: &mut H) { self.to_api().hash(state) }
|
||||
}
|
||||
impl<T: Interned + fmt::Display> fmt::Display for Tok<T> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
@@ -53,7 +55,7 @@ impl<T: Interned + fmt::Display> fmt::Display for Tok<T> {
|
||||
}
|
||||
impl<T: Interned + fmt::Debug> fmt::Debug for Tok<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "Token({} -> {:?})", self.marker().get_id(), self.data.as_ref())
|
||||
write!(f, "Token({} -> {:?})", self.to_api().get_id(), self.data.as_ref())
|
||||
}
|
||||
}
|
||||
impl<T: Interned + Encode> Encode for Tok<T> {
|
||||
@@ -117,25 +119,13 @@ impl Internable for String {
|
||||
fn get_owned(&self) -> Arc<Self::Interned> { Arc::new(self.to_string()) }
|
||||
}
|
||||
|
||||
impl ApiEquiv for Tok<String> {
|
||||
type Api = api::TStr;
|
||||
}
|
||||
impl ToApi for Tok<String> {
|
||||
type Ctx = ();
|
||||
fn to_api(&self, _: &mut Self::Ctx) -> Self::Api { self.marker() }
|
||||
}
|
||||
impl FromApi for Tok<String> {
|
||||
type Ctx = ();
|
||||
fn from_api(api: &Self::Api, _: &mut Self::Ctx) -> Self { deintern(*api) }
|
||||
}
|
||||
|
||||
impl Interned for Vec<Tok<String>> {
|
||||
type Marker = api::TStrv;
|
||||
fn intern(
|
||||
self: Arc<Self>,
|
||||
req: &(impl DynRequester<Transfer = api::IntReq> + ?Sized),
|
||||
) -> Self::Marker {
|
||||
req.request(api::InternStrv(Arc::new(self.iter().map(|t| t.marker()).collect())))
|
||||
req.request(api::InternStrv(Arc::new(self.iter().map(|t| t.to_api()).collect())))
|
||||
}
|
||||
fn bimap(interners: &mut TypedInterners) -> &mut Bimap<Self> { &mut interners.vecs }
|
||||
}
|
||||
@@ -172,17 +162,6 @@ impl Internable for [api::TStr] {
|
||||
Arc::new(self.iter().map(|ts| deintern(*ts)).collect())
|
||||
}
|
||||
}
|
||||
impl ApiEquiv for Tok<Vec<Tok<String>>> {
|
||||
type Api = api::TStrv;
|
||||
}
|
||||
impl ToApi for Tok<Vec<Tok<String>>> {
|
||||
type Ctx = ();
|
||||
fn to_api(&self, _: &mut Self::Ctx) -> Self::Api { self.marker() }
|
||||
}
|
||||
impl FromApi for Tok<Vec<Tok<String>>> {
|
||||
type Ctx = ();
|
||||
fn from_api(api: &Self::Api, _: &mut Self::Ctx) -> Self { deintern(*api) }
|
||||
}
|
||||
|
||||
/// The number of references held to any token by the interner.
|
||||
const BASE_RC: usize = 3;
|
||||
@@ -202,7 +181,7 @@ pub struct Bimap<T: Interned> {
|
||||
impl<T: Interned> Bimap<T> {
|
||||
pub fn insert(&mut self, token: Tok<T>) {
|
||||
self.intern.insert(token.data.clone(), token.clone());
|
||||
self.by_id.insert(token.marker(), token);
|
||||
self.by_id.insert(token.to_api(), token);
|
||||
}
|
||||
|
||||
pub fn by_marker(&self, marker: T::Marker) -> Option<Tok<T>> { self.by_id.get(&marker).cloned() }
|
||||
@@ -218,14 +197,14 @@ impl<T: Interned> Bimap<T> {
|
||||
(self.intern)
|
||||
.extract_if(|k, _| Arc::strong_count(k) == BASE_RC)
|
||||
.map(|(_, v)| {
|
||||
self.by_id.remove(&v.marker());
|
||||
v.marker()
|
||||
self.by_id.remove(&v.to_api());
|
||||
v.to_api()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn sweep_master(&mut self, retained: HashSet<T::Marker>) {
|
||||
self.intern.retain(|k, v| BASE_RC < Arc::strong_count(k) || retained.contains(&v.marker()))
|
||||
self.intern.retain(|k, v| BASE_RC < Arc::strong_count(k) || retained.contains(&v.to_api()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -298,7 +277,7 @@ pub fn intern<T: Interned>(t: &(impl Internable<Interned = T> + ?Sized)) -> Tok<
|
||||
tok
|
||||
}
|
||||
|
||||
pub fn deintern<M: InternMarker>(marker: M) -> Tok<M::Interned> {
|
||||
fn deintern<M: InternMarker>(marker: M) -> Tok<M::Interned> {
|
||||
let mut g = interner();
|
||||
if let Some(tok) = M::Interned::bimap(&mut g.interners).by_marker(marker) {
|
||||
return tok;
|
||||
|
||||
@@ -12,7 +12,11 @@ pub fn join_maps<K: Eq + Hash, V>(
|
||||
right: HashMap<K, V>,
|
||||
mut merge: impl FnMut(&K, V, V) -> V,
|
||||
) -> HashMap<K, V> {
|
||||
try_join_maps(left, right, |k, l, r| Ok(merge(k, l, r))).unwrap_or_else(|e: Never| match e {})
|
||||
let (val, ev) = try_join_maps::<K, V, Never>(left, right, |k, l, r| Ok(merge(k, l, r)));
|
||||
if let Some(e) = ev.first() {
|
||||
match *e {}
|
||||
}
|
||||
val
|
||||
}
|
||||
|
||||
/// Combine two hashmaps via a fallible value merger. See also [join_maps]
|
||||
@@ -20,15 +24,22 @@ pub fn try_join_maps<K: Eq + Hash, V, E>(
|
||||
left: HashMap<K, V>,
|
||||
mut right: HashMap<K, V>,
|
||||
mut merge: impl FnMut(&K, V, V) -> Result<V, E>,
|
||||
) -> Result<HashMap<K, V>, E> {
|
||||
) -> (HashMap<K, V>, Vec<E>) {
|
||||
let mut mixed = HashMap::with_capacity(left.len() + right.len());
|
||||
let mut errors = Vec::new();
|
||||
for (key, lval) in left {
|
||||
let val = match right.remove(&key) {
|
||||
None => lval,
|
||||
Some(rval) => merge(&key, lval, rval)?,
|
||||
Some(rval) => match merge(&key, lval, rval) {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
errors.push(e);
|
||||
continue;
|
||||
},
|
||||
},
|
||||
};
|
||||
mixed.insert(key, val);
|
||||
}
|
||||
mixed.extend(right);
|
||||
Ok(mixed)
|
||||
(mixed, errors)
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ pub mod parse;
|
||||
pub mod pure_seq;
|
||||
pub mod reqnot;
|
||||
pub mod sequence;
|
||||
pub mod side;
|
||||
pub mod tokens;
|
||||
pub mod tree;
|
||||
pub mod macros;
|
||||
|
||||
@@ -7,8 +7,7 @@ use std::ops::Range;
|
||||
|
||||
use trait_set::trait_set;
|
||||
|
||||
use orchid_api_traits::{ApiEquiv, FromApi, ToApi};
|
||||
use crate::interner::{deintern, intern, Tok};
|
||||
use crate::interner::{intern, Tok};
|
||||
use crate::name::Sym;
|
||||
use crate::{api, intern, sym};
|
||||
|
||||
@@ -38,30 +37,20 @@ impl Pos {
|
||||
other => format!("{other:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl ApiEquiv for Pos {
|
||||
type Api = api::Location;
|
||||
}
|
||||
impl FromApi for Pos {
|
||||
type Ctx = ();
|
||||
fn from_api(api: &Self::Api, ctx: &mut Self::Ctx) -> Self {
|
||||
pub fn from_api(api: &api::Location) -> Self {
|
||||
match_mapping!(api, api::Location => Pos {
|
||||
None, Inherit, SlotTarget,
|
||||
Range(r.clone()),
|
||||
Gen(cgi => CodeGenInfo::from_api(cgi, &mut ())),
|
||||
SourceRange(sr => CodeGenInfo::from_api(sr, &mut ()))
|
||||
Gen(cgi => CodeGenInfo::from_api(cgi)),
|
||||
SourceRange(sr => SourceRange::from_api(sr))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToApi for Pos {
|
||||
type Ctx = ();
|
||||
fn to_api(&self, ctx: &mut Self::Ctx) -> Self::Api {
|
||||
match_mapping!(self, Pos => Self::Api {
|
||||
pub fn to_api(&self) -> api::Location {
|
||||
match_mapping!(self, Pos => api::Location {
|
||||
None, Inherit, SlotTarget,
|
||||
Range(r.clone()),
|
||||
Gen(cgi.to_api(ctx)),
|
||||
SourceRange(sr.to_api(ctx)),
|
||||
Gen(cgi.to_api()),
|
||||
SourceRange(sr.to_api()),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -106,20 +95,11 @@ impl SourceRange {
|
||||
pub fn zw(path: Sym, pos: u32) -> Self {
|
||||
Self { path, range: pos..pos }
|
||||
}
|
||||
}
|
||||
impl ApiEquiv for SourceRange {
|
||||
type Api = api::SourceRange;
|
||||
}
|
||||
impl FromApi for SourceRange {
|
||||
type Ctx = ();
|
||||
fn from_api(api: &Self::Api, ctx: &mut Self::Ctx) -> Self {
|
||||
Self { path: Sym::from_api(&api.path, ctx), range: api.range.clone() }
|
||||
fn from_api(api: &api::SourceRange) -> Self {
|
||||
Self { path: Sym::from_api(api.path), range: api.range.clone() }
|
||||
}
|
||||
}
|
||||
impl ToApi for SourceRange {
|
||||
type Ctx = ();
|
||||
fn to_api(&self, ctx: &mut Self::Ctx) -> Self::Api {
|
||||
api::SourceRange { path: self.path.to_api(ctx), range: self.range.clone() }
|
||||
fn to_api(&self) -> api::SourceRange {
|
||||
api::SourceRange { path: self.path.to_api(), range: self.range.clone() }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -140,6 +120,15 @@ impl CodeGenInfo {
|
||||
}
|
||||
/// Syntactic location
|
||||
pub fn pos(&self) -> Pos { Pos::Gen(self.clone()) }
|
||||
fn from_api(api: &api::CodeGenInfo) -> Self {
|
||||
Self {
|
||||
generator: Sym::from_api(api.generator),
|
||||
details: Tok::from_api(api.details),
|
||||
}
|
||||
}
|
||||
fn to_api(&self) -> api::CodeGenInfo {
|
||||
api::CodeGenInfo { generator: self.generator.to_api(), details: self.details.to_api() }
|
||||
}
|
||||
}
|
||||
impl fmt::Debug for CodeGenInfo {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "CodeGenInfo({self})") }
|
||||
@@ -150,24 +139,6 @@ impl fmt::Display for CodeGenInfo {
|
||||
if !self.details.is_empty() { write!(f, ", details: {}", self.details) } else { write!(f, ".") }
|
||||
}
|
||||
}
|
||||
impl ApiEquiv for CodeGenInfo {
|
||||
type Api = api::CodeGenInfo;
|
||||
}
|
||||
impl FromApi for CodeGenInfo {
|
||||
type Ctx = ();
|
||||
fn from_api(api: &Self::Api, ctx: &mut Self::Ctx) -> Self {
|
||||
Self {
|
||||
generator: Sym::from_api(&api.generator, ctx),
|
||||
details: Tok::from_api(&api.details, ctx),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl ToApi for CodeGenInfo {
|
||||
type Ctx = ();
|
||||
fn to_api(&self, ctx: &mut Self::Ctx) -> Self::Api {
|
||||
api::CodeGenInfo { generator: self.generator.to_api(ctx), details: self.details.to_api(ctx) }
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn pos2lc(s: &str, i: u32) -> (u32, u32) {
|
||||
|
||||
@@ -1,17 +1,35 @@
|
||||
use itertools::Itertools;
|
||||
use never::Never;
|
||||
use trait_set::trait_set;
|
||||
|
||||
use crate::{name::Sym, tree::{AtomTok, Paren, Ph, TokTree}};
|
||||
use std::marker::PhantomData;
|
||||
use crate::{match_mapping, name::Sym, tree::{Paren, Ph}};
|
||||
use std::{marker::PhantomData, sync::Arc};
|
||||
|
||||
use crate::{api, location::Pos};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct MacroSlot<'a>(api::MacroTreeId, PhantomData<&'a ()>);
|
||||
impl<'a> MacroSlot<'a> {
|
||||
pub fn id(self) -> api::MacroTreeId { self.0 }
|
||||
}
|
||||
|
||||
trait_set! {
|
||||
pub trait MacroAtomToApi<A> = FnMut(&A) -> api::MacroToken;
|
||||
pub trait MacroAtomFromApi<'a, A> = FnMut(&api::Atom) -> MTok<'a, A>;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MTree<'a, A: AtomTok> {
|
||||
pub struct MTree<'a, A> {
|
||||
pub pos: Pos,
|
||||
pub tok: MTok<'a, A>
|
||||
pub tok: Arc<MTok<'a, A>>
|
||||
}
|
||||
impl<'a, A> MTree<'a, A> {
|
||||
pub(crate) fn from_api(api: &api::MacroTree, do_atom: &mut impl MacroAtomFromApi<'a, A>) -> Self {
|
||||
Self { pos: Pos::from_api(&api.location), tok: Arc::new(MTok::from_api(&api.token, do_atom)) }
|
||||
}
|
||||
pub(crate) fn to_api(&self, do_atom: &mut impl MacroAtomToApi<A>) -> api::MacroTree {
|
||||
api::MacroTree { location: self.pos.to_api(), token: self.tok.to_api(do_atom) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -21,41 +39,48 @@ pub enum MTok<'a, A> {
|
||||
Slot(MacroSlot<'a>),
|
||||
Lambda(Vec<MTree<'a, A>>, Vec<MTree<'a, A>>),
|
||||
Ph(Ph),
|
||||
Atom(A)
|
||||
Atom(A),
|
||||
Ref(Box<MTok<'a, Never>>),
|
||||
}
|
||||
impl<'a, A> MTree<'a, A> {
|
||||
pub(crate) fn from_api(api: &api::MacroTree) -> Self {
|
||||
use api::MacroToken as MTK;
|
||||
let tok = match &api.token {
|
||||
MTK::Lambda(x, b) => MTok::Lambda(mtreev_from_api(x), mtreev_from_api(b)),
|
||||
MTK::Name(t) => MTok::Name(Sym::deintern(*t)),
|
||||
MTK::Slot(tk) => MTok::Slot(MacroSlot(tk.clone(), PhantomData)),
|
||||
MTK::S(p, b) => MTok::S(p.clone(), mtreev_from_api(b)),
|
||||
MTK::Ph(ph) => MTok::Ph(Ph::from_api(ph)),
|
||||
};
|
||||
Self { pos: Pos::from_api(&api.location), tok }
|
||||
impl<'a, A> MTok<'a, A> {
|
||||
pub(crate) fn from_api(
|
||||
api: &api::MacroToken,
|
||||
do_atom: &mut impl MacroAtomFromApi<'a, A>
|
||||
) -> Self {
|
||||
match_mapping!(&api, api::MacroToken => MTok::<'a, A> {
|
||||
Lambda(x => mtreev_from_api(x, do_atom), b => mtreev_from_api(b, do_atom)),
|
||||
Name(t => Sym::from_api(*t)),
|
||||
Slot(tk => MacroSlot(*tk, PhantomData)),
|
||||
S(p.clone(), b => mtreev_from_api(b, do_atom)),
|
||||
Ph(ph => Ph::from_api(ph)),
|
||||
} {
|
||||
api::MacroToken::Atom(a) => do_atom(a)
|
||||
})
|
||||
}
|
||||
pub(crate) fn to_api(&self) -> api::MacroTree {
|
||||
use api::MacroToken as MTK;
|
||||
let token = match &self.tok {
|
||||
MTok::Lambda(x, b) => MTK::Lambda(mtreev_to_api(x), mtreev_to_api(b)),
|
||||
MTok::Name(t) => MTK::Name(t.tok().marker()),
|
||||
MTok::Ph(ph) => MTK::Ph(ph.to_api()),
|
||||
MTok::S(p, b) => MTK::S(p.clone(), mtreev_to_api(b)),
|
||||
MTok::Slot(tk) => MTK::Slot(tk.0.clone()),
|
||||
};
|
||||
api::MacroTree { location: self.pos.to_api(), token }
|
||||
pub(crate) fn to_api(&self, do_atom: &mut impl MacroAtomToApi<A>) -> api::MacroToken {
|
||||
match_mapping!(&self, MTok => api::MacroToken {
|
||||
Lambda(x => mtreev_to_api(x, do_atom), b => mtreev_to_api(b, do_atom)),
|
||||
Name(t.tok().to_api()),
|
||||
Ph(ph.to_api()),
|
||||
S(p.clone(), b => mtreev_to_api(b, do_atom)),
|
||||
Slot(tk.0.clone()),
|
||||
} {
|
||||
MTok::Ref(r) => r.to_api(&mut |e| match *e {}),
|
||||
MTok::Atom(a) => do_atom(a),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mtreev_from_api<'a, 'b, A>(
|
||||
api: impl IntoIterator<Item = &'b api::MacroTree>
|
||||
api: impl IntoIterator<Item = &'b api::MacroTree>,
|
||||
do_atom: &mut impl MacroAtomFromApi<'a, A>
|
||||
) -> Vec<MTree<'a, A>> {
|
||||
api.into_iter().map(MTree::from_api).collect_vec()
|
||||
api.into_iter().map(|api| MTree::from_api(api, do_atom)).collect_vec()
|
||||
}
|
||||
|
||||
pub fn mtreev_to_api<'a: 'b, 'b, A: 'b>(
|
||||
v: impl IntoIterator<Item = &'b MTree<'a, A>>
|
||||
v: impl IntoIterator<Item = &'b MTree<'a, A>>,
|
||||
do_atom: &mut impl MacroAtomToApi<A>
|
||||
) -> Vec<api::MacroTree> {
|
||||
v.into_iter().map(MTree::to_api).collect_vec()
|
||||
v.into_iter().map(|t| t.to_api(do_atom)).collect_vec()
|
||||
}
|
||||
@@ -12,84 +12,108 @@
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! match_mapping {
|
||||
($input:expr, $src:ty => $tgt:ty {
|
||||
// Entry point
|
||||
($input:expr, $($src:ident)::* => $tgt:ty {
|
||||
$($branches:tt)*
|
||||
}) => {
|
||||
match_mapping!(@BRANCH_MUNCH (($input) ($src) ($tgt)) () $($branches)* ,)
|
||||
} $({
|
||||
$($extra:tt)*
|
||||
})?) => {
|
||||
match_mapping!(@BRANCH_MUNCH
|
||||
(($input) ($($src)*) ($tgt) ($($($extra)*)?))
|
||||
()
|
||||
$($branches)* ,
|
||||
)
|
||||
// note: we're adding a comma to the input so the optional trailing comma becomes
|
||||
// an optional second comma which is easier to match
|
||||
};
|
||||
(@BRANCHES_DONE ( ($input:expr) ($src:ty) ($tgt:ty) )
|
||||
// ======== Process match branches
|
||||
// Can't generate branches individually so gather them into a collection and render them here
|
||||
(@BRANCHES_DONE ( ($input:expr) $src:tt ($tgt:ty) ($($extra:tt)*) )
|
||||
$( ( $variant:ident $($pat:tt)*) )*
|
||||
) => {
|
||||
{
|
||||
use $src as Foo;
|
||||
match $input {
|
||||
$(
|
||||
match_mapping!(@PAT (Foo :: $variant) $($pat)*) =>
|
||||
match_mapping!(@PAT ($src $variant) $($pat)*) =>
|
||||
match_mapping!(@VAL (< $tgt >:: $variant) $($pat)*),
|
||||
)*
|
||||
$($extra)*
|
||||
}
|
||||
}
|
||||
};
|
||||
// End with optional second comma
|
||||
(@BRANCH_MUNCH $ext:tt ( $($branches:tt)* ) $(,)?) => {
|
||||
match_mapping!(@BRANCHES_DONE $ext $($branches)* )
|
||||
};
|
||||
// Unit variant
|
||||
(@BRANCH_MUNCH $ext:tt ( $($branches:tt)* ) $variant:ident , $($tail:tt)*) => {
|
||||
match_mapping!(@BRANCH_MUNCH $ext ( $($branches)* ($variant) ) $($tail)*)
|
||||
};
|
||||
// Variant mapped to same shape pair
|
||||
(@BRANCH_MUNCH $ext:tt ( $($branches:tt)* ) $variant:ident $pat:tt , $($tail:tt)*) => {
|
||||
match_mapping!(@BRANCH_MUNCH $ext
|
||||
( $($branches)* ($variant $pat) )
|
||||
$($tail)*)
|
||||
};
|
||||
(@PAT ($($prefix:tt)*) ( $($fields:tt)* )) => {
|
||||
$($prefix)* ( match_mapping!(@PAT_MUNCH () $($fields)*) )
|
||||
(@PAT (($($prefix:tt)*) $variant:ident)) => { $($prefix ::)* $variant };
|
||||
(@PAT $prefix:tt ( $($fields:tt)* )) => {
|
||||
match_mapping!(@PAT_MUNCH (() $prefix) () $($fields)* ,)
|
||||
};
|
||||
(@PAT ($($prefix:tt)*) { $($fields:tt)* }) => {
|
||||
$($prefix)* { match_mapping!(@PAT_MUNCH () $($fields)*) }
|
||||
(@PAT $prefix:tt { $($fields:tt)* }) => {
|
||||
match_mapping!(@PAT_MUNCH ({} $prefix) () $($fields)* ,)
|
||||
};
|
||||
(@PAT ($($path:tt)*)) => { $($path)* };
|
||||
(@PAT_MUNCH ($($names:ident)*) $name:ident => $value:expr) => { $($names ,)* $name };
|
||||
(@PAT_MUNCH ($($names:ident)*) $name:ident => $value:expr , $($tail:tt)*) => {
|
||||
match_mapping!(@PAT_MUNCH ($($names)* $name) $($tail)*)
|
||||
(@PAT_MUNCH (() (($($prefix:ident)*) $variant:ident)) ($($names:ident)*)) => {
|
||||
$($prefix)::* :: $variant ( $($names),* )
|
||||
};
|
||||
(@PAT_MUNCH ($($names:ident)*) $name:ident . $($tail:tt)*) => {
|
||||
match_mapping!(@PAT_DOT_MUNCH ($($names)* $name) $($tail)*)
|
||||
(@PAT_MUNCH ({} (($($prefix:ident)*) $variant:ident)) ($($names:ident)*)) => {
|
||||
$($prefix)::* :: $variant { $($names),* }
|
||||
};
|
||||
(@PAT_MUNCH ($($names:ident)*)) => { $($names),* };
|
||||
(@PAT_DOT_MUNCH $names:tt , $($tail:tt)*) => {
|
||||
match_mapping!(@PAT_MUNCH $names $($tail)*)
|
||||
(@PAT_MUNCH $ctx:tt $names:tt $(,)? ) => { match_mapping!($ctx $names) };
|
||||
(@PAT_MUNCH $ctx:tt ($($names:ident)*) * $name:ident , $($tail:tt)*) => {
|
||||
match_mapping!(@PAT_MUNCH $ctx ($($names)* $name) $($tail)*)
|
||||
};
|
||||
(@PAT_DOT_MUNCH $names:tt $_:tt $($tail:tt)*) => {
|
||||
match_mapping!(@PAT_DOT_MUNCH $names $($tail)*)
|
||||
(@PAT_MUNCH $ctx:tt ($($names:ident)*) $name:ident => $value:expr , $($tail:tt)*) => {
|
||||
match_mapping!(@PAT_MUNCH $ctx ($($names)* $name) $($tail)*)
|
||||
};
|
||||
(@PAT_MUNCH $ctx:tt ($($names:ident)*) $name:ident . $($tail:tt)*) => {
|
||||
match_mapping!(@PAT_DOT_MUNCH $ctx ($($names)* $name) $($tail)*)
|
||||
};
|
||||
(@PAT_DOT_MUNCH $ctx:tt $names:tt , $($tail:tt)*) => {
|
||||
match_mapping!(@PAT_MUNCH $ctx $names $($tail)*)
|
||||
};
|
||||
(@PAT_DOT_MUNCH $ctx:tt $names:tt $_:tt $($tail:tt)*) => {
|
||||
match_mapping!(@PAT_DOT_MUNCH $ctx $names $($tail)*)
|
||||
};
|
||||
(@PAT_DOT_MUNCH ($($names:tt)*)) => { $($names),* };
|
||||
(@VAL ($($prefix:tt)*)) => { $($prefix)* };
|
||||
(@VAL ($($prefix:tt)*) ( $($fields:tt)* )) => {
|
||||
$($prefix)* ( match_mapping!(@VAL_MUNCH () () $($fields)* ) )
|
||||
(@VAL $prefix:tt ( $($fields:tt)* )) => {
|
||||
match_mapping!(@VAL_MUNCH (() $prefix) () $($fields)* , )
|
||||
};
|
||||
(@VAL ($($prefix:tt)*) { $($fields:tt)* }) => {
|
||||
$($prefix)* { match_mapping!(@VAL_MUNCH {} () $($fields)* ) }
|
||||
(@VAL $prefix:tt { $($fields:tt)* }) => {
|
||||
match_mapping!(@VAL_MUNCH ({} $prefix) () $($fields)* , )
|
||||
};
|
||||
(@VAL_MUNCH () ($($prefix:tt)*) $name:ident => $value:expr) => { $($prefix)* $value };
|
||||
(@VAL_MUNCH () ($($prefix:tt)*) $name:ident => $value:expr , $($tail:tt)*) => {
|
||||
match_mapping!(@VAL_MUNCH () ($($prefix)* $value, ) $($tail)*)
|
||||
(@VAL_MUNCH $ctx:tt ($($prefix:tt)*) * $name:ident , $($tail:tt)*) => {
|
||||
match_mapping!(@VAL_MUNCH $ctx ($($prefix)* ($name (* $name)) ) $($tail)*)
|
||||
};
|
||||
(@VAL_MUNCH {} ($($prefix:tt)*) $name:ident => $value:expr) => { $($prefix)* $name: $value };
|
||||
(@VAL_MUNCH {} ($($prefix:tt)*) $name:ident => $value:expr , $($tail:tt)*) => {
|
||||
match_mapping!(@VAL_MUNCH {} ($($prefix)* $name: $value, ) $($tail)*)
|
||||
(@VAL_MUNCH $ctx:tt ($($prefix:tt)*) $name:ident => $value:expr , $($tail:tt)*) => {
|
||||
match_mapping!(@VAL_MUNCH $ctx ($($prefix)* ($name ($value)) ) $($tail)*)
|
||||
};
|
||||
(@VAL_MUNCH () ($($prefix:tt)*) $name:ident . $member:tt $($tail:tt)*) => {
|
||||
match_mapping!(@VAL_DOT_MUNCH () ($($prefix)* $name . $member ) $($tail)*)
|
||||
(@VAL_MUNCH $ctx:tt $fields:tt $name:ident . $member:tt $($tail:tt)*) => {
|
||||
match_mapping!(@VAL_DOT_MUNCH $ctx $fields $name ($name . $member ) $($tail)*)
|
||||
};
|
||||
(@VAL_MUNCH {} ($($prefix:tt)*) $name:ident . $member:tt $($tail:tt)*) => {
|
||||
match_mapping!(@VAL_DOT_MUNCH {} ($($prefix)* $name: $name . $member) $($tail)*)
|
||||
(@VAL_DOT_MUNCH $ctx:tt ($($fields:tt)*) $name:ident $current:tt , $($tail:tt)*) => {
|
||||
match_mapping!(@VAL_MUNCH $ctx ($($fields)* ($name $current)) $($tail)*)
|
||||
};
|
||||
(@VAL_DOT_MUNCH $ptyp:tt ($($prefix:tt)*) , $($tail:tt)*) => {
|
||||
match_mapping!(@VAL_MUNCH $ptyp ($($prefix)* ,) $($tail)*)
|
||||
(@VAL_DOT_MUNCH $ctx:tt $fields:tt $name:ident ($($current:tt)*) $tt:tt $($tail:tt)*) => {
|
||||
match_mapping!(@VAL_DOT_MUNCH $ctx $fields $name ($($current)* $tt) $($tail)*)
|
||||
};
|
||||
(@VAL_DOT_MUNCH $ptyp:tt ($($prefix:tt)*) $tt:tt $($tail:tt)*) => {
|
||||
match_mapping!(@VAL_DOT_MUNCH $ptyp ($($prefix)* $tt) $($tail)*)
|
||||
(@VAL_DOT_MUNCH $ctx:tt ($($fields:tt)*) $name:ident $current:tt) => {
|
||||
match_mapping!(@VAL_MUNCH $ptyp ($($fields)* ($name $current)))
|
||||
};
|
||||
(@VAL_MUNCH $ctx:tt $fields:tt , ) => { match_mapping!(@VAL_MUNCH $ctx $fields) };
|
||||
(@VAL_MUNCH (() ($($prefix:tt)*)) ($( ( $name:ident $($value:tt)* ) )*) ) => {
|
||||
$($prefix)* ( $( $($value)* ),* )
|
||||
};
|
||||
(@VAL_MUNCH ({} ($($prefix:tt)*)) ($( ( $name:ident $($value:tt)* ) )*) ) => {
|
||||
$($prefix)* { $( $name : $($value)* ),* }
|
||||
};
|
||||
(@VAL_DOT_MUNCH $ptyp:tt ($($prefix:tt)*)) => { $($prefix)* };
|
||||
(@VAL_MUNCH $_ptyp:tt ($($prefix:tt)*)) => { $($prefix)* };
|
||||
}
|
||||
@@ -12,8 +12,7 @@ use itertools::Itertools;
|
||||
use trait_set::trait_set;
|
||||
|
||||
use crate::api;
|
||||
use crate::api_conv::{ApiEquiv, FromApi, ToApi};
|
||||
use crate::interner::{deintern, intern, InternMarker, Tok};
|
||||
use crate::interner::{intern, InternMarker, Tok};
|
||||
|
||||
trait_set! {
|
||||
/// Traits that all name iterators should implement
|
||||
@@ -258,7 +257,7 @@ impl VName {
|
||||
if data.is_empty() { Err(EmptyNameError) } else { Ok(Self(data)) }
|
||||
}
|
||||
pub fn deintern(items: impl IntoIterator<Item = api::TStr>) -> Result<Self, EmptyNameError> {
|
||||
Self::new(items.into_iter().map(deintern))
|
||||
Self::new(items.into_iter().map(Tok::from_api))
|
||||
}
|
||||
/// Unwrap the enclosed vector
|
||||
pub fn into_vec(self) -> Vec<Tok<String>> { self.0 }
|
||||
@@ -354,12 +353,13 @@ impl Sym {
|
||||
/// Grab the interner token
|
||||
pub fn tok(&self) -> Tok<Vec<Tok<String>>> { self.0.clone() }
|
||||
/// Get a number unique to this name suitable for arbitrary ordering.
|
||||
pub fn id(&self) -> NonZeroU64 { self.0.marker().get_id() }
|
||||
pub fn id(&self) -> NonZeroU64 { self.0.to_api().get_id() }
|
||||
/// Extern the sym for editing
|
||||
pub fn to_vname(&self) -> VName { VName(self[..].to_vec()) }
|
||||
pub fn deintern(marker: api::TStrv) -> Sym {
|
||||
Self::from_tok(deintern(marker)).expect("Empty sequence found for serialized Sym")
|
||||
pub fn from_api(marker: api::TStrv) -> Sym {
|
||||
Self::from_tok(Tok::from_api(marker)).expect("Empty sequence found for serialized Sym")
|
||||
}
|
||||
pub fn to_api(&self) -> api::TStrv { self.tok().to_api() }
|
||||
}
|
||||
impl fmt::Debug for Sym {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Sym({self})") }
|
||||
@@ -386,17 +386,6 @@ impl Deref for Sym {
|
||||
type Target = PathSlice;
|
||||
fn deref(&self) -> &Self::Target { self.borrow() }
|
||||
}
|
||||
impl ApiEquiv for Sym {
|
||||
type Api = api::TStrv;
|
||||
}
|
||||
impl<C> ToApi<C> for Sym {
|
||||
fn to_api(&self, ctx: &mut C) -> Self::Api { self.tok().to_api(ctx) }
|
||||
}
|
||||
impl<C> FromApi<C> for Sym {
|
||||
fn from_api(api: &Self::Api, ctx: &mut C) -> Self {
|
||||
Self::from_tok(Tok::from_api(api, ctx)).expect("Empty sequence found for serialized Sym")
|
||||
}
|
||||
}
|
||||
|
||||
/// An abstraction over tokenized vs non-tokenized names so that they can be
|
||||
/// handled together in datastructures. The names can never be empty
|
||||
|
||||
@@ -4,10 +4,10 @@ use std::ops::{Deref, Range};
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::error::{mk_err, mk_errv, OrcRes, Reporter};
|
||||
use crate::interner::{deintern, intern, Tok};
|
||||
use crate::interner::{intern, Tok};
|
||||
use crate::location::Pos;
|
||||
use crate::name::VPath;
|
||||
use crate::tree::{AtomTok, ExtraTok, Paren, TokTree, Token};
|
||||
use crate::tree::{AtomRepr, ExtraTok, Paren, TokTree, Token};
|
||||
use crate::{api, intern};
|
||||
|
||||
pub fn name_start(c: char) -> bool { c.is_alphabetic() || c == '_' }
|
||||
@@ -16,11 +16,11 @@ pub fn op_char(c: char) -> bool { !name_char(c) && !c.is_whitespace() && !"()[]{
|
||||
pub fn unrep_space(c: char) -> bool { c.is_whitespace() && !"\r\n".contains(c) }
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Snippet<'a, 'b, A: AtomTok, X: ExtraTok> {
|
||||
pub struct Snippet<'a, 'b, A: AtomRepr, X: ExtraTok> {
|
||||
prev: &'a TokTree<'b, A, X>,
|
||||
cur: &'a [TokTree<'b, A, X>],
|
||||
}
|
||||
impl<'a, 'b, A: AtomTok, X: ExtraTok> Snippet<'a, 'b, A, X> {
|
||||
impl<'a, 'b, A: AtomRepr, X: ExtraTok> Snippet<'a, 'b, A, X> {
|
||||
pub fn new(prev: &'a TokTree<'b, A, X>, cur: &'a [TokTree<'b, A, X>]) -> Self {
|
||||
Self { prev, cur }
|
||||
}
|
||||
@@ -67,18 +67,18 @@ impl<'a, 'b, A: AtomTok, X: ExtraTok> Snippet<'a, 'b, A, X> {
|
||||
self.split_at(non_fluff_start.unwrap_or(self.len())).1
|
||||
}
|
||||
}
|
||||
impl<'a, 'b, A: AtomTok, X: ExtraTok> Copy for Snippet<'a, 'b, A, X> {}
|
||||
impl<'a, 'b, A: AtomTok, X: ExtraTok> Clone for Snippet<'a, 'b, A, X> {
|
||||
impl<'a, 'b, A: AtomRepr, X: ExtraTok> Copy for Snippet<'a, 'b, A, X> {}
|
||||
impl<'a, 'b, A: AtomRepr, X: ExtraTok> Clone for Snippet<'a, 'b, A, X> {
|
||||
fn clone(&self) -> Self { *self }
|
||||
}
|
||||
impl<'a, 'b, A: AtomTok, X: ExtraTok> Deref for Snippet<'a, 'b, A, X> {
|
||||
impl<'a, 'b, A: AtomRepr, X: ExtraTok> Deref for Snippet<'a, 'b, A, X> {
|
||||
type Target = [TokTree<'b, A, X>];
|
||||
fn deref(&self) -> &Self::Target { self.cur }
|
||||
}
|
||||
|
||||
/// Remove tokens that aren't meaningful in expression context, such as comments
|
||||
/// or line breaks
|
||||
pub fn strip_fluff<'a, A: AtomTok, X: ExtraTok>(
|
||||
pub fn strip_fluff<'a, A: AtomRepr, X: ExtraTok>(
|
||||
tt: &TokTree<'a, A, X>,
|
||||
) -> Option<TokTree<'a, A, X>> {
|
||||
let tok = match &tt.tok {
|
||||
@@ -97,15 +97,15 @@ pub struct Comment {
|
||||
pub pos: Pos,
|
||||
}
|
||||
impl Comment {
|
||||
pub fn from_api(api: &api::Comment) -> Self {
|
||||
Self { pos: Pos::from_api(&api.location), text: deintern(api.text) }
|
||||
}
|
||||
pub fn to_api(&self) -> api::Comment {
|
||||
api::Comment { location: self.pos.to_api(), text: self.text.marker() }
|
||||
api::Comment { location: self.pos.to_api(), text: self.text.to_api() }
|
||||
}
|
||||
pub fn from_api(api: &api::Comment) -> Self {
|
||||
Self { pos: Pos::from_api(&api.location), text: Tok::from_api(api.text) }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn line_items<'a, 'b, A: AtomTok, X: ExtraTok>(
|
||||
pub fn line_items<'a, 'b, A: AtomRepr, X: ExtraTok>(
|
||||
snip: Snippet<'a, 'b, A, X>,
|
||||
) -> Vec<Parsed<'a, 'b, Vec<Comment>, A, X>> {
|
||||
let mut items = Vec::new();
|
||||
@@ -131,7 +131,7 @@ pub fn line_items<'a, 'b, A: AtomTok, X: ExtraTok>(
|
||||
items
|
||||
}
|
||||
|
||||
pub fn try_pop_no_fluff<'a, 'b, A: AtomTok, X: ExtraTok>(
|
||||
pub fn try_pop_no_fluff<'a, 'b, A: AtomRepr, X: ExtraTok>(
|
||||
snip: Snippet<'a, 'b, A, X>,
|
||||
) -> ParseRes<'a, 'b, &'a TokTree<'b, A, X>, A, X> {
|
||||
snip.skip_fluff().pop_front().map(|(output, tail)| Parsed { output, tail }).ok_or_else(|| {
|
||||
@@ -143,7 +143,7 @@ pub fn try_pop_no_fluff<'a, 'b, A: AtomTok, X: ExtraTok>(
|
||||
})
|
||||
}
|
||||
|
||||
pub fn expect_end(snip: Snippet<'_, '_, impl AtomTok, impl ExtraTok>) -> OrcRes<()> {
|
||||
pub fn expect_end(snip: Snippet<'_, '_, impl AtomRepr, impl ExtraTok>) -> OrcRes<()> {
|
||||
match snip.skip_fluff().get(0) {
|
||||
Some(surplus) => Err(mk_errv(
|
||||
intern!(str: "Extra code after end of line"),
|
||||
@@ -154,7 +154,7 @@ pub fn expect_end(snip: Snippet<'_, '_, impl AtomTok, impl ExtraTok>) -> OrcRes<
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_tok<'a, 'b, A: AtomTok, X: ExtraTok>(
|
||||
pub fn expect_tok<'a, 'b, A: AtomRepr, X: ExtraTok>(
|
||||
snip: Snippet<'a, 'b, A, X>,
|
||||
tok: Tok<String>,
|
||||
) -> ParseRes<'a, 'b, (), A, X> {
|
||||
@@ -169,20 +169,20 @@ pub fn expect_tok<'a, 'b, A: AtomTok, X: ExtraTok>(
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Parsed<'a, 'b, T, A: AtomTok, X: ExtraTok> {
|
||||
pub struct Parsed<'a, 'b, T, A: AtomRepr, X: ExtraTok> {
|
||||
pub output: T,
|
||||
pub tail: Snippet<'a, 'b, A, X>,
|
||||
}
|
||||
|
||||
pub type ParseRes<'a, 'b, T, A, X> = OrcRes<Parsed<'a, 'b, T, A, X>>;
|
||||
|
||||
pub fn parse_multiname<'a, 'b, A: AtomTok, X: ExtraTok>(
|
||||
pub fn parse_multiname<'a, 'b, A: AtomRepr, X: ExtraTok>(
|
||||
ctx: &impl Reporter,
|
||||
tail: Snippet<'a, 'b, A, X>,
|
||||
) -> ParseRes<'a, 'b, Vec<(Import, Pos)>, A, X> {
|
||||
let ret = rec(ctx, tail);
|
||||
#[allow(clippy::type_complexity)] // it's an internal function
|
||||
pub fn rec<'a, 'b, A: AtomTok, X: ExtraTok>(
|
||||
pub fn rec<'a, 'b, A: AtomRepr, X: ExtraTok>(
|
||||
ctx: &impl Reporter,
|
||||
tail: Snippet<'a, 'b, A, X>,
|
||||
) -> ParseRes<'a, 'b, Vec<(Vec<Tok<String>>, Option<Tok<String>>, Pos)>, A, X> {
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
use std::fmt;
|
||||
use std::ops::Not;
|
||||
|
||||
use super::boxed_iter::BoxedIter;
|
||||
use crate::boxed_iter::BoxedIter;
|
||||
|
||||
/// A primitive for encoding the two sides Left and Right. While booleans
|
||||
/// are technically usable for this purpose, they're very easy to confuse
|
||||
@@ -8,13 +8,13 @@ use std::sync::Arc;
|
||||
|
||||
use itertools::Itertools;
|
||||
use never::Never;
|
||||
use orchid_api::Placeholder;
|
||||
use ordered_float::NotNan;
|
||||
use trait_set::trait_set;
|
||||
|
||||
use crate::api;
|
||||
use crate::{api, match_mapping};
|
||||
use crate::error::OrcErrv;
|
||||
use crate::interner::{deintern, Tok};
|
||||
use crate::interner::Tok;
|
||||
use crate::location::Pos;
|
||||
use crate::name::PathSlice;
|
||||
use crate::parse::Snippet;
|
||||
use crate::tokens::PARENS;
|
||||
@@ -22,11 +22,11 @@ use crate::tokens::PARENS;
|
||||
pub use api::PhKind as PhKind;
|
||||
|
||||
trait_set! {
|
||||
pub trait RecurCB<'a, A: AtomTok, X: ExtraTok> = Fn(TokTree<'a, A, X>) -> TokTree<'a, A, X>;
|
||||
pub trait RecurCB<'a, A: AtomRepr, X: ExtraTok> = Fn(TokTree<'a, A, X>) -> TokTree<'a, A, X>;
|
||||
pub trait ExtraTok = Display + Clone + fmt::Debug;
|
||||
}
|
||||
|
||||
pub fn recur<'a, A: AtomTok, X: ExtraTok>(
|
||||
pub fn recur<'a, A: AtomRepr, X: ExtraTok>(
|
||||
tt: TokTree<'a, A, X>,
|
||||
f: &impl Fn(TokTree<'a, A, X>, &dyn RecurCB<'a, A, X>) -> TokTree<'a, A, X>,
|
||||
) -> TokTree<'a, A, X> {
|
||||
@@ -42,14 +42,14 @@ pub fn recur<'a, A: AtomTok, X: ExtraTok>(
|
||||
})
|
||||
}
|
||||
|
||||
pub trait AtomTok: fmt::Display + Clone + fmt::Debug {
|
||||
type Context: ?Sized;
|
||||
fn from_api(atom: &api::Atom, pos: Range<u32>, ctx: &mut Self::Context) -> Self;
|
||||
fn to_api(&self) -> api::Atom;
|
||||
pub trait AtomRepr: fmt::Display + Clone + fmt::Debug {
|
||||
type Ctx: ?Sized;
|
||||
fn from_api(api: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> Self;
|
||||
fn to_api(&self) -> orchid_api::Atom;
|
||||
}
|
||||
impl AtomTok for Never {
|
||||
type Context = Never;
|
||||
fn from_api(_: &api::Atom, _: Range<u32>, _: &mut Self::Context) -> Self { panic!() }
|
||||
impl AtomRepr for Never {
|
||||
type Ctx = Never;
|
||||
fn from_api(_: &api::Atom, _: Pos, _: &mut Self::Ctx) -> Self { panic!() }
|
||||
fn to_api(&self) -> orchid_api::Atom { match *self {} }
|
||||
}
|
||||
|
||||
@@ -66,25 +66,24 @@ impl<'a> Display for TokHandle<'a> {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TokTree<'a, A: AtomTok, X: ExtraTok> {
|
||||
pub struct TokTree<'a, A: AtomRepr, X: ExtraTok> {
|
||||
pub tok: Token<'a, A, X>,
|
||||
pub range: Range<u32>,
|
||||
}
|
||||
impl<'a, A: AtomTok, X: ExtraTok> TokTree<'a, A, X> {
|
||||
pub fn from_api(tt: &api::TokenTree, ctx: &mut A::Context) -> Self {
|
||||
let tok = match &tt.token {
|
||||
api::Token::Atom(a) => Token::Atom(A::from_api(a, tt.range.clone(), ctx)),
|
||||
api::Token::BR => Token::BR,
|
||||
api::Token::NS => Token::NS,
|
||||
api::Token::Bottom(e) => Token::Bottom(OrcErrv::from_api(e)),
|
||||
api::Token::LambdaHead(arg) => Token::LambdaHead(ttv_from_api(arg, ctx)),
|
||||
api::Token::Name(name) => Token::Name(deintern(*name)),
|
||||
api::Token::S(par, b) => Token::S(*par, ttv_from_api(b, ctx)),
|
||||
api::Token::Comment(c) => Token::Comment(c.clone()),
|
||||
api::Token::Slot(id) => Token::Slot(TokHandle::new(*id)),
|
||||
api::Token::Ph(ph) => Token::Ph(Ph {name: deintern(ph.name), kind: ph.kind }),
|
||||
api::Token::Macro(prio) => Token::Macro(*prio)
|
||||
};
|
||||
impl<'a, A: AtomRepr, X: ExtraTok> TokTree<'a, A, X> {
|
||||
pub fn from_api(tt: &api::TokenTree, ctx: &mut A::Ctx) -> Self {
|
||||
let tok = match_mapping!(&tt.token, api::Token => Token::<'a, A, X> {
|
||||
BR, NS,
|
||||
Atom(a => A::from_api(a, Pos::Range(tt.range.clone()), ctx)),
|
||||
Bottom(e => OrcErrv::from_api(e)),
|
||||
LambdaHead(arg => ttv_from_api(arg, ctx)),
|
||||
Name(n => Tok::from_api(*n)),
|
||||
S(*par, b => ttv_from_api(b, ctx)),
|
||||
Comment(c.clone()),
|
||||
Slot(id => TokHandle::new(*id)),
|
||||
Ph(ph => Ph::from_api(ph)),
|
||||
Macro(*prio)
|
||||
});
|
||||
Self { range: tt.range.clone(), tok }
|
||||
}
|
||||
|
||||
@@ -92,20 +91,21 @@ impl<'a, A: AtomTok, X: ExtraTok> TokTree<'a, A, X> {
|
||||
&self,
|
||||
do_extra: &mut impl FnMut(&X, Range<u32>) -> api::TokenTree,
|
||||
) -> api::TokenTree {
|
||||
let token = match &self.tok {
|
||||
Token::Atom(a) => api::Token::Atom(a.to_api()),
|
||||
Token::BR => api::Token::BR,
|
||||
Token::NS => api::Token::NS,
|
||||
Token::Bottom(e) => api::Token::Bottom(e.to_api()),
|
||||
Token::Comment(c) => api::Token::Comment(c.clone()),
|
||||
Token::LambdaHead(arg) => api::Token::LambdaHead(ttv_to_api(arg, do_extra)),
|
||||
Token::Name(n) => api::Token::Name(n.marker()),
|
||||
Token::Slot(tt) => api::Token::Slot(tt.ticket()),
|
||||
Token::S(p, b) => api::Token::S(*p, ttv_to_api(b, do_extra)),
|
||||
Token::Ph(Ph { name, kind }) => api::Token::Ph(Placeholder { name: name.marker(), kind: *kind }),
|
||||
Token::X(x) => return do_extra(x, self.range.clone()),
|
||||
Token::Macro(prio) => api::Token::Macro(*prio),
|
||||
};
|
||||
let token = match_mapping!(&self.tok, Token => api::Token {
|
||||
Atom(a.to_api()),
|
||||
BR,
|
||||
NS,
|
||||
Bottom(e.to_api()),
|
||||
Comment(c.clone()),
|
||||
LambdaHead(arg => ttv_to_api(arg, do_extra)),
|
||||
Name(n.to_api()),
|
||||
Slot(tt.ticket()),
|
||||
S(*p, b => ttv_to_api(b, do_extra)),
|
||||
Ph(ph.to_api()),
|
||||
Macro(*prio),
|
||||
} {
|
||||
Token::X(x) => return do_extra(x, self.range.clone())
|
||||
});
|
||||
api::TokenTree { range: self.range.clone(), token }
|
||||
}
|
||||
|
||||
@@ -120,10 +120,11 @@ impl<'a, A: AtomTok, X: ExtraTok> TokTree<'a, A, X> {
|
||||
Token::Bottom(e) => api::Token::Bottom(e.to_api()),
|
||||
Token::Comment(c) => api::Token::Comment(c.clone()),
|
||||
Token::LambdaHead(arg) => api::Token::LambdaHead(ttv_into_api(arg, do_extra)),
|
||||
Token::Name(n) => api::Token::Name(n.marker()),
|
||||
Token::Name(n) => api::Token::Name(n.to_api()),
|
||||
Token::Slot(tt) => api::Token::Slot(tt.ticket()),
|
||||
Token::S(p, b) => api::Token::S(p, ttv_into_api(b, do_extra)),
|
||||
Token::Ph(Ph { kind, name }) => api::Token::Ph(Placeholder { name: name.marker(), kind }),
|
||||
Token::Ph(Ph { kind, name }) =>
|
||||
api::Token::Ph(api::Placeholder { name: name.to_api(), kind }),
|
||||
Token::X(x) => return do_extra(x, self.range.clone()),
|
||||
Token::Macro(prio) => api::Token::Macro(prio),
|
||||
};
|
||||
@@ -145,25 +146,25 @@ impl<'a, A: AtomTok, X: ExtraTok> TokTree<'a, A, X> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A: AtomTok, X: ExtraTok> Display for TokTree<'a, A, X> {
|
||||
impl<'a, A: AtomRepr, X: ExtraTok> Display for TokTree<'a, A, X> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.tok) }
|
||||
}
|
||||
|
||||
pub fn ttv_from_api<A: AtomTok, X: ExtraTok>(
|
||||
pub fn ttv_from_api<A: AtomRepr, X: ExtraTok>(
|
||||
tokv: impl IntoIterator<Item: Borrow<api::TokenTree>>,
|
||||
ctx: &mut A::Context,
|
||||
ctx: &mut A::Ctx,
|
||||
) -> Vec<TokTree<'static, A, X>> {
|
||||
tokv.into_iter().map(|t| TokTree::<A, X>::from_api(t.borrow(), ctx)).collect()
|
||||
}
|
||||
|
||||
pub fn ttv_to_api<'a, A: AtomTok, X: ExtraTok>(
|
||||
pub fn ttv_to_api<'a, A: AtomRepr, X: ExtraTok>(
|
||||
tokv: impl IntoIterator<Item: Borrow<TokTree<'a, A, X>>>,
|
||||
do_extra: &mut impl FnMut(&X, Range<u32>) -> api::TokenTree,
|
||||
) -> Vec<api::TokenTree> {
|
||||
tokv.into_iter().map(|tok| Borrow::<TokTree<A, X>>::borrow(&tok).to_api(do_extra)).collect_vec()
|
||||
}
|
||||
|
||||
pub fn ttv_into_api<'a, A: AtomTok, X: ExtraTok>(
|
||||
pub fn ttv_into_api<'a, A: AtomRepr, X: ExtraTok>(
|
||||
tokv: impl IntoIterator<Item = TokTree<'a, A, X>>,
|
||||
do_extra: &mut impl FnMut(X, Range<u32>) -> api::TokenTree,
|
||||
) -> Vec<api::TokenTree> {
|
||||
@@ -172,7 +173,7 @@ pub fn ttv_into_api<'a, A: AtomTok, X: ExtraTok>(
|
||||
|
||||
/// This takes a position and not a range because it assigns the range to
|
||||
/// multiple leaf tokens, which is only valid if it's a zero-width range
|
||||
pub fn vname_tv<'a: 'b, 'b, A: AtomTok + 'a, X: ExtraTok + 'a>(
|
||||
pub fn vname_tv<'a: 'b, 'b, A: AtomRepr + 'a, X: ExtraTok + 'a>(
|
||||
name: &'b PathSlice,
|
||||
pos: u32,
|
||||
) -> impl Iterator<Item = TokTree<'a, A, X>> + 'b {
|
||||
@@ -182,7 +183,7 @@ pub fn vname_tv<'a: 'b, 'b, A: AtomTok + 'a, X: ExtraTok + 'a>(
|
||||
.map(move |t| t.at(pos..pos))
|
||||
}
|
||||
|
||||
pub fn wrap_tokv<'a, A: AtomTok, X: ExtraTok>(
|
||||
pub fn wrap_tokv<'a, A: AtomRepr, X: ExtraTok>(
|
||||
items: impl IntoIterator<Item = TokTree<'a, A, X>>
|
||||
) -> TokTree<'a, A, X> {
|
||||
let items_v = items.into_iter().collect_vec();
|
||||
@@ -199,7 +200,7 @@ pub fn wrap_tokv<'a, A: AtomTok, X: ExtraTok>(
|
||||
pub use api::Paren;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Token<'a, A: AtomTok, X: ExtraTok> {
|
||||
pub enum Token<'a, A: AtomRepr, X: ExtraTok> {
|
||||
Comment(Arc<String>),
|
||||
LambdaHead(Vec<TokTree<'a, A, X>>),
|
||||
Name(Tok<String>),
|
||||
@@ -213,7 +214,7 @@ pub enum Token<'a, A: AtomTok, X: ExtraTok> {
|
||||
Ph(Ph),
|
||||
Macro(Option<NotNan<f64>>),
|
||||
}
|
||||
impl<'a, A: AtomTok, X: ExtraTok> Token<'a, A, X> {
|
||||
impl<'a, A: AtomRepr, X: ExtraTok> Token<'a, A, X> {
|
||||
pub fn at(self, range: Range<u32>) -> TokTree<'a, A, X> { TokTree { range, tok: self } }
|
||||
pub fn is_kw(&self, tk: Tok<String>) -> bool {
|
||||
matches!(self, Token::Name(n) if *n == tk)
|
||||
@@ -225,7 +226,7 @@ impl<'a, A: AtomTok, X: ExtraTok> Token<'a, A, X> {
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a, A: AtomTok, X: ExtraTok> Display for Token<'a, A, X> {
|
||||
impl<'a, A: AtomRepr, X: ExtraTok> Display for Token<'a, A, X> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
thread_local! {
|
||||
static PAREN_LEVEL: RefCell<usize> = 0.into();
|
||||
@@ -270,13 +271,13 @@ impl<'a, A: AtomTok, X: ExtraTok> Display for Token<'a, A, X> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ttv_range(ttv: &[TokTree<'_, impl AtomTok, impl ExtraTok>]) -> Range<u32> {
|
||||
pub fn ttv_range(ttv: &[TokTree<'_, impl AtomRepr, impl ExtraTok>]) -> Range<u32> {
|
||||
assert!(!ttv.is_empty(), "Empty slice has no range");
|
||||
ttv.first().unwrap().range.start..ttv.last().unwrap().range.end
|
||||
}
|
||||
|
||||
pub fn ttv_fmt<'a: 'b, 'b>(
|
||||
ttv: impl IntoIterator<Item = &'b TokTree<'a, impl AtomTok + 'b, impl ExtraTok + 'b>>,
|
||||
ttv: impl IntoIterator<Item = &'b TokTree<'a, impl AtomRepr + 'b, impl ExtraTok + 'b>>,
|
||||
) -> String {
|
||||
ttv.into_iter().join("")
|
||||
}
|
||||
@@ -297,8 +298,12 @@ pub struct Ph {
|
||||
pub kind: PhKind,
|
||||
}
|
||||
impl Ph {
|
||||
pub fn from_api(api: &Placeholder) -> Self { Self { name: deintern(api.name), kind: api.kind } }
|
||||
pub fn to_api(&self) -> Placeholder { Placeholder { name: self.name.marker(), kind: self.kind } }
|
||||
pub fn from_api(api: &api::Placeholder) -> Self {
|
||||
Self { name: Tok::from_api(api.name), kind: api.kind }
|
||||
}
|
||||
pub fn to_api(&self) -> api::Placeholder {
|
||||
api::Placeholder { name: self.name.to_api(), kind: self.kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::any::{type_name, Any, TypeId};
|
||||
use std::fmt;
|
||||
use std::io::{Read, Write};
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::{Deref, Range};
|
||||
use std::ops::Deref;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
|
||||
use dyn_clone::{clone_box, DynClone};
|
||||
@@ -12,7 +12,7 @@ use orchid_base::intern;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::reqnot::Requester;
|
||||
use orchid_base::tree::AtomTok;
|
||||
use orchid_base::tree::AtomRepr;
|
||||
use trait_set::trait_set;
|
||||
|
||||
use crate::api;
|
||||
@@ -92,7 +92,7 @@ impl ForeignAtom<'static> {
|
||||
pub fn request<M: AtomMethod>(&self, m: M) -> Option<M::Response> {
|
||||
let rep = self.ctx.reqnot.request(api::Fwd(
|
||||
self.atom.clone(),
|
||||
Sym::parse(M::NAME).unwrap().tok().marker(),
|
||||
Sym::parse(M::NAME).unwrap().tok().to_api(),
|
||||
enc_vec(&m)
|
||||
))?;
|
||||
Some(M::Response::decode(&mut &rep[..]))
|
||||
@@ -106,16 +106,10 @@ impl<'a> fmt::Display for ForeignAtom<'a> {
|
||||
impl<'a> fmt::Debug for ForeignAtom<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ForeignAtom({self})") }
|
||||
}
|
||||
impl<'a> AtomTok for ForeignAtom<'a> {
|
||||
type Context = SysCtx;
|
||||
fn from_api(atom: &api::Atom, pos: Range<u32>, ctx: &mut Self::Context) -> Self {
|
||||
Self {
|
||||
atom: atom.clone(),
|
||||
_life: PhantomData,
|
||||
ctx: ctx.clone(),
|
||||
expr: None,
|
||||
pos: Pos::Range(pos),
|
||||
}
|
||||
impl<'a> AtomRepr for ForeignAtom<'a> {
|
||||
type Ctx = SysCtx;
|
||||
fn from_api(atom: &api::Atom, pos: Pos, ctx: &mut Self::Ctx) -> Self {
|
||||
Self { atom: atom.clone(), _life: PhantomData, ctx: ctx.clone(), expr: None, pos }
|
||||
}
|
||||
fn to_api(&self) -> orchid_api::Atom { self.atom.clone() }
|
||||
}
|
||||
@@ -208,7 +202,7 @@ impl<'a, A: AtomicFeatures> TypAtom<'a, A> {
|
||||
M::Response::decode(
|
||||
&mut &self.data.ctx.reqnot.request(api::Fwd(
|
||||
self.data.atom.clone(),
|
||||
Sym::parse(M::NAME).unwrap().tok().marker(),
|
||||
Sym::parse(M::NAME).unwrap().tok().to_api(),
|
||||
enc_vec(&req)
|
||||
)).unwrap()[..]
|
||||
)
|
||||
|
||||
@@ -6,11 +6,10 @@ use std::{mem, process, thread};
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
use orchid_api::ExtMsgSet;
|
||||
use orchid_api_traits::{enc_vec, Decode, Encode};
|
||||
use orchid_base::char_filter::{char_filter_match, char_filter_union, mk_char_filter};
|
||||
use orchid_base::clone;
|
||||
use orchid_base::interner::{deintern, init_replica, sweep_replica};
|
||||
use orchid_base::interner::{init_replica, sweep_replica, Tok};
|
||||
use orchid_base::logging::Logger;
|
||||
use orchid_base::macros::{mtreev_from_api, mtreev_to_api};
|
||||
use orchid_base::name::{PathSlice, Sym};
|
||||
@@ -30,8 +29,8 @@ use crate::system::{atom_by_idx, SysCtx};
|
||||
use crate::system_ctor::{CtedObj, DynSystemCtor};
|
||||
use crate::tree::{do_extra, GenTok, GenTokTree, LazyMemberFactory, TIACtxImpl};
|
||||
|
||||
pub type ExtReq = RequestHandle<ExtMsgSet>;
|
||||
pub type ExtReqNot = ReqNot<ExtMsgSet>;
|
||||
pub type ExtReq = RequestHandle<api::ExtMsgSet>;
|
||||
pub type ExtReqNot = ReqNot<api::ExtMsgSet>;
|
||||
|
||||
pub struct ExtensionData {
|
||||
pub name: &'static str,
|
||||
@@ -136,7 +135,7 @@ fn extension_main_logic(data: ExtensionData) {
|
||||
path: Substack::Bottom,
|
||||
};
|
||||
let const_root = (cted.inst().dyn_env().into_iter())
|
||||
.map(|(k, v)| (k.marker(), v.into_api(&mut tia_ctx)))
|
||||
.map(|(k, v)| (k.to_api(), v.into_api(&mut tia_ctx)))
|
||||
.collect();
|
||||
systems.lock().unwrap().insert(new_sys.id, SystemRecord {
|
||||
declfs: cted.inst().dyn_vfs().to_api_rec(&mut vfses),
|
||||
@@ -180,14 +179,14 @@ fn extension_main_logic(data: ExtensionData) {
|
||||
api::HostExtReq::VfsReq(api::VfsReq::VfsRead(vfs_read)) => {
|
||||
let api::VfsRead(sys_id, vfs_id, path) = &vfs_read;
|
||||
let systems_g = systems.lock().unwrap();
|
||||
let path = path.iter().map(|t| deintern(*t)).collect_vec();
|
||||
let path = path.iter().map(|t| Tok::from_api(*t)).collect_vec();
|
||||
hand.handle(&vfs_read, &systems_g[sys_id].vfses[vfs_id].load(PathSlice::new(&path)))
|
||||
}
|
||||
api::HostExtReq::LexExpr(lex @ api::LexExpr{ sys, text, pos, id }) => {
|
||||
let systems_g = systems.lock().unwrap();
|
||||
let lexers = systems_g[&sys].cted.inst().dyn_lexers();
|
||||
mem::drop(systems_g);
|
||||
let text = deintern(text);
|
||||
let text = Tok::from_api(text);
|
||||
let ctx = LexContext { sys, id, pos, reqnot: hand.reqnot(), text: &text };
|
||||
let trigger_char = text.chars().nth(pos as usize).unwrap();
|
||||
for lx in lexers.iter().filter(|l| char_filter_match(l.char_filter(), trigger_char)) {
|
||||
@@ -241,7 +240,7 @@ fn extension_main_logic(data: ExtensionData) {
|
||||
api::AtomReq::Fwded(fwded) => {
|
||||
let api::Fwded(_, key, payload) = &fwded;
|
||||
let mut reply = Vec::new();
|
||||
let some = nfo.handle_req(actx, Sym::deintern(*key), &mut &payload[..], &mut reply);
|
||||
let some = nfo.handle_req(actx, Sym::from_api(*key), &mut &payload[..], &mut reply);
|
||||
hand.handle(fwded, &some.then_some(reply))
|
||||
}
|
||||
api::AtomReq::CallRef(call@api::CallRef(_, arg)) => {
|
||||
@@ -279,13 +278,20 @@ fn extension_main_logic(data: ExtensionData) {
|
||||
let tok = hand.will_handle_as(&am);
|
||||
let sys_ctx = mk_ctx(am.sys, hand.reqnot());
|
||||
let ctx = RuleCtx {
|
||||
args: am.params.into_iter().map(|(k, v)| (deintern(k), mtreev_from_api(&v))).collect(),
|
||||
args: (am.params.into_iter())
|
||||
.map(|(k, v)| (
|
||||
Tok::from_api(k),
|
||||
mtreev_from_api(&v, &mut |_| panic!("No atom in macro prompt!"))
|
||||
))
|
||||
.collect(),
|
||||
run_id: am.run_id,
|
||||
sys: sys_ctx.clone(),
|
||||
};
|
||||
hand.handle_as(tok, &match apply_rule(am.id, ctx) {
|
||||
Err(e) => e.keep_only(|e| *e != err_cascade()).map(|e| Err(e.to_api())),
|
||||
Ok(t) => Some(Ok(mtreev_to_api(&t))),
|
||||
Ok(t) => Some(Ok(mtreev_to_api(&t, &mut |a| {
|
||||
api::MacroToken::Atom(a.clone().build(sys_ctx.clone()))
|
||||
}))),
|
||||
})
|
||||
}
|
||||
}),
|
||||
|
||||
@@ -3,7 +3,6 @@ use std::ops::Deref;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
|
||||
use derive_destructure::destructure;
|
||||
use orchid_api::InspectedKind;
|
||||
use orchid_base::error::{OrcErr, OrcErrv};
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::location::Pos;
|
||||
@@ -51,9 +50,10 @@ impl Expr {
|
||||
let details = handle.ctx.reqnot.request(api::Inspect { target: handle.tk });
|
||||
let pos = Pos::from_api(&details.location);
|
||||
let kind = match details.kind {
|
||||
InspectedKind::Atom(a) => ExprKind::Atom(ForeignAtom::new(handle.clone(), a, pos.clone())),
|
||||
InspectedKind::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b)),
|
||||
InspectedKind::Opaque => ExprKind::Opaque,
|
||||
api::InspectedKind::Atom(a) =>
|
||||
ExprKind::Atom(ForeignAtom::new(handle.clone(), a, pos.clone())),
|
||||
api::InspectedKind::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b)),
|
||||
api::InspectedKind::Opaque => ExprKind::Opaque,
|
||||
};
|
||||
ExprData { pos, kind }
|
||||
})
|
||||
@@ -124,7 +124,7 @@ impl ExprKind {
|
||||
K::Seq(Box::new(a.api_return(ctx.clone(), do_slot)), Box::new(b.api_return(ctx, do_slot))),
|
||||
Self::Lambda(arg, body) => K::Lambda(arg, Box::new(body.api_return(ctx, do_slot))),
|
||||
Self::Arg(arg) => K::Arg(arg),
|
||||
Self::Const(name) => K::Const(name.marker()),
|
||||
Self::Const(name) => K::Const(name.to_api()),
|
||||
Self::Bottom(err) => K::Bottom(err.to_api()),
|
||||
Self::NewAtom(fac) => K::NewAtom(fac.clone().build(ctx)),
|
||||
kind @ (Self::Atom(_) | Self::Opaque) => panic!("{kind:?} should have a token"),
|
||||
|
||||
@@ -24,7 +24,7 @@ impl DeclFs {
|
||||
api::EagerVfs::Lazy(id)
|
||||
},
|
||||
DeclFs::Mod(children) => api::EagerVfs::Eager(
|
||||
children.iter().map(|(k, v)| (intern(*k).marker(), v.to_api_rec(vfses))).collect(),
|
||||
children.iter().map(|(k, v)| (intern(*k).to_api(), v.to_api_rec(vfses))).collect(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,4 +17,3 @@ pub mod system;
|
||||
pub mod system_ctor;
|
||||
pub mod tree;
|
||||
pub mod macros;
|
||||
pub mod api_conv;
|
||||
|
||||
@@ -1,39 +1,48 @@
|
||||
use ahash::HashMap;
|
||||
use lazy_static::lazy_static;
|
||||
use never::Never;
|
||||
use orchid_base::{error::OrcRes, interner::{intern, Tok}, location::Pos, macros::{mtreev_from_api, mtreev_to_api, MTree}, parse::Comment, reqnot::Requester};
|
||||
use trait_set::trait_set;
|
||||
use crate::{api, lexer::err_cascade, system::SysCtx};
|
||||
use crate::{api, atom::AtomFactory, lexer::err_cascade, system::SysCtx};
|
||||
use std::{num::NonZero, sync::RwLock};
|
||||
|
||||
pub trait Macro {
|
||||
fn pattern() -> MTree<'static>;
|
||||
fn apply(binds: HashMap<Tok<String>, MTree<'_>>) -> MTree<'_>;
|
||||
fn pattern() -> MTree<'static, Never>;
|
||||
fn apply(binds: HashMap<Tok<String>, MTree<'_, Never>>) -> MTree<'_, AtomFactory>;
|
||||
}
|
||||
|
||||
pub trait DynMacro {
|
||||
fn pattern(&self) -> MTree<'static>;
|
||||
fn apply<'a>(&self, binds: HashMap<Tok<String>, MTree<'a>>) -> MTree<'a>;
|
||||
fn pattern(&self) -> MTree<'static, Never>;
|
||||
fn apply<'a>(&self, binds: HashMap<Tok<String>, MTree<'a, Never>>) -> MTree<'a, AtomFactory>;
|
||||
}
|
||||
|
||||
impl<T: Macro> DynMacro for T {
|
||||
fn pattern(&self) -> MTree<'static> { Self::pattern() }
|
||||
fn apply<'a>(&self, binds: HashMap<Tok<String>, MTree<'a>>) -> MTree<'a> { Self::apply(binds) }
|
||||
fn pattern(&self) -> MTree<'static, Never> { Self::pattern() }
|
||||
fn apply<'a>(&self, binds: HashMap<Tok<String>, MTree<'a, Never>>) -> MTree<'a, AtomFactory> {
|
||||
Self::apply(binds)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RuleCtx<'a> {
|
||||
pub(crate) args: HashMap<Tok<String>, Vec<MTree<'a>>>,
|
||||
pub(crate) args: HashMap<Tok<String>, Vec<MTree<'a, Never>>>,
|
||||
pub(crate) run_id: api::ParsId,
|
||||
pub(crate) sys: SysCtx,
|
||||
}
|
||||
impl<'a> RuleCtx<'a> {
|
||||
pub fn recurse(&mut self, tree: &[MTree<'a>]) -> OrcRes<Vec<MTree<'a>>> {
|
||||
let req = api::RunMacros{ run_id: self.run_id, query: mtreev_to_api(tree) };
|
||||
Ok(mtreev_from_api(&self.sys.reqnot.request(req).ok_or_else(err_cascade)?))
|
||||
pub fn recurse(&mut self, tree: &[MTree<'a, Never>]) -> OrcRes<Vec<MTree<'a, Never>>> {
|
||||
let req = api::RunMacros{
|
||||
run_id: self.run_id,
|
||||
query: mtreev_to_api(tree, &mut |b| match *b {})
|
||||
};
|
||||
Ok(mtreev_from_api(
|
||||
&self.sys.reqnot.request(req).ok_or_else(err_cascade)?,
|
||||
&mut |_| panic!("Returned atom from Rule recursion")
|
||||
))
|
||||
}
|
||||
pub fn getv(&mut self, key: &Tok<String>) -> Vec<MTree<'a>> {
|
||||
pub fn getv(&mut self, key: &Tok<String>) -> Vec<MTree<'a, Never>> {
|
||||
self.args.remove(key).expect("Key not found")
|
||||
}
|
||||
pub fn gets(&mut self, key: &Tok<String>) -> MTree<'a> {
|
||||
pub fn gets(&mut self, key: &Tok<String>) -> MTree<'a, Never> {
|
||||
let v = self.getv(key);
|
||||
assert!(v.len() == 1, "Not a scalar");
|
||||
v.into_iter().next().unwrap()
|
||||
@@ -44,7 +53,7 @@ impl<'a> RuleCtx<'a> {
|
||||
}
|
||||
|
||||
trait_set! {
|
||||
pub trait RuleCB = for<'a> Fn(RuleCtx<'a>) -> OrcRes<Vec<MTree<'a>>> + Send + Sync;
|
||||
pub trait RuleCB = for<'a> Fn(RuleCtx<'a>) -> OrcRes<Vec<MTree<'a, AtomFactory>>> + Send + Sync;
|
||||
}
|
||||
|
||||
lazy_static!{
|
||||
@@ -53,7 +62,7 @@ lazy_static!{
|
||||
|
||||
pub struct Rule {
|
||||
pub(crate) comments: Vec<Comment>,
|
||||
pub(crate) pattern: Vec<MTree<'static>>,
|
||||
pub(crate) pattern: Vec<MTree<'static, Never>>,
|
||||
pub(crate) id: api::MacroId,
|
||||
}
|
||||
impl Rule {
|
||||
@@ -61,7 +70,7 @@ impl Rule {
|
||||
api::MacroRule {
|
||||
comments: self.comments.iter().map(|c| c.to_api()).collect(),
|
||||
location: api::Location::Inherit,
|
||||
pattern: mtreev_to_api(&self.pattern),
|
||||
pattern: mtreev_to_api(&self.pattern, &mut |b| match *b {}),
|
||||
id: self.id,
|
||||
}
|
||||
}
|
||||
@@ -69,7 +78,7 @@ impl Rule {
|
||||
|
||||
pub fn rule_cmt<'a>(
|
||||
cmt: impl IntoIterator<Item = &'a str>,
|
||||
pattern: Vec<MTree<'static>>,
|
||||
pattern: Vec<MTree<'static, Never>>,
|
||||
apply: impl RuleCB + 'static
|
||||
) -> Rule {
|
||||
let mut rules = RULES.write().unwrap();
|
||||
@@ -79,11 +88,14 @@ pub fn rule_cmt<'a>(
|
||||
Rule { comments, pattern, id }
|
||||
}
|
||||
|
||||
pub fn rule(pattern: Vec<MTree<'static>>, apply: impl RuleCB + 'static) -> Rule {
|
||||
pub fn rule(pattern: Vec<MTree<'static, Never>>, apply: impl RuleCB + 'static) -> Rule {
|
||||
rule_cmt([], pattern, apply)
|
||||
}
|
||||
|
||||
pub(crate) fn apply_rule(id: api::MacroId, ctx: RuleCtx<'static>) -> OrcRes<Vec<MTree<'static>>> {
|
||||
pub(crate) fn apply_rule(
|
||||
id: api::MacroId,
|
||||
ctx: RuleCtx<'static>
|
||||
) -> OrcRes<Vec<MTree<'static, AtomFactory>>> {
|
||||
let rules = RULES.read().unwrap();
|
||||
rules[&id](ctx)
|
||||
}
|
||||
@@ -43,9 +43,9 @@ pub struct GenItem {
|
||||
impl GenItem {
|
||||
pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Item {
|
||||
let kind = match self.kind {
|
||||
GenItemKind::Export(n) => api::ItemKind::Export(n.marker()),
|
||||
GenItemKind::Export(n) => api::ItemKind::Export(n.to_api()),
|
||||
GenItemKind::Member(mem) => api::ItemKind::Member(mem.into_api(ctx)),
|
||||
GenItemKind::Import(cn) => api::ItemKind::Import(cn.tok().marker()),
|
||||
GenItemKind::Import(cn) => api::ItemKind::Import(cn.tok().to_api()),
|
||||
GenItemKind::Macro(prio, rules) => api::ItemKind::Macro(api::MacroBlock {
|
||||
priority: prio,
|
||||
rules: rules.into_iter().map(|r| r.to_api() ).collect_vec(),
|
||||
@@ -134,7 +134,7 @@ pub struct GenMember {
|
||||
impl GenMember {
|
||||
pub fn into_api(self, ctx: &mut impl TreeIntoApiCtx) -> api::Member {
|
||||
api::Member {
|
||||
name: self.name.marker(),
|
||||
name: self.name.to_api(),
|
||||
kind: self.kind.into_api(&mut ctx.push_path(self.name)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,10 +6,10 @@ use std::sync::{Arc, RwLock};
|
||||
use hashbrown::HashMap;
|
||||
use lazy_static::lazy_static;
|
||||
use orchid_base::error::OrcErrv;
|
||||
use orchid_base::interner::deintern;
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::match_mapping;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::tree::AtomTok;
|
||||
use orchid_base::tree::AtomRepr;
|
||||
|
||||
use crate::api;
|
||||
use crate::extension::AtomHand;
|
||||
@@ -41,12 +41,12 @@ impl Expr {
|
||||
pub fn resolve(tk: api::ExprTicket) -> Option<Self> {
|
||||
KNOWN_EXPRS.read().unwrap().get(&tk).cloned()
|
||||
}
|
||||
pub fn from_api(api: api::Expression, ctx: &mut ExprParseCtx) -> Self {
|
||||
pub fn from_api(api: &api::Expression, ctx: &mut ExprParseCtx) -> Self {
|
||||
if let api::ExpressionKind::Slot(tk) = &api.kind {
|
||||
return Self::resolve(*tk).expect("Invalid slot");
|
||||
}
|
||||
Self {
|
||||
kind: Arc::new(RwLock::new(ExprKind::from_api(api.kind, ctx))),
|
||||
kind: Arc::new(RwLock::new(ExprKind::from_api(&api.kind, ctx))),
|
||||
is_canonical: Arc::default(),
|
||||
pos: Pos::from_api(&api.location),
|
||||
}
|
||||
@@ -81,24 +81,24 @@ pub enum ExprKind {
|
||||
Seq(Expr, Expr),
|
||||
Call(Expr, Expr),
|
||||
Atom(AtomHand),
|
||||
Argument,
|
||||
Arg,
|
||||
Lambda(Option<PathSet>, Expr),
|
||||
Bottom(OrcErrv),
|
||||
Const(Sym),
|
||||
}
|
||||
impl ExprKind {
|
||||
pub fn from_api(api: api::ExpressionKind, ctx: &mut ExprParseCtx) -> Self {
|
||||
use api::ExpressionKind as K;
|
||||
match api {
|
||||
K::Slot(_) => panic!("Handled in Expr"),
|
||||
K::Lambda(id, b) => ExprKind::Lambda(PathSet::from_api(id, &b), Expr::from_api(*b, ctx)),
|
||||
K::Arg(_) => ExprKind::Argument,
|
||||
K::Bottom(b) => ExprKind::Bottom(OrcErrv::from_api(&b)),
|
||||
K::Call(f, x) => ExprKind::Call(Expr::from_api(*f, ctx), Expr::from_api(*x, ctx)),
|
||||
K::Const(c) => ExprKind::Const(Sym::from_tok(deintern(c)).unwrap()),
|
||||
K::NewAtom(a) => ExprKind::Atom(AtomHand::from_api(a)),
|
||||
K::Seq(a, b) => ExprKind::Seq(Expr::from_api(*a, ctx), Expr::from_api(*b, ctx)),
|
||||
}
|
||||
pub fn from_api(api: &api::ExpressionKind, ctx: &mut ExprParseCtx) -> Self {
|
||||
match_mapping!(api, api::ExpressionKind => ExprKind {
|
||||
Lambda(id => PathSet::from_api(*id, api), b => Expr::from_api(b, ctx)),
|
||||
Bottom(b => OrcErrv::from_api(b)),
|
||||
Call(f => Expr::from_api(f, ctx), x => Expr::from_api(x, ctx)),
|
||||
Const(c => Sym::from_api(*c)),
|
||||
Seq(a => Expr::from_api(a, ctx), b => Expr::from_api(b, ctx)),
|
||||
} {
|
||||
api::ExpressionKind::Arg(_) => ExprKind::Arg,
|
||||
api::ExpressionKind::NewAtom(a) => ExprKind::Atom(AtomHand::from_api(a.clone())),
|
||||
api::ExpressionKind::Slot(_) => panic!("Handled in Expr"),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,19 +121,20 @@ impl PathSet {
|
||||
self.steps.push_front(step);
|
||||
self
|
||||
}
|
||||
pub fn from_api(id: u64, b: &api::Expression) -> Option<Self> {
|
||||
pub fn from_api(id: u64, api: &api::ExpressionKind) -> Option<Self> {
|
||||
use api::ExpressionKind as K;
|
||||
match &b.kind {
|
||||
match &api {
|
||||
K::Arg(id2) => (id == *id2).then(|| Self { steps: VecDeque::new(), next: None }),
|
||||
K::Bottom(_) | K::Const(_) | K::NewAtom(_) | K::Slot(_) => None,
|
||||
K::Lambda(_, b) => Self::from_api(id, b),
|
||||
K::Call(l, r) | K::Seq(l, r) => match (Self::from_api(id, l), Self::from_api(id, r)) {
|
||||
(Some(a), Some(b)) =>
|
||||
Some(Self { steps: VecDeque::new(), next: Some((Box::new(a), Box::new(b))) }),
|
||||
(Some(l), None) => Some(l.after(Step::Left)),
|
||||
(None, Some(r)) => Some(r.after(Step::Right)),
|
||||
(None, None) => None,
|
||||
},
|
||||
K::Lambda(_, b) => Self::from_api(id, &b.kind),
|
||||
K::Call(l, r) | K::Seq(l, r) =>
|
||||
match (Self::from_api(id, &l.kind), Self::from_api(id, &r.kind)) {
|
||||
(Some(a), Some(b)) =>
|
||||
Some(Self { steps: VecDeque::new(), next: Some((Box::new(a), Box::new(b))) }),
|
||||
(Some(l), None) => Some(l.after(Step::Left)),
|
||||
(None, Some(r)) => Some(r.after(Step::Right)),
|
||||
(None, None) => None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::num::NonZero;
|
||||
use std::ops::{Deref, Range};
|
||||
use std::ops::Deref;
|
||||
use std::sync::atomic::{AtomicU16, AtomicU32, AtomicU64, Ordering};
|
||||
use std::sync::mpsc::{sync_channel, SyncSender};
|
||||
use std::sync::{Arc, Mutex, OnceLock, RwLock, Weak};
|
||||
@@ -11,23 +11,23 @@ use hashbrown::hash_map::Entry;
|
||||
use hashbrown::HashMap;
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use orchid_api::TStrv;
|
||||
use orchid_api_traits::Request;
|
||||
use orchid_base::char_filter::char_filter_match;
|
||||
use orchid_base::error::{OrcErrv, OrcRes};
|
||||
use orchid_base::interner::{deintern, intern, Tok};
|
||||
use orchid_base::interner::{intern, Tok};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::logging::Logger;
|
||||
use orchid_base::macros::{mtreev_from_api, mtreev_to_api};
|
||||
use orchid_base::macros::mtreev_from_api;
|
||||
use orchid_base::parse::Comment;
|
||||
use orchid_base::reqnot::{ReqNot, Requester as _};
|
||||
use orchid_base::tree::{ttv_from_api, AtomTok};
|
||||
use orchid_base::tree::{ttv_from_api, AtomRepr};
|
||||
use orchid_base::clone;
|
||||
use ordered_float::NotNan;
|
||||
use substack::{Stackframe, Substack};
|
||||
|
||||
use crate::api;
|
||||
use crate::expr::Expr;
|
||||
use crate::macros::macro_recur;
|
||||
use crate::macros::{macro_recur, macro_treev_to_api};
|
||||
use crate::tree::{Member, ParsTokTree};
|
||||
|
||||
#[derive(Debug, destructure)]
|
||||
@@ -56,11 +56,11 @@ impl Drop for AtomData {
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AtomHand(Arc<AtomData>);
|
||||
impl AtomHand {
|
||||
fn create_new(api::Atom { data, drop, owner }: api::Atom) -> Self {
|
||||
let owner = System::resolve(owner).expect("Atom owned by non-existing system");
|
||||
Self(Arc::new(AtomData { data, drop, owner }))
|
||||
}
|
||||
pub fn from_api(atom: api::Atom) -> Self {
|
||||
fn create_new(api::Atom { data, drop, owner }: api::Atom) -> AtomHand {
|
||||
let owner = System::resolve(owner).expect("Atom owned by non-existing system");
|
||||
AtomHand(Arc::new(AtomData { data, drop, owner }))
|
||||
}
|
||||
if let Some(id) = atom.drop {
|
||||
lazy_static! {
|
||||
static ref OWNED_ATOMS: Mutex<HashMap<(api::SysId, api::AtomId), Weak<AtomData>>> =
|
||||
@@ -73,11 +73,11 @@ impl AtomHand {
|
||||
return Self(atom);
|
||||
}
|
||||
}
|
||||
let new = Self::create_new(atom);
|
||||
let new = create_new(atom);
|
||||
owned_g.insert((owner, id), Arc::downgrade(&new.0));
|
||||
new
|
||||
} else {
|
||||
Self::create_new(atom)
|
||||
create_new(atom)
|
||||
}
|
||||
}
|
||||
pub fn call(self, arg: Expr) -> api::Expression {
|
||||
@@ -89,15 +89,15 @@ impl AtomHand {
|
||||
Err(hand) => reqnot.request(api::CallRef(hand.api_ref(), ticket)),
|
||||
}
|
||||
}
|
||||
pub fn req(&self, key: TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
|
||||
pub fn req(&self, key: api::TStrv, req: Vec<u8>) -> Option<Vec<u8>> {
|
||||
self.0.owner.reqnot().request(api::Fwded(self.0.api_ref(), key, req))
|
||||
}
|
||||
pub fn api_ref(&self) -> api::Atom { self.0.api_ref() }
|
||||
pub fn print(&self) -> String { self.0.owner.reqnot().request(api::AtomPrint(self.0.api_ref())) }
|
||||
}
|
||||
impl AtomTok for AtomHand {
|
||||
type Context = ();
|
||||
fn from_api(atom: &orchid_api::Atom, _: Range<u32>, (): &mut Self::Context) -> Self {
|
||||
impl AtomRepr for AtomHand {
|
||||
type Ctx = ();
|
||||
fn from_api(atom: &orchid_api::Atom, _: Pos, (): &mut Self::Ctx) -> Self {
|
||||
Self::from_api(atom.clone())
|
||||
}
|
||||
fn to_api(&self) -> orchid_api::Atom { self.api_ref() }
|
||||
@@ -177,11 +177,15 @@ impl Extension {
|
||||
|hand, req| match req {
|
||||
api::ExtHostReq::Ping(ping) => hand.handle(&ping, &()),
|
||||
api::ExtHostReq::IntReq(intreq) => match intreq {
|
||||
api::IntReq::InternStr(s) => hand.handle(&s, &intern(&**s.0).marker()),
|
||||
api::IntReq::InternStrv(v) => hand.handle(&v, &intern(&*v.0).marker()),
|
||||
api::IntReq::ExternStr(si) => hand.handle(&si, &deintern(si.0).arc()),
|
||||
api::IntReq::ExternStrv(vi) =>
|
||||
hand.handle(&vi, &Arc::new(deintern(vi.0).iter().map(|t| t.marker()).collect_vec())),
|
||||
api::IntReq::InternStr(s) => hand.handle(&s, &intern(&**s.0).to_api()),
|
||||
api::IntReq::InternStrv(v) => hand.handle(&v, &intern(&*v.0).to_api()),
|
||||
api::IntReq::ExternStr(si) => hand.handle(&si, &Tok::<String>::from_api(si.0).arc()),
|
||||
api::IntReq::ExternStrv(vi) => hand.handle(&vi, &Arc::new(
|
||||
Tok::<Vec<Tok<String>>>::from_api(vi.0)
|
||||
.iter()
|
||||
.map(|t| t.to_api())
|
||||
.collect_vec()
|
||||
)),
|
||||
},
|
||||
api::ExtHostReq::Fwd(ref fw @ api::Fwd(ref atom, ref key, ref body)) => {
|
||||
let sys = System::resolve(atom.owner).unwrap();
|
||||
@@ -207,7 +211,12 @@ impl Extension {
|
||||
})
|
||||
},
|
||||
api::ExtHostReq::RunMacros(ref rm @ api::RunMacros{ ref run_id, ref query }) => {
|
||||
hand.handle(rm, ¯o_recur(*run_id, mtreev_from_api(query)).map(|x| mtreev_to_api(&x)))
|
||||
hand.handle(rm,
|
||||
¯o_recur(*run_id,
|
||||
mtreev_from_api(query, &mut |_| panic!("Recursion never contains atoms"))
|
||||
)
|
||||
.map(|x| macro_treev_to_api(*run_id, x))
|
||||
)
|
||||
}
|
||||
},
|
||||
),
|
||||
@@ -255,13 +264,13 @@ impl SystemCtor {
|
||||
exprs: RwLock::default(),
|
||||
lex_filter: sys_inst.lex_filter,
|
||||
const_root: OnceLock::new(),
|
||||
line_types: sys_inst.line_types.into_iter().map(deintern).collect(),
|
||||
line_types: sys_inst.line_types.into_iter().map(Tok::from_api).collect(),
|
||||
id,
|
||||
}));
|
||||
let root = (sys_inst.const_root.into_iter())
|
||||
.map(|(k, v)| Member::from_api(
|
||||
api::Member { name: k, kind: v },
|
||||
Substack::Bottom.push(deintern(k)),
|
||||
Substack::Bottom.push(Tok::from_api(k)),
|
||||
&data
|
||||
))
|
||||
.collect_vec();
|
||||
@@ -342,7 +351,7 @@ impl System {
|
||||
});
|
||||
// Pass control to extension
|
||||
let ret =
|
||||
self.reqnot().request(api::LexExpr { id, pos, sys: self.id(), text: source.marker() });
|
||||
self.reqnot().request(api::LexExpr { id, pos, sys: self.id(), text: source.to_api() });
|
||||
// collect sender to unblock recursion handler thread before returning
|
||||
LEX_RECUR.lock().unwrap().remove(&id);
|
||||
ret.transpose()
|
||||
|
||||
@@ -3,8 +3,8 @@ use std::sync::Arc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use orchid_base::error::{mk_errv, OrcErrv, OrcRes};
|
||||
use orchid_base::intern;
|
||||
use orchid_base::interner::{deintern, intern, Tok};
|
||||
use orchid_base::{intern, match_mapping};
|
||||
use orchid_base::interner::{intern, Tok};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::number::{num_to_err, parse_num};
|
||||
use orchid_base::parse::{name_char, name_start, op_char, unrep_space};
|
||||
@@ -122,7 +122,7 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
body.push(lex_once(ctx)?);
|
||||
ctx.trim_ws();
|
||||
}
|
||||
ParsTok::S(paren.clone(), body)
|
||||
ParsTok::S(*paren, body)
|
||||
} else if ctx.strip_prefix("macro") &&
|
||||
!ctx.tail.chars().next().is_some_and(|x| x.is_ascii_alphabetic())
|
||||
{
|
||||
@@ -173,20 +173,19 @@ pub fn lex_once(ctx: &mut LexCtx) -> OrcRes<ParsTokTree> {
|
||||
}
|
||||
|
||||
fn tt_to_owned(api: &api::TokenTree, ctx: &mut LexCtx<'_>) -> ParsTokTree {
|
||||
let tok = match &api.token {
|
||||
api::Token::Atom(atom) => ParsTok::Atom(AtomHand::from_api(atom.clone())),
|
||||
api::Token::Bottom(err) => ParsTok::Bottom(OrcErrv::from_api(err)),
|
||||
api::Token::LambdaHead(arg) => ParsTok::LambdaHead(ttv_to_owned(arg, ctx)),
|
||||
api::Token::Lambda(arg, b) => ParsTok::Lambda(ttv_to_owned(arg, ctx), ttv_to_owned(b, ctx)),
|
||||
api::Token::Name(name) => ParsTok::Name(deintern(*name)),
|
||||
api::Token::S(p, b) => ParsTok::S(p.clone(), b.iter().map(|t| tt_to_owned(t, ctx)).collect()),
|
||||
let tok = match_mapping!(&api.token, api::Token => ParsTok {
|
||||
Atom(atom => AtomHand::from_api(atom.clone())),
|
||||
Bottom(err => OrcErrv::from_api(err)),
|
||||
LambdaHead(arg => ttv_to_owned(arg, ctx)),
|
||||
Name(name => Tok::from_api(*name)),
|
||||
S(p.clone(), b.iter().map(|t| tt_to_owned(t, ctx)).collect()),
|
||||
BR, NS,
|
||||
Comment(c.clone()),
|
||||
Ph(ph => Ph::from_api(ph)),
|
||||
Macro(*prio),
|
||||
} {
|
||||
api::Token::Slot(id) => return ctx.rm_subtree(*id),
|
||||
api::Token::BR => ParsTok::BR,
|
||||
api::Token::NS => ParsTok::NS,
|
||||
api::Token::Comment(c) => ParsTok::Comment(c.clone()),
|
||||
api::Token::Ph(ph) => ParsTok::Ph(Ph::from_api(ph)),
|
||||
api::Token::Macro(prio) => ParsTok::Macro(*prio)
|
||||
};
|
||||
});
|
||||
ParsTokTree { range: api.range.clone(), tok }
|
||||
}
|
||||
|
||||
|
||||
@@ -8,3 +8,4 @@ pub mod parse;
|
||||
pub mod subprocess;
|
||||
pub mod tree;
|
||||
pub mod macros;
|
||||
pub mod rule;
|
||||
|
||||
@@ -1,20 +1,85 @@
|
||||
use std::sync::RwLock;
|
||||
use crate::{api, rule::shared::Matcher, tree::Code};
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use orchid_base::macros::MTree;
|
||||
use orchid_base::{macros::{mtreev_from_api, mtreev_to_api, MTok, MTree}, name::Sym};
|
||||
use ordered_float::NotNan;
|
||||
use trait_set::trait_set;
|
||||
use crate::api::ParsId;
|
||||
use crate::extension::AtomHand;
|
||||
|
||||
pub type MacTok = MTok<'static, AtomHand>;
|
||||
pub type MacTree = MTree<'static, AtomHand>;
|
||||
|
||||
trait_set!{
|
||||
trait MacroCB = Fn(Vec<MTree>) -> Option<Vec<MTree>> + Send + Sync;
|
||||
trait MacroCB = Fn(Vec<MacTree>) -> Option<Vec<MacTree>> + Send + Sync;
|
||||
}
|
||||
|
||||
lazy_static!{
|
||||
static ref RECURSION: RwLock<HashMap<ParsId, Box<dyn MacroCB>>> = RwLock::default();
|
||||
static ref RECURSION: RwLock<HashMap<api::ParsId, Box<dyn MacroCB>>> = RwLock::default();
|
||||
static ref MACRO_SLOTS: RwLock<HashMap<api::ParsId,
|
||||
HashMap<api::MacroTreeId, Arc<MacTok>>
|
||||
>> = RwLock::default();
|
||||
}
|
||||
|
||||
pub fn macro_recur(run_id: ParsId, input: Vec<MTree>) -> Option<Vec<MTree>> {
|
||||
pub fn macro_recur(run_id: api::ParsId, input: Vec<MacTree>) -> Option<Vec<MacTree>> {
|
||||
(RECURSION.read().unwrap()[&run_id])(input)
|
||||
}
|
||||
|
||||
pub fn macro_treev_to_api(run_id: api::ParsId, mtree: Vec<MacTree>) -> Vec<api::MacroTree> {
|
||||
let mut g = MACRO_SLOTS.write().unwrap();
|
||||
let run_cache = g.get_mut(&run_id).expect("Parser run not found");
|
||||
mtreev_to_api(&mtree, &mut |a: &AtomHand| {
|
||||
let id = api::MacroTreeId((run_cache.len() as u64 + 1).try_into().unwrap());
|
||||
run_cache.insert(id, Arc::new(MacTok::Atom(a.clone())));
|
||||
api::MacroToken::Slot(id)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn macro_treev_from_api(api: Vec<api::MacroTree>) -> Vec<MacTree> {
|
||||
mtreev_from_api(&api, &mut |atom| MacTok::Atom(AtomHand::from_api(atom.clone())))
|
||||
}
|
||||
|
||||
pub fn deslot_macro(run_id: api::ParsId, tree: &[MacTree]) -> Option<Vec<MacTree>> {
|
||||
let mut slots = (MACRO_SLOTS.write().unwrap())
|
||||
.remove(&run_id).expect("Run not found");
|
||||
return work(&mut slots, tree);
|
||||
fn work(
|
||||
slots: &mut HashMap<api::MacroTreeId, Arc<MacTok>>,
|
||||
tree: &[MacTree]
|
||||
) -> Option<Vec<MacTree>> {
|
||||
let items = (tree.iter())
|
||||
.map(|t| Some(MacTree {
|
||||
tok: match &*t.tok {
|
||||
MacTok::Atom(_) | MacTok::Name(_) | MacTok::Ph(_) => return None,
|
||||
MacTok::Ref(_) => panic!("Ref is an extension-local optimization"),
|
||||
MacTok::Slot(slot) => slots.get(&slot.id()).expect("Slot not found").clone(),
|
||||
MacTok::S(paren, b) => Arc::new(MacTok::S(*paren, work(slots, b)?)),
|
||||
MacTok::Lambda(a, b) => Arc::new(match (work(slots, a), work(slots, b)) {
|
||||
(None, None) => return None,
|
||||
(Some(a), None) => MacTok::Lambda(a, b.clone()),
|
||||
(None, Some(b)) => MacTok::Lambda(a.clone(), b),
|
||||
(Some(a), Some(b)) => MacTok::Lambda(a, b),
|
||||
}),
|
||||
},
|
||||
pos: t.pos.clone()
|
||||
}))
|
||||
.collect_vec();
|
||||
let any_changed = items.iter().any(Option::is_some);
|
||||
any_changed.then(|| {
|
||||
(items.into_iter().enumerate())
|
||||
.map(|(i, opt)| opt.unwrap_or_else(|| tree[i].clone()))
|
||||
.collect_vec()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MacroRepo{
|
||||
no_prio: Vec<(HashSet<Sym>, Matcher, Code)>,
|
||||
prio: Vec<(HashSet<Sym>, NotNan<f64>, Matcher, Code)>,
|
||||
}
|
||||
|
||||
pub fn match_on_exprv<'a>(target: &'a [MacTree], pattern: &[MacTree]) -> Option<MatchhState<'a>> {
|
||||
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::sync::Arc;
|
||||
use std::{iter, thread};
|
||||
|
||||
use itertools::Itertools;
|
||||
@@ -16,6 +17,7 @@ use orchid_base::tree::{Paren, TokTree, Token};
|
||||
use substack::Substack;
|
||||
|
||||
use crate::extension::{AtomHand, System};
|
||||
use crate::macros::MacTree;
|
||||
use crate::tree::{Code, CodeLocator, Item, ItemKind, Member, MemberKind, Module, ParsTokTree, Rule, RuleKind};
|
||||
|
||||
type ParsSnippet<'a> = Snippet<'a, 'static, AtomHand, Never>;
|
||||
@@ -181,9 +183,7 @@ pub fn parse_const(tail: ParsSnippet) -> OrcRes<(Tok<String>, Vec<ParsTokTree>)>
|
||||
Ok((name, tail.iter().flat_map(strip_fluff).collect_vec()))
|
||||
}
|
||||
|
||||
pub fn parse_mtree<'a>(
|
||||
mut snip: ParsSnippet<'a>
|
||||
) -> OrcRes<Vec<MTree<'static>>> {
|
||||
pub fn parse_mtree(mut snip: ParsSnippet<'_>) -> OrcRes<Vec<MacTree>> {
|
||||
let mut mtreev = Vec::new();
|
||||
while let Some((ttree, tail)) = snip.pop_front() {
|
||||
let (range, tok, tail) = match &ttree.tok {
|
||||
@@ -221,21 +221,14 @@ pub fn parse_mtree<'a>(
|
||||
)),
|
||||
Token::BR | Token::Comment(_) => continue,
|
||||
Token::Bottom(e) => return Err(e.clone()),
|
||||
Token::Lambda(arg, body) => {
|
||||
let tok = MTok::Lambda(
|
||||
parse_mtree(Snippet::new(&ttree, &arg))?,
|
||||
parse_mtree(Snippet::new(&ttree, &body))?,
|
||||
);
|
||||
(ttree.range.clone(), tok, tail)
|
||||
},
|
||||
Token::LambdaHead(arg) => (
|
||||
ttree.range.start..snip.pos().end,
|
||||
MTok::Lambda(parse_mtree(Snippet::new(&ttree, &arg))?, parse_mtree(tail)?),
|
||||
MTok::Lambda(parse_mtree(Snippet::new(ttree, arg))?, parse_mtree(tail)?),
|
||||
Snippet::new(ttree, &[]),
|
||||
),
|
||||
Token::Slot(_) | Token::X(_) => panic!("Did not expect {} in parsed token tree", &ttree.tok),
|
||||
};
|
||||
mtreev.push(MTree { pos: Pos::Range(range.clone()), tok });
|
||||
mtreev.push(MTree { pos: Pos::Range(range.clone()), tok: Arc::new(tok) });
|
||||
snip = tail;
|
||||
}
|
||||
Ok(mtreev)
|
||||
@@ -246,14 +239,14 @@ pub fn parse_macro(tail: ParsSnippet, macro_i: u16, path: Substack<Tok<String>>)
|
||||
Parsed { tail, output: o@TokTree { tok: Token::S(Paren::Round, b), .. } } => (tail, o, b),
|
||||
Parsed { output, .. } => return Err(mk_errv(
|
||||
intern!(str: "m"),
|
||||
format!("Macro blocks must either start with a block or a ..$:number"),
|
||||
"Macro blocks must either start with a block or a ..$:number",
|
||||
[Pos::Range(output.range.clone()).into()]
|
||||
)),
|
||||
};
|
||||
expect_end(surplus)?;
|
||||
let mut errors = Vec::new();
|
||||
let mut rules = Vec::new();
|
||||
for (i, item) in line_items(Snippet::new(prev, &block)).into_iter().enumerate() {
|
||||
for (i, item) in line_items(Snippet::new(prev, block)).into_iter().enumerate() {
|
||||
let Parsed { tail, output } = try_pop_no_fluff(item.tail)?;
|
||||
if !output.is_kw(intern!(str: "rule")) {
|
||||
errors.extend(mk_errv(
|
||||
|
||||
29
orchid-host/src/rule/any_match.rs
Normal file
29
orchid-host/src/rule/any_match.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
use super::scal_match::scalv_match;
|
||||
use super::shared::AnyMatcher;
|
||||
use super::vec_match::vec_match;
|
||||
use orchid_base::name::Sym;
|
||||
use crate::macros::MacTree;
|
||||
use crate::rule::state::MatchState;
|
||||
|
||||
#[must_use]
|
||||
pub fn any_match<'a>(
|
||||
matcher: &AnyMatcher,
|
||||
seq: &'a [MacTree],
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<MatchState<'a>> {
|
||||
match matcher {
|
||||
AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq, save_loc),
|
||||
AnyMatcher::Vec { left, mid, right } => {
|
||||
if seq.len() < left.len() + right.len() {
|
||||
return None;
|
||||
};
|
||||
let left_split = left.len();
|
||||
let right_split = seq.len() - right.len();
|
||||
Some(
|
||||
scalv_match(left, &seq[..left_split], save_loc)?
|
||||
.combine(scalv_match(right, &seq[right_split..], save_loc)?)
|
||||
.combine(vec_match(mid, &seq[left_split..right_split], save_loc)?),
|
||||
)
|
||||
},
|
||||
}
|
||||
}
|
||||
151
orchid-host/src/rule/build.rs
Normal file
151
orchid-host/src/rule/build.rs
Normal file
@@ -0,0 +1,151 @@
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::interner::Tok;
|
||||
use itertools::Itertools;
|
||||
use orchid_base::side::Side;
|
||||
use orchid_base::tree::Ph;
|
||||
|
||||
use super::shared::{AnyMatcher, ScalMatcher, VecMatcher};
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
use crate::rule::vec_attrs::vec_attrs;
|
||||
|
||||
pub type MaxVecSplit<'a> = (&'a [MacTree], (Tok<String>, u8, bool), &'a [MacTree]);
|
||||
|
||||
/// Derive the details of the central vectorial and the two sides from a
|
||||
/// slice of Expr's
|
||||
#[must_use]
|
||||
fn split_at_max_vec(pattern: &[MacTree]) -> Option<MaxVecSplit> {
|
||||
let rngidx = pattern
|
||||
.iter()
|
||||
.position_max_by_key(|expr| vec_attrs(expr).map(|attrs| attrs.1 as i64).unwrap_or(-1))?;
|
||||
let (left, not_left) = pattern.split_at(rngidx);
|
||||
let (placeh, right) =
|
||||
not_left.split_first().expect("The index of the greatest element must be less than the length");
|
||||
vec_attrs(placeh).map(|attrs| (left, attrs, right))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn scal_cnt<'a>(iter: impl Iterator<Item = &'a MacTree>) -> usize {
|
||||
iter.take_while(|expr| vec_attrs(expr).is_none()).count()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn mk_any(pattern: &[MacTree]) -> AnyMatcher {
|
||||
let left_split = scal_cnt(pattern.iter());
|
||||
if pattern.len() <= left_split {
|
||||
return AnyMatcher::Scalar(mk_scalv(pattern));
|
||||
}
|
||||
let (left, not_left) = pattern.split_at(left_split);
|
||||
let right_split = not_left.len() - scal_cnt(pattern.iter().rev());
|
||||
let (mid, right) = not_left.split_at(right_split);
|
||||
AnyMatcher::Vec { left: mk_scalv(left), mid: mk_vec(mid), right: mk_scalv(right) }
|
||||
}
|
||||
|
||||
/// Pattern MUST NOT contain vectorial placeholders
|
||||
#[must_use]
|
||||
fn mk_scalv(pattern: &[MacTree]) -> Vec<ScalMatcher> { pattern.iter().map(mk_scalar).collect() }
|
||||
|
||||
/// Pattern MUST start and end with a vectorial placeholder
|
||||
#[must_use]
|
||||
fn mk_vec(pattern: &[MacTree]) -> VecMatcher {
|
||||
debug_assert!(!pattern.is_empty(), "pattern cannot be empty");
|
||||
debug_assert!(pattern.first().map(vec_attrs).is_some(), "pattern must start with a vectorial");
|
||||
debug_assert!(pattern.last().map(vec_attrs).is_some(), "pattern must end with a vectorial");
|
||||
let (left, (key, _, nonzero), right) = split_at_max_vec(pattern)
|
||||
.expect("pattern must have vectorial placeholders at least at either end");
|
||||
let r_sep_size = scal_cnt(right.iter());
|
||||
let (r_sep, r_side) = right.split_at(r_sep_size);
|
||||
let l_sep_size = scal_cnt(left.iter().rev());
|
||||
let (l_side, l_sep) = left.split_at(left.len() - l_sep_size);
|
||||
let main = VecMatcher::Placeh { key: key.clone(), nonzero };
|
||||
match (left, right) {
|
||||
(&[], &[]) => VecMatcher::Placeh { key, nonzero },
|
||||
(&[], _) => VecMatcher::Scan {
|
||||
direction: Side::Left,
|
||||
left: Box::new(main),
|
||||
sep: mk_scalv(r_sep),
|
||||
right: Box::new(mk_vec(r_side)),
|
||||
},
|
||||
(_, &[]) => VecMatcher::Scan {
|
||||
direction: Side::Right,
|
||||
left: Box::new(mk_vec(l_side)),
|
||||
sep: mk_scalv(l_sep),
|
||||
right: Box::new(main),
|
||||
},
|
||||
(..) => {
|
||||
let mut key_order =
|
||||
l_side.iter().chain(r_side.iter()).filter_map(vec_attrs).collect::<Vec<_>>();
|
||||
key_order.sort_by_key(|(_, prio, _)| -(*prio as i64));
|
||||
VecMatcher::Middle {
|
||||
left: Box::new(mk_vec(l_side)),
|
||||
left_sep: mk_scalv(l_sep),
|
||||
mid: Box::new(main),
|
||||
right_sep: mk_scalv(r_sep),
|
||||
right: Box::new(mk_vec(r_side)),
|
||||
key_order: key_order.into_iter().map(|(n, ..)| n).collect(),
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Pattern MUST NOT be a vectorial placeholder
|
||||
#[must_use]
|
||||
fn mk_scalar(pattern: &MacTree) -> ScalMatcher {
|
||||
match &*pattern.tok {
|
||||
MacTok::Atom(_) => panic!("Atoms aren't supported in matchers"),
|
||||
MacTok::Name(n) => ScalMatcher::Name(n.clone()),
|
||||
MacTok::Ph(Ph { name, kind }) => match kind {
|
||||
PhKind::Vector { .. } => {
|
||||
panic!("Scalar matcher cannot be built from vector pattern")
|
||||
},
|
||||
PhKind::Scalar =>
|
||||
ScalMatcher::Placeh { key: name.clone() },
|
||||
},
|
||||
MacTok::S(c, body) => ScalMatcher::S(*c, Box::new(mk_any(body))),
|
||||
MacTok::Lambda(arg, body) => ScalMatcher::Lambda(Box::new(mk_any(arg)), Box::new(mk_any(body))),
|
||||
MacTok::Ref(_) | MacTok::Slot(_) => panic!("Extension-only variants"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::sync::Arc;
|
||||
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::{intern, location::SourceRange, sym, tree::Ph, tokens::Paren};
|
||||
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
|
||||
use super::mk_any;
|
||||
|
||||
#[test]
|
||||
fn test_scan() {
|
||||
let ex = |tok: MacTok| MacTree{ tok: Arc::new(tok), pos: SourceRange::mock().pos() };
|
||||
let pattern = vec![
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: intern!(str: "::prefix"),
|
||||
})),
|
||||
ex(MacTok::Name(sym!(prelude::do))),
|
||||
ex(MacTok::S(
|
||||
Paren::Round,
|
||||
vec![
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: intern!(str: "expr"),
|
||||
})),
|
||||
ex(MacTok::Name(sym!(prelude::;))),
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 1, at_least_one: false },
|
||||
name: intern!(str: "rest"),
|
||||
})),
|
||||
],
|
||||
)),
|
||||
ex(MacTok::Ph(Ph {
|
||||
kind: PhKind::Vector { priority: 0, at_least_one: false },
|
||||
name: intern!(str: "::suffix"),
|
||||
})),
|
||||
];
|
||||
let matcher = mk_any(&pattern);
|
||||
println!("{matcher}");
|
||||
}
|
||||
}
|
||||
21
orchid-host/src/rule/matcher.rs
Normal file
21
orchid-host/src/rule/matcher.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
//! Abstract definition of a rule matcher, so that the implementation can
|
||||
//! eventually be swapped out for a different one.
|
||||
|
||||
use std::rc::Rc;
|
||||
|
||||
use orchid_base::name::Sym;
|
||||
|
||||
use super::state::State;
|
||||
use crate::macros::MacTree;
|
||||
|
||||
/// Cacheable optimized structures for matching patterns on slices. This is
|
||||
/// injected to allow experimentation in the matcher implementation.
|
||||
pub trait Matcher {
|
||||
/// Build matcher for a pattern
|
||||
#[must_use]
|
||||
fn new(pattern: Rc<Vec<MacTree>>) -> Self;
|
||||
/// Apply matcher to a token sequence
|
||||
#[must_use]
|
||||
fn apply<'a>(&self, source: &'a [MacTree], save_loc: &impl Fn(Sym) -> bool)
|
||||
-> Option<State<'a>>;
|
||||
}
|
||||
24
orchid-host/src/rule/mod.rs
Normal file
24
orchid-host/src/rule/mod.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
//! Optimized form of macro pattern that can be quickly tested against the AST.
|
||||
//!
|
||||
//! # Construction
|
||||
//!
|
||||
//! convert pattern into hierarchy of plain, scan, middle
|
||||
//! - plain: accept any sequence or any non-empty sequence
|
||||
//! - scan: a single scalar pattern moves LTR or RTL, submatchers on either
|
||||
//! side
|
||||
//! - middle: two scalar patterns walk over all permutations of matches
|
||||
//! while getting progressively closer to each other
|
||||
//!
|
||||
//! # Application
|
||||
//!
|
||||
//! walk over the current matcher's valid options and poll the submatchers
|
||||
//! for each of them
|
||||
|
||||
mod any_match;
|
||||
mod build;
|
||||
mod scal_match;
|
||||
pub mod shared;
|
||||
mod vec_match;
|
||||
pub mod state;
|
||||
mod vec_attrs;
|
||||
// pub mod matcher;
|
||||
42
orchid-host/src/rule/scal_match.rs
Normal file
42
orchid-host/src/rule/scal_match.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use orchid_base::name::Sym;
|
||||
|
||||
use super::any_match::any_match;
|
||||
use super::shared::ScalMatcher;
|
||||
use crate::{macros::{MacTok, MacTree}, rule::state::{MatchState, StateEntry}};
|
||||
|
||||
#[must_use]
|
||||
pub fn scal_match<'a>(
|
||||
matcher: &ScalMatcher,
|
||||
expr: &'a MacTree,
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<MatchState<'a>> {
|
||||
match (matcher, &*expr.tok) {
|
||||
(ScalMatcher::Name(n1), MacTok::Name(n2)) if n1 == n2 => Some(match save_loc(n1.clone()) {
|
||||
true => MatchState::from_name(n1.clone(), expr.pos.clone()),
|
||||
false => MatchState::default(),
|
||||
}),
|
||||
(ScalMatcher::Placeh { key }, _) =>
|
||||
Some(MatchState::from_ph(key.clone(), StateEntry::Scalar(expr))),
|
||||
(ScalMatcher::S(c1, b_mat), MacTok::S(c2, body)) if c1 == c2 =>
|
||||
any_match(b_mat, &body[..], save_loc),
|
||||
(ScalMatcher::Lambda(arg_mat, b_mat), MacTok::Lambda(arg, body)) =>
|
||||
Some(any_match(arg_mat, arg, save_loc)?.combine(any_match(b_mat, body, save_loc)?)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn scalv_match<'a>(
|
||||
matchers: &[ScalMatcher],
|
||||
seq: &'a [MacTree],
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<MatchState<'a>> {
|
||||
if seq.len() != matchers.len() {
|
||||
return None;
|
||||
}
|
||||
let mut state = MatchState::default();
|
||||
for (matcher, expr) in matchers.iter().zip(seq.iter()) {
|
||||
state = state.combine(scal_match(matcher, expr, save_loc)?);
|
||||
}
|
||||
Some(state)
|
||||
}
|
||||
121
orchid-host/src/rule/shared.rs
Normal file
121
orchid-host/src/rule/shared.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
//! Datastructures for cached pattern
|
||||
|
||||
use std::fmt;
|
||||
|
||||
use itertools::Itertools;
|
||||
use orchid_base::interner::Tok;
|
||||
|
||||
use super::any_match::any_match;
|
||||
use super::build::mk_any;
|
||||
use orchid_base::name::Sym;
|
||||
use crate::macros::MacTree;
|
||||
use crate::rule::state::MatchState;
|
||||
use orchid_base::side::Side;
|
||||
use orchid_base::tokens::{Paren, PARENS};
|
||||
|
||||
pub enum ScalMatcher {
|
||||
Name(Sym),
|
||||
S(Paren, Box<AnyMatcher>),
|
||||
Lambda(Box<AnyMatcher>, Box<AnyMatcher>),
|
||||
Placeh { key: Tok<String> },
|
||||
}
|
||||
|
||||
pub enum VecMatcher {
|
||||
Placeh {
|
||||
key: Tok<String>,
|
||||
nonzero: bool,
|
||||
},
|
||||
Scan {
|
||||
left: Box<VecMatcher>,
|
||||
sep: Vec<ScalMatcher>,
|
||||
right: Box<VecMatcher>,
|
||||
/// The separator traverses the sequence towards this side
|
||||
direction: Side,
|
||||
},
|
||||
Middle {
|
||||
/// Matches the left outer region
|
||||
left: Box<VecMatcher>,
|
||||
/// Matches the left separator
|
||||
left_sep: Vec<ScalMatcher>,
|
||||
/// Matches the middle - can only ever be a plain placeholder
|
||||
mid: Box<VecMatcher>,
|
||||
/// Matches the right separator
|
||||
right_sep: Vec<ScalMatcher>,
|
||||
/// Matches the right outer region
|
||||
right: Box<VecMatcher>,
|
||||
/// Order of significance for sorting equally good projects based on
|
||||
/// the length of matches on either side.
|
||||
///
|
||||
/// Vectorial keys that appear on either side, in priority order
|
||||
key_order: Vec<Tok<String>>,
|
||||
},
|
||||
}
|
||||
|
||||
pub enum AnyMatcher {
|
||||
Scalar(Vec<ScalMatcher>),
|
||||
Vec { left: Vec<ScalMatcher>, mid: VecMatcher, right: Vec<ScalMatcher> },
|
||||
}
|
||||
|
||||
// ################ Display ################
|
||||
|
||||
impl fmt::Display for ScalMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Placeh { key } => write!(f, "${key}"),
|
||||
Self::Name(n) => write!(f, "{n}"),
|
||||
Self::S(t, body) => {
|
||||
let (l, r, _) = PARENS.iter().find(|r| r.2 == *t).unwrap();
|
||||
write!(f, "{l}{body}{r}")
|
||||
},
|
||||
Self::Lambda(arg, body) => write!(f, "\\{arg}.{body}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for VecMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Placeh { key, nonzero: true } => write!(f, "...${key}"),
|
||||
Self::Placeh { key, nonzero: false } => write!(f, "..${key}"),
|
||||
Self::Scan { left, sep, right, direction } => {
|
||||
let arrow = if direction == &Side::Left { "<==" } else { "==>" };
|
||||
write!(f, "Scan{{{left} {arrow} {} {arrow} {right}}}", sep.iter().join(" "))
|
||||
},
|
||||
Self::Middle { left, left_sep, mid, right_sep, right, .. } => {
|
||||
let left_sep_s = left_sep.iter().join(" ");
|
||||
let right_sep_s = right_sep.iter().join(" ");
|
||||
write!(f, "Middle{{{left}|{left_sep_s}|{mid}|{right_sep_s}|{right}}}")
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AnyMatcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Scalar(s) => {
|
||||
write!(f, "({})", s.iter().join(" "))
|
||||
},
|
||||
Self::Vec { left, mid, right } => {
|
||||
let lefts = left.iter().join(" ");
|
||||
let rights = right.iter().join(" ");
|
||||
write!(f, "[{lefts}|{mid}|{rights}]")
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ################ External ################
|
||||
|
||||
/// A priority-order tree of the vectorial placeholders with scalars as leaves.
|
||||
pub struct Matcher(AnyMatcher);
|
||||
impl Matcher {
|
||||
pub fn new(pattern: &[MacTree]) -> Self { Self(mk_any(pattern)) }
|
||||
pub fn apply<'a>(&self, seq: &'a [MacTree], save_loc: &impl Fn(Sym) -> bool) -> Option<MatchState<'a>> {
|
||||
any_match(&self.0, seq, save_loc)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Matcher {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(f) }
|
||||
}
|
||||
84
orchid-host/src/rule/state.rs
Normal file
84
orchid-host/src/rule/state.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use hashbrown::HashMap;
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::tree::Ph;
|
||||
use orchid_base::{interner::Tok, join::join_maps};
|
||||
use orchid_base::location::Pos;
|
||||
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
use orchid_base::name::Sym;
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum StateEntry<'a> {
|
||||
Vec(&'a [MacTree]),
|
||||
Scalar(&'a MacTree),
|
||||
}
|
||||
#[derive(Clone)]
|
||||
pub struct MatchState<'a> {
|
||||
placeholders: HashMap<Tok<String>, StateEntry<'a>>,
|
||||
name_posv: HashMap<Sym, Vec<Pos>>,
|
||||
}
|
||||
impl<'a> MatchState<'a> {
|
||||
pub fn from_ph(key: Tok<String>, entry: StateEntry<'a>) -> Self {
|
||||
Self { placeholders: HashMap::from([(key, entry)]), name_posv: HashMap::new() }
|
||||
}
|
||||
pub fn combine(self, s: Self) -> Self {
|
||||
Self {
|
||||
placeholders: self.placeholders.into_iter().chain(s.placeholders).collect(),
|
||||
name_posv: join_maps(self.name_posv, s.name_posv, |_, l, r| {
|
||||
l.into_iter().chain(r).collect()
|
||||
}),
|
||||
}
|
||||
}
|
||||
pub fn ph_len(&self, key: &Tok<String>) -> Option<usize> {
|
||||
match self.placeholders.get(key)? {
|
||||
StateEntry::Vec(slc) => Some(slc.len()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
pub fn from_name(name: Sym, location: Pos) -> Self {
|
||||
Self { name_posv: HashMap::from([(name, vec![location])]), placeholders: HashMap::new() }
|
||||
}
|
||||
}
|
||||
impl Default for MatchState<'static> {
|
||||
fn default() -> Self { Self { name_posv: HashMap::new(), placeholders: HashMap::new() } }
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn apply_exprv(template: &[MacTree], state: &MatchState) -> Vec<MacTree> {
|
||||
template.iter().map(|e| apply_expr(e, state)).flat_map(Vec::into_iter).collect()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn apply_expr(template: &MacTree, state: &MatchState) -> Vec<MacTree> {
|
||||
let MacTree { pos, tok } = template;
|
||||
match &**tok {
|
||||
MacTok::Name(n) => match state.name_posv.get(n) {
|
||||
None => vec![template.clone()],
|
||||
Some(locs) => vec![MacTree { tok: tok.clone(), pos: locs[0].clone() }],
|
||||
},
|
||||
MacTok::Atom(_) => vec![template.clone()],
|
||||
MacTok::S(c, body) => vec![MacTree {
|
||||
pos: pos.clone(), tok: Arc::new(MacTok::S(*c, apply_exprv(body.as_slice(), state))),
|
||||
}],
|
||||
MacTok::Ph(Ph { name, kind }) => {
|
||||
let Some(value) = state.placeholders.get(name) else {
|
||||
panic!("Placeholder does not have a value in state")
|
||||
};
|
||||
match (kind, value) {
|
||||
(PhKind::Scalar, StateEntry::Scalar(item)) => vec![(*item).clone()],
|
||||
(PhKind::Vector { .. }, StateEntry::Vec(chunk)) => chunk.to_vec(),
|
||||
_ => panic!("Type mismatch between template and state"),
|
||||
}
|
||||
},
|
||||
MacTok::Lambda(arg, body) => vec![MacTree {
|
||||
pos: pos.clone(),
|
||||
tok: Arc::new(MacTok::Lambda(
|
||||
apply_exprv(arg, state),
|
||||
apply_exprv(&body[..], state),
|
||||
)),
|
||||
}],
|
||||
MacTok::Slot(_) | MacTok::Ref(_) => panic!("Extension-only variants"),
|
||||
}
|
||||
}
|
||||
16
orchid-host/src/rule/vec_attrs.rs
Normal file
16
orchid-host/src/rule/vec_attrs.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use orchid_api::PhKind;
|
||||
use orchid_base::interner::Tok;
|
||||
use orchid_base::tree::Ph;
|
||||
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
|
||||
/// Returns the name, priority and at_least_one of the expression if it is
|
||||
/// a vectorial placeholder
|
||||
#[must_use]
|
||||
pub fn vec_attrs(expr: &MacTree) -> Option<(Tok<String>, u8, bool)> {
|
||||
match (*expr.tok).clone() {
|
||||
MacTok::Ph(Ph { kind: PhKind::Vector { priority, at_least_one }, name }) =>
|
||||
Some((name, priority, at_least_one)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
94
orchid-host/src/rule/vec_match.rs
Normal file
94
orchid-host/src/rule/vec_match.rs
Normal file
@@ -0,0 +1,94 @@
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::scal_match::scalv_match;
|
||||
use super::shared::VecMatcher;
|
||||
use orchid_base::name::Sym;
|
||||
use crate::{macros::MacTree, rule::state::{MatchState, StateEntry}};
|
||||
|
||||
#[must_use]
|
||||
pub fn vec_match<'a>(
|
||||
matcher: &VecMatcher,
|
||||
seq: &'a [MacTree],
|
||||
save_loc: &impl Fn(Sym) -> bool,
|
||||
) -> Option<MatchState<'a>> {
|
||||
match matcher {
|
||||
VecMatcher::Placeh { key, nonzero } => {
|
||||
if *nonzero && seq.is_empty() {
|
||||
return None;
|
||||
}
|
||||
return Some(MatchState::from_ph(key.clone(), StateEntry::Vec(seq)));
|
||||
},
|
||||
VecMatcher::Scan { left, sep, right, direction } => {
|
||||
if seq.len() < sep.len() {
|
||||
return None;
|
||||
}
|
||||
for lpos in direction.walk(0..=seq.len() - sep.len()) {
|
||||
let rpos = lpos + sep.len();
|
||||
let state = vec_match(left, &seq[..lpos], save_loc)
|
||||
.and_then(|s| Some(s.combine(scalv_match(sep, &seq[lpos..rpos], save_loc)?)))
|
||||
.and_then(|s| Some(s.combine(vec_match(right, &seq[rpos..], save_loc)?)));
|
||||
if let Some(s) = state {
|
||||
return Some(s);
|
||||
}
|
||||
}
|
||||
None
|
||||
},
|
||||
// XXX predict heap space usage and allocation count
|
||||
VecMatcher::Middle { left, left_sep, mid, right_sep, right, key_order } => {
|
||||
if seq.len() < left_sep.len() + right_sep.len() {
|
||||
return None;
|
||||
}
|
||||
// Valid locations for the left separator
|
||||
let lposv = seq[..seq.len() - right_sep.len()]
|
||||
.windows(left_sep.len())
|
||||
.enumerate()
|
||||
.filter_map(|(i, window)| scalv_match(left_sep, window, save_loc).map(|s| (i, s)))
|
||||
.collect::<Vec<_>>();
|
||||
// Valid locations for the right separator
|
||||
let rposv = seq[left_sep.len()..]
|
||||
.windows(right_sep.len())
|
||||
.enumerate()
|
||||
.filter_map(|(i, window)| scalv_match(right_sep, window, save_loc).map(|s| (i, s)))
|
||||
.collect::<Vec<_>>();
|
||||
// Valid combinations of locations for the separators
|
||||
let mut pos_pairs = lposv
|
||||
.into_iter()
|
||||
.cartesian_product(rposv)
|
||||
.filter(|((lpos, _), (rpos, _))| lpos + left_sep.len() <= *rpos)
|
||||
.map(|((lpos, lstate), (rpos, rstate))| (lpos, rpos, lstate.combine(rstate)))
|
||||
.collect::<Vec<_>>();
|
||||
// In descending order of size
|
||||
pos_pairs.sort_by_key(|(l, r, _)| -((r - l) as i64));
|
||||
let eql_clusters = pos_pairs.into_iter().chunk_by(|(al, ar, _)| ar - al);
|
||||
for (_gap_size, cluster) in eql_clusters.into_iter() {
|
||||
let best_candidate = cluster
|
||||
.into_iter()
|
||||
.filter_map(|(lpos, rpos, state)| {
|
||||
Some(
|
||||
state
|
||||
.combine(vec_match(left, &seq[..lpos], save_loc)?)
|
||||
.combine(vec_match(mid, &seq[lpos + left_sep.len()..rpos], save_loc)?)
|
||||
.combine(vec_match(right, &seq[rpos + right_sep.len()..], save_loc)?),
|
||||
)
|
||||
})
|
||||
.max_by(|a, b| {
|
||||
for key in key_order {
|
||||
let alen = a.ph_len(key).expect("key_order references scalar or missing");
|
||||
let blen = b.ph_len(key).expect("key_order references scalar or missing");
|
||||
match alen.cmp(&blen) {
|
||||
Ordering::Equal => (),
|
||||
any => return any,
|
||||
}
|
||||
}
|
||||
Ordering::Equal
|
||||
});
|
||||
if let Some(state) = best_candidate {
|
||||
return Some(state);
|
||||
}
|
||||
}
|
||||
None
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ use std::path::PathBuf;
|
||||
use std::sync::Mutex;
|
||||
use std::{process, thread};
|
||||
|
||||
use orchid_api::ExtensionHeader;
|
||||
use orchid_api_traits::{Decode, Encode};
|
||||
use orchid_base::logging::Logger;
|
||||
use orchid_base::msg::{recv_msg, send_msg};
|
||||
@@ -15,7 +14,7 @@ pub struct Subprocess {
|
||||
child: Mutex<process::Child>,
|
||||
stdin: Mutex<process::ChildStdin>,
|
||||
stdout: Mutex<process::ChildStdout>,
|
||||
header: ExtensionHeader,
|
||||
header: api::ExtensionHeader,
|
||||
}
|
||||
impl Subprocess {
|
||||
pub fn new(mut cmd: process::Command, logger: Logger) -> io::Result<Self> {
|
||||
@@ -30,7 +29,7 @@ impl Subprocess {
|
||||
api::HostHeader { log_strategy: logger.strat() }.encode(&mut stdin);
|
||||
stdin.flush()?;
|
||||
let mut stdout = child.stdout.take().unwrap();
|
||||
let header = ExtensionHeader::decode(&mut stdout);
|
||||
let header = api::ExtensionHeader::decode(&mut stdout);
|
||||
let child_stderr = child.stderr.take().unwrap();
|
||||
thread::Builder::new().name(format!("stderr-fwd:{prog}")).spawn(move || {
|
||||
let mut reader = io::BufReader::new(child_stderr);
|
||||
|
||||
@@ -4,9 +4,9 @@ use std::sync::{Mutex, OnceLock};
|
||||
use itertools::Itertools;
|
||||
use never::Never;
|
||||
use orchid_base::error::OrcRes;
|
||||
use orchid_base::interner::{deintern, intern, Tok};
|
||||
use orchid_base::interner::{intern, Tok};
|
||||
use orchid_base::location::Pos;
|
||||
use orchid_base::macros::{mtreev_from_api, MTree};
|
||||
use orchid_base::macros::mtreev_from_api;
|
||||
use orchid_base::name::Sym;
|
||||
use orchid_base::parse::{Comment, Import};
|
||||
use orchid_base::tree::{TokTree, Token};
|
||||
@@ -16,6 +16,7 @@ use substack::{with_iter_stack, Substack};
|
||||
use crate::api;
|
||||
use crate::expr::Expr;
|
||||
use crate::extension::{AtomHand, System};
|
||||
use crate::macros::{MacTok, MacTree};
|
||||
|
||||
pub type ParsTokTree = TokTree<'static, AtomHand, Never>;
|
||||
pub type ParsTok = Token<'static, AtomHand, Never>;
|
||||
@@ -36,7 +37,7 @@ pub enum ItemKind {
|
||||
}
|
||||
|
||||
impl Item {
|
||||
pub fn from_api<'a>(
|
||||
pub fn from_api(
|
||||
tree: api::Item,
|
||||
path: Substack<Tok<String>>,
|
||||
sys: &System
|
||||
@@ -44,12 +45,12 @@ impl Item {
|
||||
let kind = match tree.kind {
|
||||
api::ItemKind::Member(m) => ItemKind::Member(Member::from_api(m, path, sys)),
|
||||
api::ItemKind::Import(i) =>
|
||||
ItemKind::Import(Import{ path: Sym::deintern(i).iter().collect(), name: None }),
|
||||
api::ItemKind::Export(e) => ItemKind::Export(deintern(e)),
|
||||
ItemKind::Import(Import{ path: Sym::from_api(i).iter().collect(), name: None }),
|
||||
api::ItemKind::Export(e) => ItemKind::Export(Tok::from_api(e)),
|
||||
api::ItemKind::Macro(api::MacroBlock { priority, rules }) => ItemKind::Macro(priority, {
|
||||
Vec::from_iter(rules.into_iter().map(|api| Rule {
|
||||
pos: Pos::from_api(&api.location),
|
||||
pattern: mtreev_from_api(&api.pattern),
|
||||
pattern: mtreev_from_api(&api.pattern, &mut |a| MacTok::Atom(AtomHand::from_api(a.clone()))),
|
||||
kind: RuleKind::Remote(sys.clone(), api.id),
|
||||
comments: api.comments.iter().map(Comment::from_api).collect_vec()
|
||||
}))
|
||||
@@ -67,19 +68,19 @@ pub struct Member {
|
||||
pub lazy: Mutex<Option<LazyMemberHandle>>,
|
||||
}
|
||||
impl Member {
|
||||
pub fn from_api<'a>(
|
||||
pub fn from_api(
|
||||
api: api::Member,
|
||||
path: Substack<Tok<String>>,
|
||||
sys: &System,
|
||||
) -> Self {
|
||||
let name = deintern(api.name);
|
||||
let name = Tok::from_api(api.name);
|
||||
let full_path = path.push(name.clone());
|
||||
let kind = match api.kind {
|
||||
api::MemberKind::Lazy(id) =>
|
||||
return LazyMemberHandle(id, sys.clone(), intern(&full_path.unreverse())).to_member(name),
|
||||
return LazyMemberHandle(id, sys.clone(), intern(&full_path.unreverse())).into_member(name),
|
||||
api::MemberKind::Const(c) => MemberKind::Const(Code::from_expr(
|
||||
CodeLocator::to_const(full_path.unreverse()),
|
||||
Expr::from_api(c, &mut ())
|
||||
Expr::from_api(&c, &mut ())
|
||||
)),
|
||||
api::MemberKind::Module(m) => MemberKind::Mod(Module::from_api(m, full_path, sys)),
|
||||
};
|
||||
@@ -129,7 +130,7 @@ impl LazyMemberHandle {
|
||||
pub fn run(self) -> OrcRes<MemberKind> {
|
||||
match self.1.get_tree(self.0) {
|
||||
api::MemberKind::Const(c) => Ok(MemberKind::Const(Code {
|
||||
bytecode: Expr::from_api(c, &mut ()).into(),
|
||||
bytecode: Expr::from_api(&c, &mut ()).into(),
|
||||
locator: CodeLocator { steps: self.2, rule_loc: None },
|
||||
source: None,
|
||||
})),
|
||||
@@ -139,7 +140,7 @@ impl LazyMemberHandle {
|
||||
api::MemberKind::Lazy(id) => Self(id, self.1, self.2).run(),
|
||||
}
|
||||
}
|
||||
pub fn to_member(self, name: Tok<String>) -> Member {
|
||||
pub fn into_member(self, name: Tok<String>) -> Member {
|
||||
Member { name, kind: OnceLock::new(), lazy: Mutex::new(Some(self)) }
|
||||
}
|
||||
}
|
||||
@@ -148,7 +149,7 @@ impl LazyMemberHandle {
|
||||
pub struct Rule {
|
||||
pub pos: Pos,
|
||||
pub comments: Vec<Comment>,
|
||||
pub pattern: Vec<MTree<'static>>,
|
||||
pub pattern: Vec<MacTree>,
|
||||
pub kind: RuleKind,
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ use orchid_api_derive::Coding;
|
||||
use orchid_api_traits::{Encode, Request};
|
||||
use orchid_base::error::{mk_errv, OrcRes};
|
||||
use orchid_base::intern;
|
||||
use orchid_base::interner::{deintern, intern, Tok};
|
||||
use orchid_base::interner::{intern, Tok};
|
||||
use orchid_extension::atom::{AtomMethod, Atomic, MethodSet, Supports, TypAtom};
|
||||
use orchid_extension::atom_owned::{DeserializeCtx, OwnedAtom, OwnedVariant};
|
||||
use orchid_extension::conv::TryFromExpr;
|
||||
@@ -66,7 +66,7 @@ impl From<Tok<String>> for IntStrAtom {
|
||||
}
|
||||
impl OwnedAtom for IntStrAtom {
|
||||
type Refs = ();
|
||||
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(self.0.marker()) }
|
||||
fn val(&self) -> Cow<'_, Self::Data> { Cow::Owned(self.0.to_api()) }
|
||||
fn print(&self, _ctx: SysCtx) -> String { format!("{:?}i", *self.0) }
|
||||
fn serialize(&self, _: SysCtx, write: &mut (impl io::Write + ?Sized)) { self.0.encode(write) }
|
||||
fn deserialize(ctx: impl DeserializeCtx, _: ()) -> Self { Self(intern(&ctx.decode::<String>())) }
|
||||
@@ -80,7 +80,7 @@ pub enum OrcString<'a> {
|
||||
impl<'a> OrcString<'a> {
|
||||
pub fn get_string(&self) -> Arc<String> {
|
||||
match &self {
|
||||
Self::Int(tok) => deintern(tok.value).arc(),
|
||||
Self::Int(tok) => Tok::from_api(tok.value).arc(),
|
||||
Self::Val(atom) => atom.request(StringGetVal),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,8 +108,7 @@ impl Lexer for StringLexer {
|
||||
.at(ctx.tok_ran(str.len() as u32, tail))
|
||||
};
|
||||
let add_frag = |prev: GenTokTree<'a>, new: GenTokTree<'a>| {
|
||||
let range = prev.range.start..new.range.end;
|
||||
wrap_tokv(vname_tv(&vname!(std::string::concat), new.range.end).chain([prev, new]), range)
|
||||
wrap_tokv(vname_tv(&vname!(std::string::concat), new.range.end).chain([prev, new]))
|
||||
};
|
||||
loop {
|
||||
if let Some(rest) = tail.strip_prefix('"') {
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
/// Utility functions to get rid of tedious explicit casts to
|
||||
/// BoxedIter
|
||||
use std::iter;
|
||||
|
||||
/// A trait object of [Iterator] to be assigned to variables that may be
|
||||
/// initialized form multiple iterators of incompatible types
|
||||
pub type BoxedIter<'a, T> = Box<dyn Iterator<Item = T> + 'a>;
|
||||
/// creates a [BoxedIter] of a single element
|
||||
pub fn box_once<'a, T: 'a>(t: T) -> BoxedIter<'a, T> { Box::new(iter::once(t)) }
|
||||
/// creates an empty [BoxedIter]
|
||||
pub fn box_empty<'a, T: 'a>() -> BoxedIter<'a, T> { Box::new(iter::empty()) }
|
||||
|
||||
/// Chain various iterators into a [BoxedIter]
|
||||
macro_rules! box_chain {
|
||||
($curr:expr) => {
|
||||
Box::new($curr) as BoxedIter<_>
|
||||
};
|
||||
($curr:expr, $($rest:expr),*) => {
|
||||
Box::new($curr$(.chain($rest))*) as $crate::utils::boxed_iter::BoxedIter<_>
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use box_chain;
|
||||
Reference in New Issue
Block a user