Merge remote-tracking branch 'origin/master'

This commit is contained in:
2023-02-16 20:54:58 +00:00
63 changed files with 3224 additions and 2855 deletions

View File

@@ -1,79 +1,84 @@
use mappable_rc::Mrc;
use crate::utils::collect_to_mrc;
use crate::utils::{collect_to_mrc, to_mrc_slice};
use super::super::representations::typed::{Clause, Expr};
pub fn apply_lambda(id: u64, value: Mrc<Expr>, body: Mrc<Expr>) -> Mrc<Expr> {
apply_lambda_expr_rec(id, value, Mrc::clone(&body))
.unwrap_or(body)
apply_lambda_expr_rec(id, value, Mrc::clone(&body))
.unwrap_or(body)
}
fn apply_lambda_expr_rec(
id: u64, value: Mrc<Expr>, expr: Mrc<Expr>
id: u64, value: Mrc<Expr>, expr: Mrc<Expr>
) -> Option<Mrc<Expr>> {
let Expr(clause, typ) = expr.as_ref();
match clause {
Clause::Argument(arg_id) if *arg_id == id => {
let full_typ = collect_to_mrc(
value.1.iter()
.chain(typ.iter())
.cloned()
);
Some(Mrc::new(Expr(value.0.to_owned(), full_typ)))
}
cl => {
apply_lambda_clause_rec(id, value, clause.clone())
.map(|c| Mrc::new(Expr(c, Mrc::clone(typ))))
}
let Expr(clause, typ) = expr.as_ref();
match clause {
Clause::LambdaArg(arg_id) | Clause::AutoArg(arg_id) if *arg_id == id => {
let full_typ = collect_to_mrc(
value.1.iter()
.chain(typ.iter())
.cloned()
);
Some(Mrc::new(Expr(value.0.to_owned(), full_typ)))
}
cl => {
apply_lambda_clause_rec(id, value, cl.clone())
.map(|c| Mrc::new(Expr(c, Mrc::clone(typ))))
}
}
}
fn apply_lambda_clause_rec(
id: u64, value: Mrc<Expr>, clause: Clause
id: u64, value: Mrc<Expr>, clause: Clause
) -> Option<Clause> {
match clause {
// Only element actually manipulated
Clause::Argument(id) => panic!(
"apply_lambda_expr_rec is supposed to eliminate this case"),
// Traverse, yield Some if either had changed.
Clause::Apply(f, x) => {
let new_f = apply_lambda_expr_rec(
id, Mrc::clone(&value), Mrc::clone(&f)
);
let new_x = apply_lambda_expr_rec(
id, value, Mrc::clone(&x)
);
match (new_f, new_x) { // Mind the shadows
(None, None) => None,
(None, Some(x)) => Some(Clause::Apply(f, x)),
(Some(f), None) => Some(Clause::Apply(f, x)),
(Some(f), Some(x)) => Some(Clause::Apply(f, x))
}
},
Clause::Lambda(own_id, t, b) => apply_lambda__traverse_param(id, value, own_id, t, b, Clause::Lambda),
Clause::Auto(own_id, t, b) => apply_lambda__traverse_param(id, value, own_id, t, b, Clause::Auto),
// Leaf nodes
Clause::Atom(_) | Clause::ExternFn(_) | Clause::Literal(_) => None
}
match clause {
// Only element actually manipulated
Clause::LambdaArg(_) | Clause::AutoArg(_) => Some(clause),
// Traverse, yield Some if either had changed.
Clause::Apply(f, x) => {
let new_f = apply_lambda_expr_rec(
id, Mrc::clone(&value), Mrc::clone(&f)
);
let new_x = apply_lambda_expr_rec(
id, value, Mrc::clone(&x)
);
match (new_f, new_x) { // Mind the shadows
(None, None) => None,
(None, Some(x)) => Some(Clause::Apply(f, x)),
(Some(f), None) => Some(Clause::Apply(f, x)),
(Some(f), Some(x)) => Some(Clause::Apply(f, x))
}
},
Clause::Lambda(own_id, t, b) => apply_lambda__traverse_param(id, value, own_id, t, b, Clause::Lambda),
Clause::Auto(own_id, t, b) => apply_lambda__traverse_param(id, value, own_id, t, b, Clause::Auto),
// Leaf nodes
Clause::Atom(_) | Clause::ExternFn(_) | Clause::Literal(_) => None
}
}
fn apply_lambda__traverse_param(
id: u64, value: Mrc<Expr>,
own_id: u64, t: Option<Mrc<Clause>>, b: Mrc<Expr>,
wrap: impl Fn(u64, Option<Mrc<Clause>>, Mrc<Expr>) -> Clause
id: u64, value: Mrc<Expr>,
own_id: u64, typ: Mrc<[Clause]>, b: Mrc<Expr>,
wrap: impl Fn(u64, Mrc<[Clause]>, Mrc<Expr>) -> Clause
) -> Option<Clause> {
let new_t = t.and_then(|t| apply_lambda_clause_rec(
id, Mrc::clone(&value), t.as_ref().clone()
));
// Respect shadowing
let new_b = if own_id == id {None} else {
apply_lambda_expr_rec(id, value, Mrc::clone(&b))
};
match (new_t, new_b) { // Mind the shadows
(None, None) => None,
(None, Some(b)) => Some(wrap(own_id, t, b)),
(Some(t), None) => Some(wrap(own_id, Some(Mrc::new(t)), b)),
(Some(t), Some(b)) => Some(wrap(own_id, Some(Mrc::new(t)), b))
}
let any_t = false;
let mut t_acc = vec![];
for t in typ.iter() {
let newt = apply_lambda_clause_rec(id, Mrc::clone(&value), t.clone());
any_t |= newt.is_some();
t_acc.push(newt.unwrap_or_else(|| t.clone()))
}
// Respect shadowing
let new_b = if own_id == id {None} else {
apply_lambda_expr_rec(id, value, Mrc::clone(&b))
};
if any_t { // mind the shadows
let typ = to_mrc_slice(t_acc);
if let Some(b) = new_b {
Some(wrap(own_id, typ, b))
} else {Some(wrap(own_id, typ, b))}
} else if let Some(b) = new_b {
Some(wrap(own_id, typ, b))
} else {Some(wrap(own_id, typ, b))}
}

View File

@@ -1,104 +0,0 @@
use std::any::Any;
use std::fmt::{Display, Debug};
use std::hash::Hash;
use mappable_rc::Mrc;
use crate::representations::typed::{Expr, Clause};
pub trait ExternError: Display {}
/// Represents an externally defined function from the perspective of the executor
/// Since Orchid lacks basic numerical operations, these are also external functions.
pub struct ExternFn {
name: String, param: Mrc<Expr>, rttype: Mrc<Expr>,
function: Mrc<dyn Fn(Clause) -> Result<Clause, Mrc<dyn ExternError>>>
}
impl ExternFn {
pub fn new<F: 'static + Fn(Clause) -> Result<Clause, Mrc<dyn ExternError>>>(
name: String, param: Mrc<Expr>, rttype: Mrc<Expr>, f: F
) -> Self {
Self {
name, param, rttype,
function: Mrc::map(Mrc::new(f), |f| {
f as &dyn Fn(Clause) -> Result<Clause, Mrc<dyn ExternError>>
})
}
}
pub fn name(&self) -> &str {&self.name}
pub fn apply(&self, arg: Clause) -> Result<Clause, Mrc<dyn ExternError>> {(self.function)(arg)}
}
impl Clone for ExternFn { fn clone(&self) -> Self { Self {
name: self.name.clone(),
param: Mrc::clone(&self.param),
rttype: Mrc::clone(&self.rttype),
function: Mrc::clone(&self.function)
}}}
impl Eq for ExternFn {}
impl PartialEq for ExternFn {
fn eq(&self, other: &Self) -> bool { self.name() == other.name() }
}
impl Hash for ExternFn {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.name.hash(state) }
}
impl Debug for ExternFn {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "##EXTERN[{}]:{:?} -> {:?}##", self.name(), self.param, self.rttype)
}
}
pub trait Atomic: Any + Debug where Self: 'static {
fn as_any(&self) -> &dyn Any;
fn definitely_eq(&self, _other: &dyn Any) -> bool;
fn hash(&self, hasher: &mut dyn std::hash::Hasher);
}
/// Represents a unit of information from the perspective of the executor. This may be
/// something like a file descriptor which functions can operate on, but it can also be
/// information in the universe of types or kinds such as the type of signed integers or
/// the kind of types. Ad absurdum it can also be just a number, although Literal is
/// preferable for types it's defined on.
pub struct Atom {
typ: Mrc<Expr>,
data: Mrc<dyn Atomic>
}
impl Atom {
pub fn new<T: 'static + Atomic>(data: T, typ: Mrc<Expr>) -> Self { Self{
typ,
data: Mrc::map(Mrc::new(data), |d| d as &dyn Atomic)
} }
pub fn data(&self) -> &dyn Atomic { self.data.as_ref() as &dyn Atomic }
pub fn try_cast<T: Atomic>(&self) -> Result<&T, ()> {
self.data().as_any().downcast_ref().ok_or(())
}
pub fn is<T: 'static>(&self) -> bool { self.data().as_any().is::<T>() }
pub fn cast<T: 'static>(&self) -> &T {
self.data().as_any().downcast_ref().expect("Type mismatch on Atom::cast")
}
}
impl Clone for Atom {
fn clone(&self) -> Self { Self {
typ: Mrc::clone(&self.typ),
data: Mrc::clone(&self.data)
} }
}
impl Hash for Atom {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.data.hash(state);
self.typ.hash(state)
}
}
impl Debug for Atom {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "##ATOM[{:?}]:{:?}##", self.data(), self.typ)
}
}
impl Eq for Atom {}
impl PartialEq for Atom {
fn eq(&self, other: &Self) -> bool {
self.data().definitely_eq(other.data().as_any())
}
}

View File

@@ -5,26 +5,26 @@ use crate::utils::collect_to_mrc;
use super::super::representations::typed::{Clause, Expr};
fn normalize(Expr(clause, typ): Expr) -> Expr {
todo!()
todo!()
}
fn collect_autos(
Expr(clause, typ): Expr,
arg_types: Vec<Mrc<[Clause]>>,
indirect_argt_trees: Vec<Mrc<[Clause]>>,
sunk_types: &mut dyn Iterator<Item = Clause>
Expr(clause, typ): Expr,
arg_types: Vec<Mrc<[Clause]>>,
indirect_argt_trees: Vec<Mrc<[Clause]>>,
sunk_types: &mut dyn Iterator<Item = Clause>
) -> (Vec<Mrc<[Clause]>>, Expr) {
if let Clause::Auto(argt, body) = clause {
}
else {(
arg_types,
Expr(
clause,
collect_to_mrc(
typ.iter().cloned()
.chain(sunk_types)
)
)
)}
if let Clause::Auto(argt, body) = clause {
}
else {(
arg_types,
Expr(
clause,
collect_to_mrc(
typ.iter().cloned()
.chain(sunk_types)
)
)
)}
}

View File

@@ -8,49 +8,41 @@ use super::super::representations::typed::{Clause, Expr};
use super::super::utils::Stackframe;
const PARAMETRICS_INLINE_COUNT:usize = 5;
type Parametrics<'a> = ProtoMap<'a, u64, bool, PARAMETRICS_INLINE_COUNT>;
// type Parametrics<'a> = ProtoMap<'a, u64, bool, PARAMETRICS_INLINE_COUNT>;
/// Hash the parts of an expression that are required to be equal for syntactic equality.
pub fn partial_hash_rec<H: Hasher>(
Expr(clause, _): &Expr, state: &mut H,
mut parametrics: Parametrics
Expr(clause, _): &Expr, state: &mut H,
parametrics: Option<&Stackframe<u64>>
) {
match clause {
// Skip autos
Clause::Auto(id, _, body) => {
parametrics.set(id, true);
partial_hash_rec(body, state, parametrics)
}
// Annotate everything else with a prefix
// - Recurse into the tree of lambdas and calls - classic lambda calc
Clause::Lambda(id, _, body) => {
state.write_u8(0);
parametrics.set(id, false);
partial_hash_rec(body, state, parametrics)
}
Clause::Apply(f, x) => {
state.write_u8(1);
partial_hash_rec(f, state, parametrics.clone());
partial_hash_rec(x, state, parametrics);
}
// - Only recognize the depth of an argument if it refers to a non-auto parameter
Clause::Argument(own_id) => {
let (pos, is_auto) = parametrics.iter()
.filter_map(|(id, is_auto)| is_auto.map(|is_auto| (*id, is_auto)))
.find_position(|(id, is_auto)| id == own_id)
.map(|(pos, (_, is_auto))| (pos, is_auto))
.unwrap_or((usize::MAX, false));
// If the argument references an auto, acknowledge its existence
if is_auto {
state.write_u8(2)
} else {
state.write_u8(3);
state.write_usize(pos)
}
}
// - Hash leaves like normal
Clause::Literal(lit) => { state.write_u8(4); lit.hash(state) }
Clause::Atom(at) => { state.write_u8(5); at.hash(state) }
Clause::ExternFn(f) => { state.write_u8(6); f.hash(state) }
match clause {
// Skip autos
Clause::Auto(id, _, body) => {
partial_hash_rec(body, state, parametrics)
}
// Annotate everything else with a prefix
// - Recurse into the tree of lambdas and calls - classic lambda calc
Clause::Lambda(id, _, body) => {
state.write_u8(0);
partial_hash_rec(body, state, Some(&Stackframe::opush(parametrics, *id)))
}
Clause::Apply(f, x) => {
state.write_u8(1);
partial_hash_rec(f, state, parametrics.clone());
partial_hash_rec(x, state, parametrics);
}
Clause::AutoArg(..) => state.write_u8(2),
// - Only recognize the depth of an argument if it refers to a non-auto parameter
Clause::LambdaArg(own_id) => {
let pos = parametrics
.and_then(|sf| sf.iter().position(|id| id == own_id))
.unwrap_or(usize::MAX);
state.write_u8(3);
state.write_usize(pos)
}
// - Hash leaves like normal
Clause::Literal(lit) => { state.write_u8(4); lit.hash(state) }
Clause::Atom(at) => { state.write_u8(5); at.hash(state) }
Clause::ExternFn(f) => { state.write_u8(6); f.hash(state) }
}
}

View File

@@ -10,88 +10,88 @@ use super::super::representations::typed::{Clause, Expr};
/// Call the function with the first Expression that isn't an Auto,
/// wrap all elements in the returned iterator back in the original sequence of Autos.
pub fn skip_autos<'a,
F: 'a + FnOnce(Mrc<Expr>) -> I,
I: Iterator<Item = Mrc<Expr>> + 'static
F: 'a + FnOnce(Mrc<Expr>) -> I,
I: Iterator<Item = Mrc<Expr>> + 'static
>(
expr: Mrc<Expr>, function: F
expr: Mrc<Expr>, function: F
) -> BoxedIter<'static, Mrc<Expr>> {
if let Expr(Clause::Auto(id, arg, body), typ) = expr.as_ref() {
return Box::new(skip_autos(Mrc::clone(body), function).map({
let arg = arg.as_ref().map(Mrc::clone);
let typ = Mrc::clone(typ);
move |body| {
Mrc::new(Expr(Clause::Auto(
*id,
arg.as_ref().map(Mrc::clone),
body
), Mrc::clone(&typ)))
}
})) as BoxedIter<'static, Mrc<Expr>>
}
Box::new(function(expr))
if let Expr(Clause::Auto(id, arg, body), typ) = expr.as_ref() {
return Box::new(skip_autos(Mrc::clone(body), function).map({
let arg = Mrc::clone(arg);
let typ = Mrc::clone(typ);
move |body| {
Mrc::new(Expr(Clause::Auto(
*id,
Mrc::clone(&arg),
body
), Mrc::clone(&typ)))
}
})) as BoxedIter<'static, Mrc<Expr>>
}
Box::new(function(expr))
}
/// Produces an iterator of every expression that can be produced from this one through B-reduction.
fn direct_reductions(ex: Mrc<Expr>) -> impl Iterator<Item = Mrc<Expr>> {
skip_autos(ex, |mexpr| {
let Expr(clause, typ_ref) = mexpr.as_ref();
match clause {
Clause::Apply(f, x) => box_chain!(
skip_autos(Mrc::clone(f), |mexpr| {
let Expr(f, _) = mexpr.as_ref();
match f {
Clause::Lambda(id, _, body) => box_once(
apply_lambda(*id, Mrc::clone(x), Mrc::clone(body))
),
Clause::ExternFn(xfn) => {
let Expr(xval, xtyp) = x.as_ref();
xfn.apply(xval.clone())
.map(|ret| box_once(Mrc::new(Expr(ret, Mrc::clone(xtyp)))))
.unwrap_or(box_empty())
},
// Parametric newtypes are atoms of function type
Clause::Atom(..) | Clause::Argument(..) | Clause::Apply(..) => box_empty(),
Clause::Literal(lit) =>
panic!("Literal expression {lit:?} can't be applied as function"),
Clause::Auto(..) => unreachable!("skip_autos should have filtered this"),
}
}),
direct_reductions(Mrc::clone(f)).map({
let typ = Mrc::clone(typ_ref);
let x = Mrc::clone(x);
move |f| Mrc::new(Expr(Clause::Apply(
f,
Mrc::clone(&x)
), Mrc::clone(&typ)))
}),
direct_reductions(Mrc::clone(x)).map({
let typ = Mrc::clone(typ_ref);
let f = Mrc::clone(f);
move |x| Mrc::new(Expr(Clause::Apply(
Mrc::clone(&f),
x
), Mrc::clone(&typ)))
})
skip_autos(ex, |mexpr| {
let Expr(clause, typ_ref) = mexpr.as_ref();
match clause {
Clause::Apply(f, x) => box_chain!(
skip_autos(Mrc::clone(f), |mexpr| {
let Expr(f, _) = mexpr.as_ref();
match f {
Clause::Lambda(id, _, body) => box_once(
apply_lambda(*id, Mrc::clone(x), Mrc::clone(body))
),
Clause::Lambda(id, argt, body) => {
let id = *id;
let typ = Mrc::clone(typ_ref);
let argt = argt.as_ref().map(Mrc::clone);
let body = Mrc::clone(body);
let body_reductions = direct_reductions(body)
.map(move |body| {
let argt = argt.as_ref().map(Mrc::clone);
Mrc::new(Expr(
Clause::Lambda(id, argt, body),
Mrc::clone(&typ)
))
});
Box::new(body_reductions)
Clause::ExternFn(xfn) => {
let Expr(xval, xtyp) = x.as_ref();
xfn.apply(xval.clone())
.map(|ret| box_once(Mrc::new(Expr(ret, Mrc::clone(xtyp)))))
.unwrap_or(box_empty())
},
Clause::Literal(..) | Clause::ExternFn(..) | Clause::Atom(..) | Clause::Argument(..) =>
box_empty(),
// Parametric newtypes are atoms of function type
Clause::Atom(..) | Clause::LambdaArg(..) | Clause::AutoArg(..) | Clause::Apply(..) => box_empty(),
Clause::Literal(lit) =>
panic!("Literal expression {lit:?} can't be applied as function"),
Clause::Auto(..) => unreachable!("skip_autos should have filtered this"),
}
})
}
}),
direct_reductions(Mrc::clone(f)).map({
let typ = Mrc::clone(typ_ref);
let x = Mrc::clone(x);
move |f| Mrc::new(Expr(Clause::Apply(
f,
Mrc::clone(&x)
), Mrc::clone(&typ)))
}),
direct_reductions(Mrc::clone(x)).map({
let typ = Mrc::clone(typ_ref);
let f = Mrc::clone(f);
move |x| Mrc::new(Expr(Clause::Apply(
Mrc::clone(&f),
x
), Mrc::clone(&typ)))
})
),
Clause::Lambda(id, argt, body) => {
let id = *id;
let typ = Mrc::clone(typ_ref);
let argt = Mrc::clone(argt);
let body = Mrc::clone(body);
let body_reductions = direct_reductions(body)
.map(move |body| {
let argt = Mrc::clone(&argt);
Mrc::new(Expr(
Clause::Lambda(id, argt, body),
Mrc::clone(&typ)
))
});
Box::new(body_reductions)
},
Clause::Auto(..) => unreachable!("skip_autos should have filtered this"),
Clause::Literal(..) | Clause::ExternFn(..) | Clause::Atom(..)
| Clause::LambdaArg(..) | Clause::AutoArg(..) => box_empty(),
}
})
}

View File

@@ -1,13 +1,11 @@
use std::collections::HashMap;
use std::hash::{Hasher, Hash};
use std::iter;
use itertools::Itertools;
use mappable_rc::Mrc;
use crate::utils::{ProtoMap, Side};
use crate::utils::{ProtoMap, Side, mrc_empty_slice, collect_to_mrc, Stackframe, mrc_concat, Product2};
use super::super::representations::typed::{Clause, Expr};
use super::super::utils::Stackframe;
pub fn swap<T, U>((t, u): (T, U)) -> (U, T) { (u, t) }
@@ -17,24 +15,92 @@ pub fn swap<T, U>((t, u): (T, U)) -> (U, T) { (u, t) }
// - get rid of leftovers from Explicit
// - adapt to new index-based system
// =@= =&= =%= =#= =$= =?= =!= =/=
// <@> <&> <%> <#> <$> <?> <!> </>
// |@| |&| |%| |#| |$| |?| |!| |/|
// {@} {&} {%} {#} {$} {?} {!} {/}
// (@) (&) (%) (#) ($) (?) (!) (/)
// [@] [&] [%] [#] [$] [?] [!] [/]
enum UnifError {
Conflict,
}
type LambdaMap<'a> = Option<&'a Stackframe<'a, (u64, u64)>>;
/// The context associates a given variable (by absolute index) on a given side to
/// an expression on the opposite side rooted at the specified depth.
/// The root depths are used to translate betwee de Brujin arguments and absolute indices.
struct Context(HashMap<u64, Mrc<Expr>>);
impl Context {
fn set(&mut self, id: u64, value: Mrc<Expr>) {
// If already defined, then it must be an argument
if let Some(value) = self.0.get(&id) {
if let Clause::Argument(opposite_up) ex.0
}
}
fn set(&mut self, id: u64, value: &Mrc<Expr>, lambdas: LambdaMap) -> Result<Option<Mrc<Expr>>, UnifError> {
Ok(
if let Some(local) = self.0.get(&id) {
Some(
self.unify_expr(local, value, lambdas)?
.pick(Mrc::clone(local), Mrc::clone(value))
)
} else { None }
)
}
fn unify_expr(&mut self,
left: &Mrc<Expr>, right: &Mrc<Expr>, lambdas: LambdaMap
) -> Result<Product2<Mrc<Expr>>, UnifError> {
let Expr(left_val, left_typs) = left.as_ref();
let Expr(right_val, right_typs) = right.as_ref();
let val = match (left_val, right_val) {
(Clause::AutoArg(l), Clause::AutoArg(r)) if l == r => Product2::Either,
(Clause::AutoArg(id), _) => self.set(*id, left, lambdas)?.as_ref()
.map_or(Product2::Left, |e| Product2::New(e.0.clone())),
(_, Clause::AutoArg(id)) => self.set(*id, right, lambdas)?.as_ref()
.map_or(Product2::Right, |e| Product2::New(e.0.clone())),
_ => self.unify_clause(left_val, right_val, lambdas)?
};
Ok(match val {
Product2::Either if right_typs.is_empty() && left_typs.is_empty() => Product2::Either,
Product2::Left | Product2::Either if right_typs.is_empty() => Product2::Left,
Product2::Right | Product2::Either if left_typs.is_empty() => Product2::Right,
product => {
let all_types = mrc_concat(left_typs, right_typs);
Product2::New(Mrc::new(Expr(
product.pick(left_val.clone(), right_val.clone()),
all_types
)))
}
})
}
fn unify_clause(&mut self,
left: &Clause, right: &Clause, lambdas: LambdaMap
) -> Result<Product2<Clause>, UnifError> {
Ok(match (left, right) {
(Clause::Literal(l), Clause::Literal(r)) if l == r => Product2::Either,
(Clause::Atom(l), Clause::Atom(r)) if l == r => Product2::Either,
(Clause::ExternFn(l), Clause::ExternFn(r)) if l == r => Product2::Either,
(Clause::LambdaArg(l), Clause::LambdaArg(r)) => if l == r {Product2::Either} else {
let is_equal = Stackframe::o_into_iter(lambdas)
.first_some(|(l_candidate, r_candidate)| {
if l_candidate == l && r_candidate == r {Some(true)} // match
else if l_candidate == l || r_candidate == r {Some(false)} // shadow
else {None} // irrelevant
}).unwrap_or(false);
// Reference:
if is_equal {Product2::Left} else {return Err(UnifError::Conflict)}
}
(Clause::AutoArg(_), _) | (_, Clause::AutoArg(_)) => {
unreachable!("unify_expr should have handled this")
}
(Clause::Lambda(l_id, l_arg, l_body), Clause::Lambda(r_id, r_arg, r_body)) => {
let lambdas = Stackframe::opush(lambdas, (*l_id, *r_id));
self.unify_expr(l_body, r_body, Some(&lambdas))?
.map(|ex| Clause::Lambda(*l_id, mrc_empty_slice(), ex))
}
(Clause::Apply(l_f, l_x), Clause::Apply(r_f, r_x)) => {
self.unify_expr(l_f, r_f, lambdas)?.join((Mrc::clone(l_f), Mrc::clone(r_f)),
self.unify_expr(l_x, r_x, lambdas)?, (Mrc::clone(l_x), Mrc::clone(r_x))
).map(|(f, x)| Clause::Apply(f, x))
}
(Clause::Auto(l_id, l_arg, l_body), Clause::Auto(r_id, r_arg, r_body)) => {
let typ = self.unify(l_arg, r_arg, lambdas)?;
let body = self.unify_expr(l_body, r_body, lambdas)?;
typ.join((l_arg, r_arg), )
}
})
}
}
const IS_AUTO_INLINE:usize = 5;
@@ -42,22 +108,22 @@ const IS_AUTO_INLINE:usize = 5;
// All data to be forwarded during recursion about one half of a unification task
#[derive(Clone)]
struct UnifHalfTask<'a> {
/// The expression to be unified
expr: &'a Expr,
/// Stores whether a given uid is auto or lambda
is_auto: ProtoMap<'a, usize, bool, IS_AUTO_INLINE>
/// The expression to be unified
expr: &'a Expr,
/// Stores whether a given uid is auto or lambda
is_auto: ProtoMap<'a, usize, bool, IS_AUTO_INLINE>
}
impl<'a> UnifHalfTask<'a> {
fn push_auto(&mut self, body: &Expr, key: usize) {
self.expr = body;
self.is_auto.set(&key, true);
}
fn push_auto(&mut self, body: &Expr, key: usize) {
self.expr = body;
self.is_auto.set(&key, true);
}
fn push_lambda(&mut self, body: &Expr, key: usize) {
self.expr = body;
self.is_auto.set(&key, false);
}
fn push_lambda(&mut self, body: &Expr, key: usize) {
self.expr = body;
self.is_auto.set(&key, false);
}
}
type Ctx = HashMap<usize, Mrc<Expr>>;
@@ -68,63 +134,63 @@ type Ctx = HashMap<usize, Mrc<Expr>>;
///
/// Context associates variables with subtrees resolved on the opposite side
pub fn unify_syntax_rec( // the stacks store true for autos, false for lambdas
ctx: &mut HashMap<(Side, usize), (usize, Mrc<Expr>)>,
ltask@UnifHalfTask{ expr: lexpr@Expr(lclause, _), .. }: UnifHalfTask,
rtask@UnifHalfTask{ expr: rexpr@Expr(rclause, _), .. }: UnifHalfTask
ctx: &mut HashMap<(Side, usize), (usize, Mrc<Expr>)>,
ltask@UnifHalfTask{ expr: lexpr@Expr(lclause, _), .. }: UnifHalfTask,
rtask@UnifHalfTask{ expr: rexpr@Expr(rclause, _), .. }: UnifHalfTask
) -> Option<(UnifResult, UnifResult)> {
// Ensure that ex1 is a value-level construct
match lclause {
Clause::Auto(id, _, body) => {
let res = unify_syntax_rec(ltask.push_auto(body).0, rtask);
return if ltask.explicits.is_some() {
res.map(|(r1, r2)| (r1.useExplicit(), r2))
} else {res}
}
_ => ()
};
// Reduce ex2's auto handling to ex1's. In the optimizer we trust
if let Clause::Auto(..) | Clause::Explicit(..) = rclause {
return unify_syntax_rec(rtask, ltask).map(swap);
// Ensure that ex1 is a value-level construct
match lclause {
Clause::Auto(id, _, body) => {
let res = unify_syntax_rec(ltask.push_auto(body).0, rtask);
return if ltask.explicits.is_some() {
res.map(|(r1, r2)| (r1.useExplicit(), r2))
} else {res}
}
// Neither ex1 nor ex2 can be Auto or Explicit
match (lclause, rclause) {
// recurse into both
(Clause::Lambda(_, lbody), Clause::Lambda(_, rbody)) => unify_syntax_rec(
ltask.push_lambda(lbody),
rtask.push_lambda(rbody)
),
(Clause::Apply(lf, lx), Clause::Apply(rf, rx)) => {
let (lpart, rpart) = unify_syntax_rec(
ltask.push_expr(lf),
rtask.push_expr(rf)
)?;
lpart.dropUsedExplicits(&mut ltask);
rpart.dropUsedExplicits(&mut rtask);
unify_syntax_rec(ltask.push_expr(lx), rtask.push_expr(rx))
}
(Clause::Atom(latom), Clause::Atom(ratom)) => {
if latom != ratom { None }
else { Some((UnifResult::default(), UnifResult::default())) }
}
(Clause::ExternFn(lf), Clause::ExternFn(rf)) => {
if lf != rf { None }
else { Some((UnifResult::default(), UnifResult::default())) }
}
(Clause::Literal(llit), Clause::Literal(rlit)) => {
if llit != rlit { None }
else { Some((UnifResult::default(), UnifResult::default())) }
}
// TODO Select a representative
(Clause::Argument(depth1), Clause::Argument(depth2)) => {
!*stack1.iter().nth(*depth1).unwrap_or(&false)
&& !*stack2.iter().nth(*depth2).unwrap_or(&false)
&& stack1.iter().count() - depth1 == stack2.iter().count() - depth2
}
// TODO Assign a substitute
(Clause::Argument(placeholder), _) => {
_ => ()
};
// Reduce ex2's auto handling to ex1's. In the optimizer we trust
if let Clause::Auto(..) | Clause::Explicit(..) = rclause {
return unify_syntax_rec(rtask, ltask).map(swap);
}
// Neither ex1 nor ex2 can be Auto or Explicit
match (lclause, rclause) {
// recurse into both
(Clause::Lambda(_, lbody), Clause::Lambda(_, rbody)) => unify_syntax_rec(
ltask.push_lambda(lbody),
rtask.push_lambda(rbody)
),
(Clause::Apply(lf, lx), Clause::Apply(rf, rx)) => {
let (lpart, rpart) = unify_syntax_rec(
ltask.push_expr(lf),
rtask.push_expr(rf)
)?;
lpart.dropUsedExplicits(&mut ltask);
rpart.dropUsedExplicits(&mut rtask);
unify_syntax_rec(ltask.push_expr(lx), rtask.push_expr(rx))
}
(Clause::Atom(latom), Clause::Atom(ratom)) => {
if latom != ratom { None }
else { Some((UnifResult::default(), UnifResult::default())) }
}
(Clause::ExternFn(lf), Clause::ExternFn(rf)) => {
if lf != rf { None }
else { Some((UnifResult::default(), UnifResult::default())) }
}
(Clause::Literal(llit), Clause::Literal(rlit)) => {
if llit != rlit { None }
else { Some((UnifResult::default(), UnifResult::default())) }
}
// TODO Select a representative
(Clause::Argument(depth1), Clause::Argument(depth2)) => {
!*stack1.iter().nth(*depth1).unwrap_or(&false)
&& !*stack2.iter().nth(*depth2).unwrap_or(&false)
&& stack1.iter().count() - depth1 == stack2.iter().count() - depth2
}
// TODO Assign a substitute
(Clause::Argument(placeholder), _) => {
}
}
}
}
// Tricky unifications

104
src/foreign.rs Normal file
View File

@@ -0,0 +1,104 @@
use std::any::Any;
use std::fmt::{Display, Debug};
use std::hash::Hash;
use mappable_rc::Mrc;
use crate::representations::typed::{Expr, Clause};
pub trait ExternError: Display {}
/// Represents an externally defined function from the perspective of the executor
/// Since Orchid lacks basic numerical operations, these are also external functions.
pub struct ExternFn {
name: String, param: Mrc<Expr>, rttype: Mrc<Expr>,
function: Mrc<dyn Fn(Clause) -> Result<Clause, Mrc<dyn ExternError>>>
}
impl ExternFn {
pub fn new<F: 'static + Fn(Clause) -> Result<Clause, Mrc<dyn ExternError>>>(
name: String, param: Mrc<Expr>, rttype: Mrc<Expr>, f: F
) -> Self {
Self {
name, param, rttype,
function: Mrc::map(Mrc::new(f), |f| {
f as &dyn Fn(Clause) -> Result<Clause, Mrc<dyn ExternError>>
})
}
}
pub fn name(&self) -> &str {&self.name}
pub fn apply(&self, arg: Clause) -> Result<Clause, Mrc<dyn ExternError>> {(self.function)(arg)}
}
impl Clone for ExternFn { fn clone(&self) -> Self { Self {
name: self.name.clone(),
param: Mrc::clone(&self.param),
rttype: Mrc::clone(&self.rttype),
function: Mrc::clone(&self.function)
}}}
impl Eq for ExternFn {}
impl PartialEq for ExternFn {
fn eq(&self, other: &Self) -> bool { self.name() == other.name() }
}
impl Hash for ExternFn {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.name.hash(state) }
}
impl Debug for ExternFn {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "##EXTERN[{}]:{:?} -> {:?}##", self.name(), self.param, self.rttype)
}
}
pub trait Atomic: Any + Debug where Self: 'static {
fn as_any(&self) -> &dyn Any;
fn definitely_eq(&self, _other: &dyn Any) -> bool;
fn hash(&self, hasher: &mut dyn std::hash::Hasher);
}
/// Represents a unit of information from the perspective of the executor. This may be
/// something like a file descriptor which functions can operate on, but it can also be
/// information in the universe of types or kinds such as the type of signed integers or
/// the kind of types. Ad absurdum it can also be just a number, although Literal is
/// preferable for types it's defined on.
pub struct Atom {
typ: Mrc<Expr>,
data: Mrc<dyn Atomic>
}
impl Atom {
pub fn new<T: 'static + Atomic>(data: T, typ: Mrc<Expr>) -> Self { Self{
typ,
data: Mrc::map(Mrc::new(data), |d| d as &dyn Atomic)
} }
pub fn data(&self) -> &dyn Atomic { self.data.as_ref() as &dyn Atomic }
pub fn try_cast<T: Atomic>(&self) -> Result<&T, ()> {
self.data().as_any().downcast_ref().ok_or(())
}
pub fn is<T: 'static>(&self) -> bool { self.data().as_any().is::<T>() }
pub fn cast<T: 'static>(&self) -> &T {
self.data().as_any().downcast_ref().expect("Type mismatch on Atom::cast")
}
}
impl Clone for Atom {
fn clone(&self) -> Self { Self {
typ: Mrc::clone(&self.typ),
data: Mrc::clone(&self.data)
} }
}
impl Hash for Atom {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.data.hash(state);
self.typ.hash(state)
}
}
impl Debug for Atom {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "##ATOM[{:?}]:{:?}##", self.data(), self.typ)
}
}
impl Eq for Atom {}
impl PartialEq for Atom {
fn eq(&self, other: &Self) -> bool {
self.data().definitely_eq(other.data().as_any())
}
}

View File

@@ -2,16 +2,19 @@
#![feature(core_intrinsics)]
#![feature(adt_const_params)]
#![feature(generic_const_exprs)]
#![feature(generators, generator_trait)]
use std::env::current_dir;
mod executor;
// mod executor;
mod parse;
mod project;
mod utils;
mod representations;
mod rule;
mod types;
mod scheduler;
pub(crate) mod foreign;
use file_loader::LoadingError;
pub use representations::ast;
use ast::{Expr, Clause};
@@ -19,14 +22,14 @@ use representations::typed as t;
use mappable_rc::Mrc;
use project::{rule_collector, Loaded, file_loader};
use rule::Repository;
use utils::to_mrc_slice;
use utils::{to_mrc_slice, mrc_empty_slice, one_mrc_slice};
fn literal(orig: &[&str]) -> Mrc<[String]> {
to_mrc_slice(vliteral(orig))
to_mrc_slice(vliteral(orig))
}
fn vliteral(orig: &[&str]) -> Vec<String> {
orig.iter().map(|&s| s.to_owned()).collect()
orig.iter().map(|&s| s.to_owned()).collect()
}
static PRELUDE:&str = r#"
@@ -40,62 +43,62 @@ export (match_sequence $lhs) >>= (match_sequence $rhs) =100=> (bind ($lhs) ($rhs
fn initial_tree() -> Mrc<[Expr]> {
to_mrc_slice(vec![Expr(Clause::Name {
local: None,
qualified: literal(&["main", "main"])
}, to_mrc_slice(vec![]))])
to_mrc_slice(vec![Expr(Clause::Name {
local: None,
qualified: literal(&["main", "main"])
}, to_mrc_slice(vec![]))])
}
#[allow(unused)]
fn typed_notation_debug() {
let true_ex = t::Clause::Auto(0, None,
t::Clause::Lambda(1, Some(Mrc::new(t::Clause::Argument(0))),
t::Clause::Lambda(2, Some(Mrc::new(t::Clause::Argument(0))),
t::Clause::Argument(1).wrap_t(t::Clause::Argument(0))
).wrap()
).wrap()
).wrap();
let false_ex = t::Clause::Auto(0, None,
t::Clause::Lambda(1, Some(Mrc::new(t::Clause::Argument(0))),
t::Clause::Lambda(2, Some(Mrc::new(t::Clause::Argument(0))),
t::Clause::Argument(2).wrap_t(t::Clause::Argument(0))
).wrap()
).wrap()
).wrap();
println!("{:?}", t::Clause::Apply(t::Clause::Apply(Mrc::clone(&true_ex), true_ex).wrap(), false_ex))
let true_ex = t::Clause::Auto(0, mrc_empty_slice(),
t::Clause::Lambda(1, one_mrc_slice(t::Clause::AutoArg(0)),
t::Clause::Lambda(2, one_mrc_slice(t::Clause::AutoArg(0)),
t::Clause::LambdaArg(1).wrap_t(t::Clause::AutoArg(0))
).wrap()
).wrap()
).wrap();
let false_ex = t::Clause::Auto(0, mrc_empty_slice(),
t::Clause::Lambda(1, one_mrc_slice(t::Clause::AutoArg(0)),
t::Clause::Lambda(2, one_mrc_slice(t::Clause::AutoArg(0)),
t::Clause::LambdaArg(2).wrap_t(t::Clause::AutoArg(0))
).wrap()
).wrap()
).wrap();
println!("{:?}", t::Clause::Apply(t::Clause::Apply(Mrc::clone(&true_ex), true_ex).wrap(), false_ex))
}
#[allow(unused)]
fn load_project() {
let cwd = current_dir().unwrap();
let collect_rules = rule_collector(move |n| -> Result<Loaded, LoadingError> {
if n == literal(&["prelude"]) { Ok(Loaded::Module(PRELUDE.to_string())) }
else { file_loader(cwd.clone())(n) }
}, vliteral(&["...", ">>", ">>=", "[", "]", ",", "=", "=>"]));
let rules = match collect_rules.try_find(&literal(&["main"])) {
Ok(rules) => rules,
Err(err) => panic!("{:#?}", err)
};
let mut tree = initial_tree();
println!("Start processing {tree:?}");
let repo = Repository::new(rules.as_ref().to_owned());
println!("Ruleset: {repo:?}");
xloop!(let mut i = 0; i < 10; i += 1; {
match repo.step(Mrc::clone(&tree)) {
Ok(Some(phase)) => {
println!("Step {i}: {phase:?}");
tree = phase;
},
Ok(None) => {
println!("Execution complete");
break
},
Err(e) => panic!("Rule error: {e:?}")
}
}; println!("Macro execution didn't halt"));
let cwd = current_dir().unwrap();
let collect_rules = rule_collector(move |n| -> Result<Loaded, LoadingError> {
if n == literal(&["prelude"]) { Ok(Loaded::Module(PRELUDE.to_string())) }
else { file_loader(cwd.clone())(n) }
}, vliteral(&["...", ">>", ">>=", "[", "]", ",", "=", "=>"]));
let rules = match collect_rules.try_find(&literal(&["main"])) {
Ok(rules) => rules,
Err(err) => panic!("{:#?}", err)
};
let mut tree = initial_tree();
println!("Start processing {tree:?}");
let repo = Repository::new(rules.as_ref().to_owned());
println!("Ruleset: {repo:?}");
xloop!(let mut i = 0; i < 10; i += 1; {
match repo.step(Mrc::clone(&tree)) {
Ok(Some(phase)) => {
println!("Step {i}: {phase:?}");
tree = phase;
},
Ok(None) => {
println!("Execution complete");
break
},
Err(e) => panic!("Rule error: {e:?}")
}
}; println!("Macro execution didn't halt"));
}
fn main() {
// lambda_notation_debug();
load_project();
// lambda_notation_debug();
load_project();
}

View File

@@ -2,12 +2,12 @@ pub use chumsky::{self, prelude::*, Parser};
/// Parses Lua-style comments
pub fn comment_parser() -> impl Parser<char, String, Error = Simple<char>> {
choice((
just("--[").ignore_then(take_until(
just("]--").ignored()
)),
just("--").ignore_then(take_until(
just("\n").rewind().ignored().or(end())
))
)).map(|(vc, ())| vc).collect().labelled("comment")
choice((
just("--[").ignore_then(take_until(
just("]--").ignored()
)),
just("--").ignore_then(take_until(
just("\n").rewind().ignored().or(end())
))
)).map(|(vc, ())| vc).collect().labelled("comment")
}

View File

@@ -6,27 +6,27 @@
/// ```
#[macro_export]
macro_rules! enum_parser {
($p:path | $m:tt) => {
{
::chumsky::prelude::filter_map(|s, l| {
if let $p(x) = l { Ok(x) }
else { Err(::chumsky::prelude::Simple::custom(s, $m))}
})
}
};
($p:path >> $q:path; $i:ident) => {
{
use $p as srcpath;
use $q as tgtpath;
enum_parser!(srcpath::$i | (concat!("Expected ", stringify!($i)))).map(tgtpath::$i)
}
};
($p:path >> $q:path; $($i:ident),+) => {
{
::chumsky::prelude::choice((
$( enum_parser!($p >> $q; $i) ),+
))
}
};
($p:path) => { enum_parser!($p | (concat!("Expected ", stringify!($p)))) };
($p:path | $m:tt) => {
{
::chumsky::prelude::filter_map(|s, l| {
if let $p(x) = l { Ok(x) }
else { Err(::chumsky::prelude::Simple::custom(s, $m))}
})
}
};
($p:path >> $q:path; $i:ident) => {
{
use $p as srcpath;
use $q as tgtpath;
enum_parser!(srcpath::$i | (concat!("Expected ", stringify!($i)))).map(tgtpath::$i)
}
};
($p:path >> $q:path; $($i:ident),+) => {
{
::chumsky::prelude::choice((
$( enum_parser!($p >> $q; $i) ),+
))
}
};
($p:path) => { enum_parser!($p | (concat!("Expected ", stringify!($p)))) };
}

View File

@@ -8,120 +8,120 @@ use super::lexer::Lexeme;
/// Parses any number of expr wrapped in (), [] or {}
fn sexpr_parser<P>(
expr: P
expr: P
) -> impl Parser<Lexeme, Clause, Error = Simple<Lexeme>> + Clone
where P: Parser<Lexeme, Expr, Error = Simple<Lexeme>> + Clone {
Lexeme::paren_parser(expr.repeated()).map(|(del, b)| Clause::S(del, to_mrc_slice(b)))
Lexeme::paren_parser(expr.repeated()).map(|(del, b)| Clause::S(del, to_mrc_slice(b)))
}
/// Parses `\name.body` or `\name:type.body` where name is any valid name and type and body are
/// both expressions. Comments are allowed and ignored everywhere in between the tokens
fn lambda_parser<P>(
expr: P
expr: P
) -> impl Parser<Lexeme, Clause, Error = Simple<Lexeme>> + Clone
where P: Parser<Lexeme, Expr, Error = Simple<Lexeme>> + Clone {
just(Lexeme::BS)
just(Lexeme::BS)
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.ignore_then(enum_parser!(Lexeme::Name))
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.then(
just(Lexeme::Type)
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.ignore_then(enum_parser!(Lexeme::Name))
.ignore_then(expr.clone().repeated())
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.then(
just(Lexeme::Type)
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.ignore_then(expr.clone().repeated())
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.or_not().map(Option::unwrap_or_default)
)
.then_ignore(just(Lexeme::name(".")))
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.then(expr.repeated().at_least(1))
.map(|((name, typ), body): ((String, Vec<Expr>), Vec<Expr>)| {
// for ent in &mut body { ent.bind_parameter(&name) };
Clause::Lambda(name, to_mrc_slice(typ), to_mrc_slice(body))
})
.or_not().map(Option::unwrap_or_default)
)
.then_ignore(just(Lexeme::name(".")))
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.then(expr.repeated().at_least(1))
.map(|((name, typ), body): ((String, Vec<Expr>), Vec<Expr>)| {
// for ent in &mut body { ent.bind_parameter(&name) };
Clause::Lambda(name, to_mrc_slice(typ), to_mrc_slice(body))
})
}
/// see [lambda_parser] but `@` instead of `\` and the name is optional
fn auto_parser<P>(
expr: P
expr: P
) -> impl Parser<Lexeme, Clause, Error = Simple<Lexeme>> + Clone
where P: Parser<Lexeme, Expr, Error = Simple<Lexeme>> + Clone {
just(Lexeme::At)
just(Lexeme::At)
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.ignore_then(enum_parser!(Lexeme::Name).or_not())
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.then(
just(Lexeme::Type)
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.ignore_then(enum_parser!(Lexeme::Name).or_not())
.ignore_then(expr.clone().repeated())
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.then(
just(Lexeme::Type)
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.ignore_then(expr.clone().repeated())
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.or_not().map(Option::unwrap_or_default)
)
.then_ignore(just(Lexeme::name(".")))
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.then(expr.repeated().at_least(1))
.try_map(|((name, typ), body): ((Option<String>, Vec<Expr>), Vec<Expr>), s| {
if name.is_none() && typ.is_empty() {
Err(Simple::custom(s, "Auto without name or type has no effect"))
} else {
Ok(Clause::Auto(name, to_mrc_slice(typ), to_mrc_slice(body)))
}
})
.or_not().map(Option::unwrap_or_default)
)
.then_ignore(just(Lexeme::name(".")))
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
.then(expr.repeated().at_least(1))
.try_map(|((name, typ), body): ((Option<String>, Vec<Expr>), Vec<Expr>), s| {
if name.is_none() && typ.is_empty() {
Err(Simple::custom(s, "Auto without name or type has no effect"))
} else {
Ok(Clause::Auto(name, to_mrc_slice(typ), to_mrc_slice(body)))
}
})
}
/// Parses a sequence of names separated by :: <br/>
/// Comments are allowed and ignored in between
fn name_parser() -> impl Parser<Lexeme, Vec<String>, Error = Simple<Lexeme>> + Clone {
enum_parser!(Lexeme::Name).separated_by(
enum_parser!(Lexeme::Comment).repeated()
.then(just(Lexeme::NS))
.then(enum_parser!(Lexeme::Comment).repeated())
).at_least(1)
enum_parser!(Lexeme::Name).separated_by(
enum_parser!(Lexeme::Comment).repeated()
.then(just(Lexeme::NS))
.then(enum_parser!(Lexeme::Comment).repeated())
).at_least(1)
}
/// Parse any legal argument name starting with a `$`
fn placeholder_parser() -> impl Parser<Lexeme, String, Error = Simple<Lexeme>> + Clone {
enum_parser!(Lexeme::Name).try_map(|name, span| {
name.strip_prefix('$').map(&str::to_string)
.ok_or_else(|| Simple::custom(span, "Not a placeholder"))
})
enum_parser!(Lexeme::Name).try_map(|name, span| {
name.strip_prefix('$').map(&str::to_string)
.ok_or_else(|| Simple::custom(span, "Not a placeholder"))
})
}
/// Parse an expression
pub fn xpr_parser() -> impl Parser<Lexeme, Expr, Error = Simple<Lexeme>> {
recursive(|expr| {
let clause =
enum_parser!(Lexeme::Comment).repeated()
.ignore_then(choice((
enum_parser!(Lexeme >> Literal; Int, Num, Char, Str).map(Clause::Literal),
placeholder_parser().map(|key| Clause::Placeh{key, vec: None}),
just(Lexeme::name("...")).to(true)
.or(just(Lexeme::name("..")).to(false))
.then(placeholder_parser())
.then(
just(Lexeme::Type)
.ignore_then(enum_parser!(Lexeme::Int))
.or_not().map(Option::unwrap_or_default)
)
.map(|((nonzero, key), prio)| Clause::Placeh{key, vec: Some((
prio.try_into().unwrap(),
nonzero
))}),
name_parser().map(|qualified| Clause::Name {
local: if qualified.len() == 1 {Some(qualified[0].clone())} else {None},
qualified: to_mrc_slice(qualified)
}),
sexpr_parser(expr.clone()),
lambda_parser(expr.clone()),
auto_parser(expr.clone()),
just(Lexeme::At).ignore_then(expr.clone()).map(|arg| {
Clause::Explicit(Mrc::new(arg))
})
))).then_ignore(enum_parser!(Lexeme::Comment).repeated());
clause.clone().then(
just(Lexeme::Type)
.ignore_then(clause.clone())
.repeated()
recursive(|expr| {
let clause =
enum_parser!(Lexeme::Comment).repeated()
.ignore_then(choice((
enum_parser!(Lexeme >> Literal; Int, Num, Char, Str).map(Clause::Literal),
placeholder_parser().map(|key| Clause::Placeh{key, vec: None}),
just(Lexeme::name("...")).to(true)
.or(just(Lexeme::name("..")).to(false))
.then(placeholder_parser())
.then(
just(Lexeme::Type)
.ignore_then(enum_parser!(Lexeme::Int))
.or_not().map(Option::unwrap_or_default)
)
.map(|(val, typ)| Expr(val, to_mrc_slice(typ)))
}).labelled("Expression")
.map(|((nonzero, key), prio)| Clause::Placeh{key, vec: Some((
prio.try_into().unwrap(),
nonzero
))}),
name_parser().map(|qualified| Clause::Name {
local: if qualified.len() == 1 {Some(qualified[0].clone())} else {None},
qualified: to_mrc_slice(qualified)
}),
sexpr_parser(expr.clone()),
lambda_parser(expr.clone()),
auto_parser(expr.clone()),
just(Lexeme::At).ignore_then(expr.clone()).map(|arg| {
Clause::Explicit(Mrc::new(arg))
})
))).then_ignore(enum_parser!(Lexeme::Comment).repeated());
clause.clone().then(
just(Lexeme::Type)
.ignore_then(clause.clone())
.repeated()
)
.map(|(val, typ)| Expr(val, to_mrc_slice(typ)))
}).labelled("Expression")
}

View File

@@ -9,15 +9,15 @@ use super::lexer::Lexeme;
#[derive(Debug, Clone)]
pub struct Import {
pub path: Mrc<[String]>,
/// If name is None, this is a wildcard import
pub name: Option<String>
pub path: Mrc<[String]>,
/// If name is None, this is a wildcard import
pub name: Option<String>
}
/// initialize a BoxedIter<BoxedIter<String>> with a single element.
fn init_table(name: String) -> BoxedIterIter<'static, String> {
// I'm not at all confident that this is a good approach.
box_once(box_once(name))
// I'm not at all confident that this is a good approach.
box_once(box_once(name))
}
/// Parse an import command
@@ -26,44 +26,44 @@ fn init_table(name: String) -> BoxedIterIter<'static, String> {
/// crossplatform filename-legal characters but the symbols are explicitly allowed
/// to go wild. There's a blacklist in [name]
pub fn import_parser() -> impl Parser<Lexeme, Vec<Import>, Error = Simple<Lexeme>> {
// TODO: this algorithm isn't cache friendly, copies a lot and is generally pretty bad.
recursive(|expr: Recursive<Lexeme, BoxedIterIter<String>, Simple<Lexeme>>| {
enum_parser!(Lexeme::Name)
.separated_by(just(Lexeme::NS))
.then(
just(Lexeme::NS)
.ignore_then(
choice((
expr.clone()
.separated_by(just(Lexeme::name(",")))
.delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
.map(|v| box_flatten(v.into_iter()))
.labelled("import group"),
// Each expr returns a list of imports, flatten those into a common list
just(Lexeme::name("*")).map(|_| init_table("*".to_string()))
.labelled("wildcard import"), // Just a *, wrapped
enum_parser!(Lexeme::Name).map(init_table)
.labelled("import terminal") // Just a name, wrapped
))
).or_not()
)
.map(|(name, opt_post): (Vec<String>, Option<BoxedIterIter<String>>)| -> BoxedIterIter<String> {
if let Some(post) = opt_post {
Box::new(post.map(move |el| {
box_chain!(name.clone().into_iter(), el)
}))
} else {
box_once(into_boxed_iter(name))
}
})
}).map(|paths| {
paths.filter_map(|namespaces| {
let path = to_mrc_slice(namespaces.collect_vec());
let path_prefix = mrc_derive(&path, |p| &p[..p.len() - 1]);
match path.last()?.as_str() {
"*" => Some(Import { path: path_prefix, name: None }),
name => Some(Import { path: path_prefix, name: Some(name.to_owned()) })
}
}).collect()
}).labelled("import")
// TODO: this algorithm isn't cache friendly, copies a lot and is generally pretty bad.
recursive(|expr: Recursive<Lexeme, BoxedIterIter<String>, Simple<Lexeme>>| {
enum_parser!(Lexeme::Name)
.separated_by(just(Lexeme::NS))
.then(
just(Lexeme::NS)
.ignore_then(
choice((
expr.clone()
.separated_by(just(Lexeme::name(",")))
.delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
.map(|v| box_flatten(v.into_iter()))
.labelled("import group"),
// Each expr returns a list of imports, flatten those into a common list
just(Lexeme::name("*")).map(|_| init_table("*".to_string()))
.labelled("wildcard import"), // Just a *, wrapped
enum_parser!(Lexeme::Name).map(init_table)
.labelled("import terminal") // Just a name, wrapped
))
).or_not()
)
.map(|(name, opt_post): (Vec<String>, Option<BoxedIterIter<String>>)| -> BoxedIterIter<String> {
if let Some(post) = opt_post {
Box::new(post.map(move |el| {
box_chain!(name.clone().into_iter(), el)
}))
} else {
box_once(into_boxed_iter(name))
}
})
}).map(|paths| {
paths.filter_map(|namespaces| {
let path = to_mrc_slice(namespaces.collect_vec());
let path_prefix = mrc_derive(&path, |p| &p[..p.len() - 1]);
match path.last()?.as_str() {
"*" => Some(Import { path: path_prefix, name: None }),
name => Some(Import { path: path_prefix, name: Some(name.to_owned()) })
}
}).collect()
}).labelled("import")
}

View File

@@ -9,141 +9,141 @@ use super::{number, string, name, comment};
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Entry(pub Lexeme, pub Range<usize>);
impl Debug for Entry {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self.0)
// f.debug_tuple("Entry").field(&self.0).field(&self.1).finish()
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self.0)
// f.debug_tuple("Entry").field(&self.0).field(&self.1).finish()
}
}
impl From<Entry> for (Lexeme, Range<usize>) {
fn from(ent: Entry) -> Self {
(ent.0, ent.1)
}
fn from(ent: Entry) -> Self {
(ent.0, ent.1)
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub enum Lexeme {
Num(NotNan<f64>),
Int(u64),
Char(char),
Str(String),
Name(String),
Rule(NotNan<f64>),
NS, // namespace separator
LP(char),
RP(char),
BS, // Backslash
At,
Type, // type operator
Comment(String)
Num(NotNan<f64>),
Int(u64),
Char(char),
Str(String),
Name(String),
Rule(NotNan<f64>),
NS, // namespace separator
LP(char),
RP(char),
BS, // Backslash
At,
Type, // type operator
Comment(String)
}
impl Debug for Lexeme {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Num(n) => write!(f, "{}", n),
Self::Int(i) => write!(f, "{}", i),
Self::Char(c) => write!(f, "{:?}", c),
Self::Str(s) => write!(f, "{:?}", s),
Self::Name(name) => write!(f, "{}", name),
Self::Rule(prio) => write!(f, "={}=>", prio),
Self::NS => write!(f, "::"),
Self::LP(l) => write!(f, "{}", l),
Self::RP(l) => match l {
'(' => write!(f, ")"),
'[' => write!(f, "]"),
'{' => write!(f, "}}"),
_ => f.debug_tuple("RP").field(l).finish()
},
Self::BS => write!(f, "\\"),
Self::At => write!(f, "@"),
Self::Type => write!(f, ":"),
Self::Comment(text) => write!(f, "--[{}]--", text),
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Num(n) => write!(f, "{}", n),
Self::Int(i) => write!(f, "{}", i),
Self::Char(c) => write!(f, "{:?}", c),
Self::Str(s) => write!(f, "{:?}", s),
Self::Name(name) => write!(f, "{}", name),
Self::Rule(prio) => write!(f, "={}=>", prio),
Self::NS => write!(f, "::"),
Self::LP(l) => write!(f, "{}", l),
Self::RP(l) => match l {
'(' => write!(f, ")"),
'[' => write!(f, "]"),
'{' => write!(f, "}}"),
_ => f.debug_tuple("RP").field(l).finish()
},
Self::BS => write!(f, "\\"),
Self::At => write!(f, "@"),
Self::Type => write!(f, ":"),
Self::Comment(text) => write!(f, "--[{}]--", text),
}
}
}
impl Lexeme {
pub fn name<T: ToString>(n: T) -> Self {
Lexeme::Name(n.to_string())
}
pub fn rule<T>(prio: T) -> Self where T: Into<f64> {
Lexeme::Rule(NotNan::new(prio.into()).expect("Rule priority cannot be NaN"))
}
pub fn paren_parser<T, P>(
expr: P
) -> impl Parser<Lexeme, (char, T), Error = Simple<Lexeme>> + Clone
where P: Parser<Lexeme, T, Error = Simple<Lexeme>> + Clone {
choice((
expr.clone().delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
.map(|t| ('(', t)),
expr.clone().delimited_by(just(Lexeme::LP('[')), just(Lexeme::RP('[')))
.map(|t| ('[', t)),
expr.delimited_by(just(Lexeme::LP('{')), just(Lexeme::RP('{')))
.map(|t| ('{', t)),
))
}
pub fn name<T: ToString>(n: T) -> Self {
Lexeme::Name(n.to_string())
}
pub fn rule<T>(prio: T) -> Self where T: Into<f64> {
Lexeme::Rule(NotNan::new(prio.into()).expect("Rule priority cannot be NaN"))
}
pub fn paren_parser<T, P>(
expr: P
) -> impl Parser<Lexeme, (char, T), Error = Simple<Lexeme>> + Clone
where P: Parser<Lexeme, T, Error = Simple<Lexeme>> + Clone {
choice((
expr.clone().delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
.map(|t| ('(', t)),
expr.clone().delimited_by(just(Lexeme::LP('[')), just(Lexeme::RP('[')))
.map(|t| ('[', t)),
expr.delimited_by(just(Lexeme::LP('{')), just(Lexeme::RP('{')))
.map(|t| ('{', t)),
))
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct LexedText(pub Vec<Vec<Entry>>);
impl Debug for LexedText {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for row in &self.0 {
for tok in row {
tok.fmt(f)?;
f.write_str(" ")?
}
f.write_str("\n")?
}
Ok(())
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for row in &self.0 {
for tok in row {
tok.fmt(f)?;
f.write_str(" ")?
}
f.write_str("\n")?
}
Ok(())
}
}
type LexSubres<'a> = BoxedIter<'a, Entry>;
fn paren_parser<'a>(
expr: Recursive<'a, char, LexSubres<'a>, Simple<char>>,
lp: char, rp: char
expr: Recursive<'a, char, LexSubres<'a>, Simple<char>>,
lp: char, rp: char
) -> impl Parser<char, LexSubres<'a>, Error=Simple<char>> + 'a {
expr.padded().repeated()
.map(|x| box_flatten(x.into_iter()))
.delimited_by(just(lp), just(rp)).map_with_span(move |b, s| {
box_chain!(
iter::once(Entry(Lexeme::LP(lp), s.start..s.start+1)),
b,
iter::once(Entry(Lexeme::RP(lp), s.end-1..s.end))
)
})
expr.padded().repeated()
.map(|x| box_flatten(x.into_iter()))
.delimited_by(just(lp), just(rp)).map_with_span(move |b, s| {
box_chain!(
iter::once(Entry(Lexeme::LP(lp), s.start..s.start+1)),
b,
iter::once(Entry(Lexeme::RP(lp), s.end-1..s.end))
)
})
}
pub fn lexer<'a, T: 'a>(ops: &[T]) -> impl Parser<char, LexedText, Error=Simple<char>> + 'a
where T: AsRef<str> + Clone {
let all_ops = ops.iter().map(|o| o.as_ref().to_string())
.chain(iter::once(".".to_string())).collect::<Vec<_>>();
recursive(move |recurse: Recursive<char, LexSubres, Simple<char>>| {
choice((
paren_parser(recurse.clone(), '(', ')'),
paren_parser(recurse.clone(), '[', ']'),
paren_parser(recurse.clone(), '{', '}'),
choice((
just(":=").padded().to(Lexeme::rule(0f64)),
just("=").ignore_then(number::float_parser()).then_ignore(just("=>")).map(Lexeme::rule),
comment::comment_parser().map(Lexeme::Comment),
just("::").padded().to(Lexeme::NS),
just('\\').padded().to(Lexeme::BS),
just('@').padded().to(Lexeme::At),
just(':').to(Lexeme::Type),
number::int_parser().map(Lexeme::Int), // all ints are valid floats so it takes precedence
number::float_parser().map(Lexeme::Num),
string::char_parser().map(Lexeme::Char),
string::str_parser().map(Lexeme::Str),
name::name_parser(&all_ops).map(Lexeme::Name), // includes namespacing
)).map_with_span(|lx, span| box_once(Entry(lx, span)) as LexSubres)
))
}).separated_by(one_of("\t ").repeated())
.flatten().collect()
.separated_by(just('\n').then(text::whitespace()).ignored())
.map(LexedText)
let all_ops = ops.iter().map(|o| o.as_ref().to_string())
.chain(iter::once(".".to_string())).collect::<Vec<_>>();
recursive(move |recurse: Recursive<char, LexSubres, Simple<char>>| {
choice((
paren_parser(recurse.clone(), '(', ')'),
paren_parser(recurse.clone(), '[', ']'),
paren_parser(recurse.clone(), '{', '}'),
choice((
just(":=").padded().to(Lexeme::rule(0f64)),
just("=").ignore_then(number::float_parser()).then_ignore(just("=>")).map(Lexeme::rule),
comment::comment_parser().map(Lexeme::Comment),
just("::").padded().to(Lexeme::NS),
just('\\').padded().to(Lexeme::BS),
just('@').padded().to(Lexeme::At),
just(':').to(Lexeme::Type),
number::int_parser().map(Lexeme::Int), // all ints are valid floats so it takes precedence
number::float_parser().map(Lexeme::Num),
string::char_parser().map(Lexeme::Char),
string::str_parser().map(Lexeme::Str),
name::name_parser(&all_ops).map(Lexeme::Name), // includes namespacing
)).map_with_span(|lx, span| box_once(Entry(lx, span)) as LexSubres)
))
}).separated_by(one_of("\t ").repeated())
.flatten().collect()
.separated_by(just('\n').then(text::whitespace()).ignored())
.map(LexedText)
}

View File

@@ -2,13 +2,13 @@ use chumsky::{self, prelude::*, Parser};
/// Matches any one of the passed operators, longest-first
fn op_parser<'a, T: AsRef<str> + Clone>(ops: &[T]) -> BoxedParser<'a, char, String, Simple<char>> {
let mut sorted_ops: Vec<String> = ops.iter().map(|t| t.as_ref().to_string()).collect();
sorted_ops.sort_by_key(|op| -(op.len() as i64));
sorted_ops.into_iter()
.map(|op| just(op).boxed())
.reduce(|a, b| a.or(b).boxed())
.unwrap_or_else(|| empty().map(|()| panic!("Empty isn't meant to match")).boxed())
.labelled("operator").boxed()
let mut sorted_ops: Vec<String> = ops.iter().map(|t| t.as_ref().to_string()).collect();
sorted_ops.sort_by_key(|op| -(op.len() as i64));
sorted_ops.into_iter()
.map(|op| just(op).boxed())
.reduce(|a, b| a.or(b).boxed())
.unwrap_or_else(|| empty().map(|()| panic!("Empty isn't meant to match")).boxed())
.labelled("operator").boxed()
}
/// Matches anything that's allowed as an operator
@@ -30,31 +30,31 @@ fn op_parser<'a, T: AsRef<str> + Clone>(ops: &[T]) -> BoxedParser<'a, char, Stri
/// TODO: `.` could possibly be parsed as an operator depending on context. This operator is very
/// common in maths so it's worth a try. Investigate.
pub fn modname_parser<'a>() -> impl Parser<char, String, Error = Simple<char>> + 'a {
let not_name_char: Vec<char> = vec![':', '\\', '@', '"', '\'', '(', ')', '[', ']', '{', '}', ',', '.'];
filter(move |c| !not_name_char.contains(c) && !c.is_whitespace())
.repeated().at_least(1)
.collect()
.labelled("modname")
let not_name_char: Vec<char> = vec![':', '\\', '@', '"', '\'', '(', ')', '[', ']', '{', '}', ',', '.'];
filter(move |c| !not_name_char.contains(c) && !c.is_whitespace())
.repeated().at_least(1)
.collect()
.labelled("modname")
}
/// Parse an operator or name. Failing both, parse everything up to the next whitespace or
/// blacklisted character as a new operator.
pub fn name_parser<'a, T: AsRef<str> + Clone>(
ops: &[T]
ops: &[T]
) -> impl Parser<char, String, Error = Simple<char>> + 'a {
choice((
op_parser(ops), // First try to parse a known operator
text::ident().labelled("plain text"), // Failing that, parse plain text
modname_parser() // Finally parse everything until tne next terminal as a new operator
))
.labelled("name")
choice((
op_parser(ops), // First try to parse a known operator
text::ident().labelled("plain text"), // Failing that, parse plain text
modname_parser() // Finally parse everything until tne next terminal as a new operator
))
.labelled("name")
}
/// Decide if a string can be an operator. Operators can include digits and text, just not at the
/// start.
pub fn is_op<T: AsRef<str>>(s: T) -> bool {
return match s.as_ref().chars().next() {
Some(x) => !x.is_alphanumeric(),
None => false
}
return match s.as_ref().chars().next() {
Some(x) => !x.is_alphanumeric(),
None => false
}
}

View File

@@ -2,111 +2,111 @@ use chumsky::{self, prelude::*, Parser};
use ordered_float::NotNan;
fn assert_not_digit(base: u32, c: char) {
if base > (10 + (c as u32 - 'a' as u32)) {
panic!("The character '{}' is a digit in base ({})", c, base)
}
if base > (10 + (c as u32 - 'a' as u32)) {
panic!("The character '{}' is a digit in base ({})", c, base)
}
}
/// Parse an arbitrarily grouped sequence of digits starting with an underscore.
///
/// TODO: this should use separated_by and parse the leading group too
fn separated_digits_parser(base: u32) -> impl Parser<char, String, Error = Simple<char>> {
just('_')
.ignore_then(text::digits(base))
.repeated()
.map(|sv| sv.iter().flat_map(|s| s.chars()).collect())
just('_')
.ignore_then(text::digits(base))
.repeated()
.map(|sv| sv.iter().flat_map(|s| s.chars()).collect())
}
/// parse a grouped uint
///
/// Not to be confused with [int_parser] which does a lot more
fn uint_parser(base: u32) -> impl Parser<char, u64, Error = Simple<char>> {
text::int(base)
.then(separated_digits_parser(base))
.map(move |(s1, s2): (String, String)| {
u64::from_str_radix(&(s1 + &s2), base).unwrap()
})
text::int(base)
.then(separated_digits_parser(base))
.map(move |(s1, s2): (String, String)| {
u64::from_str_radix(&(s1 + &s2), base).unwrap()
})
}
/// parse exponent notation, or return 0 as the default exponent.
/// The exponent is always in decimal.
fn pow_parser() -> impl Parser<char, i32, Error = Simple<char>> {
choice((
just('p')
.ignore_then(text::int(10))
.map(|s: String| s.parse().unwrap()),
just("p-")
.ignore_then(text::int(10))
.map(|s: String| -s.parse::<i32>().unwrap()),
)).or_else(|_| Ok(0))
choice((
just('p')
.ignore_then(text::int(10))
.map(|s: String| s.parse().unwrap()),
just("p-")
.ignore_then(text::int(10))
.map(|s: String| -s.parse::<i32>().unwrap()),
)).or_else(|_| Ok(0))
}
/// returns a mapper that converts a mantissa and an exponent into an uint
///
/// TODO it panics if it finds a negative exponent
fn nat2u(base: u64) -> impl Fn((u64, i32),) -> u64 {
move |(val, exp)| {
if exp == 0 {val}
else {val * base.checked_pow(exp.try_into().unwrap()).unwrap()}
}
move |(val, exp)| {
if exp == 0 {val}
else {val * base.checked_pow(exp.try_into().unwrap()).unwrap()}
}
}
/// returns a mapper that converts a mantissa and an exponent into a float
fn nat2f(base: u64) -> impl Fn((NotNan<f64>, i32),) -> NotNan<f64> {
move |(val, exp)| {
if exp == 0 {val}
else {val * (base as f64).powf(exp.try_into().unwrap())}
}
move |(val, exp)| {
if exp == 0 {val}
else {val * (base as f64).powf(exp.try_into().unwrap())}
}
}
/// parse an uint from exponential notation (panics if 'p' is a digit in base)
fn pow_uint_parser(base: u32) -> impl Parser<char, u64, Error = Simple<char>> {
assert_not_digit(base, 'p');
uint_parser(base).then(pow_parser()).map(nat2u(base.into()))
assert_not_digit(base, 'p');
uint_parser(base).then(pow_parser()).map(nat2u(base.into()))
}
/// parse an uint from a base determined by its prefix or lack thereof
///
/// Not to be convused with [uint_parser] which is a component of it.
pub fn int_parser() -> impl Parser<char, u64, Error = Simple<char>> {
choice((
just("0b").ignore_then(pow_uint_parser(2)),
just("0x").ignore_then(pow_uint_parser(16)),
just('0').ignore_then(pow_uint_parser(8)),
pow_uint_parser(10), // Dec has no prefix
))
choice((
just("0b").ignore_then(pow_uint_parser(2)),
just("0x").ignore_then(pow_uint_parser(16)),
just('0').ignore_then(pow_uint_parser(8)),
pow_uint_parser(10), // Dec has no prefix
))
}
/// parse a float from dot notation
fn dotted_parser(base: u32) -> impl Parser<char, NotNan<f64>, Error = Simple<char>> {
uint_parser(base)
.then(
just('.').ignore_then(
text::digits(base).then(separated_digits_parser(base))
).map(move |(frac1, frac2)| {
let frac = frac1 + &frac2;
let frac_num = u64::from_str_radix(&frac, base).unwrap() as f64;
let dexp = base.pow(frac.len().try_into().unwrap());
frac_num / dexp as f64
}).or_not().map(|o| o.unwrap_or_default())
).try_map(|(wh, f), s| {
NotNan::new(wh as f64 + f).map_err(|_| Simple::custom(s, "Float literal evaluates to NaN"))
})
uint_parser(base)
.then(
just('.').ignore_then(
text::digits(base).then(separated_digits_parser(base))
).map(move |(frac1, frac2)| {
let frac = frac1 + &frac2;
let frac_num = u64::from_str_radix(&frac, base).unwrap() as f64;
let dexp = base.pow(frac.len().try_into().unwrap());
frac_num / dexp as f64
}).or_not().map(|o| o.unwrap_or_default())
).try_map(|(wh, f), s| {
NotNan::new(wh as f64 + f).map_err(|_| Simple::custom(s, "Float literal evaluates to NaN"))
})
}
/// parse a float from dotted and optionally also exponential notation
fn pow_float_parser(base: u32) -> impl Parser<char, NotNan<f64>, Error = Simple<char>> {
assert_not_digit(base, 'p');
dotted_parser(base).then(pow_parser()).map(nat2f(base.into()))
assert_not_digit(base, 'p');
dotted_parser(base).then(pow_parser()).map(nat2f(base.into()))
}
/// parse a float with dotted and optionally exponential notation from a base determined by its
/// prefix
pub fn float_parser() -> impl Parser<char, NotNan<f64>, Error = Simple<char>> {
choice((
just("0b").ignore_then(pow_float_parser(2)),
just("0x").ignore_then(pow_float_parser(16)),
just('0').ignore_then(pow_float_parser(8)),
pow_float_parser(10),
)).labelled("float")
choice((
just("0b").ignore_then(pow_float_parser(2)),
just("0x").ignore_then(pow_float_parser(16)),
just('0').ignore_then(pow_float_parser(8)),
pow_float_parser(10),
)).labelled("float")
}

View File

@@ -11,58 +11,58 @@ use super::{Lexeme, FileEntry, lexer, line_parser, LexerEntry};
#[derive(Error, Debug, Clone)]
pub enum ParseError {
#[error("Could not tokenize {0:?}")]
Lex(Vec<Simple<char>>),
#[error("Could not parse {0:#?}")]
Ast(Vec<Simple<Lexeme>>)
#[error("Could not tokenize {0:?}")]
Lex(Vec<Simple<char>>),
#[error("Could not parse {0:#?}")]
Ast(Vec<Simple<Lexeme>>)
}
pub fn parse<'a, Iter, S, Op>(ops: &[Op], stream: S) -> Result<Vec<FileEntry>, ParseError>
where
Op: 'a + AsRef<str> + Clone,
Iter: Iterator<Item = (char, Range<usize>)> + 'a,
S: Into<Stream<'a, char, Range<usize>, Iter>> {
let lexed = lexer(ops).parse(stream).map_err(ParseError::Lex)?;
println!("Lexed:\n{:?}", lexed);
let LexedText(token_batchv) = lexed;
let parsr = line_parser().then_ignore(end());
let (parsed_lines, errors_per_line) = token_batchv.into_iter().filter(|v| {
!v.is_empty()
}).map(|v| {
// Find the first invalid position for Stream::for_iter
let LexerEntry(_, Range{ end, .. }) = v.last().unwrap().clone();
// Stream expects tuples, lexer outputs structs
let tuples = v.into_iter().map_into::<(Lexeme, Range<usize>)>();
parsr.parse(Stream::from_iter(end..end+1, tuples))
// ^^^^^^^^^^
// I haven't the foggiest idea why this is needed, parsers are supposed to be lazy so the
// end of input should make little difference
}).map(|res| match res {
Ok(r) => (Some(r), vec![]),
Err(e) => (None, e)
}).unzip::<_, _, Vec<_>, Vec<_>>();
let total_err = errors_per_line.into_iter()
.flat_map(Vec::into_iter)
.collect::<Vec<_>>();
if !total_err.is_empty() { Err(ParseError::Ast(total_err)) }
else { Ok(parsed_lines.into_iter().map(Option::unwrap).collect()) }
Op: 'a + AsRef<str> + Clone,
Iter: Iterator<Item = (char, Range<usize>)> + 'a,
S: Into<Stream<'a, char, Range<usize>, Iter>> {
let lexed = lexer(ops).parse(stream).map_err(ParseError::Lex)?;
println!("Lexed:\n{:?}", lexed);
let LexedText(token_batchv) = lexed;
let parsr = line_parser().then_ignore(end());
let (parsed_lines, errors_per_line) = token_batchv.into_iter().filter(|v| {
!v.is_empty()
}).map(|v| {
// Find the first invalid position for Stream::for_iter
let LexerEntry(_, Range{ end, .. }) = v.last().unwrap().clone();
// Stream expects tuples, lexer outputs structs
let tuples = v.into_iter().map_into::<(Lexeme, Range<usize>)>();
parsr.parse(Stream::from_iter(end..end+1, tuples))
// ^^^^^^^^^^
// I haven't the foggiest idea why this is needed, parsers are supposed to be lazy so the
// end of input should make little difference
}).map(|res| match res {
Ok(r) => (Some(r), vec![]),
Err(e) => (None, e)
}).unzip::<_, _, Vec<_>, Vec<_>>();
let total_err = errors_per_line.into_iter()
.flat_map(Vec::into_iter)
.collect::<Vec<_>>();
if !total_err.is_empty() { Err(ParseError::Ast(total_err)) }
else { Ok(parsed_lines.into_iter().map(Option::unwrap).collect()) }
}
pub fn reparse<'a, Iter, S, Op>(ops: &[Op], stream: S, pre: &[FileEntry])
-> Result<Vec<FileEntry>, ParseError>
where
Op: 'a + AsRef<str> + Clone,
Iter: Iterator<Item = (char, Range<usize>)> + 'a,
S: Into<Stream<'a, char, Range<usize>, Iter>> {
let result = parse(ops, stream)?;
Ok(result.into_iter().zip(pre.iter()).map(|(mut output, donor)| {
if let FileEntry::Rule(Rule{source, ..}, _) = &mut output {
if let FileEntry::Rule(Rule{source: s2, ..}, _) = donor {
*source = s2.clone()
} else {
panic!("Preparse and reparse received different row types!")
}
}
output
}).collect())
Op: 'a + AsRef<str> + Clone,
Iter: Iterator<Item = (char, Range<usize>)> + 'a,
S: Into<Stream<'a, char, Range<usize>, Iter>> {
let result = parse(ops, stream)?;
Ok(result.into_iter().zip(pre.iter()).map(|(mut output, donor)| {
if let FileEntry::Rule(Rule{source, ..}, _) = &mut output {
if let FileEntry::Rule(Rule{source: s2, ..}, _) = donor {
*source = s2.clone()
} else {
panic!("Preparse and reparse received different row types!")
}
}
output
}).collect())
}

View File

@@ -16,50 +16,50 @@ use ordered_float::NotNan;
/// Anything we might encounter in a file
#[derive(Debug, Clone)]
pub enum FileEntry {
Import(Vec<import::Import>),
Comment(String),
Rule(Rule, bool),
Export(Vec<Vec<String>>)
Import(Vec<import::Import>),
Comment(String),
Rule(Rule, bool),
Export(Vec<Vec<String>>)
}
fn visit_all_names_clause_recur<'a, F>(
clause: &'a Clause,
binds: Stackframe<String>,
cb: &mut F
clause: &'a Clause,
binds: Stackframe<String>,
cb: &mut F
) where F: FnMut(&'a [String]) {
match clause {
Clause::Auto(name, typ, body) => {
for x in typ.iter() {
visit_all_names_expr_recur(x, binds.clone(), cb)
}
let binds_dup = binds.clone();
let new_binds = if let Some(n) = name {
binds_dup.push(n.to_owned())
} else {
binds
};
for x in body.iter() {
visit_all_names_expr_recur(x, new_binds.clone(), cb)
}
},
Clause::Lambda(name, typ, body) => {
for x in typ.iter() {
visit_all_names_expr_recur(x, binds.clone(), cb)
}
for x in body.iter() {
visit_all_names_expr_recur(x, binds.push(name.to_owned()), cb)
}
},
Clause::S(_, body) => for x in body.iter() {
visit_all_names_expr_recur(x, binds.clone(), cb)
},
Clause::Name{ local: Some(name), qualified } => {
if binds.iter().all(|x| x != name) {
cb(qualified)
}
}
_ => (),
match clause {
Clause::Auto(name, typ, body) => {
for x in typ.iter() {
visit_all_names_expr_recur(x, binds.clone(), cb)
}
let binds_dup = binds.clone();
let new_binds = if let Some(n) = name {
binds_dup.push(n.to_owned())
} else {
binds
};
for x in body.iter() {
visit_all_names_expr_recur(x, new_binds.clone(), cb)
}
},
Clause::Lambda(name, typ, body) => {
for x in typ.iter() {
visit_all_names_expr_recur(x, binds.clone(), cb)
}
for x in body.iter() {
visit_all_names_expr_recur(x, binds.push(name.to_owned()), cb)
}
},
Clause::S(_, body) => for x in body.iter() {
visit_all_names_expr_recur(x, binds.clone(), cb)
},
Clause::Name{ local: Some(name), qualified } => {
if binds.iter().all(|x| x != name) {
cb(qualified)
}
}
_ => (),
}
}
/// Recursively iterate through all "names" in an expression. It also finds a lot of things that
@@ -68,88 +68,88 @@ fn visit_all_names_clause_recur<'a, F>(
///
/// TODO: find a way to exclude parameters
fn visit_all_names_expr_recur<'a, F>(
expr: &'a Expr,
binds: Stackframe<String>,
cb: &mut F
expr: &'a Expr,
binds: Stackframe<String>,
cb: &mut F
) where F: FnMut(&'a [String]) {
let Expr(val, typ) = expr;
visit_all_names_clause_recur(val, binds.clone(), cb);
for typ in typ.as_ref() {
visit_all_names_clause_recur(typ, binds.clone(), cb);
}
let Expr(val, typ) = expr;
visit_all_names_clause_recur(val, binds.clone(), cb);
for typ in typ.as_ref() {
visit_all_names_clause_recur(typ, binds.clone(), cb);
}
}
/// Collect all names that occur in an expression
fn find_all_names(expr: &Expr) -> HashSet<&[String]> {
let mut ret = HashSet::new();
visit_all_names_expr_recur(expr, Stackframe::new(String::new()), &mut |n| {
if !n.last().unwrap().starts_with('$') {
ret.insert(n);
}
});
ret
let mut ret = HashSet::new();
visit_all_names_expr_recur(expr, Stackframe::new(String::new()), &mut |n| {
if !n.last().unwrap().starts_with('$') {
ret.insert(n);
}
});
ret
}
fn rule_parser() -> impl Parser<Lexeme, (Vec<Expr>, NotNan<f64>, Vec<Expr>), Error = Simple<Lexeme>> {
xpr_parser().repeated()
.then(enum_parser!(Lexeme::Rule))
.then(xpr_parser().repeated())
// .map(|((lhs, prio), rhs)| )
.map(|((a, b), c)| (a, b, c))
.labelled("Rule")
xpr_parser().repeated()
.then(enum_parser!(Lexeme::Rule))
.then(xpr_parser().repeated())
// .map(|((lhs, prio), rhs)| )
.map(|((a, b), c)| (a, b, c))
.labelled("Rule")
}
pub fn line_parser() -> impl Parser<Lexeme, FileEntry, Error = Simple<Lexeme>> {
choice((
// In case the usercode wants to parse doc
enum_parser!(Lexeme >> FileEntry; Comment),
just(Lexeme::name("import"))
.ignore_then(import_parser().map(FileEntry::Import))
.then_ignore(enum_parser!(Lexeme::Comment)),
just(Lexeme::name("export")).map_err_with_span(|e, s| {
println!("{:?} could not yield an export", s); e
}).ignore_then(
just(Lexeme::NS).ignore_then(
enum_parser!(Lexeme::Name).map(|n| vec![n])
.separated_by(just(Lexeme::name(",")))
.delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
).map(FileEntry::Export)
).or(rule_parser().map(|(source, prio, target)| {
FileEntry::Rule(Rule {
source: to_mrc_slice(source),
prio,
target: to_mrc_slice(target)
}, true)
})),
// This could match almost anything so it has to go last
rule_parser().map(|(source, prio, target)| FileEntry::Rule(Rule{
source: to_mrc_slice(source),
prio,
target: to_mrc_slice(target)
}, false)),
))
choice((
// In case the usercode wants to parse doc
enum_parser!(Lexeme >> FileEntry; Comment),
just(Lexeme::name("import"))
.ignore_then(import_parser().map(FileEntry::Import))
.then_ignore(enum_parser!(Lexeme::Comment)),
just(Lexeme::name("export")).map_err_with_span(|e, s| {
println!("{:?} could not yield an export", s); e
}).ignore_then(
just(Lexeme::NS).ignore_then(
enum_parser!(Lexeme::Name).map(|n| vec![n])
.separated_by(just(Lexeme::name(",")))
.delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
).map(FileEntry::Export)
).or(rule_parser().map(|(source, prio, target)| {
FileEntry::Rule(Rule {
source: to_mrc_slice(source),
prio,
target: to_mrc_slice(target)
}, true)
})),
// This could match almost anything so it has to go last
rule_parser().map(|(source, prio, target)| FileEntry::Rule(Rule{
source: to_mrc_slice(source),
prio,
target: to_mrc_slice(target)
}, false)),
))
}
/// Collect all exported names (and a lot of other words) from a file
pub fn exported_names(src: &[FileEntry]) -> HashSet<&[String]> {
src.iter().flat_map(|ent| match ent {
FileEntry::Rule(Rule{source, target, ..}, true) =>
box_chain!(source.iter(), target.iter()),
_ => box_empty()
}).flat_map(find_all_names).chain(
src.iter().filter_map(|ent| {
if let FileEntry::Export(names) = ent {Some(names.iter())} else {None}
}).flatten().map(Vec::as_slice)
).collect()
src.iter().flat_map(|ent| match ent {
FileEntry::Rule(Rule{source, target, ..}, true) =>
box_chain!(source.iter(), target.iter()),
_ => box_empty()
}).flat_map(find_all_names).chain(
src.iter().filter_map(|ent| {
if let FileEntry::Export(names) = ent {Some(names.iter())} else {None}
}).flatten().map(Vec::as_slice)
).collect()
}
/// Summarize all imports from a file in a single list of qualified names
pub fn imports<'a, 'b, I>(
src: I
src: I
) -> impl Iterator<Item = &'b import::Import> + 'a
where I: Iterator<Item = &'b FileEntry> + 'a {
src.filter_map(|ent| match ent {
FileEntry::Import(impv) => Some(impv.iter()),
_ => None
}).flatten()
src.filter_map(|ent| match ent {
FileEntry::Import(impv) => Some(impv.iter()),
_ => None
}).flatten()
}

View File

@@ -2,45 +2,45 @@ use chumsky::{self, prelude::*, Parser};
/// Parses a text character that is not the specified delimiter
fn text_parser(delim: char) -> impl Parser<char, char, Error = Simple<char>> {
// Copied directly from Chumsky's JSON example.
let escape = just('\\').ignore_then(
just('\\')
.or(just('/'))
.or(just('"'))
.or(just('b').to('\x08'))
.or(just('f').to('\x0C'))
.or(just('n').to('\n'))
.or(just('r').to('\r'))
.or(just('t').to('\t'))
.or(just('u').ignore_then(
filter(|c: &char| c.is_ascii_hexdigit())
.repeated()
.exactly(4)
.collect::<String>()
.validate(|digits, span, emit| {
char::from_u32(u32::from_str_radix(&digits, 16).unwrap())
.unwrap_or_else(|| {
emit(Simple::custom(span, "invalid unicode character"));
'\u{FFFD}' // unicode replacement character
})
}),
)),
);
filter(move |&c| c != '\\' && c != delim).or(escape)
// Copied directly from Chumsky's JSON example.
let escape = just('\\').ignore_then(
just('\\')
.or(just('/'))
.or(just('"'))
.or(just('b').to('\x08'))
.or(just('f').to('\x0C'))
.or(just('n').to('\n'))
.or(just('r').to('\r'))
.or(just('t').to('\t'))
.or(just('u').ignore_then(
filter(|c: &char| c.is_ascii_hexdigit())
.repeated()
.exactly(4)
.collect::<String>()
.validate(|digits, span, emit| {
char::from_u32(u32::from_str_radix(&digits, 16).unwrap())
.unwrap_or_else(|| {
emit(Simple::custom(span, "invalid unicode character"));
'\u{FFFD}' // unicode replacement character
})
}),
)),
);
filter(move |&c| c != '\\' && c != delim).or(escape)
}
/// Parse a character literal between single quotes
pub fn char_parser() -> impl Parser<char, char, Error = Simple<char>> {
just('\'').ignore_then(text_parser('\'')).then_ignore(just('\''))
just('\'').ignore_then(text_parser('\'')).then_ignore(just('\''))
}
/// Parse a string between double quotes
pub fn str_parser() -> impl Parser<char, String, Error = Simple<char>> {
just('"')
.ignore_then(
text_parser('"').map(Some)
.or(just("\\\n").map(|_| None)) // Newlines preceded by backslashes are ignored.
.repeated()
).then_ignore(just('"'))
.flatten().collect()
just('"')
.ignore_then(
text_parser('"').map(Some)
.or(just("\\\n").map(|_| None)) // Newlines preceded by backslashes are ignored.
.repeated()
).then_ignore(just('"'))
.flatten().collect()
}

View File

@@ -9,43 +9,43 @@ use super::loaded::Loaded;
#[derive(Clone, Debug)]
pub enum LoadingError {
IOErr(Rc<io::Error>),
UnknownNode(String),
Missing(String)
IOErr(Rc<io::Error>),
UnknownNode(String),
Missing(String)
}
impl From<io::Error> for LoadingError {
fn from(inner: io::Error) -> Self {
LoadingError::IOErr(Rc::new(inner))
}
fn from(inner: io::Error) -> Self {
LoadingError::IOErr(Rc::new(inner))
}
}
pub fn file_loader(proj: PathBuf) -> impl FnMut(Mrc<[String]>) -> Result<Loaded, LoadingError> + 'static {
move |path| {
let dirpath = proj.join(path.join("/"));
if dirpath.is_dir() || dirpath.is_symlink() {
return Ok(Loaded::Namespace(
dirpath.read_dir()?
.filter_map(|entr| {
let ent = entr.ok()?;
let typ = ent.file_type().ok()?;
let path = ent.path();
if typ.is_dir() || typ.is_symlink() {
Some(ent.file_name().to_string_lossy().into_owned())
} else if typ.is_file() && path.extension()? == "orc" {
Some(path.file_stem()?.to_string_lossy().into_owned())
} else { None }
})
.collect()
))
}
let orcfile = dirpath.with_extension("orc");
if orcfile.is_file() {
read_to_string(orcfile).map(Loaded::Module).map_err(LoadingError::from)
} else {
let pathstr = dirpath.to_string_lossy().into_owned();
Err(if dirpath.exists() { LoadingError::UnknownNode(pathstr) }
else { LoadingError::Missing(pathstr) })
}
move |path| {
let dirpath = proj.join(path.join("/"));
if dirpath.is_dir() || dirpath.is_symlink() {
return Ok(Loaded::Namespace(
dirpath.read_dir()?
.filter_map(|entr| {
let ent = entr.ok()?;
let typ = ent.file_type().ok()?;
let path = ent.path();
if typ.is_dir() || typ.is_symlink() {
Some(ent.file_name().to_string_lossy().into_owned())
} else if typ.is_file() && path.extension()? == "orc" {
Some(path.file_stem()?.to_string_lossy().into_owned())
} else { None }
})
.collect()
))
}
let orcfile = dirpath.with_extension("orc");
if orcfile.is_file() {
read_to_string(orcfile).map(Loaded::Module).map_err(LoadingError::from)
} else {
let pathstr = dirpath.to_string_lossy().into_owned();
Err(if dirpath.exists() { LoadingError::UnknownNode(pathstr) }
else { LoadingError::Missing(pathstr) })
}
}
}

View File

@@ -1,5 +1,5 @@
#[derive(Debug, Clone)]
pub enum Loaded {
Module(String),
Namespace(Vec<String>),
Module(String),
Namespace(Vec<String>),
}

View File

@@ -6,26 +6,26 @@ use super::name_resolver::ResolutionError;
#[derive(Error, Debug, Clone)]
pub enum ModuleError<ELoad> where ELoad: Clone {
#[error("Resolution cycle")]
ResolutionCycle,
#[error("File not found: {0}")]
Load(ELoad),
#[error("Failed to parse: {0:?}")]
Syntax(ParseError),
#[error("Not a module")]
None
#[error("Resolution cycle")]
ResolutionCycle,
#[error("File not found: {0}")]
Load(ELoad),
#[error("Failed to parse: {0:?}")]
Syntax(ParseError),
#[error("Not a module")]
None
}
impl<T> From<ParseError> for ModuleError<T> where T: Clone {
fn from(pars: ParseError) -> Self { Self::Syntax(pars) }
fn from(pars: ParseError) -> Self { Self::Syntax(pars) }
}
impl<T> From<ResolutionError<ModuleError<T>>> for ModuleError<T> where T: Clone {
fn from(res: ResolutionError<ModuleError<T>>) -> Self {
match res {
ResolutionError::Cycle(_) => ModuleError::ResolutionCycle,
ResolutionError::NoModule(_) => ModuleError::None,
ResolutionError::Delegate(d) => d
}
fn from(res: ResolutionError<ModuleError<T>>) -> Self {
match res {
ResolutionError::Cycle(_) => ModuleError::ResolutionCycle,
ResolutionError::NoModule(_) => ModuleError::None,
ResolutionError::Delegate(d) => d
}
}
}

View File

@@ -10,12 +10,12 @@ type ImportMap = HashMap<String, Mrc<[String]>>;
#[derive(Debug, Clone, Error)]
pub enum ResolutionError<Err> {
#[error("Reference cycle at {0:?}")]
Cycle(Vec<Mrc<[String]>>),
#[error("No module provides {0:?}")]
NoModule(Mrc<[String]>),
#[error(transparent)]
Delegate(#[from] Err)
#[error("Reference cycle at {0:?}")]
Cycle(Vec<Mrc<[String]>>),
#[error("No module provides {0:?}")]
NoModule(Mrc<[String]>),
#[error(transparent)]
Delegate(#[from] Err)
}
type ResolutionResult<E> = Result<Mrc<[String]>, ResolutionError<E>>;
@@ -24,108 +24,108 @@ type ResolutionResult<E> = Result<Mrc<[String]>, ResolutionError<E>>;
/// resolution. This makes the resolution process lightning fast and invalidation completely
/// impossible since the intermediate steps of a resolution aren't stored.
pub struct NameResolver<FSplit, FImps, E> {
cache: HashMap<Mrc<[String]>, ResolutionResult<E>>,
get_modname: FSplit,
get_imports: FImps
cache: HashMap<Mrc<[String]>, ResolutionResult<E>>,
get_modname: FSplit,
get_imports: FImps
}
impl<FSplit, FImps, E> NameResolver<FSplit, FImps, E>
where
FSplit: FnMut(Mrc<[String]>) -> Option<Mrc<[String]>>,
FImps: FnMut(Mrc<[String]>) -> Result<ImportMap, E>,
E: Clone
FSplit: FnMut(Mrc<[String]>) -> Option<Mrc<[String]>>,
FImps: FnMut(Mrc<[String]>) -> Result<ImportMap, E>,
E: Clone
{
pub fn new(get_modname: FSplit, get_imports: FImps) -> Self {
Self {
cache: HashMap::new(),
get_modname,
get_imports
}
pub fn new(get_modname: FSplit, get_imports: FImps) -> Self {
Self {
cache: HashMap::new(),
get_modname,
get_imports
}
}
/// Obtains a symbol's originnal name
/// Uses a substack to detect loops
fn find_origin_rec(
&mut self,
symbol: Mrc<[String]>,
import_path: Stackframe<Mrc<[String]>>
) -> Result<Mrc<[String]>, ResolutionError<E>> {
if let Some(cached) = self.cache.get(&symbol) {
return cached.as_ref().map_err(|e| e.clone()).map(Mrc::clone)
}
// The imports and path of the referenced file and the local name
let path = (self.get_modname)(Mrc::clone(&symbol)).ok_or_else(|| {
ResolutionError::NoModule(Mrc::clone(&symbol))
})?;
let name = &symbol[path.len()..];
if name.is_empty() {
panic!("get_modname matched all to module and nothing to name in {:?}", import_path)
}
let imports = (self.get_imports)(Mrc::clone(&path))?;
let result = if let Some(source) = imports.get(&name[0]) {
let new_sym: Vec<String> = source.iter().chain(name.iter()).cloned().collect();
if import_path.iter().any(|el| el.as_ref() == new_sym.as_slice()) {
Err(ResolutionError::Cycle(import_path.iter().map(Mrc::clone).collect()))
} else {
self.find_origin_rec(to_mrc_slice(new_sym), import_path.push(Mrc::clone(&symbol)))
}
} else {
Ok(symbol.clone()) // If not imported, it must be locally defined
};
self.cache.insert(symbol, result.clone());
result
/// Obtains a symbol's originnal name
/// Uses a substack to detect loops
fn find_origin_rec(
&mut self,
symbol: Mrc<[String]>,
import_path: Stackframe<Mrc<[String]>>
) -> Result<Mrc<[String]>, ResolutionError<E>> {
if let Some(cached) = self.cache.get(&symbol) {
return cached.as_ref().map_err(|e| e.clone()).map(Mrc::clone)
}
// The imports and path of the referenced file and the local name
let path = (self.get_modname)(Mrc::clone(&symbol)).ok_or_else(|| {
ResolutionError::NoModule(Mrc::clone(&symbol))
})?;
let name = &symbol[path.len()..];
if name.is_empty() {
panic!("get_modname matched all to module and nothing to name in {:?}", import_path)
}
let imports = (self.get_imports)(Mrc::clone(&path))?;
let result = if let Some(source) = imports.get(&name[0]) {
let new_sym: Vec<String> = source.iter().chain(name.iter()).cloned().collect();
if import_path.iter().any(|el| el.as_ref() == new_sym.as_slice()) {
Err(ResolutionError::Cycle(import_path.iter().map(Mrc::clone).collect()))
} else {
self.find_origin_rec(to_mrc_slice(new_sym), import_path.push(Mrc::clone(&symbol)))
}
} else {
Ok(symbol.clone()) // If not imported, it must be locally defined
};
self.cache.insert(symbol, result.clone());
result
}
fn process_exprv_rec(&mut self, exv: &[Expr]) -> Result<Vec<Expr>, ResolutionError<E>> {
exv.iter().map(|ex| self.process_expression_rec(ex)).collect()
}
fn process_exprv_rec(&mut self, exv: &[Expr]) -> Result<Vec<Expr>, ResolutionError<E>> {
exv.iter().map(|ex| self.process_expression_rec(ex)).collect()
}
fn process_exprmrcopt_rec(&mut self,
exbo: &Option<Mrc<Expr>>
) -> Result<Option<Mrc<Expr>>, ResolutionError<E>> {
exbo.iter().map(|exb| Ok(Mrc::new(self.process_expression_rec(exb.as_ref())?)))
.next().transpose()
}
fn process_exprmrcopt_rec(&mut self,
exbo: &Option<Mrc<Expr>>
) -> Result<Option<Mrc<Expr>>, ResolutionError<E>> {
exbo.iter().map(|exb| Ok(Mrc::new(self.process_expression_rec(exb.as_ref())?)))
.next().transpose()
}
fn process_clause_rec(&mut self, tok: &Clause) -> Result<Clause, ResolutionError<E>> {
Ok(match tok {
Clause::S(c, exv) => Clause::S(*c, to_mrc_slice(
exv.as_ref().iter().map(|e| self.process_expression_rec(e))
.collect::<Result<Vec<Expr>, ResolutionError<E>>>()?
)),
Clause::Lambda(name, typ, body) => Clause::Lambda(name.clone(),
to_mrc_slice(self.process_exprv_rec(typ.as_ref())?),
to_mrc_slice(self.process_exprv_rec(body.as_ref())?)
),
Clause::Auto(name, typ, body) => Clause::Auto(name.clone(),
to_mrc_slice(self.process_exprv_rec(typ.as_ref())?),
to_mrc_slice(self.process_exprv_rec(body.as_ref())?)
),
Clause::Name{local, qualified} => Clause::Name{
local: local.clone(),
qualified: self.find_origin(Mrc::clone(qualified))?
},
x => x.clone()
})
}
fn process_clause_rec(&mut self, tok: &Clause) -> Result<Clause, ResolutionError<E>> {
Ok(match tok {
Clause::S(c, exv) => Clause::S(*c, to_mrc_slice(
exv.as_ref().iter().map(|e| self.process_expression_rec(e))
.collect::<Result<Vec<Expr>, ResolutionError<E>>>()?
)),
Clause::Lambda(name, typ, body) => Clause::Lambda(name.clone(),
to_mrc_slice(self.process_exprv_rec(typ.as_ref())?),
to_mrc_slice(self.process_exprv_rec(body.as_ref())?)
),
Clause::Auto(name, typ, body) => Clause::Auto(name.clone(),
to_mrc_slice(self.process_exprv_rec(typ.as_ref())?),
to_mrc_slice(self.process_exprv_rec(body.as_ref())?)
),
Clause::Name{local, qualified} => Clause::Name{
local: local.clone(),
qualified: self.find_origin(Mrc::clone(qualified))?
},
x => x.clone()
})
}
fn process_expression_rec(&mut self, Expr(token, typ): &Expr) -> Result<Expr, ResolutionError<E>> {
Ok(Expr(
self.process_clause_rec(token)?,
typ.iter().map(|t| self.process_clause_rec(t)).collect::<Result<_, _>>()?
))
}
fn process_expression_rec(&mut self, Expr(token, typ): &Expr) -> Result<Expr, ResolutionError<E>> {
Ok(Expr(
self.process_clause_rec(token)?,
typ.iter().map(|t| self.process_clause_rec(t)).collect::<Result<_, _>>()?
))
}
pub fn find_origin(&mut self, symbol: Mrc<[String]>) -> Result<Mrc<[String]>, ResolutionError<E>> {
self.find_origin_rec(Mrc::clone(&symbol), Stackframe::new(symbol))
}
pub fn find_origin(&mut self, symbol: Mrc<[String]>) -> Result<Mrc<[String]>, ResolutionError<E>> {
self.find_origin_rec(Mrc::clone(&symbol), Stackframe::new(symbol))
}
#[allow(dead_code)]
pub fn process_clause(&mut self, clause: &Clause) -> Result<Clause, ResolutionError<E>> {
self.process_clause_rec(clause)
}
#[allow(dead_code)]
pub fn process_clause(&mut self, clause: &Clause) -> Result<Clause, ResolutionError<E>> {
self.process_clause_rec(clause)
}
pub fn process_expression(&mut self, ex: &Expr) -> Result<Expr, ResolutionError<E>> {
self.process_expression_rec(ex)
}
pub fn process_expression(&mut self, ex: &Expr) -> Result<Expr, ResolutionError<E>> {
self.process_expression_rec(ex)
}
}

View File

@@ -7,35 +7,35 @@ use crate::{ast::{Expr, Clause}, utils::{collect_to_mrc, to_mrc_slice}};
/// Produce a Token object for any value of Expr other than Typed.
/// Called by [#prefix] which handles Typed.
fn prefix_clause(
expr: &Clause,
namespace: Mrc<[String]>
expr: &Clause,
namespace: Mrc<[String]>
) -> Clause {
match expr {
Clause::S(c, v) => Clause::S(*c,
collect_to_mrc(v.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace))))
),
Clause::Auto(name, typ, body) => Clause::Auto(
name.clone(),
collect_to_mrc(typ.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
collect_to_mrc(body.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
),
Clause::Lambda(name, typ, body) => Clause::Lambda(
name.clone(),
collect_to_mrc(typ.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
collect_to_mrc(body.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
),
Clause::Name{local, qualified} => Clause::Name{
local: local.clone(),
qualified: collect_to_mrc(namespace.iter().chain(qualified.iter()).cloned())
},
x => x.clone()
}
match expr {
Clause::S(c, v) => Clause::S(*c,
collect_to_mrc(v.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace))))
),
Clause::Auto(name, typ, body) => Clause::Auto(
name.clone(),
collect_to_mrc(typ.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
collect_to_mrc(body.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
),
Clause::Lambda(name, typ, body) => Clause::Lambda(
name.clone(),
collect_to_mrc(typ.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
collect_to_mrc(body.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
),
Clause::Name{local, qualified} => Clause::Name{
local: local.clone(),
qualified: collect_to_mrc(namespace.iter().chain(qualified.iter()).cloned())
},
x => x.clone()
}
}
/// Produce an Expr object for any value of Expr
pub fn prefix_expr(Expr(clause, typ): &Expr, namespace: Mrc<[String]>) -> Expr {
Expr(
prefix_clause(clause, Mrc::clone(&namespace)),
to_mrc_slice(typ.iter().map(|e| prefix_clause(e, Mrc::clone(&namespace))).collect())
)
Expr(
prefix_clause(clause, Mrc::clone(&namespace)),
to_mrc_slice(typ.iter().map(|e| prefix_clause(e, Mrc::clone(&namespace))).collect())
)
}

View File

@@ -18,198 +18,198 @@ type ParseResult<T, ELoad> = Result<T, ModuleError<ELoad>>;
#[derive(Debug, Clone)]
pub struct Module {
pub rules: Vec<Rule>,
pub exports: Vec<String>,
pub references: Vec<Mrc<[String]>>
pub rules: Vec<Rule>,
pub exports: Vec<String>,
pub references: Vec<Mrc<[String]>>
}
pub type RuleCollectionResult<ELoad> = Result<Vec<super::Rule>, ModuleError<ELoad>>;
pub fn rule_collector<F: 'static, ELoad>(
load_mod: F,
prelude: Vec<String>
load_mod: F,
prelude: Vec<String>
) -> Cache<'static, Mrc<[String]>, RuleCollectionResult<ELoad>>
where
F: FnMut(Mrc<[String]>) -> Result<Loaded, ELoad>,
ELoad: Clone + Debug
F: FnMut(Mrc<[String]>) -> Result<Loaded, ELoad>,
ELoad: Clone + Debug
{
let load_mod_rc = RefCell::new(load_mod);
// Map paths to a namespace with name list (folder) or module with source text (file)
let loaded = Rc::new(Cache::new(move |path: Mrc<[String]>, _|
-> ParseResult<Loaded, ELoad> {
(load_mod_rc.borrow_mut())(path).map_err(ModuleError::Load)
}));
// Map names to the longest prefix that points to a valid module
// At least one segment must be in the prefix, and the prefix must not be the whole name
let modname = Rc::new(Cache::new({
let loaded = Rc::clone(&loaded);
move |symbol: Mrc<[String]>, _| -> Result<Mrc<[String]>, Vec<ModuleError<ELoad>>> {
let mut errv: Vec<ModuleError<ELoad>> = Vec::new();
let reg_err = |e, errv: &mut Vec<ModuleError<ELoad>>| {
errv.push(e);
if symbol.len() == errv.len() { Err(errv.clone()) }
else { Ok(()) }
};
loop {
let path = mrc_derive(&symbol, |s| &s[..s.len() - errv.len() - 1]);
match loaded.try_find(&path) {
Ok(imports) => match imports.as_ref() {
Loaded::Module(_) => break Ok(path),
_ => reg_err(ModuleError::None, &mut errv)?
},
Err(err) => reg_err(err, &mut errv)?
}
}
let load_mod_rc = RefCell::new(load_mod);
// Map paths to a namespace with name list (folder) or module with source text (file)
let loaded = Rc::new(Cache::new(move |path: Mrc<[String]>, _|
-> ParseResult<Loaded, ELoad> {
(load_mod_rc.borrow_mut())(path).map_err(ModuleError::Load)
}));
// Map names to the longest prefix that points to a valid module
// At least one segment must be in the prefix, and the prefix must not be the whole name
let modname = Rc::new(Cache::new({
let loaded = Rc::clone(&loaded);
move |symbol: Mrc<[String]>, _| -> Result<Mrc<[String]>, Vec<ModuleError<ELoad>>> {
let mut errv: Vec<ModuleError<ELoad>> = Vec::new();
let reg_err = |e, errv: &mut Vec<ModuleError<ELoad>>| {
errv.push(e);
if symbol.len() == errv.len() { Err(errv.clone()) }
else { Ok(()) }
};
loop {
let path = mrc_derive(&symbol, |s| &s[..s.len() - errv.len() - 1]);
match loaded.try_find(&path) {
Ok(imports) => match imports.as_ref() {
Loaded::Module(_) => break Ok(path),
_ => reg_err(ModuleError::None, &mut errv)?
},
Err(err) => reg_err(err, &mut errv)?
}
}));
// Preliminarily parse a file, substitution rules and imports are valid
let preparsed = Rc::new(Cache::new({
let loaded = Rc::clone(&loaded);
let prelude2 = prelude.clone();
move |path: Mrc<[String]>, _| -> ParseResult<Vec<FileEntry>, ELoad> {
let loaded = loaded.try_find(&path)?;
if let Loaded::Module(source) = loaded.as_ref() {
Ok(parse::parse(&prelude2, source.as_str())?)
} else {Err(ModuleError::None)}
}
}
}));
// Preliminarily parse a file, substitution rules and imports are valid
let preparsed = Rc::new(Cache::new({
let loaded = Rc::clone(&loaded);
let prelude2 = prelude.clone();
move |path: Mrc<[String]>, _| -> ParseResult<Vec<FileEntry>, ELoad> {
let loaded = loaded.try_find(&path)?;
if let Loaded::Module(source) = loaded.as_ref() {
Ok(parse::parse(&prelude2, source.as_str())?)
} else {Err(ModuleError::None)}
}
}));
// Collect all toplevel names exported from a given file
let exports = Rc::new(Cache::new({
let loaded = Rc::clone(&loaded);
let preparsed = Rc::clone(&preparsed);
move |path: Mrc<[String]>, _| -> ParseResult<Vec<String>, ELoad> {
let loaded = loaded.try_find(&path)?;
if let Loaded::Namespace(names) = loaded.as_ref() {
return Ok(names.clone());
}
let preparsed = preparsed.try_find(&path)?;
Ok(parse::exported_names(&preparsed)
.into_iter()
.map(|n| n[0].clone())
.collect())
}
}));
// Collect all toplevel names imported by a given file
let imports = Rc::new(Cache::new({
let preparsed = Rc::clone(&preparsed);
let exports = Rc::clone(&exports);
move |path: Mrc<[String]>, _| -> ParseResult<HashMap<String, Mrc<[String]>>, ELoad> {
let entv = preparsed.try_find(&path)?;
let import_entries = parse::imports(entv.iter());
let mut imported_symbols: HashMap<String, Mrc<[String]>> = HashMap::new();
for imp in import_entries {
let export = exports.try_find(&imp.path)?;
if let Some(ref name) = imp.name {
if export.contains(name) {
imported_symbols.insert(name.clone(), Mrc::clone(&imp.path));
}
} else {
for exp in export.as_ref() {
imported_symbols.insert(exp.clone(), Mrc::clone(&imp.path));
}
}
}));
// Collect all toplevel names exported from a given file
let exports = Rc::new(Cache::new({
let loaded = Rc::clone(&loaded);
let preparsed = Rc::clone(&preparsed);
move |path: Mrc<[String]>, _| -> ParseResult<Vec<String>, ELoad> {
let loaded = loaded.try_find(&path)?;
if let Loaded::Namespace(names) = loaded.as_ref() {
return Ok(names.clone());
}
let preparsed = preparsed.try_find(&path)?;
Ok(parse::exported_names(&preparsed)
.into_iter()
.map(|n| n[0].clone())
.collect())
}
}));
// Collect all toplevel names imported by a given file
let imports = Rc::new(Cache::new({
let preparsed = Rc::clone(&preparsed);
let exports = Rc::clone(&exports);
move |path: Mrc<[String]>, _| -> ParseResult<HashMap<String, Mrc<[String]>>, ELoad> {
let entv = preparsed.try_find(&path)?;
let import_entries = parse::imports(entv.iter());
let mut imported_symbols: HashMap<String, Mrc<[String]>> = HashMap::new();
for imp in import_entries {
let export = exports.try_find(&imp.path)?;
if let Some(ref name) = imp.name {
if export.contains(name) {
imported_symbols.insert(name.clone(), Mrc::clone(&imp.path));
}
} else {
for exp in export.as_ref() {
imported_symbols.insert(exp.clone(), Mrc::clone(&imp.path));
}
}
}
Ok(imported_symbols)
}
}));
// Final parse, operators are correctly separated
let parsed = Rc::new(Cache::new({
let preparsed = Rc::clone(&preparsed);
let imports = Rc::clone(&imports);
let loaded = Rc::clone(&loaded);
move |path: Mrc<[String]>, _| -> ParseResult<Vec<FileEntry>, ELoad> {
let imported_ops: Vec<String> =
imports.try_find(&path)?
.keys()
.chain(prelude.iter())
.filter(|s| parse::is_op(s))
.cloned()
.collect();
// let parser = file_parser(&prelude, &imported_ops);
let pre = preparsed.try_find(&path)?;
if let Loaded::Module(source) = loaded.try_find(&path)?.as_ref() {
Ok(parse::reparse(&imported_ops, source.as_str(), &pre)?)
} else { Err(ModuleError::None) }
}
}));
let name_resolver_rc = RefCell::new(NameResolver::new({
let modname = Rc::clone(&modname);
move |path| {
Some(modname.try_find(&path).ok()?.as_ref().clone())
}
}, {
let imports = Rc::clone(&imports);
move |path| {
imports.try_find(&path).map(|f| f.as_ref().clone())
}
}));
// Turn parsed files into a bag of rules and a list of toplevel export names
let resolved = Rc::new(Cache::new({
let parsed = Rc::clone(&parsed);
let exports = Rc::clone(&exports);
let imports = Rc::clone(&imports);
let modname = Rc::clone(&modname);
move |path: Mrc<[String]>, _| -> ParseResult<Module, ELoad> {
let mut name_resolver = name_resolver_rc.borrow_mut();
let module = Module {
rules: parsed.try_find(&path)?
.iter()
.filter_map(|ent| {
if let FileEntry::Rule(Rule{source, prio, target}, _) = ent {
Some(Rule {
source: source.iter()
.map(|ex| {
prefix_expr(ex, Mrc::clone(&path))
}).collect(),
target: target.iter().map(|ex| {
prefix_expr(ex, Mrc::clone(&path))
}).collect(),
prio: *prio,
})
} else { None }
})
.map(|rule| Ok(super::Rule {
source: to_mrc_slice(rule.source.iter()
.map(|ex| name_resolver.process_expression(ex))
.collect::<Result<Vec<_>, _>>()?),
target: to_mrc_slice(rule.target.iter()
.map(|ex| name_resolver.process_expression(ex))
.collect::<Result<Vec<_>, _>>()?),
..rule
}))
.collect::<ParseResult<Vec<super::Rule>, ELoad>>()?,
exports: exports.try_find(&path)?.as_ref().clone(),
references: imports.try_find(&path)?
.values()
.filter_map(|imps| {
modname.try_find(imps).ok().map(|r| r.as_ref().clone())
})
.collect()
};
Ok(module)
}
}));
Cache::new({
let resolved = Rc::clone(&resolved);
move |path: Mrc<[String]>, _| -> ParseResult<Vec<super::Rule>, ELoad> {
// Breadth-first search
let mut processed: HashSet<Mrc<[String]>> = HashSet::new();
let mut rules: Vec<super::Rule> = Vec::new();
let mut pending: VecDeque<Mrc<[String]>> = VecDeque::new();
pending.push_back(path);
while let Some(el) = pending.pop_front() {
let resolved = resolved.try_find(&el)?;
processed.insert(el.clone());
pending.extend(
resolved.references.iter()
.filter(|&v| !processed.contains(v))
.cloned()
);
rules.extend(
resolved.rules.iter().cloned()
)
};
Ok(rules)
}
})
}
Ok(imported_symbols)
}
}));
// Final parse, operators are correctly separated
let parsed = Rc::new(Cache::new({
let preparsed = Rc::clone(&preparsed);
let imports = Rc::clone(&imports);
let loaded = Rc::clone(&loaded);
move |path: Mrc<[String]>, _| -> ParseResult<Vec<FileEntry>, ELoad> {
let imported_ops: Vec<String> =
imports.try_find(&path)?
.keys()
.chain(prelude.iter())
.filter(|s| parse::is_op(s))
.cloned()
.collect();
// let parser = file_parser(&prelude, &imported_ops);
let pre = preparsed.try_find(&path)?;
if let Loaded::Module(source) = loaded.try_find(&path)?.as_ref() {
Ok(parse::reparse(&imported_ops, source.as_str(), &pre)?)
} else { Err(ModuleError::None) }
}
}));
let name_resolver_rc = RefCell::new(NameResolver::new({
let modname = Rc::clone(&modname);
move |path| {
Some(modname.try_find(&path).ok()?.as_ref().clone())
}
}, {
let imports = Rc::clone(&imports);
move |path| {
imports.try_find(&path).map(|f| f.as_ref().clone())
}
}));
// Turn parsed files into a bag of rules and a list of toplevel export names
let resolved = Rc::new(Cache::new({
let parsed = Rc::clone(&parsed);
let exports = Rc::clone(&exports);
let imports = Rc::clone(&imports);
let modname = Rc::clone(&modname);
move |path: Mrc<[String]>, _| -> ParseResult<Module, ELoad> {
let mut name_resolver = name_resolver_rc.borrow_mut();
let module = Module {
rules: parsed.try_find(&path)?
.iter()
.filter_map(|ent| {
if let FileEntry::Rule(Rule{source, prio, target}, _) = ent {
Some(Rule {
source: source.iter()
.map(|ex| {
prefix_expr(ex, Mrc::clone(&path))
}).collect(),
target: target.iter().map(|ex| {
prefix_expr(ex, Mrc::clone(&path))
}).collect(),
prio: *prio,
})
} else { None }
})
.map(|rule| Ok(super::Rule {
source: to_mrc_slice(rule.source.iter()
.map(|ex| name_resolver.process_expression(ex))
.collect::<Result<Vec<_>, _>>()?),
target: to_mrc_slice(rule.target.iter()
.map(|ex| name_resolver.process_expression(ex))
.collect::<Result<Vec<_>, _>>()?),
..rule
}))
.collect::<ParseResult<Vec<super::Rule>, ELoad>>()?,
exports: exports.try_find(&path)?.as_ref().clone(),
references: imports.try_find(&path)?
.values()
.filter_map(|imps| {
modname.try_find(imps).ok().map(|r| r.as_ref().clone())
})
.collect()
};
Ok(module)
}
}));
Cache::new({
let resolved = Rc::clone(&resolved);
move |path: Mrc<[String]>, _| -> ParseResult<Vec<super::Rule>, ELoad> {
// Breadth-first search
let mut processed: HashSet<Mrc<[String]>> = HashSet::new();
let mut rules: Vec<super::Rule> = Vec::new();
let mut pending: VecDeque<Mrc<[String]>> = VecDeque::new();
pending.push_back(path);
while let Some(el) = pending.pop_front() {
let resolved = resolved.try_find(&el)?;
processed.insert(el.clone());
pending.extend(
resolved.references.iter()
.filter(|&v| !processed.contains(v))
.cloned()
);
rules.extend(
resolved.rules.iter().cloned()
)
};
Ok(rules)
}
})
}

View File

@@ -4,7 +4,7 @@ use ordered_float::NotNan;
use std::{hash::Hash, intrinsics::likely};
use std::fmt::Debug;
use crate::utils::mrc_empty_slice;
use crate::{executor::{ExternFn, Atom}, utils::one_mrc_slice};
use crate::{foreign::{ExternFn, Atom}, utils::one_mrc_slice};
use super::Literal;
@@ -12,178 +12,178 @@ use super::Literal;
#[derive(PartialEq, Eq, Hash)]
pub struct Expr(pub Clause, pub Mrc<[Clause]>);
impl Expr {
pub fn into_clause(self) -> Clause {
if likely(self.1.len() == 0) { self.0 }
else { Clause::S('(', one_mrc_slice(self)) }
}
pub fn into_clause(self) -> Clause {
if likely(self.1.len() == 0) { self.0 }
else { Clause::S('(', one_mrc_slice(self)) }
}
}
impl Clone for Expr {
fn clone(&self) -> Self {
Self(self.0.clone(), Mrc::clone(&self.1))
}
fn clone(&self) -> Self {
Self(self.0.clone(), Mrc::clone(&self.1))
}
}
impl Debug for Expr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let Expr(val, typ) = self;
write!(f, "{:?}", val)?;
for typ in typ.as_ref() {
write!(f, ":{:?}", typ)?
}
Ok(())
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let Expr(val, typ) = self;
write!(f, "{:?}", val)?;
for typ in typ.as_ref() {
write!(f, ":{:?}", typ)?
}
Ok(())
}
}
/// An S-expression as read from a source file
#[derive(PartialEq, Eq, Hash)]
pub enum Clause {
/// A literal value, eg. `1`, `"hello"`
Literal(Literal),
/// A c-style name or an operator, eg. `+`, `i`, `foo::bar`
Name{
local: Option<String>,
qualified: Mrc<[String]>
},
/// A parenthesized expression, eg. `(print out "hello")`, `[1, 2, 3]`, `{Some(t) => t}`
S(char, Mrc<[Expr]>),
/// An explicit expression associated with the leftmost, outermost [Clause::Auto], eg. `read @Int`
Explicit(Mrc<Expr>),
/// A function expression, eg. `\x. x + 1`
Lambda(String, Mrc<[Expr]>, Mrc<[Expr]>),
/// A parameterized expression with type inference, eg. `@T. T -> T`
Auto(Option<String>, Mrc<[Expr]>, Mrc<[Expr]>),
/// An opaque function, eg. an effectful function employing CPS.
/// Preferably wrap these in an Orchid monad.
ExternFn(ExternFn),
/// An opaque non-callable value, eg. a file handle.
/// Preferably wrap these in an Orchid structure.
Atom(Atom),
/// A placeholder for macros, eg. `$name`, `...$body`, `...$lhs:1`
Placeh{
key: String,
/// None => matches one token
/// Some((prio, nonzero)) =>
/// prio is the sizing priority for the vectorial (higher prio grows first)
/// nonzero is whether the vectorial matches 1..n or 0..n tokens
vec: Option<(usize, bool)>
},
/// A literal value, eg. `1`, `"hello"`
Literal(Literal),
/// A c-style name or an operator, eg. `+`, `i`, `foo::bar`
Name{
local: Option<String>,
qualified: Mrc<[String]>
},
/// A parenthesized expression, eg. `(print out "hello")`, `[1, 2, 3]`, `{Some(t) => t}`
S(char, Mrc<[Expr]>),
/// An explicit expression associated with the leftmost, outermost [Clause::Auto], eg. `read @Int`
Explicit(Mrc<Expr>),
/// A function expression, eg. `\x. x + 1`
Lambda(String, Mrc<[Expr]>, Mrc<[Expr]>),
/// A parameterized expression with type inference, eg. `@T. T -> T`
Auto(Option<String>, Mrc<[Expr]>, Mrc<[Expr]>),
/// An opaque function, eg. an effectful function employing CPS.
/// Preferably wrap these in an Orchid monad.
ExternFn(ExternFn),
/// An opaque non-callable value, eg. a file handle.
/// Preferably wrap these in an Orchid structure.
Atom(Atom),
/// A placeholder for macros, eg. `$name`, `...$body`, `...$lhs:1`
Placeh{
key: String,
/// None => matches one token
/// Some((prio, nonzero)) =>
/// prio is the sizing priority for the vectorial (higher prio grows first)
/// nonzero is whether the vectorial matches 1..n or 0..n tokens
vec: Option<(usize, bool)>
},
}
impl Clause {
pub fn body(&self) -> Option<Mrc<[Expr]>> {
match self {
Self::Auto(_, _, body) |
Self::Lambda(_, _, body) |
Self::S(_, body) => Some(Mrc::clone(body)),
_ => None
}
pub fn body(&self) -> Option<Mrc<[Expr]>> {
match self {
Self::Auto(_, _, body) |
Self::Lambda(_, _, body) |
Self::S(_, body) => Some(Mrc::clone(body)),
_ => None
}
pub fn typ(&self) -> Option<Mrc<[Expr]>> {
match self {
Self::Auto(_, typ, _) | Self::Lambda(_, typ, _) => Some(Mrc::clone(typ)),
_ => None
}
}
pub fn into_expr(self) -> Expr {
if let Self::S('(', body) = &self {
if body.len() == 1 { body[0].clone() }
else { Expr(self, mrc_empty_slice()) }
} else { Expr(self, mrc_empty_slice()) }
}
pub fn from_exprv(exprv: Mrc<[Expr]>) -> Option<Clause> {
if exprv.len() == 0 { None }
else if exprv.len() == 1 { Some(exprv[0].clone().into_clause()) }
else { Some(Self::S('(', exprv)) }
}
pub fn typ(&self) -> Option<Mrc<[Expr]>> {
match self {
Self::Auto(_, typ, _) | Self::Lambda(_, typ, _) => Some(Mrc::clone(typ)),
_ => None
}
}
pub fn into_expr(self) -> Expr {
if let Self::S('(', body) = &self {
if body.len() == 1 { body[0].clone() }
else { Expr(self, mrc_empty_slice()) }
} else { Expr(self, mrc_empty_slice()) }
}
pub fn from_exprv(exprv: Mrc<[Expr]>) -> Option<Clause> {
if exprv.len() == 0 { None }
else if exprv.len() == 1 { Some(exprv[0].clone().into_clause()) }
else { Some(Self::S('(', exprv)) }
}
}
impl Clone for Clause {
fn clone(&self) -> Self {
match self {
Self::S(c, b) => Self::S(*c, Mrc::clone(b)),
Self::Auto(n, t, b) => Self::Auto(
n.clone(), Mrc::clone(t), Mrc::clone(b)
),
Self::Name { local: l, qualified: q } => Self::Name {
local: l.clone(), qualified: Mrc::clone(q)
},
Self::Lambda(n, t, b) => Self::Lambda(
n.clone(), Mrc::clone(t), Mrc::clone(b)
),
Self::Placeh{key, vec} => Self::Placeh{key: key.clone(), vec: *vec},
Self::Literal(l) => Self::Literal(l.clone()),
Self::ExternFn(nc) => Self::ExternFn(nc.clone()),
Self::Atom(a) => Self::Atom(a.clone()),
Self::Explicit(expr) => Self::Explicit(Mrc::clone(expr))
}
fn clone(&self) -> Self {
match self {
Self::S(c, b) => Self::S(*c, Mrc::clone(b)),
Self::Auto(n, t, b) => Self::Auto(
n.clone(), Mrc::clone(t), Mrc::clone(b)
),
Self::Name { local: l, qualified: q } => Self::Name {
local: l.clone(), qualified: Mrc::clone(q)
},
Self::Lambda(n, t, b) => Self::Lambda(
n.clone(), Mrc::clone(t), Mrc::clone(b)
),
Self::Placeh{key, vec} => Self::Placeh{key: key.clone(), vec: *vec},
Self::Literal(l) => Self::Literal(l.clone()),
Self::ExternFn(nc) => Self::ExternFn(nc.clone()),
Self::Atom(a) => Self::Atom(a.clone()),
Self::Explicit(expr) => Self::Explicit(Mrc::clone(expr))
}
}
}
fn fmt_expr_seq(it: &mut dyn Iterator<Item = &Expr>, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for item in Itertools::intersperse(it.map(Some), None) { match item {
Some(expr) => write!(f, "{:?}", expr),
None => f.write_str(" "),
}? }
Ok(())
for item in Itertools::intersperse(it.map(Some), None) { match item {
Some(expr) => write!(f, "{:?}", expr),
None => f.write_str(" "),
}? }
Ok(())
}
impl Debug for Clause {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Literal(arg0) => write!(f, "{:?}", arg0),
Self::Name{local, qualified} =>
if let Some(local) = local {write!(f, "{}`{}`", qualified.join("::"), local)}
else {write!(f, "{}", qualified.join("::"))},
Self::S(del, items) => {
f.write_str(&del.to_string())?;
fmt_expr_seq(&mut items.iter(), f)?;
f.write_str(match del {
'(' => ")", '[' => "]", '{' => "}",
_ => "CLOSING_DELIM"
})
},
Self::Lambda(name, argtyp, body) => {
f.write_str("\\")?;
f.write_str(name)?;
f.write_str(":")?; fmt_expr_seq(&mut argtyp.iter(), f)?; f.write_str(".")?;
fmt_expr_seq(&mut body.iter(), f)
},
Self::Auto(name, argtyp, body) => {
f.write_str("@")?;
f.write_str(&name.clone().unwrap_or_default())?;
f.write_str(":")?; fmt_expr_seq(&mut argtyp.iter(), f)?; f.write_str(".")?;
fmt_expr_seq(&mut body.iter(), f)
},
Self::Placeh{key, vec: None} => write!(f, "${key}"),
Self::Placeh{key, vec: Some((prio, true))} => write!(f, "...${key}:{prio}"),
Self::Placeh{key, vec: Some((prio, false))} => write!(f, "..${key}:{prio}"),
Self::ExternFn(nc) => write!(f, "{nc:?}"),
Self::Atom(a) => write!(f, "{a:?}"),
Self::Explicit(expr) => write!(f, "@{:?}", expr.as_ref())
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Literal(arg0) => write!(f, "{:?}", arg0),
Self::Name{local, qualified} =>
if let Some(local) = local {write!(f, "{}`{}`", qualified.join("::"), local)}
else {write!(f, "{}", qualified.join("::"))},
Self::S(del, items) => {
f.write_str(&del.to_string())?;
fmt_expr_seq(&mut items.iter(), f)?;
f.write_str(match del {
'(' => ")", '[' => "]", '{' => "}",
_ => "CLOSING_DELIM"
})
},
Self::Lambda(name, argtyp, body) => {
f.write_str("\\")?;
f.write_str(name)?;
f.write_str(":")?; fmt_expr_seq(&mut argtyp.iter(), f)?; f.write_str(".")?;
fmt_expr_seq(&mut body.iter(), f)
},
Self::Auto(name, argtyp, body) => {
f.write_str("@")?;
f.write_str(&name.clone().unwrap_or_default())?;
f.write_str(":")?; fmt_expr_seq(&mut argtyp.iter(), f)?; f.write_str(".")?;
fmt_expr_seq(&mut body.iter(), f)
},
Self::Placeh{key, vec: None} => write!(f, "${key}"),
Self::Placeh{key, vec: Some((prio, true))} => write!(f, "...${key}:{prio}"),
Self::Placeh{key, vec: Some((prio, false))} => write!(f, "..${key}:{prio}"),
Self::ExternFn(nc) => write!(f, "{nc:?}"),
Self::Atom(a) => write!(f, "{a:?}"),
Self::Explicit(expr) => write!(f, "@{:?}", expr.as_ref())
}
}
}
/// A substitution rule as read from the source
#[derive(PartialEq, Eq, Hash)]
pub struct Rule {
pub source: Mrc<[Expr]>,
pub prio: NotNan<f64>,
pub target: Mrc<[Expr]>
pub source: Mrc<[Expr]>,
pub prio: NotNan<f64>,
pub target: Mrc<[Expr]>
}
impl Clone for Rule {
fn clone(&self) -> Self {
Self {
source: Mrc::clone(&self.source),
prio: self.prio,
target: Mrc::clone(&self.target)
}
fn clone(&self) -> Self {
Self {
source: Mrc::clone(&self.source),
prio: self.prio,
target: Mrc::clone(&self.target)
}
}
}
impl Debug for Rule {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?} ={}=> {:?}", self.source, self.prio, self.target)
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?} ={}=> {:?}", self.source, self.prio, self.target)
}
}

View File

@@ -1,178 +1,171 @@
use mappable_rc::Mrc;
use crate::utils::{Stackframe, to_mrc_slice, mrc_empty_slice, ProtoMap};
use crate::utils::{Stackframe, to_mrc_slice, mrc_empty_slice, ProtoMap, one_mrc_slice};
use super::{ast, typed, get_name::get_name};
#[derive(Clone)]
pub enum Error {
/// `()` as a clause is meaningless in lambda calculus
EmptyS,
/// Only `(...)` may be converted to typed lambdas. `[...]` and `{...}` left in the code are
/// signs of incomplete macro execution
BadGroup(char),
/// `foo:bar:baz` will be parsed as `(foo:bar):baz`, explicitly specifying `foo:(bar:baz)`
/// is forbidden and it's also meaningless since `baz` can only ever be the kind of types
ExplicitBottomKind,
/// Name never bound in an enclosing scope - indicates incomplete macro substitution
Unbound(String),
/// Namespaced names can never occur in the code, these are signs of incomplete macro execution
Symbol,
/// Placeholders shouldn't even occur in the code during macro execution. Something is clearly
/// terribly wrong
Placeholder,
/// It's possible to try and transform the clause `(foo:bar)` into a typed clause,
/// however the correct value of this ast clause is a typed expression (included in the error)
///
/// [expr] handles this case, so it's only really possible to get this
/// error if you're calling [clause] directly
ExprToClause(typed::Expr),
/// @ tokens only ever occur between a function and a parameter
NonInfixAt
/// `()` as a clause is meaningless in lambda calculus
EmptyS,
/// Only `(...)` may be converted to typed lambdas. `[...]` and `{...}` left in the code are
/// signs of incomplete macro execution
BadGroup(char),
/// `foo:bar:baz` will be parsed as `(foo:bar):baz`, explicitly specifying `foo:(bar:baz)`
/// is forbidden and it's also meaningless since `baz` can only ever be the kind of types
ExplicitBottomKind,
/// Name never bound in an enclosing scope - indicates incomplete macro substitution
Unbound(String),
/// Namespaced names can never occur in the code, these are signs of incomplete macro execution
Symbol,
/// Placeholders shouldn't even occur in the code during macro execution. Something is clearly
/// terribly wrong
Placeholder,
/// It's possible to try and transform the clause `(foo:bar)` into a typed clause,
/// however the correct value of this ast clause is a typed expression (included in the error)
///
/// [expr] handles this case, so it's only really possible to get this
/// error if you're calling [clause] directly
ExprToClause(typed::Expr),
/// @ tokens only ever occur between a function and a parameter
NonInfixAt
}
/// Try to convert an expression from AST format to typed lambda
pub fn expr(expr: &ast::Expr) -> Result<typed::Expr, Error> {
Ok(expr_rec(expr, ProtoMap::new(), None)?.0)
Ok(expr_rec(expr, ProtoMap::new(), None)?.0)
}
/// Try and convert a single clause from AST format to typed lambda
pub fn clause(clause: &ast::Clause) -> Result<typed::Clause, Error> {
Ok(clause_rec(clause, ProtoMap::new(), None)?.0)
Ok(clause_rec(clause, ProtoMap::new(), None)?.0)
}
/// Try and convert a sequence of expressions from AST format to typed lambda
pub fn exprv(exprv: &[ast::Expr]) -> Result<typed::Expr, Error> {
Ok(exprv_rec(exprv, ProtoMap::new(), None)?.0)
Ok(exprv_rec(exprv, ProtoMap::new(), None)?.0)
}
const NAMES_INLINE_COUNT:usize = 3;
/// Recursive state of [exprv]
fn exprv_rec<'a>(
v: &'a [ast::Expr],
names: ProtoMap<&'a str, u64, NAMES_INLINE_COUNT>,
explicits: Option<&Stackframe<Mrc<typed::Expr>>>,
fn exprv_rec(
v: &[ast::Expr],
names: ProtoMap<&str, (u64, bool), NAMES_INLINE_COUNT>,
explicits: Option<&Stackframe<Mrc<typed::Expr>>>,
) -> Result<(typed::Expr, usize), Error> {
let (last, rest) = v.split_last().ok_or(Error::EmptyS)?;
if rest.len() == 0 {return expr_rec(&v[0], names, explicits)}
if let ast::Expr(ast::Clause::Explicit(inner), empty_slice) = last {
assert!(empty_slice.len() == 0,
"It is assumed that Explicit nodes can never have type annotations as the \
wrapped expression node matches all trailing colons."
);
let (x, _) = expr_rec(inner.as_ref(), names.clone(), None)?;
let new_explicits = Stackframe::opush(explicits, Mrc::new(x));
let (body, used_expls) = exprv_rec(rest, names, Some(&new_explicits))?;
Ok((body, used_expls.saturating_sub(1)))
} else {
let (f, f_used_expls) = exprv_rec(rest, names.clone(), explicits)?;
let x_explicits = Stackframe::opop(explicits, f_used_expls);
let (x, x_used_expls) = expr_rec(last, names, x_explicits)?;
Ok((typed::Expr(
typed::Clause::Apply(Mrc::new(f), Mrc::new(x)),
mrc_empty_slice()
), x_used_expls + f_used_expls))
}
let (last, rest) = v.split_last().ok_or(Error::EmptyS)?;
if rest.len() == 0 {return expr_rec(&v[0], names, explicits)}
if let ast::Expr(ast::Clause::Explicit(inner), empty_slice) = last {
assert!(empty_slice.len() == 0,
"It is assumed that Explicit nodes can never have type annotations as the \
wrapped expression node matches all trailing colons."
);
let (x, _) = expr_rec(inner.as_ref(), names, None)?;
let new_explicits = Some(&Stackframe::opush(explicits, Mrc::new(x)));
let (body, used_expls) = exprv_rec(rest, names, new_explicits)?;
Ok((body, used_expls.saturating_sub(1)))
} else {
let (f, f_used_expls) = exprv_rec(rest, names, explicits)?;
let x_explicits = Stackframe::opop(explicits, f_used_expls);
let (x, x_used_expls) = expr_rec(last, names, x_explicits)?;
Ok((typed::Expr(
typed::Clause::Apply(Mrc::new(f), Mrc::new(x)),
mrc_empty_slice()
), x_used_expls + f_used_expls))
}
}
/// Recursive state of [expr]
fn expr_rec<'a>(
ast::Expr(val, typ): &'a ast::Expr,
names: ProtoMap<&'a str, u64, NAMES_INLINE_COUNT>,
explicits: Option<&Stackframe<Mrc<typed::Expr>>> // known explicit values
fn expr_rec(
ast::Expr(val, typ): &ast::Expr,
names: ProtoMap<&str, (u64, bool), NAMES_INLINE_COUNT>,
explicits: Option<&Stackframe<Mrc<typed::Expr>>> // known explicit values
) -> Result<(typed::Expr, usize), Error> { // (output, used_explicits)
let typ: Vec<typed::Clause> = typ.iter()
.map(|c| Ok(clause_rec(c, names.clone(), None)?.0))
.collect::<Result<_, _>>()?;
if let ast::Clause::S(paren, body) = val {
if *paren != '(' {return Err(Error::BadGroup(*paren))}
let (typed::Expr(inner, inner_t), used_expls) = exprv_rec(
body.as_ref(), names, explicits
)?;
let new_t = if typ.len() == 0 { inner_t } else {
to_mrc_slice(if inner_t.len() == 0 { typ } else {
inner_t.iter().chain(typ.iter()).cloned().collect()
})
};
Ok((typed::Expr(inner, new_t), used_expls))
} else {
let (cls, used_expls) = clause_rec(&val, names, explicits)?;
Ok((typed::Expr(cls, to_mrc_slice(typ)), used_expls))
}
let typ: Vec<typed::Clause> = typ.iter()
.map(|c| Ok(clause_rec(c, names, None)?.0))
.collect::<Result<_, _>>()?;
if let ast::Clause::S(paren, body) = val {
if *paren != '(' {return Err(Error::BadGroup(*paren))}
let (typed::Expr(inner, inner_t), used_expls) = exprv_rec(body.as_ref(), names, explicits)?;
let new_t = if typ.len() == 0 { inner_t } else {
to_mrc_slice(if inner_t.len() == 0 { typ } else {
inner_t.iter().chain(typ.iter()).cloned().collect()
})
};
Ok((typed::Expr(inner, new_t), used_expls))
} else {
let (cls, used_expls) = clause_rec(&val, names, explicits)?;
Ok((typed::Expr(cls, to_mrc_slice(typ)), used_expls))
}
}
/// Recursive state of [clause]
fn clause_rec<'a>(
cls: &'a ast::Clause,
mut names: ProtoMap<&'a str, u64, NAMES_INLINE_COUNT>,
mut explicits: Option<&Stackframe<Mrc<typed::Expr>>>
fn clause_rec(
cls: &ast::Clause,
names: ProtoMap<&str, (u64, bool), NAMES_INLINE_COUNT>,
mut explicits: Option<&Stackframe<Mrc<typed::Expr>>>
) -> Result<(typed::Clause, usize), Error> {
match cls { // (\t:(@T. Pair T T). t \left.\right. left) @number -- this will fail
ast::Clause::ExternFn(e) => Ok((typed::Clause::ExternFn(e.clone()), 0)),
ast::Clause::Atom(a) => Ok((typed::Clause::Atom(a.clone()), 0)),
ast::Clause::Auto(no, t, b) => {
// Allocate id
let id = get_name();
// Pop an explicit if available
let (value, rest_explicits) = explicits.map(
|Stackframe{ prev, item, .. }| {
(Some(item), *prev)
}
).unwrap_or_default();
explicits = rest_explicits;
// Convert the type
let typ = if t.len() == 0 {None} else {
let (typed::Expr(c, t), _) = exprv_rec(
t.as_ref(), names.clone(), None
)?;
if t.len() > 0 {return Err(Error::ExplicitBottomKind)}
else {Some(Mrc::new(c))}
};
// Traverse body with extended context
if let Some(name) = no {names.set(&&**name, id)}
let (body, used_expls) = exprv_rec(
b.as_ref(), names, explicits
)?;
// Produce a binding instead of an auto if explicit was available
if let Some(known_value) = value {
Ok((typed::Clause::Apply(
typed::Clause::Lambda(id, typ, Mrc::new(body)).wrap(),
Mrc::clone(known_value)
), used_expls + 1))
} else {
Ok((typed::Clause::Auto(id, typ, Mrc::new(body)), 0))
}
match cls { // (\t:(@T. Pair T T). t \left.\right. left) @number -- this will fail
ast::Clause::ExternFn(e) => Ok((typed::Clause::ExternFn(e.clone()), 0)),
ast::Clause::Atom(a) => Ok((typed::Clause::Atom(a.clone()), 0)),
ast::Clause::Auto(no, t, b) => {
// Allocate id
let id = get_name();
// Pop an explicit if available
let (value, rest_explicits) = explicits.map(
|Stackframe{ prev, item, .. }| {
(Some(item), *prev)
}
ast::Clause::Lambda(n, t, b) => {
// Allocate id
let id = get_name();
// Convert the type
let typ = if t.len() == 0 {None} else {
let (typed::Expr(c, t), _) = exprv_rec(t.as_ref(), names.clone(), None)?;
if t.len() > 0 {return Err(Error::ExplicitBottomKind)}
else {Some(Mrc::new(c))}
};
names.set(&&**n, id);
let (body, used_expls) = exprv_rec(b.as_ref(), names, explicits)?;
Ok((typed::Clause::Lambda(id, typ, Mrc::new(body)), used_expls))
}
ast::Clause::Literal(l) => Ok((typed::Clause::Literal(l.clone()), 0)),
ast::Clause::Name { local: Some(arg), .. } => {
let uid = names.get(&&**arg)
.ok_or_else(|| Error::Unbound(arg.clone()))?;
Ok((typed::Clause::Argument(*uid), 0))
}
ast::Clause::S(paren, entries) => {
if *paren != '(' {return Err(Error::BadGroup(*paren))}
let (typed::Expr(val, typ), used_expls) = exprv_rec(
entries.as_ref(), names, explicits
)?;
if typ.len() == 0 {Ok((val, used_expls))}
else {Err(Error::ExprToClause(typed::Expr(val, typ)))}
},
ast::Clause::Name { local: None, .. } => Err(Error::Symbol),
ast::Clause::Placeh { .. } => Err(Error::Placeholder),
ast::Clause::Explicit(..) => Err(Error::NonInfixAt)
).unwrap_or_default();
explicits = rest_explicits;
// Convert the type
let typ = if t.len() == 0 {mrc_empty_slice()} else {
let (typed::Expr(c, t), _) = exprv_rec(t.as_ref(), names, None)?;
if t.len() > 0 {return Err(Error::ExplicitBottomKind)}
else {one_mrc_slice(c)}
};
// Traverse body with extended context
if let Some(name) = no {names.set(&&**name, (id, true))}
let (body, used_expls) = exprv_rec(b.as_ref(), names, explicits)?;
// Produce a binding instead of an auto if explicit was available
if let Some(known_value) = value {
Ok((typed::Clause::Apply(
typed::Clause::Lambda(id, typ, Mrc::new(body)).wrap(),
Mrc::clone(known_value)
), used_expls + 1))
} else {
Ok((typed::Clause::Auto(id, typ, Mrc::new(body)), 0))
}
}
ast::Clause::Lambda(n, t, b) => {
// Allocate id
let id = get_name();
// Convert the type
let typ = if t.len() == 0 {mrc_empty_slice()} else {
let (typed::Expr(c, t), _) = exprv_rec(t.as_ref(), names, None)?;
if t.len() > 0 {return Err(Error::ExplicitBottomKind)}
else {one_mrc_slice(c)}
};
names.set(&&**n, (id, false));
let (body, used_expls) = exprv_rec(b.as_ref(), names, explicits)?;
Ok((typed::Clause::Lambda(id, typ, Mrc::new(body)), used_expls))
}
ast::Clause::Literal(l) => Ok((typed::Clause::Literal(l.clone()), 0)),
ast::Clause::Name { local: Some(arg), .. } => {
let (uid, is_auto) = names.get(&&**arg)
.ok_or_else(|| Error::Unbound(arg.clone()))?;
let label = if *is_auto {typed::Clause::AutoArg} else {typed::Clause::LambdaArg};
Ok((label(*uid), 0))
}
ast::Clause::S(paren, entries) => {
if *paren != '(' {return Err(Error::BadGroup(*paren))}
let (typed::Expr(val, typ), used_expls) = exprv_rec(entries.as_ref(), names, explicits)?;
if typ.len() == 0 {Ok((val, used_expls))}
else {Err(Error::ExprToClause(typed::Expr(val, typ)))}
},
ast::Clause::Name { local: None, .. } => Err(Error::Symbol),
ast::Clause::Placeh { .. } => Err(Error::Placeholder),
ast::Clause::Explicit(..) => Err(Error::NonInfixAt)
}
}

View File

@@ -4,19 +4,19 @@ use std::fmt::Debug;
/// An exact value, read from the AST and unmodified in shape until compilation
#[derive(Clone, PartialEq, Eq, Hash)]
pub enum Literal {
Num(NotNan<f64>),
Int(u64),
Char(char),
Str(String),
Num(NotNan<f64>),
Int(u64),
Char(char),
Str(String),
}
impl Debug for Literal {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Num(arg0) => write!(f, "{:?}", arg0),
Self::Int(arg0) => write!(f, "{:?}", arg0),
Self::Char(arg0) => write!(f, "{:?}", arg0),
Self::Str(arg0) => write!(f, "{:?}", arg0),
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Num(arg0) => write!(f, "{:?}", arg0),
Self::Int(arg0) => write!(f, "{:?}", arg0),
Self::Char(arg0) => write!(f, "{:?}", arg0),
Self::Str(arg0) => write!(f, "{:?}", arg0),
}
}
}

View File

@@ -1,7 +1,7 @@
use mappable_rc::Mrc;
use crate::executor::Atom;
use crate::foreign::{Atom, ExternFn};
use crate::utils::{to_mrc_slice, one_mrc_slice};
use crate::{executor::ExternFn, utils::string_from_charset};
use crate::utils::string_from_charset;
use super::{Literal, ast_to_typed};
use super::ast;
@@ -16,121 +16,124 @@ struct Wrap(bool, bool);
#[derive(PartialEq, Eq, Hash)]
pub struct Expr(pub Clause, pub Mrc<[Clause]>);
impl Expr {
fn deep_fmt(&self, f: &mut std::fmt::Formatter<'_>, tr: Wrap) -> std::fmt::Result {
let Expr(val, typ) = self;
if typ.len() > 0 {
val.deep_fmt(f, Wrap(true, true))?;
for typ in typ.as_ref() {
f.write_char(':')?;
typ.deep_fmt(f, Wrap(true, true))?;
}
} else {
val.deep_fmt(f, tr)?;
}
Ok(())
fn deep_fmt(&self, f: &mut std::fmt::Formatter<'_>, tr: Wrap) -> std::fmt::Result {
let Expr(val, typ) = self;
if typ.len() > 0 {
val.deep_fmt(f, Wrap(true, true))?;
for typ in typ.as_ref() {
f.write_char(':')?;
typ.deep_fmt(f, Wrap(true, true))?;
}
} else {
val.deep_fmt(f, tr)?;
}
Ok(())
}
}
impl Clone for Expr {
fn clone(&self) -> Self {
Self(self.0.clone(), Mrc::clone(&self.1))
}
fn clone(&self) -> Self {
Self(self.0.clone(), Mrc::clone(&self.1))
}
}
impl Debug for Expr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.deep_fmt(f, Wrap(false, false))
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.deep_fmt(f, Wrap(false, false))
}
}
#[derive(PartialEq, Eq, Hash)]
pub enum Clause {
Literal(Literal),
Apply(Mrc<Expr>, Mrc<Expr>),
Lambda(u64, Option<Mrc<Clause>>, Mrc<Expr>),
Auto(u64, Option<Mrc<Clause>>, Mrc<Expr>),
Argument(u64),
ExternFn(ExternFn),
Atom(Atom)
Literal(Literal),
Apply(Mrc<Expr>, Mrc<Expr>),
Lambda(u64, Mrc<[Clause]>, Mrc<Expr>),
Auto(u64, Mrc<[Clause]>, Mrc<Expr>),
LambdaArg(u64), AutoArg(u64),
ExternFn(ExternFn),
Atom(Atom)
}
const ARGNAME_CHARSET: &str = "abcdefghijklmnopqrstuvwxyz";
fn parametric_fmt(
f: &mut std::fmt::Formatter<'_>,
prefix: &str, argtyp: Option<Mrc<Clause>>, body: Mrc<Expr>, uid: u64, wrap_right: bool
f: &mut std::fmt::Formatter<'_>,
prefix: &str, argtyp: Mrc<[Clause]>, body: Mrc<Expr>, uid: u64, wrap_right: bool
) -> std::fmt::Result {
if wrap_right { f.write_char('(')?; }
f.write_str(prefix)?;
f.write_str(&string_from_charset(uid, ARGNAME_CHARSET))?;
if let Some(typ) = argtyp {
f.write_str(":")?;
typ.deep_fmt(f, Wrap(false, false))?;
}
f.write_str(".")?;
body.deep_fmt(f, Wrap(false, false))?;
if wrap_right { f.write_char(')')?; }
Ok(())
if wrap_right { f.write_char('(')?; }
f.write_str(prefix)?;
f.write_str(&string_from_charset(uid, ARGNAME_CHARSET))?;
for typ in argtyp.iter() {
f.write_str(":")?;
typ.deep_fmt(f, Wrap(false, false))?;
}
f.write_str(".")?;
body.deep_fmt(f, Wrap(false, false))?;
if wrap_right { f.write_char(')')?; }
Ok(())
}
impl Clause {
fn deep_fmt(&self, f: &mut std::fmt::Formatter<'_>, Wrap(wl, wr): Wrap)
-> std::fmt::Result {
match self {
Self::Literal(arg0) => write!(f, "{arg0:?}"),
Self::ExternFn(nc) => write!(f, "{nc:?}"),
Self::Atom(a) => write!(f, "{a:?}"),
Self::Lambda(uid, argtyp, body) => parametric_fmt(f,
"\\", argtyp.as_ref().map(Mrc::clone), Mrc::clone(body), *uid, wr
),
Self::Auto(uid, argtyp, body) => parametric_fmt(f,
"@", argtyp.as_ref().map(Mrc::clone), Mrc::clone(body), *uid, wr
),
Self::Argument(uid) => f.write_str(&string_from_charset(*uid, ARGNAME_CHARSET)),
Self::Apply(func, x) => {
if wl { f.write_char('(')?; }
func.deep_fmt(f, Wrap(false, true) )?;
f.write_char(' ')?;
x.deep_fmt(f, Wrap(true, wr && !wl) )?;
if wl { f.write_char(')')?; }
Ok(())
}
}
fn deep_fmt(&self, f: &mut std::fmt::Formatter<'_>, Wrap(wl, wr): Wrap)
-> std::fmt::Result {
match self {
Self::Literal(arg0) => write!(f, "{arg0:?}"),
Self::ExternFn(nc) => write!(f, "{nc:?}"),
Self::Atom(a) => write!(f, "{a:?}"),
Self::Lambda(uid, argtyp, body) => parametric_fmt(f,
"\\", Mrc::clone(argtyp), Mrc::clone(body), *uid, wr
),
Self::Auto(uid, argtyp, body) => parametric_fmt(f,
"@", Mrc::clone(argtyp), Mrc::clone(body), *uid, wr
),
Self::LambdaArg(uid) | Self::AutoArg(uid) => f.write_str(&
string_from_charset(*uid, ARGNAME_CHARSET)
),
Self::Apply(func, x) => {
if wl { f.write_char('(')?; }
func.deep_fmt(f, Wrap(false, true) )?;
f.write_char(' ')?;
x.deep_fmt(f, Wrap(true, wr && !wl) )?;
if wl { f.write_char(')')?; }
Ok(())
}
}
pub fn wrap(self) -> Mrc<Expr> { Mrc::new(Expr(self, to_mrc_slice(vec![]))) }
pub fn wrap_t(self, t: Clause) -> Mrc<Expr> { Mrc::new(Expr(self, one_mrc_slice(t))) }
}
pub fn wrap(self) -> Mrc<Expr> { Mrc::new(Expr(self, to_mrc_slice(vec![]))) }
pub fn wrap_t(self, t: Clause) -> Mrc<Expr> { Mrc::new(Expr(self, one_mrc_slice(t))) }
}
impl Clone for Clause {
fn clone(&self) -> Self {
match self {
Clause::Auto(uid,t, b) => Clause::Auto(*uid, t.as_ref().map(Mrc::clone), Mrc::clone(b)),
Clause::Lambda(uid, t, b) => Clause::Lambda(*uid, t.as_ref().map(Mrc::clone), Mrc::clone(b)),
Clause::Literal(l) => Clause::Literal(l.clone()),
Clause::ExternFn(nc) => Clause::ExternFn(nc.clone()),
Clause::Atom(a) => Clause::Atom(a.clone()),
Clause::Apply(f, x) => Clause::Apply(Mrc::clone(f), Mrc::clone(x)),
Clause::Argument(lvl) => Clause::Argument(*lvl)
}
fn clone(&self) -> Self {
match self {
Clause::Auto(uid,t, b) => Clause::Auto(*uid, Mrc::clone(t), Mrc::clone(b)),
Clause::Lambda(uid, t, b) => Clause::Lambda(*uid, Mrc::clone(t), Mrc::clone(b)),
Clause::Literal(l) => Clause::Literal(l.clone()),
Clause::ExternFn(nc) => Clause::ExternFn(nc.clone()),
Clause::Atom(a) => Clause::Atom(a.clone()),
Clause::Apply(f, x) => Clause::Apply(Mrc::clone(f), Mrc::clone(x)),
Clause::LambdaArg(id) => Clause::LambdaArg(*id),
Clause::AutoArg(id) => Clause::AutoArg(*id)
}
}
}
impl Debug for Clause {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.deep_fmt(f, Wrap(false, false))
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.deep_fmt(f, Wrap(false, false))
}
}
impl TryFrom<&ast::Expr> for Expr {
type Error = ast_to_typed::Error;
fn try_from(value: &ast::Expr) -> Result<Self, Self::Error> {
ast_to_typed::expr(value)
}
type Error = ast_to_typed::Error;
fn try_from(value: &ast::Expr) -> Result<Self, Self::Error> {
ast_to_typed::expr(value)
}
}
impl TryFrom<&ast::Clause> for Clause {
type Error = ast_to_typed::Error;
fn try_from(value: &ast::Clause) -> Result<Self, Self::Error> {
ast_to_typed::clause(value)
}
type Error = ast_to_typed::Error;
fn try_from(value: &ast::Clause) -> Result<Self, Self::Error> {
ast_to_typed::clause(value)
}
}

View File

@@ -11,55 +11,55 @@ use super::super::RuleError;
fn verify_scalar_vec(pattern: &Expr, is_vec: &mut HashMap<String, bool>)
-> Result<(), String> {
let verify_clause = |clause: &Clause, is_vec: &mut HashMap<String, bool>| -> Result<(), String> {
match clause {
Clause::Placeh{key, vec} => {
if let Some(known) = is_vec.get(key) {
if known != &vec.is_some() { return Err(key.to_string()) }
} else {
is_vec.insert(key.clone(), vec.is_some());
}
}
Clause::Auto(name, typ, body) => {
if let Some(key) = name.as_ref().and_then(|key| key.strip_prefix('$')) {
if is_vec.get(key) == Some(&true) { return Err(key.to_string()) }
}
typ.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
body.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
}
Clause::Lambda(name, typ, body) => {
if let Some(key) = name.strip_prefix('$') {
if is_vec.get(key) == Some(&true) { return Err(key.to_string()) }
}
typ.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
body.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
}
Clause::S(_, body) => {
body.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
}
_ => ()
};
Ok(())
let verify_clause = |clause: &Clause, is_vec: &mut HashMap<String, bool>| -> Result<(), String> {
match clause {
Clause::Placeh{key, vec} => {
if let Some(known) = is_vec.get(key) {
if known != &vec.is_some() { return Err(key.to_string()) }
} else {
is_vec.insert(key.clone(), vec.is_some());
}
}
Clause::Auto(name, typ, body) => {
if let Some(key) = name.as_ref().and_then(|key| key.strip_prefix('$')) {
if is_vec.get(key) == Some(&true) { return Err(key.to_string()) }
}
typ.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
body.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
}
Clause::Lambda(name, typ, body) => {
if let Some(key) = name.strip_prefix('$') {
if is_vec.get(key) == Some(&true) { return Err(key.to_string()) }
}
typ.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
body.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
}
Clause::S(_, body) => {
body.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
}
_ => ()
};
let Expr(val, typ) = pattern;
verify_clause(val, is_vec)?;
for typ in typ.as_ref() {
verify_clause(typ, is_vec)?;
}
Ok(())
};
let Expr(val, typ) = pattern;
verify_clause(val, is_vec)?;
for typ in typ.as_ref() {
verify_clause(typ, is_vec)?;
}
Ok(())
}
fn slice_to_vec(src: &mut Mrc<[Expr]>, tgt: &mut Mrc<[Expr]>) {
let prefix_expr = Expr(Clause::Placeh{key: "::prefix".to_string(), vec: Some((0, false))}, to_mrc_slice(vec![]));
let postfix_expr = Expr(Clause::Placeh{key: "::postfix".to_string(), vec: Some((0, false))}, to_mrc_slice(vec![]));
// Prefix or postfix to match the full vector
let head_multi = matches!(src.first().expect("Src can never be empty!").0, Clause::Placeh{vec: Some(_), ..});
let tail_multi = matches!(src.last().expect("Impossible branch!").0, Clause::Placeh{vec: Some(_), ..});
let prefix_vec = if head_multi {vec![]} else {vec![prefix_expr]};
let postfix_vec = if tail_multi {vec![]} else {vec![postfix_expr]};
*src = to_mrc_slice(prefix_vec.iter().chain(src.iter()).chain(postfix_vec.iter()).cloned().collect());
*tgt = to_mrc_slice(prefix_vec.iter().chain(tgt.iter()).chain(postfix_vec.iter()).cloned().collect());
let prefix_expr = Expr(Clause::Placeh{key: "::prefix".to_string(), vec: Some((0, false))}, to_mrc_slice(vec![]));
let postfix_expr = Expr(Clause::Placeh{key: "::postfix".to_string(), vec: Some((0, false))}, to_mrc_slice(vec![]));
// Prefix or postfix to match the full vector
let head_multi = matches!(src.first().expect("Src can never be empty!").0, Clause::Placeh{vec: Some(_), ..});
let tail_multi = matches!(src.last().expect("Impossible branch!").0, Clause::Placeh{vec: Some(_), ..});
let prefix_vec = if head_multi {vec![]} else {vec![prefix_expr]};
let postfix_vec = if tail_multi {vec![]} else {vec![postfix_expr]};
*src = to_mrc_slice(prefix_vec.iter().chain(src.iter()).chain(postfix_vec.iter()).cloned().collect());
*tgt = to_mrc_slice(prefix_vec.iter().chain(tgt.iter()).chain(postfix_vec.iter()).cloned().collect());
}
/// Traverse the tree, calling pred on every sibling list until it returns some vec
@@ -67,117 +67,117 @@ fn slice_to_vec(src: &mut Mrc<[Expr]>, tgt: &mut Mrc<[Expr]>) {
/// return false if pred never returned some
fn update_first_seq_rec<F>(input: Mrc<[Expr]>, pred: &mut F) -> Option<Mrc<[Expr]>>
where F: FnMut(Mrc<[Expr]>) -> Option<Mrc<[Expr]>> {
if let o@Some(_) = pred(Mrc::clone(&input)) {o} else {
for Expr(cls, _) in input.iter() {
if let Some(t) = cls.typ() {
if let o@Some(_) = update_first_seq_rec(t, pred) {return o}
}
if let Some(b) = cls.body() {
if let o@Some(_) = update_first_seq_rec(b, pred) {return o}
}
}
None
if let o@Some(_) = pred(Mrc::clone(&input)) {o} else {
for Expr(cls, _) in input.iter() {
if let Some(t) = cls.typ() {
if let o@Some(_) = update_first_seq_rec(t, pred) {return o}
}
if let Some(b) = cls.body() {
if let o@Some(_) = update_first_seq_rec(b, pred) {return o}
}
}
None
}
}
/// keep re-probing the input with pred until it stops matching
fn update_all_seqs<F>(input: Mrc<[Expr]>, pred: &mut F) -> Option<Mrc<[Expr]>>
where F: FnMut(Mrc<[Expr]>) -> Option<Mrc<[Expr]>> {
let mut tmp = update_first_seq_rec(input, pred);
while let Some(xv) = tmp {
tmp = update_first_seq_rec(Mrc::clone(&xv), pred);
if tmp.is_none() {return Some(xv)}
}
None
let mut tmp = update_first_seq_rec(input, pred);
while let Some(xv) = tmp {
tmp = update_first_seq_rec(Mrc::clone(&xv), pred);
if tmp.is_none() {return Some(xv)}
}
None
}
// fn write_clause_rec(state: &State, clause: &Clause) ->
fn write_expr_rec(state: &State, Expr(tpl_clause, tpl_typ): &Expr) -> Box<dyn Iterator<Item = Expr>> {
let out_typ = tpl_typ.iter()
.flat_map(|c| write_expr_rec(state, &c.clone().into_expr()))
.map(Expr::into_clause)
.collect::<Mrc<[Clause]>>();
match tpl_clause {
Clause::Auto(name_opt, typ, body) => box_once(Expr(Clause::Auto(
name_opt.as_ref().and_then(|name| {
if let Some(state_key) = name.strip_prefix('$') {
match &state[state_key] {
Entry::NameOpt(name) => name.as_ref().map(|s| s.as_ref().to_owned()),
Entry::Name(name) => Some(name.as_ref().to_owned()),
_ => panic!("Auto template name may only be derived from Auto or Lambda name")
}
} else {
Some(name.to_owned())
}
}),
write_slice_rec(state, typ),
write_slice_rec(state, body)
), out_typ.to_owned())),
Clause::Lambda(name, typ, body) => box_once(Expr(Clause::Lambda(
if let Some(state_key) = name.strip_prefix('$') {
if let Entry::Name(name) = &state[state_key] {
name.as_ref().to_owned()
} else {panic!("Lambda template name may only be derived from Lambda name")}
} else {
name.to_owned()
},
write_slice_rec(state, typ),
write_slice_rec(state, body)
), out_typ.to_owned())),
Clause::S(c, body) => box_once(Expr(Clause::S(
*c,
write_slice_rec(state, body)
), out_typ.to_owned())),
Clause::Placeh{key, vec: None} => {
let real_key = unwrap_or!(key.strip_prefix('_'); key);
match &state[real_key] {
Entry::Scalar(x) => box_once(x.as_ref().to_owned()),
Entry::Name(n) => box_once(Expr(Clause::Name {
local: Some(n.as_ref().to_owned()),
qualified: one_mrc_slice(n.as_ref().to_owned())
}, mrc_empty_slice())),
_ => panic!("Scalar template may only be derived from scalar placeholder"),
}
},
Clause::Placeh{key, vec: Some(_)} => if let Entry::Vec(v) = &state[key] {
into_boxed_iter(v.as_ref().to_owned())
} else {panic!("Vectorial template may only be derived from vectorial placeholder")},
Clause::Explicit(param) => {
assert!(out_typ.len() == 0, "Explicit should never have a type annotation");
box_once(Clause::Explicit(Mrc::new(
Clause::from_exprv(write_expr_rec(state, param).collect())
.expect("Result shorter than template").into_expr()
)).into_expr())
},
// Explicit base case so that we get an error if Clause gets new values
c@Clause::Literal(_) | c@Clause::Name { .. } | c@Clause::ExternFn(_) | c@Clause::Atom(_) =>
box_once(Expr(c.to_owned(), out_typ.to_owned()))
}
let out_typ = tpl_typ.iter()
.flat_map(|c| write_expr_rec(state, &c.clone().into_expr()))
.map(Expr::into_clause)
.collect::<Mrc<[Clause]>>();
match tpl_clause {
Clause::Auto(name_opt, typ, body) => box_once(Expr(Clause::Auto(
name_opt.as_ref().and_then(|name| {
if let Some(state_key) = name.strip_prefix('$') {
match &state[state_key] {
Entry::NameOpt(name) => name.as_ref().map(|s| s.as_ref().to_owned()),
Entry::Name(name) => Some(name.as_ref().to_owned()),
_ => panic!("Auto template name may only be derived from Auto or Lambda name")
}
} else {
Some(name.to_owned())
}
}),
write_slice_rec(state, typ),
write_slice_rec(state, body)
), out_typ.to_owned())),
Clause::Lambda(name, typ, body) => box_once(Expr(Clause::Lambda(
if let Some(state_key) = name.strip_prefix('$') {
if let Entry::Name(name) = &state[state_key] {
name.as_ref().to_owned()
} else {panic!("Lambda template name may only be derived from Lambda name")}
} else {
name.to_owned()
},
write_slice_rec(state, typ),
write_slice_rec(state, body)
), out_typ.to_owned())),
Clause::S(c, body) => box_once(Expr(Clause::S(
*c,
write_slice_rec(state, body)
), out_typ.to_owned())),
Clause::Placeh{key, vec: None} => {
let real_key = unwrap_or!(key.strip_prefix('_'); key);
match &state[real_key] {
Entry::Scalar(x) => box_once(x.as_ref().to_owned()),
Entry::Name(n) => box_once(Expr(Clause::Name {
local: Some(n.as_ref().to_owned()),
qualified: one_mrc_slice(n.as_ref().to_owned())
}, mrc_empty_slice())),
_ => panic!("Scalar template may only be derived from scalar placeholder"),
}
},
Clause::Placeh{key, vec: Some(_)} => if let Entry::Vec(v) = &state[key] {
into_boxed_iter(v.as_ref().to_owned())
} else {panic!("Vectorial template may only be derived from vectorial placeholder")},
Clause::Explicit(param) => {
assert!(out_typ.len() == 0, "Explicit should never have a type annotation");
box_once(Clause::Explicit(Mrc::new(
Clause::from_exprv(write_expr_rec(state, param).collect())
.expect("Result shorter than template").into_expr()
)).into_expr())
},
// Explicit base case so that we get an error if Clause gets new values
c@Clause::Literal(_) | c@Clause::Name { .. } | c@Clause::ExternFn(_) | c@Clause::Atom(_) =>
box_once(Expr(c.to_owned(), out_typ.to_owned()))
}
}
/// Fill in a template from a state as produced by a pattern
fn write_slice_rec(state: &State, tpl: &Mrc<[Expr]>) -> Mrc<[Expr]> {
eprintln!("Writing {tpl:?} with state {state:?}");
tpl.iter().flat_map(|xpr| write_expr_rec(state, xpr)).collect()
eprintln!("Writing {tpl:?} with state {state:?}");
tpl.iter().flat_map(|xpr| write_expr_rec(state, xpr)).collect()
}
/// Apply a rule (a pair of pattern and template) to an expression
pub fn execute(mut src: Mrc<[Expr]>, mut tgt: Mrc<[Expr]>, input: Mrc<[Expr]>)
-> Result<Option<Mrc<[Expr]>>, RuleError> {
// Dimension check
let mut is_vec_db = HashMap::new();
src.iter().try_for_each(|e| verify_scalar_vec(e, &mut is_vec_db))
.map_err(RuleError::ScalarVecMismatch)?;
tgt.iter().try_for_each(|e| verify_scalar_vec(e, &mut is_vec_db))
.map_err(RuleError::ScalarVecMismatch)?;
// Padding
slice_to_vec(&mut src, &mut tgt);
// Generate matcher
let matcher = SliceMatcherDnC::new(src);
let matcher_cache = SliceMatcherDnC::get_matcher_cache();
Ok(update_all_seqs(Mrc::clone(&input), &mut |p| {
let state = matcher.match_range_cached(p, &matcher_cache)?;
Some(write_slice_rec(&state, &tgt))
}))
// Dimension check
let mut is_vec_db = HashMap::new();
src.iter().try_for_each(|e| verify_scalar_vec(e, &mut is_vec_db))
.map_err(RuleError::ScalarVecMismatch)?;
tgt.iter().try_for_each(|e| verify_scalar_vec(e, &mut is_vec_db))
.map_err(RuleError::ScalarVecMismatch)?;
// Padding
slice_to_vec(&mut src, &mut tgt);
// Generate matcher
let matcher = SliceMatcherDnC::new(src);
let matcher_cache = SliceMatcherDnC::get_matcher_cache();
Ok(update_all_seqs(Mrc::clone(&input), &mut |p| {
let state = matcher.match_range_cached(p, &matcher_cache)?;
Some(write_slice_rec(&state, &tgt))
}))
}

View File

@@ -14,10 +14,10 @@ use super::split_at_max_vec::split_at_max_vec;
#[derive(Debug, Eq, PartialEq, Hash)]
pub struct CacheEntry<'a>(Mrc<[Expr]>, &'a SliceMatcherDnC);
impl<'a> Clone for CacheEntry<'a> {
fn clone(&self) -> Self {
let CacheEntry(mrc, matcher) = self;
CacheEntry(Mrc::clone(mrc), matcher)
}
fn clone(&self) -> Self {
let CacheEntry(mrc, matcher) = self;
CacheEntry(Mrc::clone(mrc), matcher)
}
}
@@ -31,281 +31,281 @@ impl<'a> Clone for CacheEntry<'a> {
/// a pattern on the entire tree.
#[derive(Clone, Eq)]
pub struct SliceMatcherDnC {
/// The entire pattern this will match
pattern: Mrc<[Expr]>,
/// The exact clause this can match
clause: Mrc<Clause>,
/// Matcher for the parts of the pattern right from us
right_subm: Option<Box<SliceMatcherDnC>>,
/// Matcher for the parts of the pattern left from us
left_subm: Option<Box<SliceMatcherDnC>>,
/// Matcher for the body of this clause if it has one.
/// Must be Some if pattern is (Auto, Lambda or S)
body_subm: Option<Box<SliceMatcherDnC>>,
/// Matcher for the type of this expression if it has one (Auto usually does)
/// Optional
typ_subm: Option<Box<SliceMatcherDnC>>,
/// The entire pattern this will match
pattern: Mrc<[Expr]>,
/// The exact clause this can match
clause: Mrc<Clause>,
/// Matcher for the parts of the pattern right from us
right_subm: Option<Box<SliceMatcherDnC>>,
/// Matcher for the parts of the pattern left from us
left_subm: Option<Box<SliceMatcherDnC>>,
/// Matcher for the body of this clause if it has one.
/// Must be Some if pattern is (Auto, Lambda or S)
body_subm: Option<Box<SliceMatcherDnC>>,
/// Matcher for the type of this expression if it has one (Auto usually does)
/// Optional
typ_subm: Option<Box<SliceMatcherDnC>>,
}
impl PartialEq for SliceMatcherDnC {
fn eq(&self, other: &Self) -> bool {
self.pattern == other.pattern
}
fn eq(&self, other: &Self) -> bool {
self.pattern == other.pattern
}
}
impl std::hash::Hash for SliceMatcherDnC {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.pattern.hash(state);
}
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.pattern.hash(state);
}
}
impl SliceMatcherDnC {
/// If this is true, `clause`, `typ_subm`, `body_subm` and `clause_qual_name` are meaningless.
/// If it's false, it's also false for both side matchers.
pub fn clause_is_vectorial(&self) -> bool {
matches!(self.clause.as_ref(), Clause::Placeh{vec: Some(..), ..})
/// If this is true, `clause`, `typ_subm`, `body_subm` and `clause_qual_name` are meaningless.
/// If it's false, it's also false for both side matchers.
pub fn clause_is_vectorial(&self) -> bool {
matches!(self.clause.as_ref(), Clause::Placeh{vec: Some(..), ..})
}
/// If clause is a name, the qualified name this can match
pub fn clause_qual_name(&self) -> Option<Mrc<[String]>> {
if let Clause::Name { qualified, .. } = self.clause.as_ref() {Some(Mrc::clone(qualified))} else {None}
}
/// If clause is a Placeh, the key in the state the match will be stored at
pub fn state_key(&self) -> Option<&String> {
if let Clause::Placeh { key, .. } = self.clause.as_ref() {Some(key)} else {None}
}
pub fn own_max_size(&self, total: usize) -> Option<usize> {
if !self.clause_is_vectorial() {
if total == self.len() {Some(total)} else {None}
} else {
let margin = self.min(Side::Left) + self.min(Side::Right);
if margin + self.own_min_size() <= total {Some(total - margin)} else {None}
}
/// If clause is a name, the qualified name this can match
pub fn clause_qual_name(&self) -> Option<Mrc<[String]>> {
if let Clause::Name { qualified, .. } = self.clause.as_ref() {Some(Mrc::clone(qualified))} else {None}
}
pub fn own_min_size(&self) -> usize {
if let Clause::Placeh { vec: Some((_, nonzero)), .. } = self.clause.as_ref() {
if *nonzero {1} else {0}
} else {self.len()}
}
/// Enumerate all valid subdivisions based on the reported size constraints of self and
/// the two subranges
pub fn valid_subdivisions(&self,
range: Mrc<[Expr]>
) -> impl Iterator<Item = (Mrc<[Expr]>, Mrc<[Expr]>, Mrc<[Expr]>)> {
let own_max = unwrap_or!(self.own_max_size(range.len()); return box_empty());
let own_min = self.own_min_size();
let lmin = self.min(Side::Left);
let _lmax = self.max(Side::Left, range.len());
let rmin = self.min(Side::Right);
let _rmax = self.max(Side::Right, range.len());
let full_len = range.len();
Box::new((own_min..=own_max).rev().flat_map(move |own_len| {
let wiggle = full_len - lmin - rmin - own_len;
let range = Mrc::clone(&range);
(0..=wiggle).map(move |offset| {
let first_break = lmin + offset;
let second_break = first_break + own_len;
let left = mrc_derive(&range, |p| &p[0..first_break]);
let mid = mrc_derive(&range, |p| &p[first_break..second_break]);
let right = mrc_derive(&range, |p| &p[second_break..]);
(left, mid, right)
})
}))
}
pub fn new(pattern: Mrc<[Expr]>) -> Self {
let (clause, left_subm, right_subm) = mrc_try_derive(&pattern, |p| {
if p.len() == 1 {Some(&p[0].0)} else {None}
}).map(|e| (e, None, None))
.or_else(|| split_at_max_vec(Mrc::clone(&pattern)).map(|(left, _, right)| (
mrc_derive(&pattern, |p| &p[left.len()].0),
if !left.is_empty() {Some(Box::new(Self::new(left)))} else {None},
if !right.is_empty() {Some(Box::new(Self::new(right)))} else {None}
)))
.unwrap_or_else(|| (
mrc_derive(&pattern, |p| &p[0].0),
None,
Some(Box::new(Self::new(mrc_derive(&pattern, |p| &p[1..]))))
));
Self {
pattern, right_subm, left_subm,
clause: Mrc::clone(&clause),
body_subm: clause.body().map(|b| Box::new(Self::new(b))),
typ_subm: clause.typ().map(|t| Box::new(Self::new(t)))
}
/// If clause is a Placeh, the key in the state the match will be stored at
pub fn state_key(&self) -> Option<&String> {
if let Clause::Placeh { key, .. } = self.clause.as_ref() {Some(key)} else {None}
}
/// The shortest slice this pattern can match
fn len(&self) -> usize {
if self.clause_is_vectorial() {
self.min(Side::Left) + self.min(Side::Right) + self.own_min_size()
} else {self.pattern.len()}
}
/// Pick a subpattern based on the parameter
fn side(&self, side: Side) -> Option<&SliceMatcherDnC> {
match side {
Side::Left => &self.left_subm,
Side::Right => &self.right_subm
}.as_ref().map(|b| b.as_ref())
}
/// The shortest slice the given side can match
fn min(&self, side: Side) -> usize {self.side(side).map_or(0, |right| right.len())}
/// The longest slice the given side can match
fn max(&self, side: Side, total: usize) -> usize {
self.side(side).map_or(0, |m| if m.clause_is_vectorial() {
total - self.min(side.opposite()) - self.own_min_size()
} else {m.len()})
}
/// Take the smallest possible slice from the given side
fn slice_min<'a>(&self, side: Side, range: &'a [Expr]) -> &'a [Expr] {
side.slice(self.min(side), range)
}
/// Matches the body on a range
/// # Panics
/// when called on an instance that does not have a body (not Auto, Lambda or S)
fn match_body<'a>(&'a self,
range: Mrc<[Expr]>, cache: &Cache<CacheEntry<'a>, Option<State>>
) -> Option<State> {
self.body_subm.as_ref()
.expect("Missing body matcher")
.match_range_cached(range, cache)
}
/// Matches the type and body on respective ranges
/// # Panics
/// when called on an instance that does not have a body (not Auto, Lambda or S)
fn match_parts<'a>(&'a self,
typ_range: Mrc<[Expr]>, body_range: Mrc<[Expr]>,
cache: &Cache<CacheEntry<'a>, Option<State>>
) -> Option<State> {
let typ_state = if let Some(typ) = &self.typ_subm {
typ.match_range_cached(typ_range, cache)?
} else {State::new()};
let body_state = self.match_body(body_range, cache)?;
typ_state + body_state
}
/// Match the specified side-submatcher on the specified range with the cache
/// In absence of a side-submatcher empty ranges are matched to empty state
fn apply_side_with_cache<'a>(&'a self,
side: Side, range: Mrc<[Expr]>,
cache: &Cache<CacheEntry<'a>, Option<State>>
) -> Option<State> {
match &self.side(side) {
None => {
if !range.is_empty() {None}
else {Some(State::new())}
},
Some(m) => cache.try_find(&CacheEntry(range, m)).map(|s| s.as_ref().to_owned())
}
pub fn own_max_size(&self, total: usize) -> Option<usize> {
if !self.clause_is_vectorial() {
if total == self.len() {Some(total)} else {None}
} else {
let margin = self.min(Side::Left) + self.min(Side::Right);
if margin + self.own_min_size() <= total {Some(total - margin)} else {None}
}
fn match_range_scalar_cached<'a>(&'a self,
target: Mrc<[Expr]>,
cache: &Cache<CacheEntry<'a>, Option<State>>
) -> Option<State> {
let pos = self.min(Side::Left);
if target.len() != self.pattern.len() {return None}
let mut own_state = (
self.apply_side_with_cache(Side::Left, mrc_derive(&target, |t| &t[0..pos]), cache)?
+ self.apply_side_with_cache(Side::Right, mrc_derive(&target, |t| &t[pos+1..]), cache)
)?;
match (self.clause.as_ref(), &target.as_ref()[pos].0) {
(Clause::Literal(val), Clause::Literal(tgt)) => {
if val == tgt {Some(own_state)} else {None}
}
(Clause::Placeh{key, vec: None}, tgt_clause) => {
if let Some(real_key) = key.strip_prefix('_') {
if let Clause::Name { local: Some(value), .. } = tgt_clause {
own_state.insert_name(real_key, value)
} else {None}
} else {own_state.insert_scalar(&key, &target[pos])}
}
(Clause::S(c, _), Clause::S(c_tgt, body_range)) => {
if c != c_tgt {return None}
own_state + self.match_parts(to_mrc_slice(vec![]), Mrc::clone(body_range), cache)
}
(Clause::Name{qualified, ..}, Clause::Name{qualified: q_tgt, ..}) => {
if qualified == q_tgt {Some(own_state)} else {None}
}
(Clause::Lambda(name, _, _), Clause::Lambda(name_tgt, typ_tgt, body_tgt)) => {
// Primarily, the name works as a placeholder
if let Some(state_key) = name.strip_prefix('$') {
own_state = own_state.insert_name(state_key, name_tgt)?
} else if name != name_tgt {return None}
// ^ But if you're weird like that, it can also work as a constraint
own_state + self.match_parts(Mrc::clone(typ_tgt), Mrc::clone(body_tgt), cache)
}
(Clause::Auto(name_opt, _, _), Clause::Auto(name_range, typ_range, body_range)) => {
if let Some(name) = name_opt {
// TODO: Enforce this at construction, on a type system level
let state_key = name.strip_prefix('$')
.expect("Auto patterns may only reference, never enforce the name");
own_state = own_state.insert_name_opt(state_key, name_range.as_ref())?
}
own_state + self.match_parts(Mrc::clone(typ_range), Mrc::clone(body_range), cache)
},
_ => None
}
pub fn own_min_size(&self) -> usize {
if let Clause::Placeh { vec: Some((_, nonzero)), .. } = self.clause.as_ref() {
if *nonzero {1} else {0}
} else {self.len()}
}
/// Enumerate all valid subdivisions based on the reported size constraints of self and
/// the two subranges
pub fn valid_subdivisions(&self,
range: Mrc<[Expr]>
) -> impl Iterator<Item = (Mrc<[Expr]>, Mrc<[Expr]>, Mrc<[Expr]>)> {
let own_max = unwrap_or!(self.own_max_size(range.len()); return box_empty());
let own_min = self.own_min_size();
let lmin = self.min(Side::Left);
let _lmax = self.max(Side::Left, range.len());
let rmin = self.min(Side::Right);
let _rmax = self.max(Side::Right, range.len());
let full_len = range.len();
Box::new((own_min..=own_max).rev().flat_map(move |own_len| {
let wiggle = full_len - lmin - rmin - own_len;
let range = Mrc::clone(&range);
(0..=wiggle).map(move |offset| {
let first_break = lmin + offset;
let second_break = first_break + own_len;
let left = mrc_derive(&range, |p| &p[0..first_break]);
let mid = mrc_derive(&range, |p| &p[first_break..second_break]);
let right = mrc_derive(&range, |p| &p[second_break..]);
(left, mid, right)
})
}))
}
}
pub fn new(pattern: Mrc<[Expr]>) -> Self {
let (clause, left_subm, right_subm) = mrc_try_derive(&pattern, |p| {
if p.len() == 1 {Some(&p[0].0)} else {None}
}).map(|e| (e, None, None))
.or_else(|| split_at_max_vec(Mrc::clone(&pattern)).map(|(left, _, right)| (
mrc_derive(&pattern, |p| &p[left.len()].0),
if !left.is_empty() {Some(Box::new(Self::new(left)))} else {None},
if !right.is_empty() {Some(Box::new(Self::new(right)))} else {None}
)))
.unwrap_or_else(|| (
mrc_derive(&pattern, |p| &p[0].0),
None,
Some(Box::new(Self::new(mrc_derive(&pattern, |p| &p[1..]))))
));
Self {
pattern, right_subm, left_subm,
clause: Mrc::clone(&clause),
body_subm: clause.body().map(|b| Box::new(Self::new(b))),
typ_subm: clause.typ().map(|t| Box::new(Self::new(t)))
}
/// Match the range with a vectorial _assuming we are a vectorial_
fn match_range_vectorial_cached<'a>(&'a self,
name: &str,
target: Mrc<[Expr]>,
cache: &Cache<CacheEntry<'a>, Option<State>>
) -> Option<State> {
// Step through valid slicings based on reported size constraints in order
// from longest own section to shortest and from left to right
for (left, own, right) in self.valid_subdivisions(target) {
return Some(unwrap_or!(
self.apply_side_with_cache(Side::Left, left, cache)
.and_then(|lres| lres + self.apply_side_with_cache(Side::Right, right, cache))
.and_then(|side_res| side_res.insert_vec(name, own.as_ref()));
continue
))
}
None
}
/// The shortest slice this pattern can match
fn len(&self) -> usize {
if self.clause_is_vectorial() {
self.min(Side::Left) + self.min(Side::Right) + self.own_min_size()
} else {self.pattern.len()}
}
/// Pick a subpattern based on the parameter
fn side(&self, side: Side) -> Option<&SliceMatcherDnC> {
match side {
Side::Left => &self.left_subm,
Side::Right => &self.right_subm
}.as_ref().map(|b| b.as_ref())
}
/// The shortest slice the given side can match
fn min(&self, side: Side) -> usize {self.side(side).map_or(0, |right| right.len())}
/// The longest slice the given side can match
fn max(&self, side: Side, total: usize) -> usize {
self.side(side).map_or(0, |m| if m.clause_is_vectorial() {
total - self.min(side.opposite()) - self.own_min_size()
} else {m.len()})
}
/// Take the smallest possible slice from the given side
fn slice_min<'a>(&self, side: Side, range: &'a [Expr]) -> &'a [Expr] {
side.slice(self.min(side), range)
/// Try and match the specified range
pub fn match_range_cached<'a>(&'a self,
target: Mrc<[Expr]>,
cache: &Cache<CacheEntry<'a>, Option<State>>
) -> Option<State> {
if self.pattern.is_empty() {
return if target.is_empty() {Some(State::new())} else {None}
}
if self.clause_is_vectorial() {
let key = self.state_key().expect("Vectorial implies key");
self.match_range_vectorial_cached(key, target, cache)
} else {self.match_range_scalar_cached(target, cache)}
}
/// Matches the body on a range
/// # Panics
/// when called on an instance that does not have a body (not Auto, Lambda or S)
fn match_body<'a>(&'a self,
range: Mrc<[Expr]>, cache: &Cache<CacheEntry<'a>, Option<State>>
) -> Option<State> {
self.body_subm.as_ref()
.expect("Missing body matcher")
.match_range_cached(range, cache)
}
/// Matches the type and body on respective ranges
/// # Panics
/// when called on an instance that does not have a body (not Auto, Lambda or S)
fn match_parts<'a>(&'a self,
typ_range: Mrc<[Expr]>, body_range: Mrc<[Expr]>,
cache: &Cache<CacheEntry<'a>, Option<State>>
) -> Option<State> {
let typ_state = if let Some(typ) = &self.typ_subm {
typ.match_range_cached(typ_range, cache)?
} else {State::new()};
let body_state = self.match_body(body_range, cache)?;
typ_state + body_state
}
pub fn get_matcher_cache<'a>()
-> Cache<'a, CacheEntry<'a>, Option<State>> {
Cache::new(
|CacheEntry(tgt, matcher), cache| {
matcher.match_range_cached(tgt, cache)
}
)
}
/// Match the specified side-submatcher on the specified range with the cache
/// In absence of a side-submatcher empty ranges are matched to empty state
fn apply_side_with_cache<'a>(&'a self,
side: Side, range: Mrc<[Expr]>,
cache: &Cache<CacheEntry<'a>, Option<State>>
) -> Option<State> {
match &self.side(side) {
None => {
if !range.is_empty() {None}
else {Some(State::new())}
},
Some(m) => cache.try_find(&CacheEntry(range, m)).map(|s| s.as_ref().to_owned())
}
}
fn match_range_scalar_cached<'a>(&'a self,
target: Mrc<[Expr]>,
cache: &Cache<CacheEntry<'a>, Option<State>>
) -> Option<State> {
let pos = self.min(Side::Left);
if target.len() != self.pattern.len() {return None}
let mut own_state = (
self.apply_side_with_cache(Side::Left, mrc_derive(&target, |t| &t[0..pos]), cache)?
+ self.apply_side_with_cache(Side::Right, mrc_derive(&target, |t| &t[pos+1..]), cache)
)?;
match (self.clause.as_ref(), &target.as_ref()[pos].0) {
(Clause::Literal(val), Clause::Literal(tgt)) => {
if val == tgt {Some(own_state)} else {None}
}
(Clause::Placeh{key, vec: None}, tgt_clause) => {
if let Some(real_key) = key.strip_prefix('_') {
if let Clause::Name { local: Some(value), .. } = tgt_clause {
own_state.insert_name(real_key, value)
} else {None}
} else {own_state.insert_scalar(&key, &target[pos])}
}
(Clause::S(c, _), Clause::S(c_tgt, body_range)) => {
if c != c_tgt {return None}
own_state + self.match_parts(to_mrc_slice(vec![]), Mrc::clone(body_range), cache)
}
(Clause::Name{qualified, ..}, Clause::Name{qualified: q_tgt, ..}) => {
if qualified == q_tgt {Some(own_state)} else {None}
}
(Clause::Lambda(name, _, _), Clause::Lambda(name_tgt, typ_tgt, body_tgt)) => {
// Primarily, the name works as a placeholder
if let Some(state_key) = name.strip_prefix('$') {
own_state = own_state.insert_name(state_key, name_tgt)?
} else if name != name_tgt {return None}
// ^ But if you're weird like that, it can also work as a constraint
own_state + self.match_parts(Mrc::clone(typ_tgt), Mrc::clone(body_tgt), cache)
}
(Clause::Auto(name_opt, _, _), Clause::Auto(name_range, typ_range, body_range)) => {
if let Some(name) = name_opt {
// TODO: Enforce this at construction, on a type system level
let state_key = name.strip_prefix('$')
.expect("Auto patterns may only reference, never enforce the name");
own_state = own_state.insert_name_opt(state_key, name_range.as_ref())?
}
own_state + self.match_parts(Mrc::clone(typ_range), Mrc::clone(body_range), cache)
},
_ => None
}
}
/// Match the range with a vectorial _assuming we are a vectorial_
fn match_range_vectorial_cached<'a>(&'a self,
name: &str,
target: Mrc<[Expr]>,
cache: &Cache<CacheEntry<'a>, Option<State>>
) -> Option<State> {
// Step through valid slicings based on reported size constraints in order
// from longest own section to shortest and from left to right
for (left, own, right) in self.valid_subdivisions(target) {
return Some(unwrap_or!(
self.apply_side_with_cache(Side::Left, left, cache)
.and_then(|lres| lres + self.apply_side_with_cache(Side::Right, right, cache))
.and_then(|side_res| side_res.insert_vec(name, own.as_ref()));
continue
))
}
None
}
/// Try and match the specified range
pub fn match_range_cached<'a>(&'a self,
target: Mrc<[Expr]>,
cache: &Cache<CacheEntry<'a>, Option<State>>
) -> Option<State> {
if self.pattern.is_empty() {
return if target.is_empty() {Some(State::new())} else {None}
}
if self.clause_is_vectorial() {
let key = self.state_key().expect("Vectorial implies key");
self.match_range_vectorial_cached(key, target, cache)
} else {self.match_range_scalar_cached(target, cache)}
}
pub fn get_matcher_cache<'a>()
-> Cache<'a, CacheEntry<'a>, Option<State>> {
Cache::new(
|CacheEntry(tgt, matcher), cache| {
matcher.match_range_cached(tgt, cache)
}
)
}
pub fn match_range(&self, target: Mrc<[Expr]>) -> Option<State> {
self.match_range_cached(target, &Self::get_matcher_cache())
}
pub fn match_range(&self, target: Mrc<[Expr]>) -> Option<State> {
self.match_range_cached(target, &Self::get_matcher_cache())
}
}
impl Debug for SliceMatcherDnC {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Matcher")
.field("clause", &self.clause)
.field("vectorial", &self.clause_is_vectorial())
.field("min", &self.len())
.field("left", &self.left_subm)
.field("right", &self.right_subm)
.field("lmin", &self.min(Side::Left))
.field("rmin", &self.min(Side::Right))
.finish()
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Matcher")
.field("clause", &self.clause)
.field("vectorial", &self.clause_is_vectorial())
.field("min", &self.len())
.field("left", &self.left_subm)
.field("right", &self.right_subm)
.field("lmin", &self.min(Side::Left))
.field("rmin", &self.min(Side::Right))
.finish()
}
}

View File

@@ -7,27 +7,27 @@ use crate::utils::{mrc_derive, mrc_try_derive};
pub type MaxVecSplit = (Mrc<[Expr]>, (Mrc<str>, usize, bool), Mrc<[Expr]>);
/// Derive the details of the central vectorial and the two sides from a slice of Expr's
pub fn split_at_max_vec(pattern: Mrc<[Expr]>) -> Option<MaxVecSplit> {
let rngidx = pattern.iter().position_max_by_key(|ex| {
if let Expr(Clause::Placeh{vec: Some((prio, _)), ..}, _) = ex {
*prio as i64
} else { -1 }
})?;
let left = mrc_derive(&pattern, |p| &p[0..rngidx]);
let placeh = mrc_derive(&pattern, |p| &p[rngidx].0);
let right = if rngidx == pattern.len() {
mrc_derive(&pattern, |x| &x[0..1])
} else {
mrc_derive(&pattern, |x| &x[rngidx + 1..])
};
mrc_try_derive(&placeh, |p| {
if let Clause::Placeh{key, vec: Some(_)} = p {
Some(key)
} else {None} // Repeated below on unchanged data
}).map(|key| {
let key = mrc_derive(&key, String::as_str);
if let Clause::Placeh{vec: Some((prio, nonzero)), ..} = placeh.as_ref() {
(left, (key, *prio, *nonzero), right)
}
else {panic!("Impossible branch")} // Duplicate of above
})
let rngidx = pattern.iter().position_max_by_key(|ex| {
if let Expr(Clause::Placeh{vec: Some((prio, _)), ..}, _) = ex {
*prio as i64
} else { -1 }
})?;
let left = mrc_derive(&pattern, |p| &p[0..rngidx]);
let placeh = mrc_derive(&pattern, |p| &p[rngidx].0);
let right = if rngidx == pattern.len() {
mrc_derive(&pattern, |x| &x[0..1])
} else {
mrc_derive(&pattern, |x| &x[rngidx + 1..])
};
mrc_try_derive(&placeh, |p| {
if let Clause::Placeh{key, vec: Some(_)} = p {
Some(key)
} else {None} // Repeated below on unchanged data
}).map(|key| {
let key = mrc_derive(&key, String::as_str);
if let Clause::Placeh{vec: Some((prio, nonzero)), ..} = placeh.as_ref() {
(left, (key, *prio, *nonzero), right)
}
else {panic!("Impossible branch")} // Duplicate of above
})
}

View File

@@ -6,10 +6,10 @@ use crate::ast::Expr;
#[derive(Debug, PartialEq, Eq)]
pub enum Entry {
Vec(Rc<Vec<Expr>>),
Scalar(Rc<Expr>),
Name(Rc<String>),
NameOpt(Option<Rc<String>>)
Vec(Rc<Vec<Expr>>),
Scalar(Rc<Expr>),
Name(Rc<String>),
NameOpt(Option<Rc<String>>)
}
/// A bucket of indexed expression fragments. Addition may fail if there's a conflict.
@@ -19,129 +19,129 @@ pub struct State(HashMap<String, Entry>);
/// Clone without also cloning arbitrarily heavy Expr objects.
/// Key is expected to be a very short string with an allocator overhead close to zero.
impl Clone for Entry {
fn clone(&self) -> Self {
match self {
Self::Name(n) => Self::Name(Rc::clone(n)),
Self::Scalar(x) => Self::Scalar(Rc::clone(x)),
Self::Vec(v) => Self::Vec(Rc::clone(v)),
Self::NameOpt(o) => Self::NameOpt(o.as_ref().map(Rc::clone))
}
fn clone(&self) -> Self {
match self {
Self::Name(n) => Self::Name(Rc::clone(n)),
Self::Scalar(x) => Self::Scalar(Rc::clone(x)),
Self::Vec(v) => Self::Vec(Rc::clone(v)),
Self::NameOpt(o) => Self::NameOpt(o.as_ref().map(Rc::clone))
}
}
}
impl State {
pub fn new() -> Self {
Self(HashMap::new())
pub fn new() -> Self {
Self(HashMap::new())
}
pub fn insert_vec<S>(mut self, k: &S, v: &[Expr]) -> Option<Self>
where S: AsRef<str> + ToString + ?Sized + Debug {
if let Some(old) = self.0.get(k.as_ref()) {
if let Entry::Vec(val) = old {
if val.as_slice() != v {return None}
} else {return None}
} else {
self.0.insert(k.to_string(), Entry::Vec(Rc::new(v.to_vec())));
}
pub fn insert_vec<S>(mut self, k: &S, v: &[Expr]) -> Option<Self>
where S: AsRef<str> + ToString + ?Sized + Debug {
if let Some(old) = self.0.get(k.as_ref()) {
if let Entry::Vec(val) = old {
if val.as_slice() != v {return None}
} else {return None}
} else {
self.0.insert(k.to_string(), Entry::Vec(Rc::new(v.to_vec())));
Some(self)
}
pub fn insert_scalar<S>(mut self, k: &S, v: &Expr) -> Option<Self>
where S: AsRef<str> + ToString + ?Sized {
if let Some(old) = self.0.get(k.as_ref()) {
if let Entry::Scalar(val) = old {
if val.as_ref() != v {return None}
} else {return None}
} else {
self.0.insert(k.to_string(), Entry::Scalar(Rc::new(v.to_owned())));
}
Some(self)
}
pub fn insert_name<S1, S2>(mut self, k: &S1, v: &S2) -> Option<Self>
where
S1: AsRef<str> + ToString + ?Sized,
S2: AsRef<str> + ToString + ?Sized
{
if let Some(old) = self.0.get(k.as_ref()) {
if let Entry::Name(val) = old {
if val.as_str() != v.as_ref() {return None}
} else {return None}
} else {
self.0.insert(k.to_string(), Entry::Name(Rc::new(v.to_string())));
}
Some(self)
}
pub fn insert_name_opt<S1, S2>(mut self, k: &S1, v: Option<&S2>) -> Option<Self>
where
S1: AsRef<str> + ToString + ?Sized,
S2: AsRef<str> + ToString + ?Sized
{
if let Some(old) = self.0.get(k.as_ref()) {
if let Entry::NameOpt(val) = old {
if val.as_ref().map(|s| s.as_ref().as_str()) != v.map(|s| s.as_ref()) {
return None
}
Some(self)
} else {return None}
} else {
self.0.insert(k.to_string(), Entry::NameOpt(v.map(|s| Rc::new(s.to_string()))));
}
pub fn insert_scalar<S>(mut self, k: &S, v: &Expr) -> Option<Self>
where S: AsRef<str> + ToString + ?Sized {
if let Some(old) = self.0.get(k.as_ref()) {
if let Entry::Scalar(val) = old {
if val.as_ref() != v {return None}
} else {return None}
} else {
self.0.insert(k.to_string(), Entry::Scalar(Rc::new(v.to_owned())));
}
Some(self)
}
pub fn insert_name<S1, S2>(mut self, k: &S1, v: &S2) -> Option<Self>
where
S1: AsRef<str> + ToString + ?Sized,
S2: AsRef<str> + ToString + ?Sized
{
if let Some(old) = self.0.get(k.as_ref()) {
if let Entry::Name(val) = old {
if val.as_str() != v.as_ref() {return None}
} else {return None}
} else {
self.0.insert(k.to_string(), Entry::Name(Rc::new(v.to_string())));
}
Some(self)
}
pub fn insert_name_opt<S1, S2>(mut self, k: &S1, v: Option<&S2>) -> Option<Self>
where
S1: AsRef<str> + ToString + ?Sized,
S2: AsRef<str> + ToString + ?Sized
{
if let Some(old) = self.0.get(k.as_ref()) {
if let Entry::NameOpt(val) = old {
if val.as_ref().map(|s| s.as_ref().as_str()) != v.map(|s| s.as_ref()) {
return None
}
} else {return None}
} else {
self.0.insert(k.to_string(), Entry::NameOpt(v.map(|s| Rc::new(s.to_string()))));
}
Some(self)
}
/// Insert a new entry, return None on conflict
pub fn insert_pair(mut self, (k, v): (String, Entry)) -> Option<State> {
if let Some(old) = self.0.get(&k) {
if old != &v {return None}
} else {
self.0.insert(k, v);
}
Some(self)
}
/// Returns `true` if the state contains no data
pub fn empty(&self) -> bool {
self.0.is_empty()
Some(self)
}
/// Insert a new entry, return None on conflict
pub fn insert_pair(mut self, (k, v): (String, Entry)) -> Option<State> {
if let Some(old) = self.0.get(&k) {
if old != &v {return None}
} else {
self.0.insert(k, v);
}
Some(self)
}
/// Returns `true` if the state contains no data
pub fn empty(&self) -> bool {
self.0.is_empty()
}
}
impl Add for State {
type Output = Option<State>;
type Output = Option<State>;
fn add(mut self, rhs: Self) -> Self::Output {
if self.empty() {
return Some(rhs)
}
for pair in rhs.0 {
self = self.insert_pair(pair)?
}
Some(self)
fn add(mut self, rhs: Self) -> Self::Output {
if self.empty() {
return Some(rhs)
}
for pair in rhs.0 {
self = self.insert_pair(pair)?
}
Some(self)
}
}
impl Add<Option<State>> for State {
type Output = Option<State>;
type Output = Option<State>;
fn add(self, rhs: Option<State>) -> Self::Output {
rhs.and_then(|s| self + s)
}
fn add(self, rhs: Option<State>) -> Self::Output {
rhs.and_then(|s| self + s)
}
}
impl<S> Index<S> for State where S: AsRef<str> {
type Output = Entry;
type Output = Entry;
fn index(&self, index: S) -> &Self::Output {
return &self.0[index.as_ref()]
}
fn index(&self, index: S) -> &Self::Output {
return &self.0[index.as_ref()]
}
}
impl IntoIterator for State {
type Item = (String, Entry);
type Item = (String, Entry);
type IntoIter = hashbrown::hash_map::IntoIter<String, Entry>;
type IntoIter = hashbrown::hash_map::IntoIter<String, Entry>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl Debug for State {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self.0)
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self.0)
}
}

View File

@@ -9,44 +9,44 @@ use super::{super::ast::Rule, executor::execute, RuleError};
/// Manages a priority queue of substitution rules and allows to apply them
pub struct Repository(Vec<Rule>);
impl Repository {
pub fn new(mut rules: Vec<Rule>) -> Self {
rules.sort_by_key(|r| r.prio);
Self(rules)
}
pub fn new(mut rules: Vec<Rule>) -> Self {
rules.sort_by_key(|r| r.prio);
Self(rules)
}
/// Attempt to run each rule in priority order once
pub fn step(&self, mut code: Mrc<[Expr]>) -> Result<Option<Mrc<[Expr]>>, RuleError> {
let mut ran_once = false;
for rule in self.0.iter() {
if let Some(tmp) = execute(
Mrc::clone(&rule.source), Mrc::clone(&rule.target),
Mrc::clone(&code)
)? {
ran_once = true;
code = tmp;
}
}
Ok(if ran_once {Some(code)} else {None})
/// Attempt to run each rule in priority order once
pub fn step(&self, mut code: Mrc<[Expr]>) -> Result<Option<Mrc<[Expr]>>, RuleError> {
let mut ran_once = false;
for rule in self.0.iter() {
if let Some(tmp) = execute(
Mrc::clone(&rule.source), Mrc::clone(&rule.target),
Mrc::clone(&code)
)? {
ran_once = true;
code = tmp;
}
}
Ok(if ran_once {Some(code)} else {None})
}
/// Attempt to run each rule in priority order `limit` times. Returns the final
/// tree and the number of iterations left to the limit.
pub fn long_step(&self, mut code: Mrc<[Expr]>, mut limit: usize)
-> Result<(Mrc<[Expr]>, usize), RuleError> {
while let Some(tmp) = self.step(Mrc::clone(&code))? {
if 0 >= limit {break}
limit -= 1;
code = tmp
}
Ok((code, limit))
/// Attempt to run each rule in priority order `limit` times. Returns the final
/// tree and the number of iterations left to the limit.
pub fn long_step(&self, mut code: Mrc<[Expr]>, mut limit: usize)
-> Result<(Mrc<[Expr]>, usize), RuleError> {
while let Some(tmp) = self.step(Mrc::clone(&code))? {
if 0 >= limit {break}
limit -= 1;
code = tmp
}
Ok((code, limit))
}
}
impl Debug for Repository {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for rule in self.0.iter() {
writeln!(f, "{rule:?}")?
}
Ok(())
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for rule in self.0.iter() {
writeln!(f, "{rule:?}")?
}
Ok(())
}
}

View File

@@ -2,17 +2,17 @@ use std::{fmt, error::Error};
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum RuleError {
BadState(String),
ScalarVecMismatch(String)
BadState(String),
ScalarVecMismatch(String)
}
impl fmt::Display for RuleError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::BadState(key) => write!(f, "Key {:?} not in match pattern", key),
Self::ScalarVecMismatch(key) =>
write!(f, "Key {:?} used inconsistently with and without ellipsis", key)
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::BadState(key) => write!(f, "Key {:?} not in match pattern", key),
Self::ScalarVecMismatch(key) =>
write!(f, "Key {:?} used inconsistently with and without ellipsis", key)
}
}
}
impl Error for RuleError {}

View File

@@ -0,0 +1,51 @@
use std::{ops::{Generator, GeneratorState}, pin::Pin};
use super::{Task, Nice, TaskState};
pub struct GeneratorTask<G: Generator<(), Yield = ()>> {
nice: Nice,
generator: Pin<Box<G>>
}
impl<G> GeneratorTask<G> where G: Generator<(), Yield = ()> {
fn new(nice: Nice, generator: G) -> Self { Self {
nice,
generator: Box::pin(generator)
} }
}
impl<G> Task for GeneratorTask<G>
where G: Generator<(), Yield = ()> {
type Result = G::Return;
fn run_once(&mut self) -> super::TaskState<Self::Result> {
match self.generator.as_mut().resume(()) {
GeneratorState::Yielded(()) => super::TaskState::Yield,
GeneratorState::Complete(r) => super::TaskState::Complete(r)
}
}
}
impl<T> Task for Pin<Box<T>> where T: Generator<(), Yield = ()> {
type Result = T::Return;
fn run_once(&mut self) -> super::TaskState<Self::Result> {
match self.as_mut().resume(()) {
GeneratorState::Yielded(()) => TaskState::Yield,
GeneratorState::Complete(r) => TaskState::Complete(r)
}
}
}
#[macro_export]
macro_rules! subtask {
($g:tt) => { {
let task = $g;
loop {
match task.run_once() {
TaskState::Yield => yield;
TaskState::Complete(r) => break r;
}
}
} };
}

47
src/scheduler/mod.rs Normal file
View File

@@ -0,0 +1,47 @@
mod generator_task;
mod task_pair;
mod task_vec;
pub type Nice = u16;
pub type Priority = i32;
pub enum TaskState<R> {
Yield,
Complete(R)
}
pub trait Task {
type Result;
fn run_once(&mut self) -> TaskState<Self::Result>;
fn run_n_times(&mut self, count: u64) -> TaskState<Self::Result> {
for _ in 0..count {
if let r@TaskState::Complete(_) = self.run_once() {
return r
}
}
return TaskState::Yield
}
fn run_to_completion(&mut self) -> Self::Result {
loop { if let TaskState::Complete(r) = self.run_once() {return r} }
}
fn boxed<'a>(self) -> TaskBox<'a, Self::Result> where Self: 'a + Sized { Box::new(self) }
}
pub type TaskBox<'a, T> = Box<dyn Task<Result = T> + 'a>;
impl<'a, R> Task for TaskBox<'a, R> {
type Result = R;
fn run_once(&mut self) -> TaskState<Self::Result> { self.as_mut().run_once() }
fn run_n_times(&mut self, count: u64) -> TaskState<Self::Result> {
self.as_mut().run_n_times(count)
}
fn run_to_completion(&mut self) -> Self::Result {
self.as_mut().run_to_completion()
}
}

3
src/scheduler/notes.md Normal file
View File

@@ -0,0 +1,3 @@
# Purpose
Type expressions are trees. Any single branch could terminate the solver and any branch may be nonterminating, therefore all of them must be run concurrently. Thread-based concurrency isn't an option because a compiler must be perfectly deterministic. It is also beneficial to have fine-grained control over the relative priority of different tasks.

View File

@@ -0,0 +1,67 @@
use crate::utils::translate::process;
use super::{Task, Nice, Priority, TaskState};
enum TaskPairState<T: Task, U: Task> {
Empty,
Left(T, U::Result),
Right(T::Result, U),
Both(T, U)
}
pub struct TaskPair<T: Task, U: Task> {
l_nice: Nice,
r_nice: Nice,
state: TaskPairState<T, U>,
tally: Priority,
}
impl<T: Task, U: Task> TaskPair<T, U> {
pub fn new(l_nice: Nice, left: T, r_nice: Nice, right: U) -> Self {
Self {
l_nice, r_nice,
tally: 0,
state: TaskPairState::Both(left, right)
}
}
}
impl<T: Task, U: Task> Task for TaskPair<T, U> {
type Result = (T::Result, U::Result);
fn run_once(&mut self) -> TaskState<Self::Result> {
let TaskPair{ state, tally, l_nice, r_nice } = self;
let ret = process(state, |s| match s {
TaskPairState::Empty => panic!("Generator completed and empty"),
TaskPairState::Left(mut l_task, r_res) => {
match l_task.run_once() {
TaskState::Complete(r) => (TaskPairState::Empty, TaskState::Complete((r, r_res))),
TaskState::Yield => (TaskPairState::Left(l_task, r_res), TaskState::Yield),
}
}
TaskPairState::Right(l_res, mut r_task) => {
match r_task.run_once() {
TaskState::Complete(r) => (TaskPairState::Empty, TaskState::Complete((l_res, r))),
TaskState::Yield => (TaskPairState::Right(l_res, r_task), TaskState::Yield),
}
}
TaskPairState::Both(mut l_task, mut r_task) => {
let state = if 0 <= *tally {
*tally -= *l_nice as Priority;
match l_task.run_once() {
TaskState::Complete(r) => TaskPairState::Right(r, r_task),
TaskState::Yield => TaskPairState::Both(l_task, r_task),
}
} else {
*tally += *r_nice as Priority;
match r_task.run_once() {
TaskState::Complete(r) => TaskPairState::Left(l_task, r),
TaskState::Yield => TaskPairState::Both(l_task, r_task),
}
};
(state, TaskState::Yield)
}
});
ret
}
}

107
src/scheduler/task_vec.rs Normal file
View File

@@ -0,0 +1,107 @@
use std::iter;
use itertools::Itertools;
use super::{Task, Nice, TaskState};
const NORMALIZATION_THRESHOLD:Nice = Nice::MAX / 4;
struct TaskEntry<T: Task> {
nice: Nice,
position: usize,
tally: Nice,
task: T
}
struct TaskVec<T: Task> {
results: Vec<Option<T::Result>>,
task_heap: Vec<Option<TaskEntry<T>>>,
}
impl<T: Task> TaskVec<T> {
pub fn new(tasks: Vec<(Nice, T)>) -> Self {
let mut results = Vec::with_capacity(tasks.len());
results.resize_with(tasks.len(), || None);
let task_heap = tasks.into_iter().enumerate()
.map(|(position, (nice, task))| Some(TaskEntry{ nice, task, position, tally: 1 }))
.collect_vec();
Self { results, task_heap }
}
fn entry(&self, i: usize) -> Option<&TaskEntry<T>> {
if self.task_heap.len() <= i {None}
else {self.task_heap[i].as_ref()}
}
fn entry_mut(&mut self, i: usize) -> Option<&mut TaskEntry<T>> {
if self.task_heap.len() <= i {None}
else {self.task_heap[i].as_mut()}
}
fn tally(&self, i: usize) -> Nice {
self.task_heap[i].as_ref().map(|e| e.tally).unwrap_or(0)
}
fn swap(&mut self, a: usize, b: usize) {
self.task_heap.swap(a, b);
}
fn iter_mut(&mut self) -> impl Iterator<Item = &mut TaskEntry<T>> {
self.task_heap.iter_mut().filter_map(|e| e.as_mut())
}
fn normalize(&mut self) {
let shrink_count = self.task_heap.iter().rev().take_while(|e| e.is_none()).count();
let new_len = self.task_heap.len() - shrink_count;
self.task_heap.splice(0..new_len, iter::empty());
let head = self.entry_mut(0);
let offset = if let Some(e) = head {
let offset = e.tally - 1;
if offset < NORMALIZATION_THRESHOLD {return}
e.tally = 1;
offset
} else {return};
for entry in self.iter_mut() { entry.tally -= offset }
}
fn sink(&mut self, i: usize) {
let lchi = 2*i + 1;
let rchi = 2*i + 2;
let t = self.tally(i);
let lcht = if let Some(e) = self.entry(lchi) {e.tally} else {
if self.tally(rchi) < t {
self.swap(rchi, i);
self.sink(rchi);
}
return
};
let rcht = if let Some(e) = self.entry(rchi) {e.tally} else {
if self.tally(lchi) < t {
self.swap(lchi, i);
self.sink(lchi);
}
return
};
let mchi = {
if rcht < t && rcht < lcht {rchi}
else if lcht < t && lcht < rcht {lchi}
else {return}
};
self.swap(i, mchi);
self.sink(mchi);
}
}
impl<T: Task> Task for TaskVec<T> {
fn run_once(&mut self) -> super::TaskState<Self::Result> {
let head = &mut self.task_heap[0];
let head_entry = head.as_mut().expect("All completed, cannot run further");
head_entry.tally += head_entry.nice;
match head_entry.task.run_once() {
TaskState::Complete(r) => {
self.results[head_entry.position] = Some(r);
*head = None;
self.sink(0);
if self.entry(0).is_some() {
}
}
}
}
}

View File

@@ -1,52 +0,0 @@
use std::{borrow::Borrow};
use std::hash::Hash;
use hashbrown::HashMap;
use mappable_rc::Mrc;
use crate::{ast::{Expr, Clause}, utils::mrc_to_iter};
pub struct Substitution(HashMap<String, Mrc<Expr>>);
impl Substitution {
fn new() -> Self { Self(HashMap::new()) }
fn apply<Q: ?Sized + Hash + Eq>(&self, q: &Q) -> Option<Mrc<Expr>>
where String: Borrow<Q> {
self.0.get(q).map(Mrc::clone)
}
}
pub fn hindley_milner(a: Mrc<[Expr]>, b: Mrc<[Expr]>) -> Result<Substitution, ()> {
hindley_milner_rec(Substitution::new(), a, b)
}
pub fn hindley_milner_rec(mut s: Substitution, a: Mrc<[Expr]>, b: Mrc<[Expr]>)
-> Result<Substitution, ()> {
if a.len() != b.len() {return Err(())}
for (mut a, mut b) in mrc_to_iter(a).zip(mrc_to_iter(b)) {
if let Clause::Placeh{key, ..} = &a.0 {
if let Some(ex) = s.apply(key) { a = ex }
}
if let Clause::Placeh{key, ..} = &b.0 {
if let Some(ex) = s.apply(key) { b = ex }
}
if !matches!(&a.0, Clause::Placeh{..}) { (a, b) = (b, a) }
match (&a.0, &b.0) {
(Clause::Placeh{key:a_key,..}, Clause::Placeh{key:b_key,..}) =>
if a_key == b_key {return Ok(s)},
_ => return Err(())
}
if let (Clause::Placeh{key: a_key,..}, Clause::Placeh{key: b_key,..}) = (&a.0, &b.0) {
if a_key == b_key {return Ok(s)}
} else if let (Clause::S(_, a_body), Clause::S(_, b_body)) = (&a.0, &b.0) {
s = hindley_milner_rec(s, Mrc::clone(a_body), Mrc::clone(b_body))?
} else if let ()
}
Ok(s)
}
pub fn occurs(key: &str, val: &Expr) -> bool {
match val.0 {
Clause::Auto(_, _, body) => body.
}
}

View File

@@ -1,13 +0,0 @@
// mod hindley_milner;
#[derive(Clone, Hash, PartialEq, Eq)]
pub enum Expression<L, V, O, F> {
Literal(L),
Variable(V),
Operation(O, Vec<Expression<L, V, O, F>>),
Lazy(F)
}
pub struct Rule {
}

View File

View File

@@ -17,27 +17,27 @@ use crate::utils::BoxedIter;
pub fn bfs<T, F, I>(init: T, neighbors: F)
-> impl Iterator<Item = T>
where T: Eq + Hash + Clone + std::fmt::Debug,
F: Fn(T) -> I, I: Iterator<Item = T>
F: Fn(T) -> I, I: Iterator<Item = T>
{
let mut visited: HashSet<T> = HashSet::new();
let mut visit_queue: VecDeque<T> = VecDeque::from([init]);
let mut unpack_queue: VecDeque<T> = VecDeque::new();
iter::from_fn(move || {
let next = {loop {
let next = unwrap_or!(visit_queue.pop_front(); break None);
if !visited.contains(&next) { break Some(next) }
}}.or_else(|| loop {
let unpacked = unwrap_or!(unpack_queue.pop_front(); break None);
let mut nbv = neighbors(unpacked).filter(|t| !visited.contains(t));
if let Some(next) = nbv.next() {
visit_queue.extend(nbv);
break Some(next)
}
})?;
visited.insert(next.clone());
unpack_queue.push_back(next.clone());
Some(next)
})
let mut visited: HashSet<T> = HashSet::new();
let mut visit_queue: VecDeque<T> = VecDeque::from([init]);
let mut unpack_queue: VecDeque<T> = VecDeque::new();
iter::from_fn(move || {
let next = {loop {
let next = unwrap_or!(visit_queue.pop_front(); break None);
if !visited.contains(&next) { break Some(next) }
}}.or_else(|| loop {
let unpacked = unwrap_or!(unpack_queue.pop_front(); break None);
let mut nbv = neighbors(unpacked).filter(|t| !visited.contains(t));
if let Some(next) = nbv.next() {
visit_queue.extend(nbv);
break Some(next)
}
})?;
visited.insert(next.clone());
unpack_queue.push_back(next.clone());
Some(next)
})
}
/// Same as [bfs] but with a recursion depth limit
@@ -48,66 +48,66 @@ where T: Eq + Hash + Clone + std::fmt::Debug,
pub fn bfs_upto<'a, T: 'a, F: 'a, I: 'a>(init: T, neighbors: F, limit: usize)
-> impl Iterator<Item = T> + 'a
where T: Eq + Hash + Clone + std::fmt::Debug,
F: Fn(T) -> I, I: Iterator<Item = T>
F: Fn(T) -> I, I: Iterator<Item = T>
{
/// Newtype to store the recursion depth but exclude it from equality comparisons
/// Because BFS visits nodes in increasing distance order, when a node is visited for the
/// second time it will never override the earlier version of itself. This is not the case
/// with Djikstra's algorithm, which can be conceptualised as a "weighted BFS".
#[derive(Eq, Clone, Debug)]
struct Wrap<U>(usize, U);
impl<U: PartialEq> PartialEq for Wrap<U> {
fn eq(&self, other: &Self) -> bool { self.1.eq(&other.1) }
}
impl<U: Hash> Hash for Wrap<U> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.1.hash(state) }
}
bfs(Wrap(0, init), move |Wrap(dist, t)| -> BoxedIter<Wrap<T>> { // boxed because we branch
if dist == limit {Box::new(iter::empty())}
else {Box::new(neighbors(t).map(move |t| Wrap(dist + 1, t)))}
}).map(|Wrap(_, t)| t)
/// Newtype to store the recursion depth but exclude it from equality comparisons
/// Because BFS visits nodes in increasing distance order, when a node is visited for the
/// second time it will never override the earlier version of itself. This is not the case
/// with Djikstra's algorithm, which can be conceptualised as a "weighted BFS".
#[derive(Eq, Clone, Debug)]
struct Wrap<U>(usize, U);
impl<U: PartialEq> PartialEq for Wrap<U> {
fn eq(&self, other: &Self) -> bool { self.1.eq(&other.1) }
}
impl<U: Hash> Hash for Wrap<U> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.1.hash(state) }
}
bfs(Wrap(0, init), move |Wrap(dist, t)| -> BoxedIter<Wrap<T>> { // boxed because we branch
if dist == limit {Box::new(iter::empty())}
else {Box::new(neighbors(t).map(move |t| Wrap(dist + 1, t)))}
}).map(|Wrap(_, t)| t)
}
#[cfg(test)]
mod tests {
use itertools::Itertools;
use itertools::Itertools;
use super::*;
use super::*;
type Graph = Vec<Vec<usize>>;
fn neighbors(graph: &Graph, pt: usize) -> impl Iterator<Item = usize> + '_ {
graph[pt].iter().copied()
}
fn from_neighborhood_matrix(matrix: Vec<Vec<usize>>) -> Graph {
matrix.into_iter().map(|v| {
v.into_iter().enumerate().filter_map(|(i, ent)| {
if ent > 1 {panic!("Neighborhood matrices must contain binary values")}
else if ent == 1 {Some(i)}
else {None}
}).collect()
}).collect()
}
type Graph = Vec<Vec<usize>>;
fn neighbors(graph: &Graph, pt: usize) -> impl Iterator<Item = usize> + '_ {
graph[pt].iter().copied()
}
fn from_neighborhood_matrix(matrix: Vec<Vec<usize>>) -> Graph {
matrix.into_iter().map(|v| {
v.into_iter().enumerate().filter_map(|(i, ent)| {
if ent > 1 {panic!("Neighborhood matrices must contain binary values")}
else if ent == 1 {Some(i)}
else {None}
}).collect()
}).collect()
}
#[test]
fn test_square() {
let simple_graph = from_neighborhood_matrix(vec![
vec![0,1,0,1,1,0,0,0],
vec![1,0,1,0,0,1,0,0],
vec![0,1,0,1,0,0,1,0],
vec![1,0,1,0,0,0,0,1],
vec![1,0,0,0,0,1,0,1],
vec![0,1,0,0,1,0,1,0],
vec![0,0,1,0,0,1,0,1],
vec![0,0,0,1,1,0,1,0],
]);
let scan = bfs(0, |n| neighbors(&simple_graph, n)).collect_vec();
assert_eq!(scan, vec![0, 1, 3, 4, 2, 5, 7, 6])
}
#[test]
fn test_stringbuilder() {
let scan = bfs("".to_string(), |s| {
vec![s.clone()+";", s.clone()+"a", s+"aaa"].into_iter()
}).take(30).collect_vec();
println!("{scan:?}")
}
#[test]
fn test_square() {
let simple_graph = from_neighborhood_matrix(vec![
vec![0,1,0,1,1,0,0,0],
vec![1,0,1,0,0,1,0,0],
vec![0,1,0,1,0,0,1,0],
vec![1,0,1,0,0,0,0,1],
vec![1,0,0,0,0,1,0,1],
vec![0,1,0,0,1,0,1,0],
vec![0,0,1,0,0,1,0,1],
vec![0,0,0,1,1,0,1,0],
]);
let scan = bfs(0, |n| neighbors(&simple_graph, n)).collect_vec();
assert_eq!(scan, vec![0, 1, 3, 4, 2, 5, 7, 6])
}
#[test]
fn test_stringbuilder() {
let scan = bfs("".to_string(), |s| {
vec![s.clone()+";", s.clone()+"a", s+"aaa"].into_iter()
}).take(30).collect_vec();
println!("{scan:?}")
}
}

View File

@@ -4,93 +4,93 @@ use mappable_rc::Mrc;
/// Convenience trait for overriding Mrc's strange cloning logic
pub trait MyClone {
fn my_clone(&self) -> Self;
fn my_clone(&self) -> Self;
}
impl<T> MyClone for T where T: Clone {
default fn my_clone(&self) -> Self { self.clone() }
default fn my_clone(&self) -> Self { self.clone() }
}
impl<T: ?Sized> MyClone for Rc<T> {
fn my_clone(&self) -> Self { Rc::clone(self) }
fn my_clone(&self) -> Self { Rc::clone(self) }
}
impl<T: ?Sized> MyClone for Mrc<T> {
fn my_clone(&self) -> Self { Mrc::clone(self) }
fn my_clone(&self) -> Self { Mrc::clone(self) }
}
/// Cache the return values of an effectless closure in a hashmap
/// Inspired by the closure_cacher crate.
pub struct Cache<'a, I, O: 'static> {
store: RefCell<HashMap<I, Mrc<O>>>,
closure: Box<dyn Fn (I, &Self) -> Mrc<O> + 'a>
store: RefCell<HashMap<I, Mrc<O>>>,
closure: Box<dyn Fn (I, &Self) -> Mrc<O> + 'a>
}
impl<'a, I, O> Cache<'a, I, O> where
I: Eq + Hash + MyClone
I: Eq + Hash + MyClone
{
pub fn new<F: 'a>(closure: F) -> Self where F: Fn(I, &Self) -> O {
Self::new_raw(move |o, s| Mrc::new(closure(o, s)))
}
pub fn new<F: 'a>(closure: F) -> Self where F: Fn(I, &Self) -> O {
Self::new_raw(move |o, s| Mrc::new(closure(o, s)))
}
/// Take an Mrc<O> closure rather than an O closure
/// Used internally to derive caches from other systems working with Mrc-s
pub fn new_raw<F: 'a>(closure: F) -> Self where F: Fn(I, &Self) -> Mrc<O> {
Self {
store: RefCell::new(HashMap::new()),
closure: Box::new(closure)
}
/// Take an Mrc<O> closure rather than an O closure
/// Used internally to derive caches from other systems working with Mrc-s
pub fn new_raw<F: 'a>(closure: F) -> Self where F: Fn(I, &Self) -> Mrc<O> {
Self {
store: RefCell::new(HashMap::new()),
closure: Box::new(closure)
}
}
/// Produce and cache a result by cloning I if necessary
pub fn find(&self, i: &I) -> Mrc<O> {
let closure = &self.closure;
if let Some(v) = self.store.borrow().get(i) {
return Mrc::clone(v)
}
// In the moment of invocation the refcell is on immutable
// this is important for recursive calculations
let result = closure(i.my_clone(), self);
let mut store = self.store.borrow_mut();
Mrc::clone(store.raw_entry_mut().from_key(i)
.or_insert_with(|| (i.my_clone(), result)).1)
/// Produce and cache a result by cloning I if necessary
pub fn find(&self, i: &I) -> Mrc<O> {
let closure = &self.closure;
if let Some(v) = self.store.borrow().get(i) {
return Mrc::clone(v)
}
// In the moment of invocation the refcell is on immutable
// this is important for recursive calculations
let result = closure(i.my_clone(), self);
let mut store = self.store.borrow_mut();
Mrc::clone(store.raw_entry_mut().from_key(i)
.or_insert_with(|| (i.my_clone(), result)).1)
}
#[allow(dead_code)]
/// Return the result if it has already been computed
pub fn known(&self, i: &I) -> Option<Mrc<O>> {
let store = self.store.borrow();
store.get(i).map(Mrc::clone)
}
#[allow(dead_code)]
/// Forget the output for the given input
pub fn drop(&self, i: &I) -> bool {
self.store.borrow_mut().remove(i).is_some()
}
#[allow(dead_code)]
/// Return the result if it has already been computed
pub fn known(&self, i: &I) -> Option<Mrc<O>> {
let store = self.store.borrow();
store.get(i).map(Mrc::clone)
}
#[allow(dead_code)]
/// Forget the output for the given input
pub fn drop(&self, i: &I) -> bool {
self.store.borrow_mut().remove(i).is_some()
}
}
impl<'a, I, O, E> Cache<'a, I, Result<O, E>> where
I: Eq + Hash + MyClone,
// O: Clone,
E: Clone
I: Eq + Hash + MyClone,
// O: Clone,
E: Clone
{
/// Sink the ref from a Result into the Ok value, such that cloning only occurs on the sad path
/// but the return value can be short-circuited
pub fn try_find(&self, i: &I) -> Result<Mrc<O>, E> {
let ent = self.find(i);
Mrc::try_map(ent, |t| t.as_ref().ok())
.map_err(|res| Result::as_ref(&res).err().unwrap().to_owned())
}
/// Sink the ref from a Result into the Ok value, such that cloning only occurs on the sad path
/// but the return value can be short-circuited
pub fn try_find(&self, i: &I) -> Result<Mrc<O>, E> {
let ent = self.find(i);
Mrc::try_map(ent, |t| t.as_ref().ok())
.map_err(|res| Result::as_ref(&res).err().unwrap().to_owned())
}
}
impl<'a, I, O> Cache<'a, I, Option<O>> where
I: Eq + Hash + MyClone,
// O: Clone
I: Eq + Hash + MyClone,
// O: Clone
{
#[allow(dead_code)]
/// Sink the ref from an Option into the Some value such that the return value can be
/// short-circuited
pub fn try_find(&self, i: &I) -> Option<Mrc<O>> where I: Clone {
let ent = self.find(i);
Mrc::try_map(ent, |o| o.as_ref()).ok()
}
#[allow(dead_code)]
/// Sink the ref from an Option into the Some value such that the return value can be
/// short-circuited
pub fn try_find(&self, i: &I) -> Option<Mrc<O>> where I: Clone {
let ent = self.find(i);
Mrc::try_map(ent, |o| o.as_ref()).ok()
}
}

View File

@@ -11,10 +11,10 @@
///
/// ```
/// xloop!(for i in 0..10; {
/// connection.try_connect()
/// if connection.ready() {
/// break Some(connection)
/// }
/// connection.try_connect()
/// if connection.ready() {
/// break Some(connection)
/// }
/// }; None)
/// ```
///
@@ -22,17 +22,17 @@
///
/// ```
/// xloop!(while socket.is_open(); {
/// let (data, is_end) = socket.read();
/// all_data.append(data)
/// if is_end { break Ok(all_data) }
/// let (data, is_end) = socket.read();
/// all_data.append(data)
/// if is_end { break Ok(all_data) }
/// }; {
/// if let Ok(new_sock) = open_socket(socket.position()) {
/// new_sock.set_position(socket.position());
/// socket = new_sock;
/// continue
/// } else {
/// Err(DownloadError::ConnectionLost)
/// }
/// if let Ok(new_sock) = open_socket(socket.position()) {
/// new_sock.set_position(socket.position());
/// socket = new_sock;
/// continue
/// } else {
/// Err(DownloadError::ConnectionLost)
/// }
/// })
/// ```
///
@@ -40,7 +40,7 @@
///
/// ```
/// xloop!(let mut leap = 1; own_id*2 + leap < batch_size; leap *= 2; {
/// batch[own_id*2] += batch[own_id*2 + leap]
/// batch[own_id*2] += batch[own_id*2 + leap]
/// })
/// ```
///
@@ -51,41 +51,41 @@
/// **todo** find a valid use case for While let for a demo
#[macro_export]
macro_rules! xloop {
(for $p:pat in $it:expr; $body:stmt) => {
xloop!(for $p in $it; $body; ())
};
(for $p:pat in $it:expr; $body:stmt; $exit:stmt) => {
{
let mut __xloop__ = $it.into_iter();
xloop!(let Some($p) = __xloop__.next(); $body; $exit)
}
};
(let $p:pat = $e:expr; $body:stmt) => {
xloop!(let $p = $e; $body; ())
};
(let $p:pat = $e:expr; $body:stmt; $exit:stmt) => {
{
loop {
if let $p = $e { $body }
else { break { $exit } }
}
}
};
(while $cond:expr; $body:stmt) => {
xloop!($cond; $body; ())
};
(while $cond:expr; $body:stmt; $exit:stmt) => {
{
loop {
if $cond { break { $exit } }
else { $body }
}
}
};
($init:stmt; $cond:expr; $step:stmt; $body:stmt) => {
xloop!(for ( $init; $cond; $step ) $body; ())
};
($init:stmt; $cond:expr; $step:stmt; $body:stmt; $exit:stmt) => {
{ $init; xloop!(while !($cond); { $body; $step }; $exit) }
};
(for $p:pat in $it:expr; $body:stmt) => {
xloop!(for $p in $it; $body; ())
};
(for $p:pat in $it:expr; $body:stmt; $exit:stmt) => {
{
let mut __xloop__ = $it.into_iter();
xloop!(let Some($p) = __xloop__.next(); $body; $exit)
}
};
(let $p:pat = $e:expr; $body:stmt) => {
xloop!(let $p = $e; $body; ())
};
(let $p:pat = $e:expr; $body:stmt; $exit:stmt) => {
{
loop {
if let $p = $e { $body }
else { break { $exit } }
}
}
};
(while $cond:expr; $body:stmt) => {
xloop!($cond; $body; ())
};
(while $cond:expr; $body:stmt; $exit:stmt) => {
{
loop {
if $cond { break { $exit } }
else { $body }
}
}
};
($init:stmt; $cond:expr; $step:stmt; $body:stmt) => {
xloop!(for ( $init; $cond; $step ) $body; ())
};
($init:stmt; $cond:expr; $step:stmt; $body:stmt; $exit:stmt) => {
{ $init; xloop!(while !($cond); { $body; $step }; $exit) }
};
}

View File

@@ -6,31 +6,31 @@ pub type BoxedIter<'a, T> = Box<dyn Iterator<Item = T> + 'a>;
pub type BoxedIterIter<'a, T> = BoxedIter<'a, BoxedIter<'a, T>>;
/// BoxedIter of a single element
pub fn box_once<'a, T: 'a>(t: T) -> BoxedIter<'a, T> {
Box::new(iter::once(t))
Box::new(iter::once(t))
}
/// BoxedIter of no elements
pub fn box_empty<'a, T: 'a>() -> BoxedIter<'a, T> {
Box::new(iter::empty())
Box::new(iter::empty())
}
#[macro_export]
macro_rules! box_chain {
($curr:expr) => {
Box::new($curr) as BoxedIter<_>
};
($curr:expr, $($rest:expr),*) => {
Box::new($curr$(.chain($rest))*) as $crate::utils::iter::BoxedIter<_>
};
($curr:expr) => {
Box::new($curr) as BoxedIter<_>
};
($curr:expr, $($rest:expr),*) => {
Box::new($curr$(.chain($rest))*) as $crate::utils::iter::BoxedIter<_>
};
}
pub fn box_flatten<'a, T: 'a, I: 'a, J: 'a>(i: I) -> BoxedIter<'a, T>
where
J: Iterator<Item = T>,
I: Iterator<Item = J>,
J: Iterator<Item = T>,
I: Iterator<Item = J>,
{
Box::new(i.flatten())
Box::new(i.flatten())
}
pub fn into_boxed_iter<'a, T: 'a>(t: T) -> BoxedIter<'a, <T as IntoIterator>::Item>
where T: IntoIterator {
Box::new(t.into_iter())
Box::new(t.into_iter())
}

View File

@@ -5,23 +5,23 @@ use std::mem;
/// Merge two sorted iterators into a sorted iterator.
pub fn merge_sorted<T, I, J, F, O>(mut i: I, mut j: J, mut f: F) -> impl Iterator<Item = T>
where
I: Iterator<Item = T>, J: Iterator<Item = T>,
F: FnMut(&T) -> O, O: Ord,
I: Iterator<Item = T>, J: Iterator<Item = T>,
F: FnMut(&T) -> O, O: Ord,
{
let mut i_item: Option<T> = None;
let mut j_item: Option<T> = None;
std::iter::from_fn(move || {
match (&mut i_item, &mut j_item) {
(&mut None, &mut None) => None,
(&mut None, j_item @ &mut Some(_)) => Some((j_item, None)),
(i_item @ &mut Some(_), &mut None) => Some((i_item, i.next())),
(Some(i_val), Some(j_val)) => Some(
if f(i_val) < f(j_val) {
(&mut i_item, i.next())
} else {
(&mut j_item, j.next())
}
)
}.and_then(|(dest, value)| mem::replace(dest, value))
})
let mut i_item: Option<T> = None;
let mut j_item: Option<T> = None;
std::iter::from_fn(move || {
match (&mut i_item, &mut j_item) {
(&mut None, &mut None) => None,
(&mut None, j_item @ &mut Some(_)) => Some((j_item, None)),
(i_item @ &mut Some(_), &mut None) => Some((i_item, i.next())),
(Some(i_val), Some(j_val)) => Some(
if f(i_val) < f(j_val) {
(&mut i_item, i.next())
} else {
(&mut j_item, j.next())
}
)
}.and_then(|(dest, value)| mem::replace(dest, value))
})
}

View File

@@ -1,78 +1,85 @@
mod cache;
pub mod translate;
pub use cache::Cache;
mod substack;
pub use substack::Stackframe;
mod side;
pub use side::Side;
mod merge_sorted;
pub use merge_sorted::merge_sorted;
mod unwrap_or;
pub mod iter;
pub use iter::BoxedIter;
mod bfs;
mod unless_let;
mod string_from_charset;
pub use string_from_charset::string_from_charset;
mod for_loop;
mod protomap;
pub use cache::Cache;
use mappable_rc::Mrc;
pub use substack::Stackframe;
pub use side::Side;
pub use merge_sorted::merge_sorted;
pub use iter::BoxedIter;
pub use string_from_charset::string_from_charset;
pub use protomap::ProtoMap;
mod product2;
pub use product2::Product2;
use mappable_rc::Mrc;
pub fn mrc_derive<T: ?Sized, P, U: ?Sized>(m: &Mrc<T>, p: P) -> Mrc<U>
where P: for<'a> FnOnce(&'a T) -> &'a U {
Mrc::map(Mrc::clone(m), p)
Mrc::map(Mrc::clone(m), p)
}
pub fn mrc_try_derive<T: ?Sized, P, U: ?Sized>(m: &Mrc<T>, p: P) -> Option<Mrc<U>>
where P: for<'a> FnOnce(&'a T) -> Option<&'a U> {
Mrc::try_map(Mrc::clone(m), p).ok()
Mrc::try_map(Mrc::clone(m), p).ok()
}
pub fn mrc_empty_slice<T>() -> Mrc<[T]> {
mrc_derive_slice(&Mrc::new(Vec::new()))
mrc_derive_slice(&Mrc::new(Vec::new()))
}
pub fn to_mrc_slice<T>(v: Vec<T>) -> Mrc<[T]> {
Mrc::map(Mrc::new(v), |v| v.as_slice())
Mrc::map(Mrc::new(v), |v| v.as_slice())
}
pub fn collect_to_mrc<I>(iter: I) -> Mrc<[I::Item]> where I: Iterator {
to_mrc_slice(iter.collect())
to_mrc_slice(iter.collect())
}
pub fn mrc_derive_slice<T>(mv: &Mrc<Vec<T>>) -> Mrc<[T]> {
mrc_derive(mv, |v| v.as_slice())
mrc_derive(mv, |v| v.as_slice())
}
pub fn one_mrc_slice<T>(t: T) -> Mrc<[T]> {
Mrc::map(Mrc::new([t; 1]), |v| v.as_slice())
Mrc::map(Mrc::new([t; 1]), |v| v.as_slice())
}
pub fn mrc_to_iter<T>(ms: Mrc<[T]>) -> impl Iterator<Item = Mrc<T>> {
let mut i = 0;
std::iter::from_fn(move || if i < ms.len() {
let out = Some(mrc_derive(&ms, |s| &s[i]));
i += 1;
out
} else {None})
let mut i = 0;
std::iter::from_fn(move || if i < ms.len() {
let out = Some(mrc_derive(&ms, |s| &s[i]));
i += 1;
out
} else {None})
}
pub fn mrc_unnest<T>(m: &Mrc<Mrc<T>>) -> Mrc<T> {
Mrc::clone(m.as_ref())
Mrc::clone(m.as_ref())
}
pub fn mrc_slice_to_only<T>(m: Mrc<[T]>) -> Result<Mrc<T>, ()> {
Mrc::try_map(m, |slice| {
if slice.len() != 1 {None}
else {Some(&slice[0])}
}).map_err(|_| ())
Mrc::try_map(m, |slice| {
if slice.len() != 1 {None}
else {Some(&slice[0])}
}).map_err(|_| ())
}
pub fn mrc_slice_to_only_option<T>(m: Mrc<[T]>) -> Result<Option<Mrc<T>>, ()> {
if m.len() > 1 {return Err(())}
Ok(Mrc::try_map(m, |slice| {
if slice.len() == 0 {None}
else {Some(&slice[0])}
}).ok())
if m.len() > 1 {return Err(())}
Ok(Mrc::try_map(m, |slice| {
if slice.len() == 0 {None}
else {Some(&slice[0])}
}).ok())
}
pub fn mrc_concat<T: Clone>(a: &Mrc<[T]>, b: &Mrc<[T]>) -> Mrc<[T]> {
collect_to_mrc(a.iter().chain(b.iter()).cloned())
}

53
src/utils/product2.rs Normal file
View File

@@ -0,0 +1,53 @@
use super::Side;
/// The output of a two-part algorithm. The values are
///
/// - [Product2::Left] or [Product2::Right] if one of the arguments is the product
/// - [Product2::Either] if the arguments are identical
/// - [Product2::New] if the product is a different value from either
pub enum Product2<T> {
Left,
Right,
Either,
New(T)
}
impl<T> Product2<T> {
/// Convert the product into a concrete value by providing the original arguments
pub fn pick(self, left: T, right: T) -> T {
match self {
Self::Left | Self::Either => left,
Self::Right => right,
Self::New(t) => t
}
}
/// Combine some subresults into a tuple representing a greater result
pub fn join<U>(
self, (lt, rt): (T, T),
second: Product2<U>, (lu, ru): (U, U)
) -> Product2<(T, U)> {
match (self, second) {
(Self::Either, Product2::Either) => Product2::Either,
(Self::Left | Self::Either, Product2::Left | Product2::Either) => Product2::Left,
(Self::Right | Self::Either, Product2::Right | Product2::Either) => Product2::Right,
(t, u) => Product2::New((t.pick(lt, rt), u.pick(lu, ru)))
}
}
/// Translate results back into the type of the original problem.
pub fn map<A, F: FnOnce(T) -> A>(self, f: F) -> Product2<A> {
match self {
Product2::Left => Product2::Left, Product2::Right => Product2::Right,
Product2::Either => Product2::Either,
Product2::New(t) => Product2::New(f(t))
}
}
}
/// Technically very different but sometimes neecessary to translate
impl<T> From<Side> for Product2<T> {
fn from(value: Side) -> Self {match value {
Side::Left => Self::Left,
Side::Right => Self::Right
}}
}

View File

@@ -13,152 +13,152 @@ const INLINE_ENTRIES: usize = 2;
/// plus wasted stack space which is likely wasted L1 as well. The cost of underruns is wasted stack
/// space.
pub struct ProtoMap<'a, K, V, const STACK_COUNT: usize = 2> {
entries: SmallVec<[(K, Option<V>); STACK_COUNT]>,
prototype: Option<&'a ProtoMap<'a, K, V, STACK_COUNT>>
entries: SmallVec<[(K, Option<V>); STACK_COUNT]>,
prototype: Option<&'a ProtoMap<'a, K, V, STACK_COUNT>>
}
impl<'a, K, V, const STACK_COUNT: usize> ProtoMap<'a, K, V, STACK_COUNT> {
pub fn new() -> Self {
Self {
entries: SmallVec::new(),
prototype: None
}
pub fn new() -> Self {
Self {
entries: SmallVec::new(),
prototype: None
}
}
/// Mutable reference to entry without checking proto in O(m)
fn local_entry_mut<'b, Q: ?Sized>(&'b mut self, query: &Q)
-> Option<(usize, &'b mut K, &'b mut Option<V>)>
where K: Borrow<Q>, Q: Eq
{
self.entries.iter_mut().enumerate().find_map(|(i, (k, v))| {
if query.eq((*k).borrow()) { Some((i, k, v)) } else { None }
})
}
/// Mutable reference to entry without checking proto in O(m)
fn local_entry_mut<'b, Q: ?Sized>(&'b mut self, query: &Q)
-> Option<(usize, &'b mut K, &'b mut Option<V>)>
where K: Borrow<Q>, Q: Eq
{
self.entries.iter_mut().enumerate().find_map(|(i, (k, v))| {
if query.eq((*k).borrow()) { Some((i, k, v)) } else { None }
})
}
/// Entry without checking proto in O(m)
fn local_entry<'b, Q: ?Sized>(&'b self, query: &Q)
-> Option<(usize, &'b K, &'b Option<V>)>
where K: Borrow<Q>, Q: Eq
{
self.entries.iter().enumerate().find_map(|(i, (k, v))| {
if query.eq((*k).borrow()) { Some((i, k, v)) } else { None }
})
}
/// Entry without checking proto in O(m)
fn local_entry<'b, Q: ?Sized>(&'b self, query: &Q)
-> Option<(usize, &'b K, &'b Option<V>)>
where K: Borrow<Q>, Q: Eq
{
self.entries.iter().enumerate().find_map(|(i, (k, v))| {
if query.eq((*k).borrow()) { Some((i, k, v)) } else { None }
})
}
/// Find entry in prototype chain in O(n)
pub fn get<'b, Q: ?Sized>(&'b self, query: &Q) -> Option<&'b V>
where K: Borrow<Q>, Q: Eq
{
if let Some((_, _, v)) = self.local_entry(query) {
v.as_ref()
} else {
self.prototype?.get(query)
}
/// Find entry in prototype chain in O(n)
pub fn get<'b, Q: ?Sized>(&'b self, query: &Q) -> Option<&'b V>
where K: Borrow<Q>, Q: Eq
{
if let Some((_, _, v)) = self.local_entry(query) {
v.as_ref()
} else {
self.prototype?.get(query)
}
}
/// Record a value for the given key in O(m)
pub fn set(&mut self, key: &K, value: V) where K: Eq + Clone {
if let Some((_, _, v)) = self.local_entry_mut(key) {
*v = Some(value);
} else {
self.entries.push((key.clone(), Some(value)))
}
/// Record a value for the given key in O(m)
pub fn set(&mut self, key: &K, value: V) where K: Eq + Clone {
if let Some((_, _, v)) = self.local_entry_mut(key) {
*v = Some(value);
} else {
self.entries.push((key.clone(), Some(value)))
}
}
/// Delete in a memory-efficient way in O(n)
pub fn delete_small(&mut self, key: &K) where K: Eq + Clone {
let exists_up = self.prototype.and_then(|p| p.get(key)).is_some();
let local_entry = self.local_entry_mut(key);
match (exists_up, local_entry) {
(false, None) => (), // nothing to do
(false, Some((i, _, _))) => { self.entries.remove(i); }, // forget locally
(true, Some((_, _, v))) => *v = None, // update local override to cover
(true, None) => self.entries.push((key.clone(), None)), // create new
}
/// Delete in a memory-efficient way in O(n)
pub fn delete_small(&mut self, key: &K) where K: Eq + Clone {
let exists_up = self.prototype.and_then(|p| p.get(key)).is_some();
let local_entry = self.local_entry_mut(key);
match (exists_up, local_entry) {
(false, None) => (), // nothing to do
(false, Some((i, _, _))) => { self.entries.remove(i); }, // forget locally
(true, Some((_, _, v))) => *v = None, // update local override to cover
(true, None) => self.entries.push((key.clone(), None)), // create new
}
}
/// Delete in O(m) without checking the prototype chain
/// May produce unnecessary cover over previously unknown key
pub fn delete_fast(&mut self, key: &K) where K: Eq + Clone {
if let Some((_, _, v)) = self.local_entry_mut(key) {
*v = None
} else {
self.entries.push((key.clone(), None))
}
/// Delete in O(m) without checking the prototype chain
/// May produce unnecessary cover over previously unknown key
pub fn delete_fast(&mut self, key: &K) where K: Eq + Clone {
if let Some((_, _, v)) = self.local_entry_mut(key) {
*v = None
} else {
self.entries.push((key.clone(), None))
}
}
/// Iterate over the values defined herein and on the prototype chain
/// Note that this will visit keys multiple times
pub fn iter(&self) -> impl Iterator<Item = &(K, Option<V>)> {
let mut map = self;
iter::from_fn(move || {
let pairs = map.entries.iter();
map = map.prototype?;
Some(pairs)
}).flatten()
}
/// Iterate over the values defined herein and on the prototype chain
/// Note that this will visit keys multiple times
pub fn iter(&self) -> impl Iterator<Item = &(K, Option<V>)> {
let mut map = self;
iter::from_fn(move || {
let pairs = map.entries.iter();
map = map.prototype?;
Some(pairs)
}).flatten()
}
/// Visit the keys in an unsafe random order, repeated arbitrarily many times
pub fn keys(&self) -> impl Iterator<Item = &K> {
self.iter().map(|(k, _)| k)
}
/// Visit the keys in an unsafe random order, repeated arbitrarily many times
pub fn keys(&self) -> impl Iterator<Item = &K> {
self.iter().map(|(k, _)| k)
}
/// Visit the values in random order
pub fn values(&self) -> impl Iterator<Item = &V> {
self.iter().filter_map(|(_, v)| v.as_ref())
}
/// Visit the values in random order
pub fn values(&self) -> impl Iterator<Item = &V> {
self.iter().filter_map(|(_, v)| v.as_ref())
}
/// Update the prototype, and correspondingly the lifetime of the map
pub fn set_proto<'b>(self, proto: &'b ProtoMap<'b, K, V, STACK_COUNT>)
-> ProtoMap<'b, K, V, STACK_COUNT> {
ProtoMap {
entries: self.entries,
prototype: Some(proto)
}
/// Update the prototype, and correspondingly the lifetime of the map
pub fn set_proto<'b>(self, proto: &'b ProtoMap<'b, K, V, STACK_COUNT>)
-> ProtoMap<'b, K, V, STACK_COUNT> {
ProtoMap {
entries: self.entries,
prototype: Some(proto)
}
}
}
impl<T, K, V, const STACK_COUNT: usize>
From<T> for ProtoMap<'_, K, V, STACK_COUNT>
where T: IntoIterator<Item = (K, V)> {
fn from(value: T) -> Self {
Self {
entries: value.into_iter().map(|(k, v)| (k, Some(v))).collect(),
prototype: None
}
fn from(value: T) -> Self {
Self {
entries: value.into_iter().map(|(k, v)| (k, Some(v))).collect(),
prototype: None
}
}
}
impl<Q: ?Sized, K, V, const STACK_COUNT: usize>
Index<&Q> for ProtoMap<'_, K, V, STACK_COUNT>
where K: Borrow<Q>, Q: Eq {
type Output = V;
fn index(&self, index: &Q) -> &Self::Output {
self.get(index).expect("Index not found in map")
}
type Output = V;
fn index(&self, index: &Q) -> &Self::Output {
self.get(index).expect("Index not found in map")
}
}
impl<K: Clone, V: Clone, const STACK_COUNT: usize>
Clone for ProtoMap<'_, K, V, STACK_COUNT> {
fn clone(&self) -> Self {
Self {
entries: self.entries.clone(),
prototype: self.prototype
}
fn clone(&self) -> Self {
Self {
entries: self.entries.clone(),
prototype: self.prototype
}
}
}
impl<'a, K: 'a, V: 'a, const STACK_COUNT: usize>
Add<(K, V)> for &'a ProtoMap<'a, K, V, STACK_COUNT> {
type Output = ProtoMap<'a, K, V, STACK_COUNT>;
fn add(self, rhs: (K, V)) -> Self::Output {
ProtoMap::from([rhs]).set_proto(self)
}
type Output = ProtoMap<'a, K, V, STACK_COUNT>;
fn add(self, rhs: (K, V)) -> Self::Output {
ProtoMap::from([rhs]).set_proto(self)
}
}
#[macro_export]
macro_rules! protomap {
($($ent:expr),*) => {
ProtoMap::from([$($ent:expr),*])
};
($($ent:expr),*) => {
ProtoMap::from([$($ent:expr),*])
};
}

View File

@@ -4,50 +4,50 @@ use std::fmt::Display;
pub enum Side {Left, Right}
impl Display for Side {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Left => write!(f, "Left"),
Self::Right => write!(f, "Right"),
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Left => write!(f, "Left"),
Self::Right => write!(f, "Right"),
}
}
}
impl Side {
pub fn opposite(&self) -> Self {
match self {
Self::Left => Self::Right,
Self::Right => Self::Left
}
pub fn opposite(&self) -> Self {
match self {
Self::Left => Self::Right,
Self::Right => Self::Left
}
/// Shorthand for opposite
pub fn inv(&self) -> Self { self.opposite() }
/// take N elements from this end of a slice
pub fn slice<'a, T>(&self, size: usize, slice: &'a [T]) -> &'a [T] {
match self {
Side::Left => &slice[..size],
Side::Right => &slice[slice.len() - size..]
}
}
/// Shorthand for opposite
pub fn inv(&self) -> Self { self.opposite() }
/// take N elements from this end of a slice
pub fn slice<'a, T>(&self, size: usize, slice: &'a [T]) -> &'a [T] {
match self {
Side::Left => &slice[..size],
Side::Right => &slice[slice.len() - size..]
}
/// ignore N elements from this end of a slice
pub fn crop<'a, T>(&self, margin: usize, slice: &'a [T]) -> &'a [T] {
self.opposite().slice(slice.len() - margin, slice)
}
/// ignore N elements from this end of a slice
pub fn crop<'a, T>(&self, margin: usize, slice: &'a [T]) -> &'a [T] {
self.opposite().slice(slice.len() - margin, slice)
}
/// ignore N elements from this end and M elements from the other end of a slice
pub fn crop_both<'a, T>(&self, margin: usize, opposite: usize, slice: &'a [T]) -> &'a [T] {
self.crop(margin, self.opposite().crop(opposite, slice))
}
/// Pick this side from a pair of things
pub fn pick<T>(&self, pair: (T, T)) -> T {
match self {
Side::Left => pair.0,
Side::Right => pair.1
}
/// ignore N elements from this end and M elements from the other end of a slice
pub fn crop_both<'a, T>(&self, margin: usize, opposite: usize, slice: &'a [T]) -> &'a [T] {
self.crop(margin, self.opposite().crop(opposite, slice))
}
/// Pick this side from a pair of things
pub fn pick<T>(&self, pair: (T, T)) -> T {
match self {
Side::Left => pair.0,
Side::Right => pair.1
}
}
/// Make a pair with the first element on this side
pub fn pair<T>(&self, this: T, opposite: T) -> (T, T) {
match self {
Side::Left => (this, opposite),
Side::Right => (opposite, this)
}
}
/// Make a pair with the first element on this side
pub fn pair<T>(&self, this: T, opposite: T) -> (T, T) {
match self {
Side::Left => (this, opposite),
Side::Right => (opposite, this)
}
}
}

View File

@@ -1,14 +1,14 @@
fn string_from_charset_rec(val: u64, digits: &str) -> String {
let radix = digits.len() as u64;
let mut prefix = if val > radix {
string_from_charset_rec(val / radix, digits)
} else {String::new()};
prefix.push(digits.chars().nth(val as usize - 1).unwrap_or_else(
|| panic!("Overindexed digit set \"{}\" with {}", digits, val - 1)
));
prefix
let radix = digits.len() as u64;
let mut prefix = if val > radix {
string_from_charset_rec(val / radix, digits)
} else {String::new()};
prefix.push(digits.chars().nth(val as usize - 1).unwrap_or_else(
|| panic!("Overindexed digit set \"{}\" with {}", digits, val - 1)
));
prefix
}
pub fn string_from_charset(val: u64, digits: &str) -> String {
string_from_charset_rec(val + 1, digits)
string_from_charset_rec(val + 1, digits)
}

View File

@@ -5,70 +5,84 @@ use std::fmt::Debug;
/// deep enough to warrant a heap-allocated set
#[derive(Clone, Copy)]
pub struct Stackframe<'a, T> {
pub item: T,
pub prev: Option<&'a Stackframe<'a, T>>,
pub len: usize
pub item: T,
pub prev: Option<&'a Stackframe<'a, T>>,
pub len: usize
}
impl<'a, T: 'a> Stackframe<'a, T> {
pub fn new(item: T) -> Self {
Self {
item,
prev: None,
len: 1
}
pub fn new(item: T) -> Self {
Self {
item,
prev: None,
len: 1
}
/// Get the item owned by this listlike, very fast O(1)
pub fn item(&self) -> &T { &self.item }
/// Get the next link in the list, very fast O(1)
pub fn prev(&self) -> Option<&'a Stackframe<T>> { self.prev }
/// Construct an iterator over the listlike, very fast O(1)
pub fn iter(&self) -> StackframeIterator<T> {
StackframeIterator { curr: Some(self) }
}
/// Get the item owned by this listlike, very fast O(1)
pub fn item(&self) -> &T { &self.item }
/// Get the next link in the list, very fast O(1)
pub fn prev(&self) -> Option<&'a Stackframe<T>> { self.prev }
/// Construct an iterator over the listlike, very fast O(1)
pub fn iter(&self) -> StackframeIterator<T> {
StackframeIterator { curr: Some(self) }
}
pub fn push(&self, item: T) -> Stackframe<'_, T> {
Stackframe {
item,
prev: Some(self),
len: self.len + 1
}
pub fn push(&self, item: T) -> Stackframe<'_, T> {
Stackframe {
item,
prev: Some(self),
len: self.len + 1
}
}
pub fn opush(prev: Option<&'a Self>, item: T) -> Self {
Self {
item,
prev,
len: prev.map_or(1, |s| s.len)
}
}
pub fn len(&self) -> usize { self.len }
pub fn pop(&self, count: usize) -> Option<&Self> {
if count == 0 {Some(self)}
else {self.prev.expect("Index out of range").pop(count - 1)}
}
pub fn opop(cur: Option<&Self>, count: usize) -> Option<&Self> {
if count == 0 {cur}
else {Self::opop(cur.expect("Index out of range").prev, count - 1)}
}
pub fn opush(prev: Option<&'a Self>, item: T) -> Self {
Self {
item,
prev,
len: prev.map_or(1, |s| s.len)
}
}
pub fn len(&self) -> usize { self.len }
pub fn pop(&self, count: usize) -> Option<&Self> {
if count == 0 {Some(self)}
else {self.prev.expect("Index out of range").pop(count - 1)}
}
pub fn opop(cur: Option<&Self>, count: usize) -> Option<&Self> {
if count == 0 {cur}
else {Self::opop(cur.expect("Index out of range").prev, count - 1)}
}
pub fn o_into_iter(curr: Option<&Self>) -> StackframeIterator<T> {
StackframeIterator { curr }
}
}
impl<'a, T> Debug for Stackframe<'a, T> where T: Debug {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Substack")?;
f.debug_list().entries(self.iter()).finish()
}
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Substack")?;
f.debug_list().entries(self.iter()).finish()
}
}
pub struct StackframeIterator<'a, T> {
curr: Option<&'a Stackframe<'a, T>>
curr: Option<&'a Stackframe<'a, T>>
}
impl<'a, T> StackframeIterator<'a, T> {
pub fn first_some<U, F: Fn(&T) -> Option<U>>(&mut self, f: F) -> Option<U> {
while let Some(x) = self.next() {
if let Some(result) = f(x) {
return Some(result)
}
}
None
}
}
impl<'a, T> Iterator for StackframeIterator<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
let curr = self.curr?;
let item = curr.item();
let prev = curr.prev();
self.curr = prev;
Some(item)
}
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
let curr = self.curr?;
let item = curr.item();
let prev = curr.prev();
self.curr = prev;
Some(item)
}
}

22
src/utils/translate.rs Normal file
View File

@@ -0,0 +1,22 @@
use std::mem;
pub fn translate<T, F: FnOnce(T) -> T>(data: &mut T, f: F) {
unsafe {
let mut acc = mem::MaybeUninit::<T>::uninit().assume_init();
mem::swap(&mut acc, data);
let mut new = f(acc);
mem::swap(&mut new, data);
mem::forget(new);
}
}
pub fn process<T, U, F: FnOnce(T) -> (T, U)>(data: &mut T, f: F) -> U {
unsafe {
let mut acc = mem::MaybeUninit::<T>::uninit().assume_init();
mem::swap(&mut acc, data);
let (mut new, ret) = f(acc);
mem::swap(&mut new, data);
mem::forget(new);
ret
}
}

View File

@@ -1,6 +1,6 @@
#[macro_export]
macro_rules! unless_let {
($m:pat_param = $expr:tt) => {
if let $m = $expr {} else
}
($m:pat_param = $expr:tt) => {
if let $m = $expr {} else
}
}

View File

@@ -1,6 +1,6 @@
#[macro_export]
macro_rules! unwrap_or {
($m:expr; $fail:expr) => {
{ if let Some(res) = ($m) {res} else {$fail} }
}
($m:expr; $fail:expr) => {
{ if let Some(res) = ($m) {res} else {$fail} }
}
}