forked from Orchid/orchid
transfer commit
This commit is contained in:
96
README.md
96
README.md
@@ -15,15 +15,15 @@ Basic command line calculator
|
|||||||
import std::io::(readln, printf, in, out)
|
import std::io::(readln, printf, in, out)
|
||||||
|
|
||||||
main := (
|
main := (
|
||||||
readln in >>= int |> \a.
|
readln in >>= int |> \a.
|
||||||
readln in >>= \op.
|
readln in >>= \op.
|
||||||
readln in >>= int |> \b.
|
readln in >>= int |> \b.
|
||||||
printf out "the result is {}\n", [match op (
|
printf out "the result is {}\n", [match op (
|
||||||
"+" => a + b,
|
"+" => a + b,
|
||||||
"-" => a - b,
|
"-" => a - b,
|
||||||
"*" => a * b,
|
"*" => a * b,
|
||||||
"/" => a / b
|
"/" => a / b
|
||||||
)]
|
)]
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -32,17 +32,17 @@ Grep
|
|||||||
import std::io::(readln, println, in, out, getarg)
|
import std::io::(readln, println, in, out, getarg)
|
||||||
|
|
||||||
main := loop \r. (
|
main := loop \r. (
|
||||||
readln in >>= \line.
|
readln in >>= \line.
|
||||||
if (substring (getarg 1) line)
|
if (substring (getarg 1) line)
|
||||||
then (println out ln >>= r)
|
then (println out ln >>= r)
|
||||||
else r
|
else r
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
Filter through an arbitrary collection
|
Filter through an arbitrary collection
|
||||||
```orchid
|
```orchid
|
||||||
filter := @C:Type -> Type. @:Map C. @T. \f:T -> Bool. \coll:C T. (
|
filter := @C:Type -> Type. @:Map C. @T. \f:T -> Bool. \coll:C T. (
|
||||||
coll >> \el. if (f el) then (Some el) else Nil
|
coll >> \el. if (f el) then (Some el) else Nil
|
||||||
):(C T)
|
):(C T)
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -76,9 +76,9 @@ it to itself. A naiive implementation of `imul` might look like this.
|
|||||||
|
|
||||||
```orchid
|
```orchid
|
||||||
\a:int.\b:int. loop \r. (\i.
|
\a:int.\b:int. loop \r. (\i.
|
||||||
ifthenelse (ieq i 0)
|
ifthenelse (ieq i 0)
|
||||||
b
|
b
|
||||||
(iadd b (r (isub i 1))
|
(iadd b (r (isub i 1))
|
||||||
) a
|
) a
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -123,8 +123,8 @@ For a demonstration, here's a sample implementation of the Option monad.
|
|||||||
```orchid
|
```orchid
|
||||||
--[[ The definition of Monad ]]--
|
--[[ The definition of Monad ]]--
|
||||||
define Monad $M:(Type -> Type) as (Pair
|
define Monad $M:(Type -> Type) as (Pair
|
||||||
(@T. @U. (T -> M U) -> M T -> M U) -- bind
|
(@T. @U. (T -> M U) -> M T -> M U) -- bind
|
||||||
(@T. T -> M T) -- return
|
(@T. T -> M T) -- return
|
||||||
)
|
)
|
||||||
|
|
||||||
bind := @M:Type -> Type. @monad:Monad M. fst monad
|
bind := @M:Type -> Type. @monad:Monad M. fst monad
|
||||||
@@ -134,17 +134,17 @@ return := @M:Type -> Type. @monad:Monad M. snd monad
|
|||||||
define Option $T as @U. U -> (T -> U) -> U
|
define Option $T as @U. U -> (T -> U) -> U
|
||||||
--[ Constructors ]--
|
--[ Constructors ]--
|
||||||
export Some := @T. \data:T. categorise @(Option T) ( \default. \map. map data )
|
export Some := @T. \data:T. categorise @(Option T) ( \default. \map. map data )
|
||||||
export None := @T. categorise @(Option T) ( \default. \map. default )
|
export None := @T. categorise @(Option T) ( \default. \map. default )
|
||||||
--[ Implement Monad ]--
|
--[ Implement Monad ]--
|
||||||
impl Monad Option via (makePair
|
impl Monad Option via (makePair
|
||||||
( @T. @U. \f:T -> U. \opt:Option T. opt None \x. Some f ) -- bind
|
( @T. @U. \f:T -> U. \opt:Option T. opt None \x. Some f ) -- bind
|
||||||
Some -- return
|
Some -- return
|
||||||
)
|
)
|
||||||
--[ Sample function that works on unknown monad to demonstrate HKTs.
|
--[ Sample function that works on unknown monad to demonstrate HKTs.
|
||||||
Turns (Option (M T)) into (M (Option T)), "raising" the unknown monad
|
Turns (Option (M T)) into (M (Option T)), "raising" the unknown monad
|
||||||
out of the Option ]--
|
out of the Option ]--
|
||||||
export raise := @M:Type -> Type. @T. @:Monad M. \opt:Option (M T). (
|
export raise := @M:Type -> Type. @T. @:Monad M. \opt:Option (M T). (
|
||||||
opt (return None) (\m. bind m (\x. Some x))
|
opt (return None) (\m. bind m (\x. Some x))
|
||||||
):(M (Option T))
|
):(M (Option T))
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -160,7 +160,7 @@ the result:
|
|||||||
|
|
||||||
```orchid
|
```orchid
|
||||||
impl @T. Add (List T) (List T) (List T) by concatListAdd over elementwiseAdd via (
|
impl @T. Add (List T) (List T) (List T) by concatListAdd over elementwiseAdd via (
|
||||||
...
|
...
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -168,11 +168,11 @@ For completeness' sake, the original definition might look like this:
|
|||||||
|
|
||||||
```orchid
|
```orchid
|
||||||
impl
|
impl
|
||||||
@C:Type -> Type. @T. @U. @V. -- variables
|
@C:Type -> Type. @T. @U. @V. -- variables
|
||||||
@:(Applicative C). @:(Add T U V). -- conditions
|
@:(Applicative C). @:(Add T U V). -- conditions
|
||||||
Add (C T) (C U) (C V) -- target
|
Add (C T) (C U) (C V) -- target
|
||||||
by elementwiseAdd via (
|
by elementwiseAdd via (
|
||||||
...
|
...
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -181,11 +181,11 @@ implementation looks like:
|
|||||||
|
|
||||||
```orchid
|
```orchid
|
||||||
impl @T. @:(Add T T T). Multiply T int T by iterativeMultiply via (
|
impl @T. @:(Add T T T). Multiply T int T by iterativeMultiply via (
|
||||||
\a:int. \b:T. loop \r. (\i.
|
\a:int. \b:T. loop \r. (\i.
|
||||||
ifthenelse (ieq i 0)
|
ifthenelse (ieq i 0)
|
||||||
b
|
b
|
||||||
(add b (r (isub i 1)) -- notice how iadd is now add
|
(add b (r (isub i 1)) -- notice how iadd is now add
|
||||||
) a
|
) a
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -193,7 +193,7 @@ This could then be applied to any type that's closed over addition
|
|||||||
|
|
||||||
```orchid
|
```orchid
|
||||||
aroundTheWorldLyrics := (
|
aroundTheWorldLyrics := (
|
||||||
mult 18 (add (mult 4 "Around the World\n") "\n")
|
mult 18 (add (mult 4 "Around the World\n") "\n")
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -222,8 +222,8 @@ parenthesize subexpressions at the callsite.
|
|||||||
|
|
||||||
```orchid
|
```orchid
|
||||||
(..$pre:2 if ...$cond then ...$true else ...$false) =10=> (
|
(..$pre:2 if ...$cond then ...$true else ...$false) =10=> (
|
||||||
..$pre
|
..$pre
|
||||||
(ifthenelse (...$cond) (...$true) (...$false))
|
(ifthenelse (...$cond) (...$true) (...$false))
|
||||||
)
|
)
|
||||||
...$a + ...$b =2=> (add (...$a) (...$b))
|
...$a + ...$b =2=> (add (...$a) (...$b))
|
||||||
...$a = ...$b =5=> (eq $a $b)
|
...$a = ...$b =5=> (eq $a $b)
|
||||||
@@ -234,10 +234,10 @@ The recursive addition function now looks like this
|
|||||||
|
|
||||||
```orchid
|
```orchid
|
||||||
impl @T. @:(Add T T T). Multiply T int T by iterativeMultiply via (
|
impl @T. @:(Add T T T). Multiply T int T by iterativeMultiply via (
|
||||||
\a:int.\b:T. loop \r. (\i.
|
\a:int.\b:T. loop \r. (\i.
|
||||||
if (i = 0) then b
|
if (i = 0) then b
|
||||||
else (b + (r (i - 1)))
|
else (b + (r (i - 1)))
|
||||||
) a
|
) a
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -302,13 +302,13 @@ This is very far away so I don't want to make promises, but I have some
|
|||||||
ideas.
|
ideas.
|
||||||
|
|
||||||
- [ ] early execution of functions on any subset of their arguments where
|
- [ ] early execution of functions on any subset of their arguments where
|
||||||
it could provide substantial speedup
|
it could provide substantial speedup
|
||||||
- [ ] tracking copies of expressions and evaluating them only once
|
- [ ] tracking copies of expressions and evaluating them only once
|
||||||
- [ ] Many cases of single recursion converted to loops
|
- [ ] Many cases of single recursion converted to loops
|
||||||
- [ ] tail recursion
|
- [ ] tail recursion
|
||||||
- [ ] 2 distinct loops where the tail doesn't use the arguments
|
- [ ] 2 distinct loops where the tail doesn't use the arguments
|
||||||
- [ ] reorder operations to favour this scenario
|
- [ ] reorder operations to favour this scenario
|
||||||
- [ ] reactive calculation of values that are deemed to be read more often
|
- [ ] reactive calculation of values that are deemed to be read more often
|
||||||
than written
|
than written
|
||||||
- [ ] automatic profiling based on performance metrics generated by debug
|
- [ ] automatic profiling based on performance metrics generated by debug
|
||||||
builds
|
builds
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ define Add $L:type $R:type $O:type as $L -> $R -> $O
|
|||||||
define Mappable $C:(type -> type) as @T. @U. (T -> U) -> $C T -> $C U
|
define Mappable $C:(type -> type) as @T. @U. (T -> U) -> $C T -> $C U
|
||||||
-- Dependency on existing typeclass
|
-- Dependency on existing typeclass
|
||||||
define Zippable $C:(type -> type) as @:Mappable $C. (
|
define Zippable $C:(type -> type) as @:Mappable $C. (
|
||||||
@T. @U. @V. (T -> U -> V) -> $C T -> $C U -> $C V
|
@T. @U. @V. (T -> U -> V) -> $C T -> $C U -> $C V
|
||||||
)
|
)
|
||||||
define Default $T:type as $T
|
define Default $T:type as $T
|
||||||
-- Is the intersection of typeclasses an operation we need?
|
-- Is the intersection of typeclasses an operation we need?
|
||||||
@@ -15,16 +15,16 @@ define Default $T:type as $T
|
|||||||
define Cons $elem:type as loop \r. Option (Pair T $elem)
|
define Cons $elem:type as loop \r. Option (Pair T $elem)
|
||||||
nil := @T. from @(Cons T) none
|
nil := @T. from @(Cons T) none
|
||||||
cons := @T. \el:T. (
|
cons := @T. \el:T. (
|
||||||
generalise @(Cons T)
|
generalise @(Cons T)
|
||||||
|> (\list. some t[el, into list])
|
|> (\list. some t[el, into list])
|
||||||
|> categorise @(Cons T)
|
|> categorise @(Cons T)
|
||||||
)
|
)
|
||||||
export map := @T. @U. \f:T -> U. (
|
export map := @T. @U. \f:T -> U. (
|
||||||
generalise @(Cons T)
|
generalise @(Cons T)
|
||||||
|> loop ( \recurse. \option.
|
|> loop ( \recurse. \option.
|
||||||
map option \pair. t[f (fst pair), recurse (snd pair)]
|
map option \pair. t[f (fst pair), recurse (snd pair)]
|
||||||
)
|
)
|
||||||
|> categorise @(Cons U)
|
|> categorise @(Cons U)
|
||||||
)
|
)
|
||||||
-- Universal typeclass implementation; no parameters, no overrides, no name for overriding
|
-- Universal typeclass implementation; no parameters, no overrides, no name for overriding
|
||||||
impl Mappable Cons via map
|
impl Mappable Cons via map
|
||||||
@@ -34,7 +34,7 @@ impl (@T. Add (Cons T) (Cons T) (Cons T)) by concatenation over elementwiseAddit
|
|||||||
-- Scratchpad
|
-- Scratchpad
|
||||||
|
|
||||||
filterBadWords := @C:type -> type. @:Mappable C. \strings:C String. (
|
filterBadWords := @C:type -> type. @:Mappable C. \strings:C String. (
|
||||||
map strings \s. if intersects badWords (slice " " s) then none else some s
|
map strings \s. if intersects badWords (slice " " s) then none else some s
|
||||||
):(C (Option String))
|
):(C (Option String))
|
||||||
|
|
||||||
-- /Scratchpad
|
-- /Scratchpad
|
||||||
|
|||||||
@@ -57,16 +57,16 @@ provisional feature set.
|
|||||||
A working type system should have the following parts, which I will implement in roughly this order
|
A working type system should have the following parts, which I will implement in roughly this order
|
||||||
|
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\item \textbf{Type inference engine and type checker} This will be an extension of
|
\item \textbf{Type inference engine and type checker} This will be an extension of
|
||||||
the Hindley-Milner algorithm, which simultaneously unifies and completes partial type
|
the Hindley-Milner algorithm, which simultaneously unifies and completes partial type
|
||||||
annotations, and recognizes conflicts.
|
annotations, and recognizes conflicts.
|
||||||
\item \textbf{Typeclass solver} At the moment this appears to be a relatively simple piece of
|
\item \textbf{Typeclass solver} At the moment this appears to be a relatively simple piece of
|
||||||
code but I'm not entirely confident that complications won't arise as its responsibilities
|
code but I'm not entirely confident that complications won't arise as its responsibilities
|
||||||
become clearer, so I consider it a separate component
|
become clearer, so I consider it a separate component
|
||||||
\item \textbf{Executor} Orchid is a statically typed language so it should eventually be compiled
|
\item \textbf{Executor} Orchid is a statically typed language so it should eventually be compiled
|
||||||
with LLVM, but in order to demonstrate the usability of my type system I will have to write
|
with LLVM, but in order to demonstrate the usability of my type system I will have to write
|
||||||
an experimental interpreter. Since types are already basically expressions of type type,
|
an experimental interpreter. Since types are already basically expressions of type type,
|
||||||
parts of the executor will coincide with parts of the type inference engine.
|
parts of the executor will coincide with parts of the type inference engine.
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
|
|
||||||
\section{Literature Review}
|
\section{Literature Review}
|
||||||
@@ -99,12 +99,12 @@ in the same group.
|
|||||||
At a minimum, the following must be valid reduction steps:
|
At a minimum, the following must be valid reduction steps:
|
||||||
|
|
||||||
\begin{itemize}
|
\begin{itemize}
|
||||||
\item $\beta$-reduction
|
\item $\beta$-reduction
|
||||||
\item fixed point normalization, which simply means identifying that a subexpression has
|
\item fixed point normalization, which simply means identifying that a subexpression has
|
||||||
reduced to an expression that contains the original. When a fixed point is detected, the
|
reduced to an expression that contains the original. When a fixed point is detected, the
|
||||||
recursive expression is converted to a form that uses the Y-combinator. This operation
|
recursive expression is converted to a form that uses the Y-combinator. This operation
|
||||||
is ordered before $\beta$-reductions of the expression in the BFS tree but otherwise has
|
is ordered before $\beta$-reductions of the expression in the BFS tree but otherwise has
|
||||||
the same precedence.
|
the same precedence.
|
||||||
\end{itemize}
|
\end{itemize}
|
||||||
|
|
||||||
\subsection{Typeclass solver}
|
\subsection{Typeclass solver}
|
||||||
|
|||||||
@@ -1,20 +1,20 @@
|
|||||||
@online{suckerpinch,
|
@online{suckerpinch,
|
||||||
title = {Generalized kerning is undecidable! But anagraphing is possible.},
|
title = {Generalized kerning is undecidable! But anagraphing is possible.},
|
||||||
author = {suckerpinch},
|
author = {suckerpinch},
|
||||||
date = {dec, 2017},
|
date = {dec, 2017},
|
||||||
organization = {YouTube},
|
organization = {YouTube},
|
||||||
url = {https://www.youtube.com/watch?v=8\_npHZbe3qM}
|
url = {https://www.youtube.com/watch?v=8\_npHZbe3qM}
|
||||||
}
|
}
|
||||||
|
|
||||||
@phdthesis{tubella,
|
@phdthesis{tubella,
|
||||||
author = {Jordi Tubella and Antonio González},
|
author = {Jordi Tubella and Antonio González},
|
||||||
school = {Universitat Politechnica de Catalunya},
|
school = {Universitat Politechnica de Catalunya},
|
||||||
title = {A Partial Breadth-First Execution Model for Prolog},
|
title = {A Partial Breadth-First Execution Model for Prolog},
|
||||||
year = {1994}
|
year = {1994}
|
||||||
}
|
}
|
||||||
|
|
||||||
@misc{yallop,
|
@misc{yallop,
|
||||||
author = {Jeremy Yallop and Leo White},
|
author = {Jeremy Yallop and Leo White},
|
||||||
howpublished = {University of Cambridge},
|
howpublished = {University of Cambridge},
|
||||||
title = {Lightweight higher-kinded polymorphism}
|
title = {Lightweight higher-kinded polymorphism}
|
||||||
}
|
}
|
||||||
@@ -4,13 +4,13 @@
|
|||||||
- enqueue evaluation steps for each of them and put them in a unification group
|
- enqueue evaluation steps for each of them and put them in a unification group
|
||||||
- evaluation step refers to previous step, complete expression tree
|
- evaluation step refers to previous step, complete expression tree
|
||||||
- unification **succeeds** if either
|
- unification **succeeds** if either
|
||||||
- the trees are syntactically identical in any two steps between the targets
|
- the trees are syntactically identical in any two steps between the targets
|
||||||
- unification succeeds for all substeps:
|
- unification succeeds for all substeps:
|
||||||
- try to find an ancestor step that provably produces the same value as any lambda in this
|
- try to find an ancestor step that provably produces the same value as any lambda in this
|
||||||
step (for example, by syntactic equality)
|
step (for example, by syntactic equality)
|
||||||
- if found, substitute it with the recursive normal form of the lambda
|
- if found, substitute it with the recursive normal form of the lambda
|
||||||
- recursive normal form is `Apply(Y, \r.[body referencing r on point of recursion])`
|
- recursive normal form is `Apply(Y, \r.[body referencing r on point of recursion])`
|
||||||
- find all `Apply(\x.##, ##)` nodes in the tree and execute them
|
- find all `Apply(\x.##, ##)` nodes in the tree and execute them
|
||||||
- unification **fails** if a member of the concrete tree differs (only outermost steps add to
|
- unification **fails** if a member of the concrete tree differs (only outermost steps add to
|
||||||
the concrete tree so it belongs to the group and not the resolution) or no substeps are found
|
the concrete tree so it belongs to the group and not the resolution) or no substeps are found
|
||||||
for a resolution step _(failure: unresolved higher kinded type)_
|
for a resolution step _(failure: unresolved higher kinded type)_
|
||||||
|
|||||||
@@ -1,15 +1,14 @@
|
|||||||
{
|
{
|
||||||
"folders": [
|
"folders": [
|
||||||
{
|
{
|
||||||
"path": ".."
|
"path": "."
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"settings": {},
|
"settings": {},
|
||||||
"extensions": {
|
"extensions": {
|
||||||
"recommendations": [
|
"recommendations": [
|
||||||
"tomoki1207.pdf",
|
"tomoki1207.pdf",
|
||||||
"James-Yu.latex-workshop",
|
"james-yu.latex-workshop",
|
||||||
"rust-lang.rust-analyzer",
|
|
||||||
"bungcip.better-toml"
|
"bungcip.better-toml"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
2
rust-toolchain.toml
Normal file
2
rust-toolchain.toml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[toolchain]
|
||||||
|
channel = "nightly"
|
||||||
@@ -1,79 +1,84 @@
|
|||||||
use mappable_rc::Mrc;
|
use mappable_rc::Mrc;
|
||||||
|
|
||||||
use crate::utils::collect_to_mrc;
|
use crate::utils::{collect_to_mrc, to_mrc_slice};
|
||||||
|
|
||||||
use super::super::representations::typed::{Clause, Expr};
|
use super::super::representations::typed::{Clause, Expr};
|
||||||
|
|
||||||
pub fn apply_lambda(id: u64, value: Mrc<Expr>, body: Mrc<Expr>) -> Mrc<Expr> {
|
pub fn apply_lambda(id: u64, value: Mrc<Expr>, body: Mrc<Expr>) -> Mrc<Expr> {
|
||||||
apply_lambda_expr_rec(id, value, Mrc::clone(&body))
|
apply_lambda_expr_rec(id, value, Mrc::clone(&body))
|
||||||
.unwrap_or(body)
|
.unwrap_or(body)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_lambda_expr_rec(
|
fn apply_lambda_expr_rec(
|
||||||
id: u64, value: Mrc<Expr>, expr: Mrc<Expr>
|
id: u64, value: Mrc<Expr>, expr: Mrc<Expr>
|
||||||
) -> Option<Mrc<Expr>> {
|
) -> Option<Mrc<Expr>> {
|
||||||
let Expr(clause, typ) = expr.as_ref();
|
let Expr(clause, typ) = expr.as_ref();
|
||||||
match clause {
|
match clause {
|
||||||
Clause::Argument(arg_id) if *arg_id == id => {
|
Clause::LambdaArg(arg_id) | Clause::AutoArg(arg_id) if *arg_id == id => {
|
||||||
let full_typ = collect_to_mrc(
|
let full_typ = collect_to_mrc(
|
||||||
value.1.iter()
|
value.1.iter()
|
||||||
.chain(typ.iter())
|
.chain(typ.iter())
|
||||||
.cloned()
|
.cloned()
|
||||||
);
|
);
|
||||||
Some(Mrc::new(Expr(value.0.to_owned(), full_typ)))
|
Some(Mrc::new(Expr(value.0.to_owned(), full_typ)))
|
||||||
}
|
|
||||||
cl => {
|
|
||||||
apply_lambda_clause_rec(id, value, clause.clone())
|
|
||||||
.map(|c| Mrc::new(Expr(c, Mrc::clone(typ))))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
cl => {
|
||||||
|
apply_lambda_clause_rec(id, value, cl.clone())
|
||||||
|
.map(|c| Mrc::new(Expr(c, Mrc::clone(typ))))
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_lambda_clause_rec(
|
fn apply_lambda_clause_rec(
|
||||||
id: u64, value: Mrc<Expr>, clause: Clause
|
id: u64, value: Mrc<Expr>, clause: Clause
|
||||||
) -> Option<Clause> {
|
) -> Option<Clause> {
|
||||||
match clause {
|
match clause {
|
||||||
// Only element actually manipulated
|
// Only element actually manipulated
|
||||||
Clause::Argument(id) => panic!(
|
Clause::LambdaArg(_) | Clause::AutoArg(_) => Some(clause),
|
||||||
"apply_lambda_expr_rec is supposed to eliminate this case"),
|
// Traverse, yield Some if either had changed.
|
||||||
// Traverse, yield Some if either had changed.
|
Clause::Apply(f, x) => {
|
||||||
Clause::Apply(f, x) => {
|
let new_f = apply_lambda_expr_rec(
|
||||||
let new_f = apply_lambda_expr_rec(
|
id, Mrc::clone(&value), Mrc::clone(&f)
|
||||||
id, Mrc::clone(&value), Mrc::clone(&f)
|
);
|
||||||
);
|
let new_x = apply_lambda_expr_rec(
|
||||||
let new_x = apply_lambda_expr_rec(
|
id, value, Mrc::clone(&x)
|
||||||
id, value, Mrc::clone(&x)
|
);
|
||||||
);
|
match (new_f, new_x) { // Mind the shadows
|
||||||
match (new_f, new_x) { // Mind the shadows
|
(None, None) => None,
|
||||||
(None, None) => None,
|
(None, Some(x)) => Some(Clause::Apply(f, x)),
|
||||||
(None, Some(x)) => Some(Clause::Apply(f, x)),
|
(Some(f), None) => Some(Clause::Apply(f, x)),
|
||||||
(Some(f), None) => Some(Clause::Apply(f, x)),
|
(Some(f), Some(x)) => Some(Clause::Apply(f, x))
|
||||||
(Some(f), Some(x)) => Some(Clause::Apply(f, x))
|
}
|
||||||
}
|
},
|
||||||
},
|
Clause::Lambda(own_id, t, b) => apply_lambda__traverse_param(id, value, own_id, t, b, Clause::Lambda),
|
||||||
Clause::Lambda(own_id, t, b) => apply_lambda__traverse_param(id, value, own_id, t, b, Clause::Lambda),
|
Clause::Auto(own_id, t, b) => apply_lambda__traverse_param(id, value, own_id, t, b, Clause::Auto),
|
||||||
Clause::Auto(own_id, t, b) => apply_lambda__traverse_param(id, value, own_id, t, b, Clause::Auto),
|
// Leaf nodes
|
||||||
// Leaf nodes
|
Clause::Atom(_) | Clause::ExternFn(_) | Clause::Literal(_) => None
|
||||||
Clause::Atom(_) | Clause::ExternFn(_) | Clause::Literal(_) => None
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_lambda__traverse_param(
|
fn apply_lambda__traverse_param(
|
||||||
id: u64, value: Mrc<Expr>,
|
id: u64, value: Mrc<Expr>,
|
||||||
own_id: u64, t: Option<Mrc<Clause>>, b: Mrc<Expr>,
|
own_id: u64, typ: Mrc<[Clause]>, b: Mrc<Expr>,
|
||||||
wrap: impl Fn(u64, Option<Mrc<Clause>>, Mrc<Expr>) -> Clause
|
wrap: impl Fn(u64, Mrc<[Clause]>, Mrc<Expr>) -> Clause
|
||||||
) -> Option<Clause> {
|
) -> Option<Clause> {
|
||||||
let new_t = t.and_then(|t| apply_lambda_clause_rec(
|
let any_t = false;
|
||||||
id, Mrc::clone(&value), t.as_ref().clone()
|
let mut t_acc = vec![];
|
||||||
));
|
for t in typ.iter() {
|
||||||
// Respect shadowing
|
let newt = apply_lambda_clause_rec(id, Mrc::clone(&value), t.clone());
|
||||||
let new_b = if own_id == id {None} else {
|
any_t |= newt.is_some();
|
||||||
apply_lambda_expr_rec(id, value, Mrc::clone(&b))
|
t_acc.push(newt.unwrap_or_else(|| t.clone()))
|
||||||
};
|
}
|
||||||
match (new_t, new_b) { // Mind the shadows
|
// Respect shadowing
|
||||||
(None, None) => None,
|
let new_b = if own_id == id {None} else {
|
||||||
(None, Some(b)) => Some(wrap(own_id, t, b)),
|
apply_lambda_expr_rec(id, value, Mrc::clone(&b))
|
||||||
(Some(t), None) => Some(wrap(own_id, Some(Mrc::new(t)), b)),
|
};
|
||||||
(Some(t), Some(b)) => Some(wrap(own_id, Some(Mrc::new(t)), b))
|
if any_t { // mind the shadows
|
||||||
}
|
let typ = to_mrc_slice(t_acc);
|
||||||
|
if let Some(b) = new_b {
|
||||||
|
Some(wrap(own_id, typ, b))
|
||||||
|
} else {Some(wrap(own_id, typ, b))}
|
||||||
|
} else if let Some(b) = new_b {
|
||||||
|
Some(wrap(own_id, typ, b))
|
||||||
|
} else {Some(wrap(own_id, typ, b))}
|
||||||
}
|
}
|
||||||
@@ -1,104 +0,0 @@
|
|||||||
use std::any::Any;
|
|
||||||
use std::fmt::{Display, Debug};
|
|
||||||
use std::hash::Hash;
|
|
||||||
|
|
||||||
use mappable_rc::Mrc;
|
|
||||||
|
|
||||||
use crate::representations::typed::{Expr, Clause};
|
|
||||||
|
|
||||||
pub trait ExternError: Display {}
|
|
||||||
|
|
||||||
/// Represents an externally defined function from the perspective of the executor
|
|
||||||
/// Since Orchid lacks basic numerical operations, these are also external functions.
|
|
||||||
pub struct ExternFn {
|
|
||||||
name: String, param: Mrc<Expr>, rttype: Mrc<Expr>,
|
|
||||||
function: Mrc<dyn Fn(Clause) -> Result<Clause, Mrc<dyn ExternError>>>
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExternFn {
|
|
||||||
pub fn new<F: 'static + Fn(Clause) -> Result<Clause, Mrc<dyn ExternError>>>(
|
|
||||||
name: String, param: Mrc<Expr>, rttype: Mrc<Expr>, f: F
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
name, param, rttype,
|
|
||||||
function: Mrc::map(Mrc::new(f), |f| {
|
|
||||||
f as &dyn Fn(Clause) -> Result<Clause, Mrc<dyn ExternError>>
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn name(&self) -> &str {&self.name}
|
|
||||||
pub fn apply(&self, arg: Clause) -> Result<Clause, Mrc<dyn ExternError>> {(self.function)(arg)}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Clone for ExternFn { fn clone(&self) -> Self { Self {
|
|
||||||
name: self.name.clone(),
|
|
||||||
param: Mrc::clone(&self.param),
|
|
||||||
rttype: Mrc::clone(&self.rttype),
|
|
||||||
function: Mrc::clone(&self.function)
|
|
||||||
}}}
|
|
||||||
impl Eq for ExternFn {}
|
|
||||||
impl PartialEq for ExternFn {
|
|
||||||
fn eq(&self, other: &Self) -> bool { self.name() == other.name() }
|
|
||||||
}
|
|
||||||
impl Hash for ExternFn {
|
|
||||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.name.hash(state) }
|
|
||||||
}
|
|
||||||
impl Debug for ExternFn {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "##EXTERN[{}]:{:?} -> {:?}##", self.name(), self.param, self.rttype)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait Atomic: Any + Debug where Self: 'static {
|
|
||||||
fn as_any(&self) -> &dyn Any;
|
|
||||||
fn definitely_eq(&self, _other: &dyn Any) -> bool;
|
|
||||||
fn hash(&self, hasher: &mut dyn std::hash::Hasher);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents a unit of information from the perspective of the executor. This may be
|
|
||||||
/// something like a file descriptor which functions can operate on, but it can also be
|
|
||||||
/// information in the universe of types or kinds such as the type of signed integers or
|
|
||||||
/// the kind of types. Ad absurdum it can also be just a number, although Literal is
|
|
||||||
/// preferable for types it's defined on.
|
|
||||||
pub struct Atom {
|
|
||||||
typ: Mrc<Expr>,
|
|
||||||
data: Mrc<dyn Atomic>
|
|
||||||
}
|
|
||||||
impl Atom {
|
|
||||||
pub fn new<T: 'static + Atomic>(data: T, typ: Mrc<Expr>) -> Self { Self{
|
|
||||||
typ,
|
|
||||||
data: Mrc::map(Mrc::new(data), |d| d as &dyn Atomic)
|
|
||||||
} }
|
|
||||||
pub fn data(&self) -> &dyn Atomic { self.data.as_ref() as &dyn Atomic }
|
|
||||||
pub fn try_cast<T: Atomic>(&self) -> Result<&T, ()> {
|
|
||||||
self.data().as_any().downcast_ref().ok_or(())
|
|
||||||
}
|
|
||||||
pub fn is<T: 'static>(&self) -> bool { self.data().as_any().is::<T>() }
|
|
||||||
pub fn cast<T: 'static>(&self) -> &T {
|
|
||||||
self.data().as_any().downcast_ref().expect("Type mismatch on Atom::cast")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Clone for Atom {
|
|
||||||
fn clone(&self) -> Self { Self {
|
|
||||||
typ: Mrc::clone(&self.typ),
|
|
||||||
data: Mrc::clone(&self.data)
|
|
||||||
} }
|
|
||||||
}
|
|
||||||
impl Hash for Atom {
|
|
||||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
|
||||||
self.data.hash(state);
|
|
||||||
self.typ.hash(state)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Debug for Atom {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "##ATOM[{:?}]:{:?}##", self.data(), self.typ)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Eq for Atom {}
|
|
||||||
impl PartialEq for Atom {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.data().definitely_eq(other.data().as_any())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -5,26 +5,26 @@ use crate::utils::collect_to_mrc;
|
|||||||
use super::super::representations::typed::{Clause, Expr};
|
use super::super::representations::typed::{Clause, Expr};
|
||||||
|
|
||||||
fn normalize(Expr(clause, typ): Expr) -> Expr {
|
fn normalize(Expr(clause, typ): Expr) -> Expr {
|
||||||
todo!()
|
todo!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_autos(
|
fn collect_autos(
|
||||||
Expr(clause, typ): Expr,
|
Expr(clause, typ): Expr,
|
||||||
arg_types: Vec<Mrc<[Clause]>>,
|
arg_types: Vec<Mrc<[Clause]>>,
|
||||||
indirect_argt_trees: Vec<Mrc<[Clause]>>,
|
indirect_argt_trees: Vec<Mrc<[Clause]>>,
|
||||||
sunk_types: &mut dyn Iterator<Item = Clause>
|
sunk_types: &mut dyn Iterator<Item = Clause>
|
||||||
) -> (Vec<Mrc<[Clause]>>, Expr) {
|
) -> (Vec<Mrc<[Clause]>>, Expr) {
|
||||||
if let Clause::Auto(argt, body) = clause {
|
if let Clause::Auto(argt, body) = clause {
|
||||||
|
|
||||||
}
|
}
|
||||||
else {(
|
else {(
|
||||||
arg_types,
|
arg_types,
|
||||||
Expr(
|
Expr(
|
||||||
clause,
|
clause,
|
||||||
collect_to_mrc(
|
collect_to_mrc(
|
||||||
typ.iter().cloned()
|
typ.iter().cloned()
|
||||||
.chain(sunk_types)
|
.chain(sunk_types)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)}
|
)}
|
||||||
}
|
}
|
||||||
@@ -8,49 +8,41 @@ use super::super::representations::typed::{Clause, Expr};
|
|||||||
use super::super::utils::Stackframe;
|
use super::super::utils::Stackframe;
|
||||||
|
|
||||||
const PARAMETRICS_INLINE_COUNT:usize = 5;
|
const PARAMETRICS_INLINE_COUNT:usize = 5;
|
||||||
type Parametrics<'a> = ProtoMap<'a, u64, bool, PARAMETRICS_INLINE_COUNT>;
|
// type Parametrics<'a> = ProtoMap<'a, u64, bool, PARAMETRICS_INLINE_COUNT>;
|
||||||
|
|
||||||
/// Hash the parts of an expression that are required to be equal for syntactic equality.
|
/// Hash the parts of an expression that are required to be equal for syntactic equality.
|
||||||
pub fn partial_hash_rec<H: Hasher>(
|
pub fn partial_hash_rec<H: Hasher>(
|
||||||
Expr(clause, _): &Expr, state: &mut H,
|
Expr(clause, _): &Expr, state: &mut H,
|
||||||
mut parametrics: Parametrics
|
parametrics: Option<&Stackframe<u64>>
|
||||||
) {
|
) {
|
||||||
match clause {
|
match clause {
|
||||||
// Skip autos
|
// Skip autos
|
||||||
Clause::Auto(id, _, body) => {
|
Clause::Auto(id, _, body) => {
|
||||||
parametrics.set(id, true);
|
partial_hash_rec(body, state, parametrics)
|
||||||
partial_hash_rec(body, state, parametrics)
|
|
||||||
}
|
|
||||||
// Annotate everything else with a prefix
|
|
||||||
// - Recurse into the tree of lambdas and calls - classic lambda calc
|
|
||||||
Clause::Lambda(id, _, body) => {
|
|
||||||
state.write_u8(0);
|
|
||||||
parametrics.set(id, false);
|
|
||||||
partial_hash_rec(body, state, parametrics)
|
|
||||||
}
|
|
||||||
Clause::Apply(f, x) => {
|
|
||||||
state.write_u8(1);
|
|
||||||
partial_hash_rec(f, state, parametrics.clone());
|
|
||||||
partial_hash_rec(x, state, parametrics);
|
|
||||||
}
|
|
||||||
// - Only recognize the depth of an argument if it refers to a non-auto parameter
|
|
||||||
Clause::Argument(own_id) => {
|
|
||||||
let (pos, is_auto) = parametrics.iter()
|
|
||||||
.filter_map(|(id, is_auto)| is_auto.map(|is_auto| (*id, is_auto)))
|
|
||||||
.find_position(|(id, is_auto)| id == own_id)
|
|
||||||
.map(|(pos, (_, is_auto))| (pos, is_auto))
|
|
||||||
.unwrap_or((usize::MAX, false));
|
|
||||||
// If the argument references an auto, acknowledge its existence
|
|
||||||
if is_auto {
|
|
||||||
state.write_u8(2)
|
|
||||||
} else {
|
|
||||||
state.write_u8(3);
|
|
||||||
state.write_usize(pos)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// - Hash leaves like normal
|
|
||||||
Clause::Literal(lit) => { state.write_u8(4); lit.hash(state) }
|
|
||||||
Clause::Atom(at) => { state.write_u8(5); at.hash(state) }
|
|
||||||
Clause::ExternFn(f) => { state.write_u8(6); f.hash(state) }
|
|
||||||
}
|
}
|
||||||
|
// Annotate everything else with a prefix
|
||||||
|
// - Recurse into the tree of lambdas and calls - classic lambda calc
|
||||||
|
Clause::Lambda(id, _, body) => {
|
||||||
|
state.write_u8(0);
|
||||||
|
partial_hash_rec(body, state, Some(&Stackframe::opush(parametrics, *id)))
|
||||||
|
}
|
||||||
|
Clause::Apply(f, x) => {
|
||||||
|
state.write_u8(1);
|
||||||
|
partial_hash_rec(f, state, parametrics.clone());
|
||||||
|
partial_hash_rec(x, state, parametrics);
|
||||||
|
}
|
||||||
|
Clause::AutoArg(..) => state.write_u8(2),
|
||||||
|
// - Only recognize the depth of an argument if it refers to a non-auto parameter
|
||||||
|
Clause::LambdaArg(own_id) => {
|
||||||
|
let pos = parametrics
|
||||||
|
.and_then(|sf| sf.iter().position(|id| id == own_id))
|
||||||
|
.unwrap_or(usize::MAX);
|
||||||
|
state.write_u8(3);
|
||||||
|
state.write_usize(pos)
|
||||||
|
}
|
||||||
|
// - Hash leaves like normal
|
||||||
|
Clause::Literal(lit) => { state.write_u8(4); lit.hash(state) }
|
||||||
|
Clause::Atom(at) => { state.write_u8(5); at.hash(state) }
|
||||||
|
Clause::ExternFn(f) => { state.write_u8(6); f.hash(state) }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -10,88 +10,88 @@ use super::super::representations::typed::{Clause, Expr};
|
|||||||
/// Call the function with the first Expression that isn't an Auto,
|
/// Call the function with the first Expression that isn't an Auto,
|
||||||
/// wrap all elements in the returned iterator back in the original sequence of Autos.
|
/// wrap all elements in the returned iterator back in the original sequence of Autos.
|
||||||
pub fn skip_autos<'a,
|
pub fn skip_autos<'a,
|
||||||
F: 'a + FnOnce(Mrc<Expr>) -> I,
|
F: 'a + FnOnce(Mrc<Expr>) -> I,
|
||||||
I: Iterator<Item = Mrc<Expr>> + 'static
|
I: Iterator<Item = Mrc<Expr>> + 'static
|
||||||
>(
|
>(
|
||||||
expr: Mrc<Expr>, function: F
|
expr: Mrc<Expr>, function: F
|
||||||
) -> BoxedIter<'static, Mrc<Expr>> {
|
) -> BoxedIter<'static, Mrc<Expr>> {
|
||||||
if let Expr(Clause::Auto(id, arg, body), typ) = expr.as_ref() {
|
if let Expr(Clause::Auto(id, arg, body), typ) = expr.as_ref() {
|
||||||
return Box::new(skip_autos(Mrc::clone(body), function).map({
|
return Box::new(skip_autos(Mrc::clone(body), function).map({
|
||||||
let arg = arg.as_ref().map(Mrc::clone);
|
let arg = Mrc::clone(arg);
|
||||||
let typ = Mrc::clone(typ);
|
let typ = Mrc::clone(typ);
|
||||||
move |body| {
|
move |body| {
|
||||||
Mrc::new(Expr(Clause::Auto(
|
Mrc::new(Expr(Clause::Auto(
|
||||||
*id,
|
*id,
|
||||||
arg.as_ref().map(Mrc::clone),
|
Mrc::clone(&arg),
|
||||||
body
|
body
|
||||||
), Mrc::clone(&typ)))
|
), Mrc::clone(&typ)))
|
||||||
}
|
}
|
||||||
})) as BoxedIter<'static, Mrc<Expr>>
|
})) as BoxedIter<'static, Mrc<Expr>>
|
||||||
}
|
}
|
||||||
Box::new(function(expr))
|
Box::new(function(expr))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Produces an iterator of every expression that can be produced from this one through B-reduction.
|
/// Produces an iterator of every expression that can be produced from this one through B-reduction.
|
||||||
fn direct_reductions(ex: Mrc<Expr>) -> impl Iterator<Item = Mrc<Expr>> {
|
fn direct_reductions(ex: Mrc<Expr>) -> impl Iterator<Item = Mrc<Expr>> {
|
||||||
skip_autos(ex, |mexpr| {
|
skip_autos(ex, |mexpr| {
|
||||||
let Expr(clause, typ_ref) = mexpr.as_ref();
|
let Expr(clause, typ_ref) = mexpr.as_ref();
|
||||||
match clause {
|
match clause {
|
||||||
Clause::Apply(f, x) => box_chain!(
|
Clause::Apply(f, x) => box_chain!(
|
||||||
skip_autos(Mrc::clone(f), |mexpr| {
|
skip_autos(Mrc::clone(f), |mexpr| {
|
||||||
let Expr(f, _) = mexpr.as_ref();
|
let Expr(f, _) = mexpr.as_ref();
|
||||||
match f {
|
match f {
|
||||||
Clause::Lambda(id, _, body) => box_once(
|
Clause::Lambda(id, _, body) => box_once(
|
||||||
apply_lambda(*id, Mrc::clone(x), Mrc::clone(body))
|
apply_lambda(*id, Mrc::clone(x), Mrc::clone(body))
|
||||||
),
|
|
||||||
Clause::ExternFn(xfn) => {
|
|
||||||
let Expr(xval, xtyp) = x.as_ref();
|
|
||||||
xfn.apply(xval.clone())
|
|
||||||
.map(|ret| box_once(Mrc::new(Expr(ret, Mrc::clone(xtyp)))))
|
|
||||||
.unwrap_or(box_empty())
|
|
||||||
},
|
|
||||||
// Parametric newtypes are atoms of function type
|
|
||||||
Clause::Atom(..) | Clause::Argument(..) | Clause::Apply(..) => box_empty(),
|
|
||||||
Clause::Literal(lit) =>
|
|
||||||
panic!("Literal expression {lit:?} can't be applied as function"),
|
|
||||||
Clause::Auto(..) => unreachable!("skip_autos should have filtered this"),
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
direct_reductions(Mrc::clone(f)).map({
|
|
||||||
let typ = Mrc::clone(typ_ref);
|
|
||||||
let x = Mrc::clone(x);
|
|
||||||
move |f| Mrc::new(Expr(Clause::Apply(
|
|
||||||
f,
|
|
||||||
Mrc::clone(&x)
|
|
||||||
), Mrc::clone(&typ)))
|
|
||||||
}),
|
|
||||||
direct_reductions(Mrc::clone(x)).map({
|
|
||||||
let typ = Mrc::clone(typ_ref);
|
|
||||||
let f = Mrc::clone(f);
|
|
||||||
move |x| Mrc::new(Expr(Clause::Apply(
|
|
||||||
Mrc::clone(&f),
|
|
||||||
x
|
|
||||||
), Mrc::clone(&typ)))
|
|
||||||
})
|
|
||||||
),
|
),
|
||||||
Clause::Lambda(id, argt, body) => {
|
Clause::ExternFn(xfn) => {
|
||||||
let id = *id;
|
let Expr(xval, xtyp) = x.as_ref();
|
||||||
let typ = Mrc::clone(typ_ref);
|
xfn.apply(xval.clone())
|
||||||
let argt = argt.as_ref().map(Mrc::clone);
|
.map(|ret| box_once(Mrc::new(Expr(ret, Mrc::clone(xtyp)))))
|
||||||
let body = Mrc::clone(body);
|
.unwrap_or(box_empty())
|
||||||
let body_reductions = direct_reductions(body)
|
|
||||||
.map(move |body| {
|
|
||||||
let argt = argt.as_ref().map(Mrc::clone);
|
|
||||||
Mrc::new(Expr(
|
|
||||||
Clause::Lambda(id, argt, body),
|
|
||||||
Mrc::clone(&typ)
|
|
||||||
))
|
|
||||||
});
|
|
||||||
Box::new(body_reductions)
|
|
||||||
},
|
},
|
||||||
Clause::Literal(..) | Clause::ExternFn(..) | Clause::Atom(..) | Clause::Argument(..) =>
|
// Parametric newtypes are atoms of function type
|
||||||
box_empty(),
|
Clause::Atom(..) | Clause::LambdaArg(..) | Clause::AutoArg(..) | Clause::Apply(..) => box_empty(),
|
||||||
|
Clause::Literal(lit) =>
|
||||||
|
panic!("Literal expression {lit:?} can't be applied as function"),
|
||||||
Clause::Auto(..) => unreachable!("skip_autos should have filtered this"),
|
Clause::Auto(..) => unreachable!("skip_autos should have filtered this"),
|
||||||
}
|
}
|
||||||
})
|
}),
|
||||||
|
direct_reductions(Mrc::clone(f)).map({
|
||||||
|
let typ = Mrc::clone(typ_ref);
|
||||||
|
let x = Mrc::clone(x);
|
||||||
|
move |f| Mrc::new(Expr(Clause::Apply(
|
||||||
|
f,
|
||||||
|
Mrc::clone(&x)
|
||||||
|
), Mrc::clone(&typ)))
|
||||||
|
}),
|
||||||
|
direct_reductions(Mrc::clone(x)).map({
|
||||||
|
let typ = Mrc::clone(typ_ref);
|
||||||
|
let f = Mrc::clone(f);
|
||||||
|
move |x| Mrc::new(Expr(Clause::Apply(
|
||||||
|
Mrc::clone(&f),
|
||||||
|
x
|
||||||
|
), Mrc::clone(&typ)))
|
||||||
|
})
|
||||||
|
),
|
||||||
|
Clause::Lambda(id, argt, body) => {
|
||||||
|
let id = *id;
|
||||||
|
let typ = Mrc::clone(typ_ref);
|
||||||
|
let argt = Mrc::clone(argt);
|
||||||
|
let body = Mrc::clone(body);
|
||||||
|
let body_reductions = direct_reductions(body)
|
||||||
|
.map(move |body| {
|
||||||
|
let argt = Mrc::clone(&argt);
|
||||||
|
Mrc::new(Expr(
|
||||||
|
Clause::Lambda(id, argt, body),
|
||||||
|
Mrc::clone(&typ)
|
||||||
|
))
|
||||||
|
});
|
||||||
|
Box::new(body_reductions)
|
||||||
|
},
|
||||||
|
Clause::Auto(..) => unreachable!("skip_autos should have filtered this"),
|
||||||
|
Clause::Literal(..) | Clause::ExternFn(..) | Clause::Atom(..)
|
||||||
|
| Clause::LambdaArg(..) | Clause::AutoArg(..) => box_empty(),
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,11 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::hash::{Hasher, Hash};
|
|
||||||
use std::iter;
|
|
||||||
|
|
||||||
|
use itertools::Itertools;
|
||||||
use mappable_rc::Mrc;
|
use mappable_rc::Mrc;
|
||||||
|
|
||||||
use crate::utils::{ProtoMap, Side};
|
use crate::utils::{ProtoMap, Side, mrc_empty_slice, collect_to_mrc, Stackframe, mrc_concat, Product2};
|
||||||
|
|
||||||
use super::super::representations::typed::{Clause, Expr};
|
use super::super::representations::typed::{Clause, Expr};
|
||||||
use super::super::utils::Stackframe;
|
|
||||||
|
|
||||||
pub fn swap<T, U>((t, u): (T, U)) -> (U, T) { (u, t) }
|
pub fn swap<T, U>((t, u): (T, U)) -> (U, T) { (u, t) }
|
||||||
|
|
||||||
@@ -17,24 +15,92 @@ pub fn swap<T, U>((t, u): (T, U)) -> (U, T) { (u, t) }
|
|||||||
// - get rid of leftovers from Explicit
|
// - get rid of leftovers from Explicit
|
||||||
// - adapt to new index-based system
|
// - adapt to new index-based system
|
||||||
|
|
||||||
// =@= =&= =%= =#= =$= =?= =!= =/=
|
enum UnifError {
|
||||||
// <@> <&> <%> <#> <$> <?> <!> </>
|
Conflict,
|
||||||
// |@| |&| |%| |#| |$| |?| |!| |/|
|
}
|
||||||
// {@} {&} {%} {#} {$} {?} {!} {/}
|
|
||||||
// (@) (&) (%) (#) ($) (?) (!) (/)
|
type LambdaMap<'a> = Option<&'a Stackframe<'a, (u64, u64)>>;
|
||||||
// [@] [&] [%] [#] [$] [?] [!] [/]
|
|
||||||
|
|
||||||
/// The context associates a given variable (by absolute index) on a given side to
|
/// The context associates a given variable (by absolute index) on a given side to
|
||||||
/// an expression on the opposite side rooted at the specified depth.
|
/// an expression on the opposite side rooted at the specified depth.
|
||||||
/// The root depths are used to translate betwee de Brujin arguments and absolute indices.
|
/// The root depths are used to translate betwee de Brujin arguments and absolute indices.
|
||||||
struct Context(HashMap<u64, Mrc<Expr>>);
|
struct Context(HashMap<u64, Mrc<Expr>>);
|
||||||
impl Context {
|
impl Context {
|
||||||
fn set(&mut self, id: u64, value: Mrc<Expr>) {
|
fn set(&mut self, id: u64, value: &Mrc<Expr>, lambdas: LambdaMap) -> Result<Option<Mrc<Expr>>, UnifError> {
|
||||||
// If already defined, then it must be an argument
|
Ok(
|
||||||
if let Some(value) = self.0.get(&id) {
|
if let Some(local) = self.0.get(&id) {
|
||||||
if let Clause::Argument(opposite_up) ex.0
|
Some(
|
||||||
}
|
self.unify_expr(local, value, lambdas)?
|
||||||
}
|
.pick(Mrc::clone(local), Mrc::clone(value))
|
||||||
|
)
|
||||||
|
} else { None }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unify_expr(&mut self,
|
||||||
|
left: &Mrc<Expr>, right: &Mrc<Expr>, lambdas: LambdaMap
|
||||||
|
) -> Result<Product2<Mrc<Expr>>, UnifError> {
|
||||||
|
let Expr(left_val, left_typs) = left.as_ref();
|
||||||
|
let Expr(right_val, right_typs) = right.as_ref();
|
||||||
|
let val = match (left_val, right_val) {
|
||||||
|
(Clause::AutoArg(l), Clause::AutoArg(r)) if l == r => Product2::Either,
|
||||||
|
(Clause::AutoArg(id), _) => self.set(*id, left, lambdas)?.as_ref()
|
||||||
|
.map_or(Product2::Left, |e| Product2::New(e.0.clone())),
|
||||||
|
(_, Clause::AutoArg(id)) => self.set(*id, right, lambdas)?.as_ref()
|
||||||
|
.map_or(Product2::Right, |e| Product2::New(e.0.clone())),
|
||||||
|
_ => self.unify_clause(left_val, right_val, lambdas)?
|
||||||
|
};
|
||||||
|
Ok(match val {
|
||||||
|
Product2::Either if right_typs.is_empty() && left_typs.is_empty() => Product2::Either,
|
||||||
|
Product2::Left | Product2::Either if right_typs.is_empty() => Product2::Left,
|
||||||
|
Product2::Right | Product2::Either if left_typs.is_empty() => Product2::Right,
|
||||||
|
product => {
|
||||||
|
let all_types = mrc_concat(left_typs, right_typs);
|
||||||
|
Product2::New(Mrc::new(Expr(
|
||||||
|
product.pick(left_val.clone(), right_val.clone()),
|
||||||
|
all_types
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unify_clause(&mut self,
|
||||||
|
left: &Clause, right: &Clause, lambdas: LambdaMap
|
||||||
|
) -> Result<Product2<Clause>, UnifError> {
|
||||||
|
Ok(match (left, right) {
|
||||||
|
(Clause::Literal(l), Clause::Literal(r)) if l == r => Product2::Either,
|
||||||
|
(Clause::Atom(l), Clause::Atom(r)) if l == r => Product2::Either,
|
||||||
|
(Clause::ExternFn(l), Clause::ExternFn(r)) if l == r => Product2::Either,
|
||||||
|
(Clause::LambdaArg(l), Clause::LambdaArg(r)) => if l == r {Product2::Either} else {
|
||||||
|
let is_equal = Stackframe::o_into_iter(lambdas)
|
||||||
|
.first_some(|(l_candidate, r_candidate)| {
|
||||||
|
if l_candidate == l && r_candidate == r {Some(true)} // match
|
||||||
|
else if l_candidate == l || r_candidate == r {Some(false)} // shadow
|
||||||
|
else {None} // irrelevant
|
||||||
|
}).unwrap_or(false);
|
||||||
|
// Reference:
|
||||||
|
if is_equal {Product2::Left} else {return Err(UnifError::Conflict)}
|
||||||
|
}
|
||||||
|
(Clause::AutoArg(_), _) | (_, Clause::AutoArg(_)) => {
|
||||||
|
unreachable!("unify_expr should have handled this")
|
||||||
|
}
|
||||||
|
(Clause::Lambda(l_id, l_arg, l_body), Clause::Lambda(r_id, r_arg, r_body)) => {
|
||||||
|
let lambdas = Stackframe::opush(lambdas, (*l_id, *r_id));
|
||||||
|
self.unify_expr(l_body, r_body, Some(&lambdas))?
|
||||||
|
.map(|ex| Clause::Lambda(*l_id, mrc_empty_slice(), ex))
|
||||||
|
}
|
||||||
|
(Clause::Apply(l_f, l_x), Clause::Apply(r_f, r_x)) => {
|
||||||
|
self.unify_expr(l_f, r_f, lambdas)?.join((Mrc::clone(l_f), Mrc::clone(r_f)),
|
||||||
|
self.unify_expr(l_x, r_x, lambdas)?, (Mrc::clone(l_x), Mrc::clone(r_x))
|
||||||
|
).map(|(f, x)| Clause::Apply(f, x))
|
||||||
|
}
|
||||||
|
(Clause::Auto(l_id, l_arg, l_body), Clause::Auto(r_id, r_arg, r_body)) => {
|
||||||
|
let typ = self.unify(l_arg, r_arg, lambdas)?;
|
||||||
|
let body = self.unify_expr(l_body, r_body, lambdas)?;
|
||||||
|
typ.join((l_arg, r_arg), )
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const IS_AUTO_INLINE:usize = 5;
|
const IS_AUTO_INLINE:usize = 5;
|
||||||
@@ -42,22 +108,22 @@ const IS_AUTO_INLINE:usize = 5;
|
|||||||
// All data to be forwarded during recursion about one half of a unification task
|
// All data to be forwarded during recursion about one half of a unification task
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct UnifHalfTask<'a> {
|
struct UnifHalfTask<'a> {
|
||||||
/// The expression to be unified
|
/// The expression to be unified
|
||||||
expr: &'a Expr,
|
expr: &'a Expr,
|
||||||
/// Stores whether a given uid is auto or lambda
|
/// Stores whether a given uid is auto or lambda
|
||||||
is_auto: ProtoMap<'a, usize, bool, IS_AUTO_INLINE>
|
is_auto: ProtoMap<'a, usize, bool, IS_AUTO_INLINE>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> UnifHalfTask<'a> {
|
impl<'a> UnifHalfTask<'a> {
|
||||||
fn push_auto(&mut self, body: &Expr, key: usize) {
|
fn push_auto(&mut self, body: &Expr, key: usize) {
|
||||||
self.expr = body;
|
self.expr = body;
|
||||||
self.is_auto.set(&key, true);
|
self.is_auto.set(&key, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_lambda(&mut self, body: &Expr, key: usize) {
|
fn push_lambda(&mut self, body: &Expr, key: usize) {
|
||||||
self.expr = body;
|
self.expr = body;
|
||||||
self.is_auto.set(&key, false);
|
self.is_auto.set(&key, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type Ctx = HashMap<usize, Mrc<Expr>>;
|
type Ctx = HashMap<usize, Mrc<Expr>>;
|
||||||
@@ -68,63 +134,63 @@ type Ctx = HashMap<usize, Mrc<Expr>>;
|
|||||||
///
|
///
|
||||||
/// Context associates variables with subtrees resolved on the opposite side
|
/// Context associates variables with subtrees resolved on the opposite side
|
||||||
pub fn unify_syntax_rec( // the stacks store true for autos, false for lambdas
|
pub fn unify_syntax_rec( // the stacks store true for autos, false for lambdas
|
||||||
ctx: &mut HashMap<(Side, usize), (usize, Mrc<Expr>)>,
|
ctx: &mut HashMap<(Side, usize), (usize, Mrc<Expr>)>,
|
||||||
ltask@UnifHalfTask{ expr: lexpr@Expr(lclause, _), .. }: UnifHalfTask,
|
ltask@UnifHalfTask{ expr: lexpr@Expr(lclause, _), .. }: UnifHalfTask,
|
||||||
rtask@UnifHalfTask{ expr: rexpr@Expr(rclause, _), .. }: UnifHalfTask
|
rtask@UnifHalfTask{ expr: rexpr@Expr(rclause, _), .. }: UnifHalfTask
|
||||||
) -> Option<(UnifResult, UnifResult)> {
|
) -> Option<(UnifResult, UnifResult)> {
|
||||||
// Ensure that ex1 is a value-level construct
|
// Ensure that ex1 is a value-level construct
|
||||||
match lclause {
|
match lclause {
|
||||||
Clause::Auto(id, _, body) => {
|
Clause::Auto(id, _, body) => {
|
||||||
let res = unify_syntax_rec(ltask.push_auto(body).0, rtask);
|
let res = unify_syntax_rec(ltask.push_auto(body).0, rtask);
|
||||||
return if ltask.explicits.is_some() {
|
return if ltask.explicits.is_some() {
|
||||||
res.map(|(r1, r2)| (r1.useExplicit(), r2))
|
res.map(|(r1, r2)| (r1.useExplicit(), r2))
|
||||||
} else {res}
|
} else {res}
|
||||||
}
|
|
||||||
_ => ()
|
|
||||||
};
|
|
||||||
// Reduce ex2's auto handling to ex1's. In the optimizer we trust
|
|
||||||
if let Clause::Auto(..) | Clause::Explicit(..) = rclause {
|
|
||||||
return unify_syntax_rec(rtask, ltask).map(swap);
|
|
||||||
}
|
}
|
||||||
// Neither ex1 nor ex2 can be Auto or Explicit
|
_ => ()
|
||||||
match (lclause, rclause) {
|
};
|
||||||
// recurse into both
|
// Reduce ex2's auto handling to ex1's. In the optimizer we trust
|
||||||
(Clause::Lambda(_, lbody), Clause::Lambda(_, rbody)) => unify_syntax_rec(
|
if let Clause::Auto(..) | Clause::Explicit(..) = rclause {
|
||||||
ltask.push_lambda(lbody),
|
return unify_syntax_rec(rtask, ltask).map(swap);
|
||||||
rtask.push_lambda(rbody)
|
}
|
||||||
),
|
// Neither ex1 nor ex2 can be Auto or Explicit
|
||||||
(Clause::Apply(lf, lx), Clause::Apply(rf, rx)) => {
|
match (lclause, rclause) {
|
||||||
let (lpart, rpart) = unify_syntax_rec(
|
// recurse into both
|
||||||
ltask.push_expr(lf),
|
(Clause::Lambda(_, lbody), Clause::Lambda(_, rbody)) => unify_syntax_rec(
|
||||||
rtask.push_expr(rf)
|
ltask.push_lambda(lbody),
|
||||||
)?;
|
rtask.push_lambda(rbody)
|
||||||
lpart.dropUsedExplicits(&mut ltask);
|
),
|
||||||
rpart.dropUsedExplicits(&mut rtask);
|
(Clause::Apply(lf, lx), Clause::Apply(rf, rx)) => {
|
||||||
unify_syntax_rec(ltask.push_expr(lx), rtask.push_expr(rx))
|
let (lpart, rpart) = unify_syntax_rec(
|
||||||
}
|
ltask.push_expr(lf),
|
||||||
(Clause::Atom(latom), Clause::Atom(ratom)) => {
|
rtask.push_expr(rf)
|
||||||
if latom != ratom { None }
|
)?;
|
||||||
else { Some((UnifResult::default(), UnifResult::default())) }
|
lpart.dropUsedExplicits(&mut ltask);
|
||||||
}
|
rpart.dropUsedExplicits(&mut rtask);
|
||||||
(Clause::ExternFn(lf), Clause::ExternFn(rf)) => {
|
unify_syntax_rec(ltask.push_expr(lx), rtask.push_expr(rx))
|
||||||
if lf != rf { None }
|
}
|
||||||
else { Some((UnifResult::default(), UnifResult::default())) }
|
(Clause::Atom(latom), Clause::Atom(ratom)) => {
|
||||||
}
|
if latom != ratom { None }
|
||||||
(Clause::Literal(llit), Clause::Literal(rlit)) => {
|
else { Some((UnifResult::default(), UnifResult::default())) }
|
||||||
if llit != rlit { None }
|
}
|
||||||
else { Some((UnifResult::default(), UnifResult::default())) }
|
(Clause::ExternFn(lf), Clause::ExternFn(rf)) => {
|
||||||
}
|
if lf != rf { None }
|
||||||
// TODO Select a representative
|
else { Some((UnifResult::default(), UnifResult::default())) }
|
||||||
(Clause::Argument(depth1), Clause::Argument(depth2)) => {
|
}
|
||||||
!*stack1.iter().nth(*depth1).unwrap_or(&false)
|
(Clause::Literal(llit), Clause::Literal(rlit)) => {
|
||||||
&& !*stack2.iter().nth(*depth2).unwrap_or(&false)
|
if llit != rlit { None }
|
||||||
&& stack1.iter().count() - depth1 == stack2.iter().count() - depth2
|
else { Some((UnifResult::default(), UnifResult::default())) }
|
||||||
}
|
}
|
||||||
// TODO Assign a substitute
|
// TODO Select a representative
|
||||||
(Clause::Argument(placeholder), _) => {
|
(Clause::Argument(depth1), Clause::Argument(depth2)) => {
|
||||||
|
!*stack1.iter().nth(*depth1).unwrap_or(&false)
|
||||||
|
&& !*stack2.iter().nth(*depth2).unwrap_or(&false)
|
||||||
|
&& stack1.iter().count() - depth1 == stack2.iter().count() - depth2
|
||||||
|
}
|
||||||
|
// TODO Assign a substitute
|
||||||
|
(Clause::Argument(placeholder), _) => {
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tricky unifications
|
// Tricky unifications
|
||||||
|
|||||||
104
src/foreign.rs
Normal file
104
src/foreign.rs
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
use std::any::Any;
|
||||||
|
use std::fmt::{Display, Debug};
|
||||||
|
use std::hash::Hash;
|
||||||
|
|
||||||
|
use mappable_rc::Mrc;
|
||||||
|
|
||||||
|
use crate::representations::typed::{Expr, Clause};
|
||||||
|
|
||||||
|
pub trait ExternError: Display {}
|
||||||
|
|
||||||
|
/// Represents an externally defined function from the perspective of the executor
|
||||||
|
/// Since Orchid lacks basic numerical operations, these are also external functions.
|
||||||
|
pub struct ExternFn {
|
||||||
|
name: String, param: Mrc<Expr>, rttype: Mrc<Expr>,
|
||||||
|
function: Mrc<dyn Fn(Clause) -> Result<Clause, Mrc<dyn ExternError>>>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExternFn {
|
||||||
|
pub fn new<F: 'static + Fn(Clause) -> Result<Clause, Mrc<dyn ExternError>>>(
|
||||||
|
name: String, param: Mrc<Expr>, rttype: Mrc<Expr>, f: F
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
name, param, rttype,
|
||||||
|
function: Mrc::map(Mrc::new(f), |f| {
|
||||||
|
f as &dyn Fn(Clause) -> Result<Clause, Mrc<dyn ExternError>>
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn name(&self) -> &str {&self.name}
|
||||||
|
pub fn apply(&self, arg: Clause) -> Result<Clause, Mrc<dyn ExternError>> {(self.function)(arg)}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for ExternFn { fn clone(&self) -> Self { Self {
|
||||||
|
name: self.name.clone(),
|
||||||
|
param: Mrc::clone(&self.param),
|
||||||
|
rttype: Mrc::clone(&self.rttype),
|
||||||
|
function: Mrc::clone(&self.function)
|
||||||
|
}}}
|
||||||
|
impl Eq for ExternFn {}
|
||||||
|
impl PartialEq for ExternFn {
|
||||||
|
fn eq(&self, other: &Self) -> bool { self.name() == other.name() }
|
||||||
|
}
|
||||||
|
impl Hash for ExternFn {
|
||||||
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.name.hash(state) }
|
||||||
|
}
|
||||||
|
impl Debug for ExternFn {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "##EXTERN[{}]:{:?} -> {:?}##", self.name(), self.param, self.rttype)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait Atomic: Any + Debug where Self: 'static {
|
||||||
|
fn as_any(&self) -> &dyn Any;
|
||||||
|
fn definitely_eq(&self, _other: &dyn Any) -> bool;
|
||||||
|
fn hash(&self, hasher: &mut dyn std::hash::Hasher);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Represents a unit of information from the perspective of the executor. This may be
|
||||||
|
/// something like a file descriptor which functions can operate on, but it can also be
|
||||||
|
/// information in the universe of types or kinds such as the type of signed integers or
|
||||||
|
/// the kind of types. Ad absurdum it can also be just a number, although Literal is
|
||||||
|
/// preferable for types it's defined on.
|
||||||
|
pub struct Atom {
|
||||||
|
typ: Mrc<Expr>,
|
||||||
|
data: Mrc<dyn Atomic>
|
||||||
|
}
|
||||||
|
impl Atom {
|
||||||
|
pub fn new<T: 'static + Atomic>(data: T, typ: Mrc<Expr>) -> Self { Self{
|
||||||
|
typ,
|
||||||
|
data: Mrc::map(Mrc::new(data), |d| d as &dyn Atomic)
|
||||||
|
} }
|
||||||
|
pub fn data(&self) -> &dyn Atomic { self.data.as_ref() as &dyn Atomic }
|
||||||
|
pub fn try_cast<T: Atomic>(&self) -> Result<&T, ()> {
|
||||||
|
self.data().as_any().downcast_ref().ok_or(())
|
||||||
|
}
|
||||||
|
pub fn is<T: 'static>(&self) -> bool { self.data().as_any().is::<T>() }
|
||||||
|
pub fn cast<T: 'static>(&self) -> &T {
|
||||||
|
self.data().as_any().downcast_ref().expect("Type mismatch on Atom::cast")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for Atom {
|
||||||
|
fn clone(&self) -> Self { Self {
|
||||||
|
typ: Mrc::clone(&self.typ),
|
||||||
|
data: Mrc::clone(&self.data)
|
||||||
|
} }
|
||||||
|
}
|
||||||
|
impl Hash for Atom {
|
||||||
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||||
|
self.data.hash(state);
|
||||||
|
self.typ.hash(state)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Debug for Atom {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "##ATOM[{:?}]:{:?}##", self.data(), self.typ)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Eq for Atom {}
|
||||||
|
impl PartialEq for Atom {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.data().definitely_eq(other.data().as_any())
|
||||||
|
}
|
||||||
|
}
|
||||||
107
src/main.rs
107
src/main.rs
@@ -2,16 +2,19 @@
|
|||||||
#![feature(core_intrinsics)]
|
#![feature(core_intrinsics)]
|
||||||
#![feature(adt_const_params)]
|
#![feature(adt_const_params)]
|
||||||
#![feature(generic_const_exprs)]
|
#![feature(generic_const_exprs)]
|
||||||
|
#![feature(generators, generator_trait)]
|
||||||
|
|
||||||
|
|
||||||
use std::env::current_dir;
|
use std::env::current_dir;
|
||||||
|
|
||||||
mod executor;
|
// mod executor;
|
||||||
mod parse;
|
mod parse;
|
||||||
mod project;
|
mod project;
|
||||||
mod utils;
|
mod utils;
|
||||||
mod representations;
|
mod representations;
|
||||||
mod rule;
|
mod rule;
|
||||||
mod types;
|
mod scheduler;
|
||||||
|
pub(crate) mod foreign;
|
||||||
use file_loader::LoadingError;
|
use file_loader::LoadingError;
|
||||||
pub use representations::ast;
|
pub use representations::ast;
|
||||||
use ast::{Expr, Clause};
|
use ast::{Expr, Clause};
|
||||||
@@ -19,14 +22,14 @@ use representations::typed as t;
|
|||||||
use mappable_rc::Mrc;
|
use mappable_rc::Mrc;
|
||||||
use project::{rule_collector, Loaded, file_loader};
|
use project::{rule_collector, Loaded, file_loader};
|
||||||
use rule::Repository;
|
use rule::Repository;
|
||||||
use utils::to_mrc_slice;
|
use utils::{to_mrc_slice, mrc_empty_slice, one_mrc_slice};
|
||||||
|
|
||||||
fn literal(orig: &[&str]) -> Mrc<[String]> {
|
fn literal(orig: &[&str]) -> Mrc<[String]> {
|
||||||
to_mrc_slice(vliteral(orig))
|
to_mrc_slice(vliteral(orig))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn vliteral(orig: &[&str]) -> Vec<String> {
|
fn vliteral(orig: &[&str]) -> Vec<String> {
|
||||||
orig.iter().map(|&s| s.to_owned()).collect()
|
orig.iter().map(|&s| s.to_owned()).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
static PRELUDE:&str = r#"
|
static PRELUDE:&str = r#"
|
||||||
@@ -40,62 +43,62 @@ export (match_sequence $lhs) >>= (match_sequence $rhs) =100=> (bind ($lhs) ($rhs
|
|||||||
|
|
||||||
|
|
||||||
fn initial_tree() -> Mrc<[Expr]> {
|
fn initial_tree() -> Mrc<[Expr]> {
|
||||||
to_mrc_slice(vec![Expr(Clause::Name {
|
to_mrc_slice(vec![Expr(Clause::Name {
|
||||||
local: None,
|
local: None,
|
||||||
qualified: literal(&["main", "main"])
|
qualified: literal(&["main", "main"])
|
||||||
}, to_mrc_slice(vec![]))])
|
}, to_mrc_slice(vec![]))])
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
fn typed_notation_debug() {
|
fn typed_notation_debug() {
|
||||||
let true_ex = t::Clause::Auto(0, None,
|
let true_ex = t::Clause::Auto(0, mrc_empty_slice(),
|
||||||
t::Clause::Lambda(1, Some(Mrc::new(t::Clause::Argument(0))),
|
t::Clause::Lambda(1, one_mrc_slice(t::Clause::AutoArg(0)),
|
||||||
t::Clause::Lambda(2, Some(Mrc::new(t::Clause::Argument(0))),
|
t::Clause::Lambda(2, one_mrc_slice(t::Clause::AutoArg(0)),
|
||||||
t::Clause::Argument(1).wrap_t(t::Clause::Argument(0))
|
t::Clause::LambdaArg(1).wrap_t(t::Clause::AutoArg(0))
|
||||||
).wrap()
|
).wrap()
|
||||||
).wrap()
|
).wrap()
|
||||||
).wrap();
|
).wrap();
|
||||||
let false_ex = t::Clause::Auto(0, None,
|
let false_ex = t::Clause::Auto(0, mrc_empty_slice(),
|
||||||
t::Clause::Lambda(1, Some(Mrc::new(t::Clause::Argument(0))),
|
t::Clause::Lambda(1, one_mrc_slice(t::Clause::AutoArg(0)),
|
||||||
t::Clause::Lambda(2, Some(Mrc::new(t::Clause::Argument(0))),
|
t::Clause::Lambda(2, one_mrc_slice(t::Clause::AutoArg(0)),
|
||||||
t::Clause::Argument(2).wrap_t(t::Clause::Argument(0))
|
t::Clause::LambdaArg(2).wrap_t(t::Clause::AutoArg(0))
|
||||||
).wrap()
|
).wrap()
|
||||||
).wrap()
|
).wrap()
|
||||||
).wrap();
|
).wrap();
|
||||||
println!("{:?}", t::Clause::Apply(t::Clause::Apply(Mrc::clone(&true_ex), true_ex).wrap(), false_ex))
|
println!("{:?}", t::Clause::Apply(t::Clause::Apply(Mrc::clone(&true_ex), true_ex).wrap(), false_ex))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
fn load_project() {
|
fn load_project() {
|
||||||
let cwd = current_dir().unwrap();
|
let cwd = current_dir().unwrap();
|
||||||
let collect_rules = rule_collector(move |n| -> Result<Loaded, LoadingError> {
|
let collect_rules = rule_collector(move |n| -> Result<Loaded, LoadingError> {
|
||||||
if n == literal(&["prelude"]) { Ok(Loaded::Module(PRELUDE.to_string())) }
|
if n == literal(&["prelude"]) { Ok(Loaded::Module(PRELUDE.to_string())) }
|
||||||
else { file_loader(cwd.clone())(n) }
|
else { file_loader(cwd.clone())(n) }
|
||||||
}, vliteral(&["...", ">>", ">>=", "[", "]", ",", "=", "=>"]));
|
}, vliteral(&["...", ">>", ">>=", "[", "]", ",", "=", "=>"]));
|
||||||
let rules = match collect_rules.try_find(&literal(&["main"])) {
|
let rules = match collect_rules.try_find(&literal(&["main"])) {
|
||||||
Ok(rules) => rules,
|
Ok(rules) => rules,
|
||||||
Err(err) => panic!("{:#?}", err)
|
Err(err) => panic!("{:#?}", err)
|
||||||
};
|
};
|
||||||
let mut tree = initial_tree();
|
let mut tree = initial_tree();
|
||||||
println!("Start processing {tree:?}");
|
println!("Start processing {tree:?}");
|
||||||
let repo = Repository::new(rules.as_ref().to_owned());
|
let repo = Repository::new(rules.as_ref().to_owned());
|
||||||
println!("Ruleset: {repo:?}");
|
println!("Ruleset: {repo:?}");
|
||||||
xloop!(let mut i = 0; i < 10; i += 1; {
|
xloop!(let mut i = 0; i < 10; i += 1; {
|
||||||
match repo.step(Mrc::clone(&tree)) {
|
match repo.step(Mrc::clone(&tree)) {
|
||||||
Ok(Some(phase)) => {
|
Ok(Some(phase)) => {
|
||||||
println!("Step {i}: {phase:?}");
|
println!("Step {i}: {phase:?}");
|
||||||
tree = phase;
|
tree = phase;
|
||||||
},
|
},
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
println!("Execution complete");
|
println!("Execution complete");
|
||||||
break
|
break
|
||||||
},
|
},
|
||||||
Err(e) => panic!("Rule error: {e:?}")
|
Err(e) => panic!("Rule error: {e:?}")
|
||||||
}
|
}
|
||||||
}; println!("Macro execution didn't halt"));
|
}; println!("Macro execution didn't halt"));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
// lambda_notation_debug();
|
// lambda_notation_debug();
|
||||||
load_project();
|
load_project();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,12 +2,12 @@ pub use chumsky::{self, prelude::*, Parser};
|
|||||||
|
|
||||||
/// Parses Lua-style comments
|
/// Parses Lua-style comments
|
||||||
pub fn comment_parser() -> impl Parser<char, String, Error = Simple<char>> {
|
pub fn comment_parser() -> impl Parser<char, String, Error = Simple<char>> {
|
||||||
choice((
|
choice((
|
||||||
just("--[").ignore_then(take_until(
|
just("--[").ignore_then(take_until(
|
||||||
just("]--").ignored()
|
just("]--").ignored()
|
||||||
)),
|
)),
|
||||||
just("--").ignore_then(take_until(
|
just("--").ignore_then(take_until(
|
||||||
just("\n").rewind().ignored().or(end())
|
just("\n").rewind().ignored().or(end())
|
||||||
))
|
))
|
||||||
)).map(|(vc, ())| vc).collect().labelled("comment")
|
)).map(|(vc, ())| vc).collect().labelled("comment")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,27 +6,27 @@
|
|||||||
/// ```
|
/// ```
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! enum_parser {
|
macro_rules! enum_parser {
|
||||||
($p:path | $m:tt) => {
|
($p:path | $m:tt) => {
|
||||||
{
|
{
|
||||||
::chumsky::prelude::filter_map(|s, l| {
|
::chumsky::prelude::filter_map(|s, l| {
|
||||||
if let $p(x) = l { Ok(x) }
|
if let $p(x) = l { Ok(x) }
|
||||||
else { Err(::chumsky::prelude::Simple::custom(s, $m))}
|
else { Err(::chumsky::prelude::Simple::custom(s, $m))}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
($p:path >> $q:path; $i:ident) => {
|
($p:path >> $q:path; $i:ident) => {
|
||||||
{
|
{
|
||||||
use $p as srcpath;
|
use $p as srcpath;
|
||||||
use $q as tgtpath;
|
use $q as tgtpath;
|
||||||
enum_parser!(srcpath::$i | (concat!("Expected ", stringify!($i)))).map(tgtpath::$i)
|
enum_parser!(srcpath::$i | (concat!("Expected ", stringify!($i)))).map(tgtpath::$i)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
($p:path >> $q:path; $($i:ident),+) => {
|
($p:path >> $q:path; $($i:ident),+) => {
|
||||||
{
|
{
|
||||||
::chumsky::prelude::choice((
|
::chumsky::prelude::choice((
|
||||||
$( enum_parser!($p >> $q; $i) ),+
|
$( enum_parser!($p >> $q; $i) ),+
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
($p:path) => { enum_parser!($p | (concat!("Expected ", stringify!($p)))) };
|
($p:path) => { enum_parser!($p | (concat!("Expected ", stringify!($p)))) };
|
||||||
}
|
}
|
||||||
@@ -8,120 +8,120 @@ use super::lexer::Lexeme;
|
|||||||
|
|
||||||
/// Parses any number of expr wrapped in (), [] or {}
|
/// Parses any number of expr wrapped in (), [] or {}
|
||||||
fn sexpr_parser<P>(
|
fn sexpr_parser<P>(
|
||||||
expr: P
|
expr: P
|
||||||
) -> impl Parser<Lexeme, Clause, Error = Simple<Lexeme>> + Clone
|
) -> impl Parser<Lexeme, Clause, Error = Simple<Lexeme>> + Clone
|
||||||
where P: Parser<Lexeme, Expr, Error = Simple<Lexeme>> + Clone {
|
where P: Parser<Lexeme, Expr, Error = Simple<Lexeme>> + Clone {
|
||||||
Lexeme::paren_parser(expr.repeated()).map(|(del, b)| Clause::S(del, to_mrc_slice(b)))
|
Lexeme::paren_parser(expr.repeated()).map(|(del, b)| Clause::S(del, to_mrc_slice(b)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses `\name.body` or `\name:type.body` where name is any valid name and type and body are
|
/// Parses `\name.body` or `\name:type.body` where name is any valid name and type and body are
|
||||||
/// both expressions. Comments are allowed and ignored everywhere in between the tokens
|
/// both expressions. Comments are allowed and ignored everywhere in between the tokens
|
||||||
fn lambda_parser<P>(
|
fn lambda_parser<P>(
|
||||||
expr: P
|
expr: P
|
||||||
) -> impl Parser<Lexeme, Clause, Error = Simple<Lexeme>> + Clone
|
) -> impl Parser<Lexeme, Clause, Error = Simple<Lexeme>> + Clone
|
||||||
where P: Parser<Lexeme, Expr, Error = Simple<Lexeme>> + Clone {
|
where P: Parser<Lexeme, Expr, Error = Simple<Lexeme>> + Clone {
|
||||||
just(Lexeme::BS)
|
just(Lexeme::BS)
|
||||||
|
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
||||||
|
.ignore_then(enum_parser!(Lexeme::Name))
|
||||||
|
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
||||||
|
.then(
|
||||||
|
just(Lexeme::Type)
|
||||||
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
||||||
.ignore_then(enum_parser!(Lexeme::Name))
|
.ignore_then(expr.clone().repeated())
|
||||||
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
||||||
.then(
|
.or_not().map(Option::unwrap_or_default)
|
||||||
just(Lexeme::Type)
|
)
|
||||||
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
.then_ignore(just(Lexeme::name(".")))
|
||||||
.ignore_then(expr.clone().repeated())
|
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
||||||
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
.then(expr.repeated().at_least(1))
|
||||||
.or_not().map(Option::unwrap_or_default)
|
.map(|((name, typ), body): ((String, Vec<Expr>), Vec<Expr>)| {
|
||||||
)
|
// for ent in &mut body { ent.bind_parameter(&name) };
|
||||||
.then_ignore(just(Lexeme::name(".")))
|
Clause::Lambda(name, to_mrc_slice(typ), to_mrc_slice(body))
|
||||||
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
})
|
||||||
.then(expr.repeated().at_least(1))
|
|
||||||
.map(|((name, typ), body): ((String, Vec<Expr>), Vec<Expr>)| {
|
|
||||||
// for ent in &mut body { ent.bind_parameter(&name) };
|
|
||||||
Clause::Lambda(name, to_mrc_slice(typ), to_mrc_slice(body))
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// see [lambda_parser] but `@` instead of `\` and the name is optional
|
/// see [lambda_parser] but `@` instead of `\` and the name is optional
|
||||||
fn auto_parser<P>(
|
fn auto_parser<P>(
|
||||||
expr: P
|
expr: P
|
||||||
) -> impl Parser<Lexeme, Clause, Error = Simple<Lexeme>> + Clone
|
) -> impl Parser<Lexeme, Clause, Error = Simple<Lexeme>> + Clone
|
||||||
where P: Parser<Lexeme, Expr, Error = Simple<Lexeme>> + Clone {
|
where P: Parser<Lexeme, Expr, Error = Simple<Lexeme>> + Clone {
|
||||||
just(Lexeme::At)
|
just(Lexeme::At)
|
||||||
|
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
||||||
|
.ignore_then(enum_parser!(Lexeme::Name).or_not())
|
||||||
|
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
||||||
|
.then(
|
||||||
|
just(Lexeme::Type)
|
||||||
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
||||||
.ignore_then(enum_parser!(Lexeme::Name).or_not())
|
.ignore_then(expr.clone().repeated())
|
||||||
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
||||||
.then(
|
.or_not().map(Option::unwrap_or_default)
|
||||||
just(Lexeme::Type)
|
)
|
||||||
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
.then_ignore(just(Lexeme::name(".")))
|
||||||
.ignore_then(expr.clone().repeated())
|
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
||||||
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
.then(expr.repeated().at_least(1))
|
||||||
.or_not().map(Option::unwrap_or_default)
|
.try_map(|((name, typ), body): ((Option<String>, Vec<Expr>), Vec<Expr>), s| {
|
||||||
)
|
if name.is_none() && typ.is_empty() {
|
||||||
.then_ignore(just(Lexeme::name(".")))
|
Err(Simple::custom(s, "Auto without name or type has no effect"))
|
||||||
.then_ignore(enum_parser!(Lexeme::Comment).repeated())
|
} else {
|
||||||
.then(expr.repeated().at_least(1))
|
Ok(Clause::Auto(name, to_mrc_slice(typ), to_mrc_slice(body)))
|
||||||
.try_map(|((name, typ), body): ((Option<String>, Vec<Expr>), Vec<Expr>), s| {
|
}
|
||||||
if name.is_none() && typ.is_empty() {
|
})
|
||||||
Err(Simple::custom(s, "Auto without name or type has no effect"))
|
|
||||||
} else {
|
|
||||||
Ok(Clause::Auto(name, to_mrc_slice(typ), to_mrc_slice(body)))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses a sequence of names separated by :: <br/>
|
/// Parses a sequence of names separated by :: <br/>
|
||||||
/// Comments are allowed and ignored in between
|
/// Comments are allowed and ignored in between
|
||||||
fn name_parser() -> impl Parser<Lexeme, Vec<String>, Error = Simple<Lexeme>> + Clone {
|
fn name_parser() -> impl Parser<Lexeme, Vec<String>, Error = Simple<Lexeme>> + Clone {
|
||||||
enum_parser!(Lexeme::Name).separated_by(
|
enum_parser!(Lexeme::Name).separated_by(
|
||||||
enum_parser!(Lexeme::Comment).repeated()
|
enum_parser!(Lexeme::Comment).repeated()
|
||||||
.then(just(Lexeme::NS))
|
.then(just(Lexeme::NS))
|
||||||
.then(enum_parser!(Lexeme::Comment).repeated())
|
.then(enum_parser!(Lexeme::Comment).repeated())
|
||||||
).at_least(1)
|
).at_least(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse any legal argument name starting with a `$`
|
/// Parse any legal argument name starting with a `$`
|
||||||
fn placeholder_parser() -> impl Parser<Lexeme, String, Error = Simple<Lexeme>> + Clone {
|
fn placeholder_parser() -> impl Parser<Lexeme, String, Error = Simple<Lexeme>> + Clone {
|
||||||
enum_parser!(Lexeme::Name).try_map(|name, span| {
|
enum_parser!(Lexeme::Name).try_map(|name, span| {
|
||||||
name.strip_prefix('$').map(&str::to_string)
|
name.strip_prefix('$').map(&str::to_string)
|
||||||
.ok_or_else(|| Simple::custom(span, "Not a placeholder"))
|
.ok_or_else(|| Simple::custom(span, "Not a placeholder"))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse an expression
|
/// Parse an expression
|
||||||
pub fn xpr_parser() -> impl Parser<Lexeme, Expr, Error = Simple<Lexeme>> {
|
pub fn xpr_parser() -> impl Parser<Lexeme, Expr, Error = Simple<Lexeme>> {
|
||||||
recursive(|expr| {
|
recursive(|expr| {
|
||||||
let clause =
|
let clause =
|
||||||
enum_parser!(Lexeme::Comment).repeated()
|
enum_parser!(Lexeme::Comment).repeated()
|
||||||
.ignore_then(choice((
|
.ignore_then(choice((
|
||||||
enum_parser!(Lexeme >> Literal; Int, Num, Char, Str).map(Clause::Literal),
|
enum_parser!(Lexeme >> Literal; Int, Num, Char, Str).map(Clause::Literal),
|
||||||
placeholder_parser().map(|key| Clause::Placeh{key, vec: None}),
|
placeholder_parser().map(|key| Clause::Placeh{key, vec: None}),
|
||||||
just(Lexeme::name("...")).to(true)
|
just(Lexeme::name("...")).to(true)
|
||||||
.or(just(Lexeme::name("..")).to(false))
|
.or(just(Lexeme::name("..")).to(false))
|
||||||
.then(placeholder_parser())
|
.then(placeholder_parser())
|
||||||
.then(
|
.then(
|
||||||
just(Lexeme::Type)
|
just(Lexeme::Type)
|
||||||
.ignore_then(enum_parser!(Lexeme::Int))
|
.ignore_then(enum_parser!(Lexeme::Int))
|
||||||
.or_not().map(Option::unwrap_or_default)
|
.or_not().map(Option::unwrap_or_default)
|
||||||
)
|
|
||||||
.map(|((nonzero, key), prio)| Clause::Placeh{key, vec: Some((
|
|
||||||
prio.try_into().unwrap(),
|
|
||||||
nonzero
|
|
||||||
))}),
|
|
||||||
name_parser().map(|qualified| Clause::Name {
|
|
||||||
local: if qualified.len() == 1 {Some(qualified[0].clone())} else {None},
|
|
||||||
qualified: to_mrc_slice(qualified)
|
|
||||||
}),
|
|
||||||
sexpr_parser(expr.clone()),
|
|
||||||
lambda_parser(expr.clone()),
|
|
||||||
auto_parser(expr.clone()),
|
|
||||||
just(Lexeme::At).ignore_then(expr.clone()).map(|arg| {
|
|
||||||
Clause::Explicit(Mrc::new(arg))
|
|
||||||
})
|
|
||||||
))).then_ignore(enum_parser!(Lexeme::Comment).repeated());
|
|
||||||
clause.clone().then(
|
|
||||||
just(Lexeme::Type)
|
|
||||||
.ignore_then(clause.clone())
|
|
||||||
.repeated()
|
|
||||||
)
|
)
|
||||||
.map(|(val, typ)| Expr(val, to_mrc_slice(typ)))
|
.map(|((nonzero, key), prio)| Clause::Placeh{key, vec: Some((
|
||||||
}).labelled("Expression")
|
prio.try_into().unwrap(),
|
||||||
|
nonzero
|
||||||
|
))}),
|
||||||
|
name_parser().map(|qualified| Clause::Name {
|
||||||
|
local: if qualified.len() == 1 {Some(qualified[0].clone())} else {None},
|
||||||
|
qualified: to_mrc_slice(qualified)
|
||||||
|
}),
|
||||||
|
sexpr_parser(expr.clone()),
|
||||||
|
lambda_parser(expr.clone()),
|
||||||
|
auto_parser(expr.clone()),
|
||||||
|
just(Lexeme::At).ignore_then(expr.clone()).map(|arg| {
|
||||||
|
Clause::Explicit(Mrc::new(arg))
|
||||||
|
})
|
||||||
|
))).then_ignore(enum_parser!(Lexeme::Comment).repeated());
|
||||||
|
clause.clone().then(
|
||||||
|
just(Lexeme::Type)
|
||||||
|
.ignore_then(clause.clone())
|
||||||
|
.repeated()
|
||||||
|
)
|
||||||
|
.map(|(val, typ)| Expr(val, to_mrc_slice(typ)))
|
||||||
|
}).labelled("Expression")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,15 +9,15 @@ use super::lexer::Lexeme;
|
|||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Import {
|
pub struct Import {
|
||||||
pub path: Mrc<[String]>,
|
pub path: Mrc<[String]>,
|
||||||
/// If name is None, this is a wildcard import
|
/// If name is None, this is a wildcard import
|
||||||
pub name: Option<String>
|
pub name: Option<String>
|
||||||
}
|
}
|
||||||
|
|
||||||
/// initialize a BoxedIter<BoxedIter<String>> with a single element.
|
/// initialize a BoxedIter<BoxedIter<String>> with a single element.
|
||||||
fn init_table(name: String) -> BoxedIterIter<'static, String> {
|
fn init_table(name: String) -> BoxedIterIter<'static, String> {
|
||||||
// I'm not at all confident that this is a good approach.
|
// I'm not at all confident that this is a good approach.
|
||||||
box_once(box_once(name))
|
box_once(box_once(name))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse an import command
|
/// Parse an import command
|
||||||
@@ -26,44 +26,44 @@ fn init_table(name: String) -> BoxedIterIter<'static, String> {
|
|||||||
/// crossplatform filename-legal characters but the symbols are explicitly allowed
|
/// crossplatform filename-legal characters but the symbols are explicitly allowed
|
||||||
/// to go wild. There's a blacklist in [name]
|
/// to go wild. There's a blacklist in [name]
|
||||||
pub fn import_parser() -> impl Parser<Lexeme, Vec<Import>, Error = Simple<Lexeme>> {
|
pub fn import_parser() -> impl Parser<Lexeme, Vec<Import>, Error = Simple<Lexeme>> {
|
||||||
// TODO: this algorithm isn't cache friendly, copies a lot and is generally pretty bad.
|
// TODO: this algorithm isn't cache friendly, copies a lot and is generally pretty bad.
|
||||||
recursive(|expr: Recursive<Lexeme, BoxedIterIter<String>, Simple<Lexeme>>| {
|
recursive(|expr: Recursive<Lexeme, BoxedIterIter<String>, Simple<Lexeme>>| {
|
||||||
enum_parser!(Lexeme::Name)
|
enum_parser!(Lexeme::Name)
|
||||||
.separated_by(just(Lexeme::NS))
|
.separated_by(just(Lexeme::NS))
|
||||||
.then(
|
.then(
|
||||||
just(Lexeme::NS)
|
just(Lexeme::NS)
|
||||||
.ignore_then(
|
.ignore_then(
|
||||||
choice((
|
choice((
|
||||||
expr.clone()
|
expr.clone()
|
||||||
.separated_by(just(Lexeme::name(",")))
|
.separated_by(just(Lexeme::name(",")))
|
||||||
.delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
|
.delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
|
||||||
.map(|v| box_flatten(v.into_iter()))
|
.map(|v| box_flatten(v.into_iter()))
|
||||||
.labelled("import group"),
|
.labelled("import group"),
|
||||||
// Each expr returns a list of imports, flatten those into a common list
|
// Each expr returns a list of imports, flatten those into a common list
|
||||||
just(Lexeme::name("*")).map(|_| init_table("*".to_string()))
|
just(Lexeme::name("*")).map(|_| init_table("*".to_string()))
|
||||||
.labelled("wildcard import"), // Just a *, wrapped
|
.labelled("wildcard import"), // Just a *, wrapped
|
||||||
enum_parser!(Lexeme::Name).map(init_table)
|
enum_parser!(Lexeme::Name).map(init_table)
|
||||||
.labelled("import terminal") // Just a name, wrapped
|
.labelled("import terminal") // Just a name, wrapped
|
||||||
))
|
))
|
||||||
).or_not()
|
).or_not()
|
||||||
)
|
)
|
||||||
.map(|(name, opt_post): (Vec<String>, Option<BoxedIterIter<String>>)| -> BoxedIterIter<String> {
|
.map(|(name, opt_post): (Vec<String>, Option<BoxedIterIter<String>>)| -> BoxedIterIter<String> {
|
||||||
if let Some(post) = opt_post {
|
if let Some(post) = opt_post {
|
||||||
Box::new(post.map(move |el| {
|
Box::new(post.map(move |el| {
|
||||||
box_chain!(name.clone().into_iter(), el)
|
box_chain!(name.clone().into_iter(), el)
|
||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
box_once(into_boxed_iter(name))
|
box_once(into_boxed_iter(name))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}).map(|paths| {
|
}).map(|paths| {
|
||||||
paths.filter_map(|namespaces| {
|
paths.filter_map(|namespaces| {
|
||||||
let path = to_mrc_slice(namespaces.collect_vec());
|
let path = to_mrc_slice(namespaces.collect_vec());
|
||||||
let path_prefix = mrc_derive(&path, |p| &p[..p.len() - 1]);
|
let path_prefix = mrc_derive(&path, |p| &p[..p.len() - 1]);
|
||||||
match path.last()?.as_str() {
|
match path.last()?.as_str() {
|
||||||
"*" => Some(Import { path: path_prefix, name: None }),
|
"*" => Some(Import { path: path_prefix, name: None }),
|
||||||
name => Some(Import { path: path_prefix, name: Some(name.to_owned()) })
|
name => Some(Import { path: path_prefix, name: Some(name.to_owned()) })
|
||||||
}
|
}
|
||||||
}).collect()
|
}).collect()
|
||||||
}).labelled("import")
|
}).labelled("import")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,141 +9,141 @@ use super::{number, string, name, comment};
|
|||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct Entry(pub Lexeme, pub Range<usize>);
|
pub struct Entry(pub Lexeme, pub Range<usize>);
|
||||||
impl Debug for Entry {
|
impl Debug for Entry {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "{:?}", self.0)
|
write!(f, "{:?}", self.0)
|
||||||
// f.debug_tuple("Entry").field(&self.0).field(&self.1).finish()
|
// f.debug_tuple("Entry").field(&self.0).field(&self.1).finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Entry> for (Lexeme, Range<usize>) {
|
impl From<Entry> for (Lexeme, Range<usize>) {
|
||||||
fn from(ent: Entry) -> Self {
|
fn from(ent: Entry) -> Self {
|
||||||
(ent.0, ent.1)
|
(ent.0, ent.1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum Lexeme {
|
pub enum Lexeme {
|
||||||
Num(NotNan<f64>),
|
Num(NotNan<f64>),
|
||||||
Int(u64),
|
Int(u64),
|
||||||
Char(char),
|
Char(char),
|
||||||
Str(String),
|
Str(String),
|
||||||
Name(String),
|
Name(String),
|
||||||
Rule(NotNan<f64>),
|
Rule(NotNan<f64>),
|
||||||
NS, // namespace separator
|
NS, // namespace separator
|
||||||
LP(char),
|
LP(char),
|
||||||
RP(char),
|
RP(char),
|
||||||
BS, // Backslash
|
BS, // Backslash
|
||||||
At,
|
At,
|
||||||
Type, // type operator
|
Type, // type operator
|
||||||
Comment(String)
|
Comment(String)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for Lexeme {
|
impl Debug for Lexeme {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Num(n) => write!(f, "{}", n),
|
Self::Num(n) => write!(f, "{}", n),
|
||||||
Self::Int(i) => write!(f, "{}", i),
|
Self::Int(i) => write!(f, "{}", i),
|
||||||
Self::Char(c) => write!(f, "{:?}", c),
|
Self::Char(c) => write!(f, "{:?}", c),
|
||||||
Self::Str(s) => write!(f, "{:?}", s),
|
Self::Str(s) => write!(f, "{:?}", s),
|
||||||
Self::Name(name) => write!(f, "{}", name),
|
Self::Name(name) => write!(f, "{}", name),
|
||||||
Self::Rule(prio) => write!(f, "={}=>", prio),
|
Self::Rule(prio) => write!(f, "={}=>", prio),
|
||||||
Self::NS => write!(f, "::"),
|
Self::NS => write!(f, "::"),
|
||||||
Self::LP(l) => write!(f, "{}", l),
|
Self::LP(l) => write!(f, "{}", l),
|
||||||
Self::RP(l) => match l {
|
Self::RP(l) => match l {
|
||||||
'(' => write!(f, ")"),
|
'(' => write!(f, ")"),
|
||||||
'[' => write!(f, "]"),
|
'[' => write!(f, "]"),
|
||||||
'{' => write!(f, "}}"),
|
'{' => write!(f, "}}"),
|
||||||
_ => f.debug_tuple("RP").field(l).finish()
|
_ => f.debug_tuple("RP").field(l).finish()
|
||||||
},
|
},
|
||||||
Self::BS => write!(f, "\\"),
|
Self::BS => write!(f, "\\"),
|
||||||
Self::At => write!(f, "@"),
|
Self::At => write!(f, "@"),
|
||||||
Self::Type => write!(f, ":"),
|
Self::Type => write!(f, ":"),
|
||||||
Self::Comment(text) => write!(f, "--[{}]--", text),
|
Self::Comment(text) => write!(f, "--[{}]--", text),
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Lexeme {
|
impl Lexeme {
|
||||||
pub fn name<T: ToString>(n: T) -> Self {
|
pub fn name<T: ToString>(n: T) -> Self {
|
||||||
Lexeme::Name(n.to_string())
|
Lexeme::Name(n.to_string())
|
||||||
}
|
}
|
||||||
pub fn rule<T>(prio: T) -> Self where T: Into<f64> {
|
pub fn rule<T>(prio: T) -> Self where T: Into<f64> {
|
||||||
Lexeme::Rule(NotNan::new(prio.into()).expect("Rule priority cannot be NaN"))
|
Lexeme::Rule(NotNan::new(prio.into()).expect("Rule priority cannot be NaN"))
|
||||||
}
|
}
|
||||||
pub fn paren_parser<T, P>(
|
pub fn paren_parser<T, P>(
|
||||||
expr: P
|
expr: P
|
||||||
) -> impl Parser<Lexeme, (char, T), Error = Simple<Lexeme>> + Clone
|
) -> impl Parser<Lexeme, (char, T), Error = Simple<Lexeme>> + Clone
|
||||||
where P: Parser<Lexeme, T, Error = Simple<Lexeme>> + Clone {
|
where P: Parser<Lexeme, T, Error = Simple<Lexeme>> + Clone {
|
||||||
choice((
|
choice((
|
||||||
expr.clone().delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
|
expr.clone().delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
|
||||||
.map(|t| ('(', t)),
|
.map(|t| ('(', t)),
|
||||||
expr.clone().delimited_by(just(Lexeme::LP('[')), just(Lexeme::RP('[')))
|
expr.clone().delimited_by(just(Lexeme::LP('[')), just(Lexeme::RP('[')))
|
||||||
.map(|t| ('[', t)),
|
.map(|t| ('[', t)),
|
||||||
expr.delimited_by(just(Lexeme::LP('{')), just(Lexeme::RP('{')))
|
expr.delimited_by(just(Lexeme::LP('{')), just(Lexeme::RP('{')))
|
||||||
.map(|t| ('{', t)),
|
.map(|t| ('{', t)),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct LexedText(pub Vec<Vec<Entry>>);
|
pub struct LexedText(pub Vec<Vec<Entry>>);
|
||||||
|
|
||||||
impl Debug for LexedText {
|
impl Debug for LexedText {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
for row in &self.0 {
|
for row in &self.0 {
|
||||||
for tok in row {
|
for tok in row {
|
||||||
tok.fmt(f)?;
|
tok.fmt(f)?;
|
||||||
f.write_str(" ")?
|
f.write_str(" ")?
|
||||||
}
|
}
|
||||||
f.write_str("\n")?
|
f.write_str("\n")?
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type LexSubres<'a> = BoxedIter<'a, Entry>;
|
type LexSubres<'a> = BoxedIter<'a, Entry>;
|
||||||
|
|
||||||
fn paren_parser<'a>(
|
fn paren_parser<'a>(
|
||||||
expr: Recursive<'a, char, LexSubres<'a>, Simple<char>>,
|
expr: Recursive<'a, char, LexSubres<'a>, Simple<char>>,
|
||||||
lp: char, rp: char
|
lp: char, rp: char
|
||||||
) -> impl Parser<char, LexSubres<'a>, Error=Simple<char>> + 'a {
|
) -> impl Parser<char, LexSubres<'a>, Error=Simple<char>> + 'a {
|
||||||
expr.padded().repeated()
|
expr.padded().repeated()
|
||||||
.map(|x| box_flatten(x.into_iter()))
|
.map(|x| box_flatten(x.into_iter()))
|
||||||
.delimited_by(just(lp), just(rp)).map_with_span(move |b, s| {
|
.delimited_by(just(lp), just(rp)).map_with_span(move |b, s| {
|
||||||
box_chain!(
|
box_chain!(
|
||||||
iter::once(Entry(Lexeme::LP(lp), s.start..s.start+1)),
|
iter::once(Entry(Lexeme::LP(lp), s.start..s.start+1)),
|
||||||
b,
|
b,
|
||||||
iter::once(Entry(Lexeme::RP(lp), s.end-1..s.end))
|
iter::once(Entry(Lexeme::RP(lp), s.end-1..s.end))
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lexer<'a, T: 'a>(ops: &[T]) -> impl Parser<char, LexedText, Error=Simple<char>> + 'a
|
pub fn lexer<'a, T: 'a>(ops: &[T]) -> impl Parser<char, LexedText, Error=Simple<char>> + 'a
|
||||||
where T: AsRef<str> + Clone {
|
where T: AsRef<str> + Clone {
|
||||||
let all_ops = ops.iter().map(|o| o.as_ref().to_string())
|
let all_ops = ops.iter().map(|o| o.as_ref().to_string())
|
||||||
.chain(iter::once(".".to_string())).collect::<Vec<_>>();
|
.chain(iter::once(".".to_string())).collect::<Vec<_>>();
|
||||||
recursive(move |recurse: Recursive<char, LexSubres, Simple<char>>| {
|
recursive(move |recurse: Recursive<char, LexSubres, Simple<char>>| {
|
||||||
choice((
|
choice((
|
||||||
paren_parser(recurse.clone(), '(', ')'),
|
paren_parser(recurse.clone(), '(', ')'),
|
||||||
paren_parser(recurse.clone(), '[', ']'),
|
paren_parser(recurse.clone(), '[', ']'),
|
||||||
paren_parser(recurse.clone(), '{', '}'),
|
paren_parser(recurse.clone(), '{', '}'),
|
||||||
choice((
|
choice((
|
||||||
just(":=").padded().to(Lexeme::rule(0f64)),
|
just(":=").padded().to(Lexeme::rule(0f64)),
|
||||||
just("=").ignore_then(number::float_parser()).then_ignore(just("=>")).map(Lexeme::rule),
|
just("=").ignore_then(number::float_parser()).then_ignore(just("=>")).map(Lexeme::rule),
|
||||||
comment::comment_parser().map(Lexeme::Comment),
|
comment::comment_parser().map(Lexeme::Comment),
|
||||||
just("::").padded().to(Lexeme::NS),
|
just("::").padded().to(Lexeme::NS),
|
||||||
just('\\').padded().to(Lexeme::BS),
|
just('\\').padded().to(Lexeme::BS),
|
||||||
just('@').padded().to(Lexeme::At),
|
just('@').padded().to(Lexeme::At),
|
||||||
just(':').to(Lexeme::Type),
|
just(':').to(Lexeme::Type),
|
||||||
number::int_parser().map(Lexeme::Int), // all ints are valid floats so it takes precedence
|
number::int_parser().map(Lexeme::Int), // all ints are valid floats so it takes precedence
|
||||||
number::float_parser().map(Lexeme::Num),
|
number::float_parser().map(Lexeme::Num),
|
||||||
string::char_parser().map(Lexeme::Char),
|
string::char_parser().map(Lexeme::Char),
|
||||||
string::str_parser().map(Lexeme::Str),
|
string::str_parser().map(Lexeme::Str),
|
||||||
name::name_parser(&all_ops).map(Lexeme::Name), // includes namespacing
|
name::name_parser(&all_ops).map(Lexeme::Name), // includes namespacing
|
||||||
)).map_with_span(|lx, span| box_once(Entry(lx, span)) as LexSubres)
|
)).map_with_span(|lx, span| box_once(Entry(lx, span)) as LexSubres)
|
||||||
))
|
))
|
||||||
}).separated_by(one_of("\t ").repeated())
|
}).separated_by(one_of("\t ").repeated())
|
||||||
.flatten().collect()
|
.flatten().collect()
|
||||||
.separated_by(just('\n').then(text::whitespace()).ignored())
|
.separated_by(just('\n').then(text::whitespace()).ignored())
|
||||||
.map(LexedText)
|
.map(LexedText)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,13 +2,13 @@ use chumsky::{self, prelude::*, Parser};
|
|||||||
|
|
||||||
/// Matches any one of the passed operators, longest-first
|
/// Matches any one of the passed operators, longest-first
|
||||||
fn op_parser<'a, T: AsRef<str> + Clone>(ops: &[T]) -> BoxedParser<'a, char, String, Simple<char>> {
|
fn op_parser<'a, T: AsRef<str> + Clone>(ops: &[T]) -> BoxedParser<'a, char, String, Simple<char>> {
|
||||||
let mut sorted_ops: Vec<String> = ops.iter().map(|t| t.as_ref().to_string()).collect();
|
let mut sorted_ops: Vec<String> = ops.iter().map(|t| t.as_ref().to_string()).collect();
|
||||||
sorted_ops.sort_by_key(|op| -(op.len() as i64));
|
sorted_ops.sort_by_key(|op| -(op.len() as i64));
|
||||||
sorted_ops.into_iter()
|
sorted_ops.into_iter()
|
||||||
.map(|op| just(op).boxed())
|
.map(|op| just(op).boxed())
|
||||||
.reduce(|a, b| a.or(b).boxed())
|
.reduce(|a, b| a.or(b).boxed())
|
||||||
.unwrap_or_else(|| empty().map(|()| panic!("Empty isn't meant to match")).boxed())
|
.unwrap_or_else(|| empty().map(|()| panic!("Empty isn't meant to match")).boxed())
|
||||||
.labelled("operator").boxed()
|
.labelled("operator").boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matches anything that's allowed as an operator
|
/// Matches anything that's allowed as an operator
|
||||||
@@ -30,31 +30,31 @@ fn op_parser<'a, T: AsRef<str> + Clone>(ops: &[T]) -> BoxedParser<'a, char, Stri
|
|||||||
/// TODO: `.` could possibly be parsed as an operator depending on context. This operator is very
|
/// TODO: `.` could possibly be parsed as an operator depending on context. This operator is very
|
||||||
/// common in maths so it's worth a try. Investigate.
|
/// common in maths so it's worth a try. Investigate.
|
||||||
pub fn modname_parser<'a>() -> impl Parser<char, String, Error = Simple<char>> + 'a {
|
pub fn modname_parser<'a>() -> impl Parser<char, String, Error = Simple<char>> + 'a {
|
||||||
let not_name_char: Vec<char> = vec![':', '\\', '@', '"', '\'', '(', ')', '[', ']', '{', '}', ',', '.'];
|
let not_name_char: Vec<char> = vec![':', '\\', '@', '"', '\'', '(', ')', '[', ']', '{', '}', ',', '.'];
|
||||||
filter(move |c| !not_name_char.contains(c) && !c.is_whitespace())
|
filter(move |c| !not_name_char.contains(c) && !c.is_whitespace())
|
||||||
.repeated().at_least(1)
|
.repeated().at_least(1)
|
||||||
.collect()
|
.collect()
|
||||||
.labelled("modname")
|
.labelled("modname")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse an operator or name. Failing both, parse everything up to the next whitespace or
|
/// Parse an operator or name. Failing both, parse everything up to the next whitespace or
|
||||||
/// blacklisted character as a new operator.
|
/// blacklisted character as a new operator.
|
||||||
pub fn name_parser<'a, T: AsRef<str> + Clone>(
|
pub fn name_parser<'a, T: AsRef<str> + Clone>(
|
||||||
ops: &[T]
|
ops: &[T]
|
||||||
) -> impl Parser<char, String, Error = Simple<char>> + 'a {
|
) -> impl Parser<char, String, Error = Simple<char>> + 'a {
|
||||||
choice((
|
choice((
|
||||||
op_parser(ops), // First try to parse a known operator
|
op_parser(ops), // First try to parse a known operator
|
||||||
text::ident().labelled("plain text"), // Failing that, parse plain text
|
text::ident().labelled("plain text"), // Failing that, parse plain text
|
||||||
modname_parser() // Finally parse everything until tne next terminal as a new operator
|
modname_parser() // Finally parse everything until tne next terminal as a new operator
|
||||||
))
|
))
|
||||||
.labelled("name")
|
.labelled("name")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Decide if a string can be an operator. Operators can include digits and text, just not at the
|
/// Decide if a string can be an operator. Operators can include digits and text, just not at the
|
||||||
/// start.
|
/// start.
|
||||||
pub fn is_op<T: AsRef<str>>(s: T) -> bool {
|
pub fn is_op<T: AsRef<str>>(s: T) -> bool {
|
||||||
return match s.as_ref().chars().next() {
|
return match s.as_ref().chars().next() {
|
||||||
Some(x) => !x.is_alphanumeric(),
|
Some(x) => !x.is_alphanumeric(),
|
||||||
None => false
|
None => false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,111 +2,111 @@ use chumsky::{self, prelude::*, Parser};
|
|||||||
use ordered_float::NotNan;
|
use ordered_float::NotNan;
|
||||||
|
|
||||||
fn assert_not_digit(base: u32, c: char) {
|
fn assert_not_digit(base: u32, c: char) {
|
||||||
if base > (10 + (c as u32 - 'a' as u32)) {
|
if base > (10 + (c as u32 - 'a' as u32)) {
|
||||||
panic!("The character '{}' is a digit in base ({})", c, base)
|
panic!("The character '{}' is a digit in base ({})", c, base)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse an arbitrarily grouped sequence of digits starting with an underscore.
|
/// Parse an arbitrarily grouped sequence of digits starting with an underscore.
|
||||||
///
|
///
|
||||||
/// TODO: this should use separated_by and parse the leading group too
|
/// TODO: this should use separated_by and parse the leading group too
|
||||||
fn separated_digits_parser(base: u32) -> impl Parser<char, String, Error = Simple<char>> {
|
fn separated_digits_parser(base: u32) -> impl Parser<char, String, Error = Simple<char>> {
|
||||||
just('_')
|
just('_')
|
||||||
.ignore_then(text::digits(base))
|
.ignore_then(text::digits(base))
|
||||||
.repeated()
|
.repeated()
|
||||||
.map(|sv| sv.iter().flat_map(|s| s.chars()).collect())
|
.map(|sv| sv.iter().flat_map(|s| s.chars()).collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// parse a grouped uint
|
/// parse a grouped uint
|
||||||
///
|
///
|
||||||
/// Not to be confused with [int_parser] which does a lot more
|
/// Not to be confused with [int_parser] which does a lot more
|
||||||
fn uint_parser(base: u32) -> impl Parser<char, u64, Error = Simple<char>> {
|
fn uint_parser(base: u32) -> impl Parser<char, u64, Error = Simple<char>> {
|
||||||
text::int(base)
|
text::int(base)
|
||||||
.then(separated_digits_parser(base))
|
.then(separated_digits_parser(base))
|
||||||
.map(move |(s1, s2): (String, String)| {
|
.map(move |(s1, s2): (String, String)| {
|
||||||
u64::from_str_radix(&(s1 + &s2), base).unwrap()
|
u64::from_str_radix(&(s1 + &s2), base).unwrap()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// parse exponent notation, or return 0 as the default exponent.
|
/// parse exponent notation, or return 0 as the default exponent.
|
||||||
/// The exponent is always in decimal.
|
/// The exponent is always in decimal.
|
||||||
fn pow_parser() -> impl Parser<char, i32, Error = Simple<char>> {
|
fn pow_parser() -> impl Parser<char, i32, Error = Simple<char>> {
|
||||||
choice((
|
choice((
|
||||||
just('p')
|
just('p')
|
||||||
.ignore_then(text::int(10))
|
.ignore_then(text::int(10))
|
||||||
.map(|s: String| s.parse().unwrap()),
|
.map(|s: String| s.parse().unwrap()),
|
||||||
just("p-")
|
just("p-")
|
||||||
.ignore_then(text::int(10))
|
.ignore_then(text::int(10))
|
||||||
.map(|s: String| -s.parse::<i32>().unwrap()),
|
.map(|s: String| -s.parse::<i32>().unwrap()),
|
||||||
)).or_else(|_| Ok(0))
|
)).or_else(|_| Ok(0))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// returns a mapper that converts a mantissa and an exponent into an uint
|
/// returns a mapper that converts a mantissa and an exponent into an uint
|
||||||
///
|
///
|
||||||
/// TODO it panics if it finds a negative exponent
|
/// TODO it panics if it finds a negative exponent
|
||||||
fn nat2u(base: u64) -> impl Fn((u64, i32),) -> u64 {
|
fn nat2u(base: u64) -> impl Fn((u64, i32),) -> u64 {
|
||||||
move |(val, exp)| {
|
move |(val, exp)| {
|
||||||
if exp == 0 {val}
|
if exp == 0 {val}
|
||||||
else {val * base.checked_pow(exp.try_into().unwrap()).unwrap()}
|
else {val * base.checked_pow(exp.try_into().unwrap()).unwrap()}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// returns a mapper that converts a mantissa and an exponent into a float
|
/// returns a mapper that converts a mantissa and an exponent into a float
|
||||||
fn nat2f(base: u64) -> impl Fn((NotNan<f64>, i32),) -> NotNan<f64> {
|
fn nat2f(base: u64) -> impl Fn((NotNan<f64>, i32),) -> NotNan<f64> {
|
||||||
move |(val, exp)| {
|
move |(val, exp)| {
|
||||||
if exp == 0 {val}
|
if exp == 0 {val}
|
||||||
else {val * (base as f64).powf(exp.try_into().unwrap())}
|
else {val * (base as f64).powf(exp.try_into().unwrap())}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// parse an uint from exponential notation (panics if 'p' is a digit in base)
|
/// parse an uint from exponential notation (panics if 'p' is a digit in base)
|
||||||
fn pow_uint_parser(base: u32) -> impl Parser<char, u64, Error = Simple<char>> {
|
fn pow_uint_parser(base: u32) -> impl Parser<char, u64, Error = Simple<char>> {
|
||||||
assert_not_digit(base, 'p');
|
assert_not_digit(base, 'p');
|
||||||
uint_parser(base).then(pow_parser()).map(nat2u(base.into()))
|
uint_parser(base).then(pow_parser()).map(nat2u(base.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// parse an uint from a base determined by its prefix or lack thereof
|
/// parse an uint from a base determined by its prefix or lack thereof
|
||||||
///
|
///
|
||||||
/// Not to be convused with [uint_parser] which is a component of it.
|
/// Not to be convused with [uint_parser] which is a component of it.
|
||||||
pub fn int_parser() -> impl Parser<char, u64, Error = Simple<char>> {
|
pub fn int_parser() -> impl Parser<char, u64, Error = Simple<char>> {
|
||||||
choice((
|
choice((
|
||||||
just("0b").ignore_then(pow_uint_parser(2)),
|
just("0b").ignore_then(pow_uint_parser(2)),
|
||||||
just("0x").ignore_then(pow_uint_parser(16)),
|
just("0x").ignore_then(pow_uint_parser(16)),
|
||||||
just('0').ignore_then(pow_uint_parser(8)),
|
just('0').ignore_then(pow_uint_parser(8)),
|
||||||
pow_uint_parser(10), // Dec has no prefix
|
pow_uint_parser(10), // Dec has no prefix
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// parse a float from dot notation
|
/// parse a float from dot notation
|
||||||
fn dotted_parser(base: u32) -> impl Parser<char, NotNan<f64>, Error = Simple<char>> {
|
fn dotted_parser(base: u32) -> impl Parser<char, NotNan<f64>, Error = Simple<char>> {
|
||||||
uint_parser(base)
|
uint_parser(base)
|
||||||
.then(
|
.then(
|
||||||
just('.').ignore_then(
|
just('.').ignore_then(
|
||||||
text::digits(base).then(separated_digits_parser(base))
|
text::digits(base).then(separated_digits_parser(base))
|
||||||
).map(move |(frac1, frac2)| {
|
).map(move |(frac1, frac2)| {
|
||||||
let frac = frac1 + &frac2;
|
let frac = frac1 + &frac2;
|
||||||
let frac_num = u64::from_str_radix(&frac, base).unwrap() as f64;
|
let frac_num = u64::from_str_radix(&frac, base).unwrap() as f64;
|
||||||
let dexp = base.pow(frac.len().try_into().unwrap());
|
let dexp = base.pow(frac.len().try_into().unwrap());
|
||||||
frac_num / dexp as f64
|
frac_num / dexp as f64
|
||||||
}).or_not().map(|o| o.unwrap_or_default())
|
}).or_not().map(|o| o.unwrap_or_default())
|
||||||
).try_map(|(wh, f), s| {
|
).try_map(|(wh, f), s| {
|
||||||
NotNan::new(wh as f64 + f).map_err(|_| Simple::custom(s, "Float literal evaluates to NaN"))
|
NotNan::new(wh as f64 + f).map_err(|_| Simple::custom(s, "Float literal evaluates to NaN"))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// parse a float from dotted and optionally also exponential notation
|
/// parse a float from dotted and optionally also exponential notation
|
||||||
fn pow_float_parser(base: u32) -> impl Parser<char, NotNan<f64>, Error = Simple<char>> {
|
fn pow_float_parser(base: u32) -> impl Parser<char, NotNan<f64>, Error = Simple<char>> {
|
||||||
assert_not_digit(base, 'p');
|
assert_not_digit(base, 'p');
|
||||||
dotted_parser(base).then(pow_parser()).map(nat2f(base.into()))
|
dotted_parser(base).then(pow_parser()).map(nat2f(base.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// parse a float with dotted and optionally exponential notation from a base determined by its
|
/// parse a float with dotted and optionally exponential notation from a base determined by its
|
||||||
/// prefix
|
/// prefix
|
||||||
pub fn float_parser() -> impl Parser<char, NotNan<f64>, Error = Simple<char>> {
|
pub fn float_parser() -> impl Parser<char, NotNan<f64>, Error = Simple<char>> {
|
||||||
choice((
|
choice((
|
||||||
just("0b").ignore_then(pow_float_parser(2)),
|
just("0b").ignore_then(pow_float_parser(2)),
|
||||||
just("0x").ignore_then(pow_float_parser(16)),
|
just("0x").ignore_then(pow_float_parser(16)),
|
||||||
just('0').ignore_then(pow_float_parser(8)),
|
just('0').ignore_then(pow_float_parser(8)),
|
||||||
pow_float_parser(10),
|
pow_float_parser(10),
|
||||||
)).labelled("float")
|
)).labelled("float")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,58 +11,58 @@ use super::{Lexeme, FileEntry, lexer, line_parser, LexerEntry};
|
|||||||
|
|
||||||
#[derive(Error, Debug, Clone)]
|
#[derive(Error, Debug, Clone)]
|
||||||
pub enum ParseError {
|
pub enum ParseError {
|
||||||
#[error("Could not tokenize {0:?}")]
|
#[error("Could not tokenize {0:?}")]
|
||||||
Lex(Vec<Simple<char>>),
|
Lex(Vec<Simple<char>>),
|
||||||
#[error("Could not parse {0:#?}")]
|
#[error("Could not parse {0:#?}")]
|
||||||
Ast(Vec<Simple<Lexeme>>)
|
Ast(Vec<Simple<Lexeme>>)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse<'a, Iter, S, Op>(ops: &[Op], stream: S) -> Result<Vec<FileEntry>, ParseError>
|
pub fn parse<'a, Iter, S, Op>(ops: &[Op], stream: S) -> Result<Vec<FileEntry>, ParseError>
|
||||||
where
|
where
|
||||||
Op: 'a + AsRef<str> + Clone,
|
Op: 'a + AsRef<str> + Clone,
|
||||||
Iter: Iterator<Item = (char, Range<usize>)> + 'a,
|
Iter: Iterator<Item = (char, Range<usize>)> + 'a,
|
||||||
S: Into<Stream<'a, char, Range<usize>, Iter>> {
|
S: Into<Stream<'a, char, Range<usize>, Iter>> {
|
||||||
let lexed = lexer(ops).parse(stream).map_err(ParseError::Lex)?;
|
let lexed = lexer(ops).parse(stream).map_err(ParseError::Lex)?;
|
||||||
println!("Lexed:\n{:?}", lexed);
|
println!("Lexed:\n{:?}", lexed);
|
||||||
let LexedText(token_batchv) = lexed;
|
let LexedText(token_batchv) = lexed;
|
||||||
let parsr = line_parser().then_ignore(end());
|
let parsr = line_parser().then_ignore(end());
|
||||||
let (parsed_lines, errors_per_line) = token_batchv.into_iter().filter(|v| {
|
let (parsed_lines, errors_per_line) = token_batchv.into_iter().filter(|v| {
|
||||||
!v.is_empty()
|
!v.is_empty()
|
||||||
}).map(|v| {
|
}).map(|v| {
|
||||||
// Find the first invalid position for Stream::for_iter
|
// Find the first invalid position for Stream::for_iter
|
||||||
let LexerEntry(_, Range{ end, .. }) = v.last().unwrap().clone();
|
let LexerEntry(_, Range{ end, .. }) = v.last().unwrap().clone();
|
||||||
// Stream expects tuples, lexer outputs structs
|
// Stream expects tuples, lexer outputs structs
|
||||||
let tuples = v.into_iter().map_into::<(Lexeme, Range<usize>)>();
|
let tuples = v.into_iter().map_into::<(Lexeme, Range<usize>)>();
|
||||||
parsr.parse(Stream::from_iter(end..end+1, tuples))
|
parsr.parse(Stream::from_iter(end..end+1, tuples))
|
||||||
// ^^^^^^^^^^
|
// ^^^^^^^^^^
|
||||||
// I haven't the foggiest idea why this is needed, parsers are supposed to be lazy so the
|
// I haven't the foggiest idea why this is needed, parsers are supposed to be lazy so the
|
||||||
// end of input should make little difference
|
// end of input should make little difference
|
||||||
}).map(|res| match res {
|
}).map(|res| match res {
|
||||||
Ok(r) => (Some(r), vec![]),
|
Ok(r) => (Some(r), vec![]),
|
||||||
Err(e) => (None, e)
|
Err(e) => (None, e)
|
||||||
}).unzip::<_, _, Vec<_>, Vec<_>>();
|
}).unzip::<_, _, Vec<_>, Vec<_>>();
|
||||||
let total_err = errors_per_line.into_iter()
|
let total_err = errors_per_line.into_iter()
|
||||||
.flat_map(Vec::into_iter)
|
.flat_map(Vec::into_iter)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
if !total_err.is_empty() { Err(ParseError::Ast(total_err)) }
|
if !total_err.is_empty() { Err(ParseError::Ast(total_err)) }
|
||||||
else { Ok(parsed_lines.into_iter().map(Option::unwrap).collect()) }
|
else { Ok(parsed_lines.into_iter().map(Option::unwrap).collect()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn reparse<'a, Iter, S, Op>(ops: &[Op], stream: S, pre: &[FileEntry])
|
pub fn reparse<'a, Iter, S, Op>(ops: &[Op], stream: S, pre: &[FileEntry])
|
||||||
-> Result<Vec<FileEntry>, ParseError>
|
-> Result<Vec<FileEntry>, ParseError>
|
||||||
where
|
where
|
||||||
Op: 'a + AsRef<str> + Clone,
|
Op: 'a + AsRef<str> + Clone,
|
||||||
Iter: Iterator<Item = (char, Range<usize>)> + 'a,
|
Iter: Iterator<Item = (char, Range<usize>)> + 'a,
|
||||||
S: Into<Stream<'a, char, Range<usize>, Iter>> {
|
S: Into<Stream<'a, char, Range<usize>, Iter>> {
|
||||||
let result = parse(ops, stream)?;
|
let result = parse(ops, stream)?;
|
||||||
Ok(result.into_iter().zip(pre.iter()).map(|(mut output, donor)| {
|
Ok(result.into_iter().zip(pre.iter()).map(|(mut output, donor)| {
|
||||||
if let FileEntry::Rule(Rule{source, ..}, _) = &mut output {
|
if let FileEntry::Rule(Rule{source, ..}, _) = &mut output {
|
||||||
if let FileEntry::Rule(Rule{source: s2, ..}, _) = donor {
|
if let FileEntry::Rule(Rule{source: s2, ..}, _) = donor {
|
||||||
*source = s2.clone()
|
*source = s2.clone()
|
||||||
} else {
|
} else {
|
||||||
panic!("Preparse and reparse received different row types!")
|
panic!("Preparse and reparse received different row types!")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
output
|
output
|
||||||
}).collect())
|
}).collect())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,50 +16,50 @@ use ordered_float::NotNan;
|
|||||||
/// Anything we might encounter in a file
|
/// Anything we might encounter in a file
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum FileEntry {
|
pub enum FileEntry {
|
||||||
Import(Vec<import::Import>),
|
Import(Vec<import::Import>),
|
||||||
Comment(String),
|
Comment(String),
|
||||||
Rule(Rule, bool),
|
Rule(Rule, bool),
|
||||||
Export(Vec<Vec<String>>)
|
Export(Vec<Vec<String>>)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_all_names_clause_recur<'a, F>(
|
fn visit_all_names_clause_recur<'a, F>(
|
||||||
clause: &'a Clause,
|
clause: &'a Clause,
|
||||||
binds: Stackframe<String>,
|
binds: Stackframe<String>,
|
||||||
cb: &mut F
|
cb: &mut F
|
||||||
) where F: FnMut(&'a [String]) {
|
) where F: FnMut(&'a [String]) {
|
||||||
match clause {
|
match clause {
|
||||||
Clause::Auto(name, typ, body) => {
|
Clause::Auto(name, typ, body) => {
|
||||||
for x in typ.iter() {
|
for x in typ.iter() {
|
||||||
visit_all_names_expr_recur(x, binds.clone(), cb)
|
visit_all_names_expr_recur(x, binds.clone(), cb)
|
||||||
}
|
}
|
||||||
let binds_dup = binds.clone();
|
let binds_dup = binds.clone();
|
||||||
let new_binds = if let Some(n) = name {
|
let new_binds = if let Some(n) = name {
|
||||||
binds_dup.push(n.to_owned())
|
binds_dup.push(n.to_owned())
|
||||||
} else {
|
} else {
|
||||||
binds
|
binds
|
||||||
};
|
};
|
||||||
for x in body.iter() {
|
for x in body.iter() {
|
||||||
visit_all_names_expr_recur(x, new_binds.clone(), cb)
|
visit_all_names_expr_recur(x, new_binds.clone(), cb)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Clause::Lambda(name, typ, body) => {
|
Clause::Lambda(name, typ, body) => {
|
||||||
for x in typ.iter() {
|
for x in typ.iter() {
|
||||||
visit_all_names_expr_recur(x, binds.clone(), cb)
|
visit_all_names_expr_recur(x, binds.clone(), cb)
|
||||||
}
|
}
|
||||||
for x in body.iter() {
|
for x in body.iter() {
|
||||||
visit_all_names_expr_recur(x, binds.push(name.to_owned()), cb)
|
visit_all_names_expr_recur(x, binds.push(name.to_owned()), cb)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Clause::S(_, body) => for x in body.iter() {
|
Clause::S(_, body) => for x in body.iter() {
|
||||||
visit_all_names_expr_recur(x, binds.clone(), cb)
|
visit_all_names_expr_recur(x, binds.clone(), cb)
|
||||||
},
|
},
|
||||||
Clause::Name{ local: Some(name), qualified } => {
|
Clause::Name{ local: Some(name), qualified } => {
|
||||||
if binds.iter().all(|x| x != name) {
|
if binds.iter().all(|x| x != name) {
|
||||||
cb(qualified)
|
cb(qualified)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
}
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recursively iterate through all "names" in an expression. It also finds a lot of things that
|
/// Recursively iterate through all "names" in an expression. It also finds a lot of things that
|
||||||
@@ -68,88 +68,88 @@ fn visit_all_names_clause_recur<'a, F>(
|
|||||||
///
|
///
|
||||||
/// TODO: find a way to exclude parameters
|
/// TODO: find a way to exclude parameters
|
||||||
fn visit_all_names_expr_recur<'a, F>(
|
fn visit_all_names_expr_recur<'a, F>(
|
||||||
expr: &'a Expr,
|
expr: &'a Expr,
|
||||||
binds: Stackframe<String>,
|
binds: Stackframe<String>,
|
||||||
cb: &mut F
|
cb: &mut F
|
||||||
) where F: FnMut(&'a [String]) {
|
) where F: FnMut(&'a [String]) {
|
||||||
let Expr(val, typ) = expr;
|
let Expr(val, typ) = expr;
|
||||||
visit_all_names_clause_recur(val, binds.clone(), cb);
|
visit_all_names_clause_recur(val, binds.clone(), cb);
|
||||||
for typ in typ.as_ref() {
|
for typ in typ.as_ref() {
|
||||||
visit_all_names_clause_recur(typ, binds.clone(), cb);
|
visit_all_names_clause_recur(typ, binds.clone(), cb);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Collect all names that occur in an expression
|
/// Collect all names that occur in an expression
|
||||||
fn find_all_names(expr: &Expr) -> HashSet<&[String]> {
|
fn find_all_names(expr: &Expr) -> HashSet<&[String]> {
|
||||||
let mut ret = HashSet::new();
|
let mut ret = HashSet::new();
|
||||||
visit_all_names_expr_recur(expr, Stackframe::new(String::new()), &mut |n| {
|
visit_all_names_expr_recur(expr, Stackframe::new(String::new()), &mut |n| {
|
||||||
if !n.last().unwrap().starts_with('$') {
|
if !n.last().unwrap().starts_with('$') {
|
||||||
ret.insert(n);
|
ret.insert(n);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rule_parser() -> impl Parser<Lexeme, (Vec<Expr>, NotNan<f64>, Vec<Expr>), Error = Simple<Lexeme>> {
|
fn rule_parser() -> impl Parser<Lexeme, (Vec<Expr>, NotNan<f64>, Vec<Expr>), Error = Simple<Lexeme>> {
|
||||||
xpr_parser().repeated()
|
xpr_parser().repeated()
|
||||||
.then(enum_parser!(Lexeme::Rule))
|
.then(enum_parser!(Lexeme::Rule))
|
||||||
.then(xpr_parser().repeated())
|
.then(xpr_parser().repeated())
|
||||||
// .map(|((lhs, prio), rhs)| )
|
// .map(|((lhs, prio), rhs)| )
|
||||||
.map(|((a, b), c)| (a, b, c))
|
.map(|((a, b), c)| (a, b, c))
|
||||||
.labelled("Rule")
|
.labelled("Rule")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn line_parser() -> impl Parser<Lexeme, FileEntry, Error = Simple<Lexeme>> {
|
pub fn line_parser() -> impl Parser<Lexeme, FileEntry, Error = Simple<Lexeme>> {
|
||||||
choice((
|
choice((
|
||||||
// In case the usercode wants to parse doc
|
// In case the usercode wants to parse doc
|
||||||
enum_parser!(Lexeme >> FileEntry; Comment),
|
enum_parser!(Lexeme >> FileEntry; Comment),
|
||||||
just(Lexeme::name("import"))
|
just(Lexeme::name("import"))
|
||||||
.ignore_then(import_parser().map(FileEntry::Import))
|
.ignore_then(import_parser().map(FileEntry::Import))
|
||||||
.then_ignore(enum_parser!(Lexeme::Comment)),
|
.then_ignore(enum_parser!(Lexeme::Comment)),
|
||||||
just(Lexeme::name("export")).map_err_with_span(|e, s| {
|
just(Lexeme::name("export")).map_err_with_span(|e, s| {
|
||||||
println!("{:?} could not yield an export", s); e
|
println!("{:?} could not yield an export", s); e
|
||||||
}).ignore_then(
|
}).ignore_then(
|
||||||
just(Lexeme::NS).ignore_then(
|
just(Lexeme::NS).ignore_then(
|
||||||
enum_parser!(Lexeme::Name).map(|n| vec![n])
|
enum_parser!(Lexeme::Name).map(|n| vec![n])
|
||||||
.separated_by(just(Lexeme::name(",")))
|
.separated_by(just(Lexeme::name(",")))
|
||||||
.delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
|
.delimited_by(just(Lexeme::LP('(')), just(Lexeme::RP('(')))
|
||||||
).map(FileEntry::Export)
|
).map(FileEntry::Export)
|
||||||
).or(rule_parser().map(|(source, prio, target)| {
|
).or(rule_parser().map(|(source, prio, target)| {
|
||||||
FileEntry::Rule(Rule {
|
FileEntry::Rule(Rule {
|
||||||
source: to_mrc_slice(source),
|
source: to_mrc_slice(source),
|
||||||
prio,
|
prio,
|
||||||
target: to_mrc_slice(target)
|
target: to_mrc_slice(target)
|
||||||
}, true)
|
}, true)
|
||||||
})),
|
})),
|
||||||
// This could match almost anything so it has to go last
|
// This could match almost anything so it has to go last
|
||||||
rule_parser().map(|(source, prio, target)| FileEntry::Rule(Rule{
|
rule_parser().map(|(source, prio, target)| FileEntry::Rule(Rule{
|
||||||
source: to_mrc_slice(source),
|
source: to_mrc_slice(source),
|
||||||
prio,
|
prio,
|
||||||
target: to_mrc_slice(target)
|
target: to_mrc_slice(target)
|
||||||
}, false)),
|
}, false)),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Collect all exported names (and a lot of other words) from a file
|
/// Collect all exported names (and a lot of other words) from a file
|
||||||
pub fn exported_names(src: &[FileEntry]) -> HashSet<&[String]> {
|
pub fn exported_names(src: &[FileEntry]) -> HashSet<&[String]> {
|
||||||
src.iter().flat_map(|ent| match ent {
|
src.iter().flat_map(|ent| match ent {
|
||||||
FileEntry::Rule(Rule{source, target, ..}, true) =>
|
FileEntry::Rule(Rule{source, target, ..}, true) =>
|
||||||
box_chain!(source.iter(), target.iter()),
|
box_chain!(source.iter(), target.iter()),
|
||||||
_ => box_empty()
|
_ => box_empty()
|
||||||
}).flat_map(find_all_names).chain(
|
}).flat_map(find_all_names).chain(
|
||||||
src.iter().filter_map(|ent| {
|
src.iter().filter_map(|ent| {
|
||||||
if let FileEntry::Export(names) = ent {Some(names.iter())} else {None}
|
if let FileEntry::Export(names) = ent {Some(names.iter())} else {None}
|
||||||
}).flatten().map(Vec::as_slice)
|
}).flatten().map(Vec::as_slice)
|
||||||
).collect()
|
).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Summarize all imports from a file in a single list of qualified names
|
/// Summarize all imports from a file in a single list of qualified names
|
||||||
pub fn imports<'a, 'b, I>(
|
pub fn imports<'a, 'b, I>(
|
||||||
src: I
|
src: I
|
||||||
) -> impl Iterator<Item = &'b import::Import> + 'a
|
) -> impl Iterator<Item = &'b import::Import> + 'a
|
||||||
where I: Iterator<Item = &'b FileEntry> + 'a {
|
where I: Iterator<Item = &'b FileEntry> + 'a {
|
||||||
src.filter_map(|ent| match ent {
|
src.filter_map(|ent| match ent {
|
||||||
FileEntry::Import(impv) => Some(impv.iter()),
|
FileEntry::Import(impv) => Some(impv.iter()),
|
||||||
_ => None
|
_ => None
|
||||||
}).flatten()
|
}).flatten()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,45 +2,45 @@ use chumsky::{self, prelude::*, Parser};
|
|||||||
|
|
||||||
/// Parses a text character that is not the specified delimiter
|
/// Parses a text character that is not the specified delimiter
|
||||||
fn text_parser(delim: char) -> impl Parser<char, char, Error = Simple<char>> {
|
fn text_parser(delim: char) -> impl Parser<char, char, Error = Simple<char>> {
|
||||||
// Copied directly from Chumsky's JSON example.
|
// Copied directly from Chumsky's JSON example.
|
||||||
let escape = just('\\').ignore_then(
|
let escape = just('\\').ignore_then(
|
||||||
just('\\')
|
just('\\')
|
||||||
.or(just('/'))
|
.or(just('/'))
|
||||||
.or(just('"'))
|
.or(just('"'))
|
||||||
.or(just('b').to('\x08'))
|
.or(just('b').to('\x08'))
|
||||||
.or(just('f').to('\x0C'))
|
.or(just('f').to('\x0C'))
|
||||||
.or(just('n').to('\n'))
|
.or(just('n').to('\n'))
|
||||||
.or(just('r').to('\r'))
|
.or(just('r').to('\r'))
|
||||||
.or(just('t').to('\t'))
|
.or(just('t').to('\t'))
|
||||||
.or(just('u').ignore_then(
|
.or(just('u').ignore_then(
|
||||||
filter(|c: &char| c.is_ascii_hexdigit())
|
filter(|c: &char| c.is_ascii_hexdigit())
|
||||||
.repeated()
|
.repeated()
|
||||||
.exactly(4)
|
.exactly(4)
|
||||||
.collect::<String>()
|
.collect::<String>()
|
||||||
.validate(|digits, span, emit| {
|
.validate(|digits, span, emit| {
|
||||||
char::from_u32(u32::from_str_radix(&digits, 16).unwrap())
|
char::from_u32(u32::from_str_radix(&digits, 16).unwrap())
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
emit(Simple::custom(span, "invalid unicode character"));
|
emit(Simple::custom(span, "invalid unicode character"));
|
||||||
'\u{FFFD}' // unicode replacement character
|
'\u{FFFD}' // unicode replacement character
|
||||||
})
|
})
|
||||||
}),
|
}),
|
||||||
)),
|
)),
|
||||||
);
|
);
|
||||||
filter(move |&c| c != '\\' && c != delim).or(escape)
|
filter(move |&c| c != '\\' && c != delim).or(escape)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a character literal between single quotes
|
/// Parse a character literal between single quotes
|
||||||
pub fn char_parser() -> impl Parser<char, char, Error = Simple<char>> {
|
pub fn char_parser() -> impl Parser<char, char, Error = Simple<char>> {
|
||||||
just('\'').ignore_then(text_parser('\'')).then_ignore(just('\''))
|
just('\'').ignore_then(text_parser('\'')).then_ignore(just('\''))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a string between double quotes
|
/// Parse a string between double quotes
|
||||||
pub fn str_parser() -> impl Parser<char, String, Error = Simple<char>> {
|
pub fn str_parser() -> impl Parser<char, String, Error = Simple<char>> {
|
||||||
just('"')
|
just('"')
|
||||||
.ignore_then(
|
.ignore_then(
|
||||||
text_parser('"').map(Some)
|
text_parser('"').map(Some)
|
||||||
.or(just("\\\n").map(|_| None)) // Newlines preceded by backslashes are ignored.
|
.or(just("\\\n").map(|_| None)) // Newlines preceded by backslashes are ignored.
|
||||||
.repeated()
|
.repeated()
|
||||||
).then_ignore(just('"'))
|
).then_ignore(just('"'))
|
||||||
.flatten().collect()
|
.flatten().collect()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,43 +9,43 @@ use super::loaded::Loaded;
|
|||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum LoadingError {
|
pub enum LoadingError {
|
||||||
IOErr(Rc<io::Error>),
|
IOErr(Rc<io::Error>),
|
||||||
UnknownNode(String),
|
UnknownNode(String),
|
||||||
Missing(String)
|
Missing(String)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<io::Error> for LoadingError {
|
impl From<io::Error> for LoadingError {
|
||||||
fn from(inner: io::Error) -> Self {
|
fn from(inner: io::Error) -> Self {
|
||||||
LoadingError::IOErr(Rc::new(inner))
|
LoadingError::IOErr(Rc::new(inner))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_loader(proj: PathBuf) -> impl FnMut(Mrc<[String]>) -> Result<Loaded, LoadingError> + 'static {
|
pub fn file_loader(proj: PathBuf) -> impl FnMut(Mrc<[String]>) -> Result<Loaded, LoadingError> + 'static {
|
||||||
move |path| {
|
move |path| {
|
||||||
let dirpath = proj.join(path.join("/"));
|
let dirpath = proj.join(path.join("/"));
|
||||||
if dirpath.is_dir() || dirpath.is_symlink() {
|
if dirpath.is_dir() || dirpath.is_symlink() {
|
||||||
return Ok(Loaded::Namespace(
|
return Ok(Loaded::Namespace(
|
||||||
dirpath.read_dir()?
|
dirpath.read_dir()?
|
||||||
.filter_map(|entr| {
|
.filter_map(|entr| {
|
||||||
let ent = entr.ok()?;
|
let ent = entr.ok()?;
|
||||||
let typ = ent.file_type().ok()?;
|
let typ = ent.file_type().ok()?;
|
||||||
let path = ent.path();
|
let path = ent.path();
|
||||||
if typ.is_dir() || typ.is_symlink() {
|
if typ.is_dir() || typ.is_symlink() {
|
||||||
Some(ent.file_name().to_string_lossy().into_owned())
|
Some(ent.file_name().to_string_lossy().into_owned())
|
||||||
} else if typ.is_file() && path.extension()? == "orc" {
|
} else if typ.is_file() && path.extension()? == "orc" {
|
||||||
Some(path.file_stem()?.to_string_lossy().into_owned())
|
Some(path.file_stem()?.to_string_lossy().into_owned())
|
||||||
} else { None }
|
} else { None }
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
))
|
))
|
||||||
}
|
|
||||||
let orcfile = dirpath.with_extension("orc");
|
|
||||||
if orcfile.is_file() {
|
|
||||||
read_to_string(orcfile).map(Loaded::Module).map_err(LoadingError::from)
|
|
||||||
} else {
|
|
||||||
let pathstr = dirpath.to_string_lossy().into_owned();
|
|
||||||
Err(if dirpath.exists() { LoadingError::UnknownNode(pathstr) }
|
|
||||||
else { LoadingError::Missing(pathstr) })
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
let orcfile = dirpath.with_extension("orc");
|
||||||
|
if orcfile.is_file() {
|
||||||
|
read_to_string(orcfile).map(Loaded::Module).map_err(LoadingError::from)
|
||||||
|
} else {
|
||||||
|
let pathstr = dirpath.to_string_lossy().into_owned();
|
||||||
|
Err(if dirpath.exists() { LoadingError::UnknownNode(pathstr) }
|
||||||
|
else { LoadingError::Missing(pathstr) })
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum Loaded {
|
pub enum Loaded {
|
||||||
Module(String),
|
Module(String),
|
||||||
Namespace(Vec<String>),
|
Namespace(Vec<String>),
|
||||||
}
|
}
|
||||||
@@ -6,26 +6,26 @@ use super::name_resolver::ResolutionError;
|
|||||||
|
|
||||||
#[derive(Error, Debug, Clone)]
|
#[derive(Error, Debug, Clone)]
|
||||||
pub enum ModuleError<ELoad> where ELoad: Clone {
|
pub enum ModuleError<ELoad> where ELoad: Clone {
|
||||||
#[error("Resolution cycle")]
|
#[error("Resolution cycle")]
|
||||||
ResolutionCycle,
|
ResolutionCycle,
|
||||||
#[error("File not found: {0}")]
|
#[error("File not found: {0}")]
|
||||||
Load(ELoad),
|
Load(ELoad),
|
||||||
#[error("Failed to parse: {0:?}")]
|
#[error("Failed to parse: {0:?}")]
|
||||||
Syntax(ParseError),
|
Syntax(ParseError),
|
||||||
#[error("Not a module")]
|
#[error("Not a module")]
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> From<ParseError> for ModuleError<T> where T: Clone {
|
impl<T> From<ParseError> for ModuleError<T> where T: Clone {
|
||||||
fn from(pars: ParseError) -> Self { Self::Syntax(pars) }
|
fn from(pars: ParseError) -> Self { Self::Syntax(pars) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> From<ResolutionError<ModuleError<T>>> for ModuleError<T> where T: Clone {
|
impl<T> From<ResolutionError<ModuleError<T>>> for ModuleError<T> where T: Clone {
|
||||||
fn from(res: ResolutionError<ModuleError<T>>) -> Self {
|
fn from(res: ResolutionError<ModuleError<T>>) -> Self {
|
||||||
match res {
|
match res {
|
||||||
ResolutionError::Cycle(_) => ModuleError::ResolutionCycle,
|
ResolutionError::Cycle(_) => ModuleError::ResolutionCycle,
|
||||||
ResolutionError::NoModule(_) => ModuleError::None,
|
ResolutionError::NoModule(_) => ModuleError::None,
|
||||||
ResolutionError::Delegate(d) => d
|
ResolutionError::Delegate(d) => d
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -10,12 +10,12 @@ type ImportMap = HashMap<String, Mrc<[String]>>;
|
|||||||
|
|
||||||
#[derive(Debug, Clone, Error)]
|
#[derive(Debug, Clone, Error)]
|
||||||
pub enum ResolutionError<Err> {
|
pub enum ResolutionError<Err> {
|
||||||
#[error("Reference cycle at {0:?}")]
|
#[error("Reference cycle at {0:?}")]
|
||||||
Cycle(Vec<Mrc<[String]>>),
|
Cycle(Vec<Mrc<[String]>>),
|
||||||
#[error("No module provides {0:?}")]
|
#[error("No module provides {0:?}")]
|
||||||
NoModule(Mrc<[String]>),
|
NoModule(Mrc<[String]>),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Delegate(#[from] Err)
|
Delegate(#[from] Err)
|
||||||
}
|
}
|
||||||
|
|
||||||
type ResolutionResult<E> = Result<Mrc<[String]>, ResolutionError<E>>;
|
type ResolutionResult<E> = Result<Mrc<[String]>, ResolutionError<E>>;
|
||||||
@@ -24,108 +24,108 @@ type ResolutionResult<E> = Result<Mrc<[String]>, ResolutionError<E>>;
|
|||||||
/// resolution. This makes the resolution process lightning fast and invalidation completely
|
/// resolution. This makes the resolution process lightning fast and invalidation completely
|
||||||
/// impossible since the intermediate steps of a resolution aren't stored.
|
/// impossible since the intermediate steps of a resolution aren't stored.
|
||||||
pub struct NameResolver<FSplit, FImps, E> {
|
pub struct NameResolver<FSplit, FImps, E> {
|
||||||
cache: HashMap<Mrc<[String]>, ResolutionResult<E>>,
|
cache: HashMap<Mrc<[String]>, ResolutionResult<E>>,
|
||||||
get_modname: FSplit,
|
get_modname: FSplit,
|
||||||
get_imports: FImps
|
get_imports: FImps
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<FSplit, FImps, E> NameResolver<FSplit, FImps, E>
|
impl<FSplit, FImps, E> NameResolver<FSplit, FImps, E>
|
||||||
where
|
where
|
||||||
FSplit: FnMut(Mrc<[String]>) -> Option<Mrc<[String]>>,
|
FSplit: FnMut(Mrc<[String]>) -> Option<Mrc<[String]>>,
|
||||||
FImps: FnMut(Mrc<[String]>) -> Result<ImportMap, E>,
|
FImps: FnMut(Mrc<[String]>) -> Result<ImportMap, E>,
|
||||||
E: Clone
|
E: Clone
|
||||||
{
|
{
|
||||||
pub fn new(get_modname: FSplit, get_imports: FImps) -> Self {
|
pub fn new(get_modname: FSplit, get_imports: FImps) -> Self {
|
||||||
Self {
|
Self {
|
||||||
cache: HashMap::new(),
|
cache: HashMap::new(),
|
||||||
get_modname,
|
get_modname,
|
||||||
get_imports
|
get_imports
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Obtains a symbol's originnal name
|
/// Obtains a symbol's originnal name
|
||||||
/// Uses a substack to detect loops
|
/// Uses a substack to detect loops
|
||||||
fn find_origin_rec(
|
fn find_origin_rec(
|
||||||
&mut self,
|
&mut self,
|
||||||
symbol: Mrc<[String]>,
|
symbol: Mrc<[String]>,
|
||||||
import_path: Stackframe<Mrc<[String]>>
|
import_path: Stackframe<Mrc<[String]>>
|
||||||
) -> Result<Mrc<[String]>, ResolutionError<E>> {
|
) -> Result<Mrc<[String]>, ResolutionError<E>> {
|
||||||
if let Some(cached) = self.cache.get(&symbol) {
|
if let Some(cached) = self.cache.get(&symbol) {
|
||||||
return cached.as_ref().map_err(|e| e.clone()).map(Mrc::clone)
|
return cached.as_ref().map_err(|e| e.clone()).map(Mrc::clone)
|
||||||
}
|
|
||||||
// The imports and path of the referenced file and the local name
|
|
||||||
let path = (self.get_modname)(Mrc::clone(&symbol)).ok_or_else(|| {
|
|
||||||
ResolutionError::NoModule(Mrc::clone(&symbol))
|
|
||||||
})?;
|
|
||||||
let name = &symbol[path.len()..];
|
|
||||||
if name.is_empty() {
|
|
||||||
panic!("get_modname matched all to module and nothing to name in {:?}", import_path)
|
|
||||||
}
|
|
||||||
let imports = (self.get_imports)(Mrc::clone(&path))?;
|
|
||||||
let result = if let Some(source) = imports.get(&name[0]) {
|
|
||||||
let new_sym: Vec<String> = source.iter().chain(name.iter()).cloned().collect();
|
|
||||||
if import_path.iter().any(|el| el.as_ref() == new_sym.as_slice()) {
|
|
||||||
Err(ResolutionError::Cycle(import_path.iter().map(Mrc::clone).collect()))
|
|
||||||
} else {
|
|
||||||
self.find_origin_rec(to_mrc_slice(new_sym), import_path.push(Mrc::clone(&symbol)))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(symbol.clone()) // If not imported, it must be locally defined
|
|
||||||
};
|
|
||||||
self.cache.insert(symbol, result.clone());
|
|
||||||
result
|
|
||||||
}
|
}
|
||||||
|
// The imports and path of the referenced file and the local name
|
||||||
|
let path = (self.get_modname)(Mrc::clone(&symbol)).ok_or_else(|| {
|
||||||
|
ResolutionError::NoModule(Mrc::clone(&symbol))
|
||||||
|
})?;
|
||||||
|
let name = &symbol[path.len()..];
|
||||||
|
if name.is_empty() {
|
||||||
|
panic!("get_modname matched all to module and nothing to name in {:?}", import_path)
|
||||||
|
}
|
||||||
|
let imports = (self.get_imports)(Mrc::clone(&path))?;
|
||||||
|
let result = if let Some(source) = imports.get(&name[0]) {
|
||||||
|
let new_sym: Vec<String> = source.iter().chain(name.iter()).cloned().collect();
|
||||||
|
if import_path.iter().any(|el| el.as_ref() == new_sym.as_slice()) {
|
||||||
|
Err(ResolutionError::Cycle(import_path.iter().map(Mrc::clone).collect()))
|
||||||
|
} else {
|
||||||
|
self.find_origin_rec(to_mrc_slice(new_sym), import_path.push(Mrc::clone(&symbol)))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(symbol.clone()) // If not imported, it must be locally defined
|
||||||
|
};
|
||||||
|
self.cache.insert(symbol, result.clone());
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
fn process_exprv_rec(&mut self, exv: &[Expr]) -> Result<Vec<Expr>, ResolutionError<E>> {
|
fn process_exprv_rec(&mut self, exv: &[Expr]) -> Result<Vec<Expr>, ResolutionError<E>> {
|
||||||
exv.iter().map(|ex| self.process_expression_rec(ex)).collect()
|
exv.iter().map(|ex| self.process_expression_rec(ex)).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_exprmrcopt_rec(&mut self,
|
fn process_exprmrcopt_rec(&mut self,
|
||||||
exbo: &Option<Mrc<Expr>>
|
exbo: &Option<Mrc<Expr>>
|
||||||
) -> Result<Option<Mrc<Expr>>, ResolutionError<E>> {
|
) -> Result<Option<Mrc<Expr>>, ResolutionError<E>> {
|
||||||
exbo.iter().map(|exb| Ok(Mrc::new(self.process_expression_rec(exb.as_ref())?)))
|
exbo.iter().map(|exb| Ok(Mrc::new(self.process_expression_rec(exb.as_ref())?)))
|
||||||
.next().transpose()
|
.next().transpose()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_clause_rec(&mut self, tok: &Clause) -> Result<Clause, ResolutionError<E>> {
|
fn process_clause_rec(&mut self, tok: &Clause) -> Result<Clause, ResolutionError<E>> {
|
||||||
Ok(match tok {
|
Ok(match tok {
|
||||||
Clause::S(c, exv) => Clause::S(*c, to_mrc_slice(
|
Clause::S(c, exv) => Clause::S(*c, to_mrc_slice(
|
||||||
exv.as_ref().iter().map(|e| self.process_expression_rec(e))
|
exv.as_ref().iter().map(|e| self.process_expression_rec(e))
|
||||||
.collect::<Result<Vec<Expr>, ResolutionError<E>>>()?
|
.collect::<Result<Vec<Expr>, ResolutionError<E>>>()?
|
||||||
)),
|
)),
|
||||||
Clause::Lambda(name, typ, body) => Clause::Lambda(name.clone(),
|
Clause::Lambda(name, typ, body) => Clause::Lambda(name.clone(),
|
||||||
to_mrc_slice(self.process_exprv_rec(typ.as_ref())?),
|
to_mrc_slice(self.process_exprv_rec(typ.as_ref())?),
|
||||||
to_mrc_slice(self.process_exprv_rec(body.as_ref())?)
|
to_mrc_slice(self.process_exprv_rec(body.as_ref())?)
|
||||||
),
|
),
|
||||||
Clause::Auto(name, typ, body) => Clause::Auto(name.clone(),
|
Clause::Auto(name, typ, body) => Clause::Auto(name.clone(),
|
||||||
to_mrc_slice(self.process_exprv_rec(typ.as_ref())?),
|
to_mrc_slice(self.process_exprv_rec(typ.as_ref())?),
|
||||||
to_mrc_slice(self.process_exprv_rec(body.as_ref())?)
|
to_mrc_slice(self.process_exprv_rec(body.as_ref())?)
|
||||||
),
|
),
|
||||||
Clause::Name{local, qualified} => Clause::Name{
|
Clause::Name{local, qualified} => Clause::Name{
|
||||||
local: local.clone(),
|
local: local.clone(),
|
||||||
qualified: self.find_origin(Mrc::clone(qualified))?
|
qualified: self.find_origin(Mrc::clone(qualified))?
|
||||||
},
|
},
|
||||||
x => x.clone()
|
x => x.clone()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_expression_rec(&mut self, Expr(token, typ): &Expr) -> Result<Expr, ResolutionError<E>> {
|
fn process_expression_rec(&mut self, Expr(token, typ): &Expr) -> Result<Expr, ResolutionError<E>> {
|
||||||
Ok(Expr(
|
Ok(Expr(
|
||||||
self.process_clause_rec(token)?,
|
self.process_clause_rec(token)?,
|
||||||
typ.iter().map(|t| self.process_clause_rec(t)).collect::<Result<_, _>>()?
|
typ.iter().map(|t| self.process_clause_rec(t)).collect::<Result<_, _>>()?
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_origin(&mut self, symbol: Mrc<[String]>) -> Result<Mrc<[String]>, ResolutionError<E>> {
|
pub fn find_origin(&mut self, symbol: Mrc<[String]>) -> Result<Mrc<[String]>, ResolutionError<E>> {
|
||||||
self.find_origin_rec(Mrc::clone(&symbol), Stackframe::new(symbol))
|
self.find_origin_rec(Mrc::clone(&symbol), Stackframe::new(symbol))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn process_clause(&mut self, clause: &Clause) -> Result<Clause, ResolutionError<E>> {
|
pub fn process_clause(&mut self, clause: &Clause) -> Result<Clause, ResolutionError<E>> {
|
||||||
self.process_clause_rec(clause)
|
self.process_clause_rec(clause)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn process_expression(&mut self, ex: &Expr) -> Result<Expr, ResolutionError<E>> {
|
pub fn process_expression(&mut self, ex: &Expr) -> Result<Expr, ResolutionError<E>> {
|
||||||
self.process_expression_rec(ex)
|
self.process_expression_rec(ex)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,35 +7,35 @@ use crate::{ast::{Expr, Clause}, utils::{collect_to_mrc, to_mrc_slice}};
|
|||||||
/// Produce a Token object for any value of Expr other than Typed.
|
/// Produce a Token object for any value of Expr other than Typed.
|
||||||
/// Called by [#prefix] which handles Typed.
|
/// Called by [#prefix] which handles Typed.
|
||||||
fn prefix_clause(
|
fn prefix_clause(
|
||||||
expr: &Clause,
|
expr: &Clause,
|
||||||
namespace: Mrc<[String]>
|
namespace: Mrc<[String]>
|
||||||
) -> Clause {
|
) -> Clause {
|
||||||
match expr {
|
match expr {
|
||||||
Clause::S(c, v) => Clause::S(*c,
|
Clause::S(c, v) => Clause::S(*c,
|
||||||
collect_to_mrc(v.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace))))
|
collect_to_mrc(v.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace))))
|
||||||
),
|
),
|
||||||
Clause::Auto(name, typ, body) => Clause::Auto(
|
Clause::Auto(name, typ, body) => Clause::Auto(
|
||||||
name.clone(),
|
name.clone(),
|
||||||
collect_to_mrc(typ.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
|
collect_to_mrc(typ.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
|
||||||
collect_to_mrc(body.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
|
collect_to_mrc(body.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
|
||||||
),
|
),
|
||||||
Clause::Lambda(name, typ, body) => Clause::Lambda(
|
Clause::Lambda(name, typ, body) => Clause::Lambda(
|
||||||
name.clone(),
|
name.clone(),
|
||||||
collect_to_mrc(typ.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
|
collect_to_mrc(typ.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
|
||||||
collect_to_mrc(body.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
|
collect_to_mrc(body.iter().map(|e| prefix_expr(e, Mrc::clone(&namespace)))),
|
||||||
),
|
),
|
||||||
Clause::Name{local, qualified} => Clause::Name{
|
Clause::Name{local, qualified} => Clause::Name{
|
||||||
local: local.clone(),
|
local: local.clone(),
|
||||||
qualified: collect_to_mrc(namespace.iter().chain(qualified.iter()).cloned())
|
qualified: collect_to_mrc(namespace.iter().chain(qualified.iter()).cloned())
|
||||||
},
|
},
|
||||||
x => x.clone()
|
x => x.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Produce an Expr object for any value of Expr
|
/// Produce an Expr object for any value of Expr
|
||||||
pub fn prefix_expr(Expr(clause, typ): &Expr, namespace: Mrc<[String]>) -> Expr {
|
pub fn prefix_expr(Expr(clause, typ): &Expr, namespace: Mrc<[String]>) -> Expr {
|
||||||
Expr(
|
Expr(
|
||||||
prefix_clause(clause, Mrc::clone(&namespace)),
|
prefix_clause(clause, Mrc::clone(&namespace)),
|
||||||
to_mrc_slice(typ.iter().map(|e| prefix_clause(e, Mrc::clone(&namespace))).collect())
|
to_mrc_slice(typ.iter().map(|e| prefix_clause(e, Mrc::clone(&namespace))).collect())
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,198 +18,198 @@ type ParseResult<T, ELoad> = Result<T, ModuleError<ELoad>>;
|
|||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Module {
|
pub struct Module {
|
||||||
pub rules: Vec<Rule>,
|
pub rules: Vec<Rule>,
|
||||||
pub exports: Vec<String>,
|
pub exports: Vec<String>,
|
||||||
pub references: Vec<Mrc<[String]>>
|
pub references: Vec<Mrc<[String]>>
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type RuleCollectionResult<ELoad> = Result<Vec<super::Rule>, ModuleError<ELoad>>;
|
pub type RuleCollectionResult<ELoad> = Result<Vec<super::Rule>, ModuleError<ELoad>>;
|
||||||
|
|
||||||
pub fn rule_collector<F: 'static, ELoad>(
|
pub fn rule_collector<F: 'static, ELoad>(
|
||||||
load_mod: F,
|
load_mod: F,
|
||||||
prelude: Vec<String>
|
prelude: Vec<String>
|
||||||
) -> Cache<'static, Mrc<[String]>, RuleCollectionResult<ELoad>>
|
) -> Cache<'static, Mrc<[String]>, RuleCollectionResult<ELoad>>
|
||||||
where
|
where
|
||||||
F: FnMut(Mrc<[String]>) -> Result<Loaded, ELoad>,
|
F: FnMut(Mrc<[String]>) -> Result<Loaded, ELoad>,
|
||||||
ELoad: Clone + Debug
|
ELoad: Clone + Debug
|
||||||
{
|
{
|
||||||
let load_mod_rc = RefCell::new(load_mod);
|
let load_mod_rc = RefCell::new(load_mod);
|
||||||
// Map paths to a namespace with name list (folder) or module with source text (file)
|
// Map paths to a namespace with name list (folder) or module with source text (file)
|
||||||
let loaded = Rc::new(Cache::new(move |path: Mrc<[String]>, _|
|
let loaded = Rc::new(Cache::new(move |path: Mrc<[String]>, _|
|
||||||
-> ParseResult<Loaded, ELoad> {
|
-> ParseResult<Loaded, ELoad> {
|
||||||
(load_mod_rc.borrow_mut())(path).map_err(ModuleError::Load)
|
(load_mod_rc.borrow_mut())(path).map_err(ModuleError::Load)
|
||||||
}));
|
}));
|
||||||
// Map names to the longest prefix that points to a valid module
|
// Map names to the longest prefix that points to a valid module
|
||||||
// At least one segment must be in the prefix, and the prefix must not be the whole name
|
// At least one segment must be in the prefix, and the prefix must not be the whole name
|
||||||
let modname = Rc::new(Cache::new({
|
let modname = Rc::new(Cache::new({
|
||||||
let loaded = Rc::clone(&loaded);
|
let loaded = Rc::clone(&loaded);
|
||||||
move |symbol: Mrc<[String]>, _| -> Result<Mrc<[String]>, Vec<ModuleError<ELoad>>> {
|
move |symbol: Mrc<[String]>, _| -> Result<Mrc<[String]>, Vec<ModuleError<ELoad>>> {
|
||||||
let mut errv: Vec<ModuleError<ELoad>> = Vec::new();
|
let mut errv: Vec<ModuleError<ELoad>> = Vec::new();
|
||||||
let reg_err = |e, errv: &mut Vec<ModuleError<ELoad>>| {
|
let reg_err = |e, errv: &mut Vec<ModuleError<ELoad>>| {
|
||||||
errv.push(e);
|
errv.push(e);
|
||||||
if symbol.len() == errv.len() { Err(errv.clone()) }
|
if symbol.len() == errv.len() { Err(errv.clone()) }
|
||||||
else { Ok(()) }
|
else { Ok(()) }
|
||||||
};
|
};
|
||||||
loop {
|
loop {
|
||||||
let path = mrc_derive(&symbol, |s| &s[..s.len() - errv.len() - 1]);
|
let path = mrc_derive(&symbol, |s| &s[..s.len() - errv.len() - 1]);
|
||||||
match loaded.try_find(&path) {
|
match loaded.try_find(&path) {
|
||||||
Ok(imports) => match imports.as_ref() {
|
Ok(imports) => match imports.as_ref() {
|
||||||
Loaded::Module(_) => break Ok(path),
|
Loaded::Module(_) => break Ok(path),
|
||||||
_ => reg_err(ModuleError::None, &mut errv)?
|
_ => reg_err(ModuleError::None, &mut errv)?
|
||||||
},
|
},
|
||||||
Err(err) => reg_err(err, &mut errv)?
|
Err(err) => reg_err(err, &mut errv)?
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}));
|
}
|
||||||
// Preliminarily parse a file, substitution rules and imports are valid
|
}
|
||||||
let preparsed = Rc::new(Cache::new({
|
}));
|
||||||
let loaded = Rc::clone(&loaded);
|
// Preliminarily parse a file, substitution rules and imports are valid
|
||||||
let prelude2 = prelude.clone();
|
let preparsed = Rc::new(Cache::new({
|
||||||
move |path: Mrc<[String]>, _| -> ParseResult<Vec<FileEntry>, ELoad> {
|
let loaded = Rc::clone(&loaded);
|
||||||
let loaded = loaded.try_find(&path)?;
|
let prelude2 = prelude.clone();
|
||||||
if let Loaded::Module(source) = loaded.as_ref() {
|
move |path: Mrc<[String]>, _| -> ParseResult<Vec<FileEntry>, ELoad> {
|
||||||
Ok(parse::parse(&prelude2, source.as_str())?)
|
let loaded = loaded.try_find(&path)?;
|
||||||
} else {Err(ModuleError::None)}
|
if let Loaded::Module(source) = loaded.as_ref() {
|
||||||
|
Ok(parse::parse(&prelude2, source.as_str())?)
|
||||||
|
} else {Err(ModuleError::None)}
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
// Collect all toplevel names exported from a given file
|
||||||
|
let exports = Rc::new(Cache::new({
|
||||||
|
let loaded = Rc::clone(&loaded);
|
||||||
|
let preparsed = Rc::clone(&preparsed);
|
||||||
|
move |path: Mrc<[String]>, _| -> ParseResult<Vec<String>, ELoad> {
|
||||||
|
let loaded = loaded.try_find(&path)?;
|
||||||
|
if let Loaded::Namespace(names) = loaded.as_ref() {
|
||||||
|
return Ok(names.clone());
|
||||||
|
}
|
||||||
|
let preparsed = preparsed.try_find(&path)?;
|
||||||
|
Ok(parse::exported_names(&preparsed)
|
||||||
|
.into_iter()
|
||||||
|
.map(|n| n[0].clone())
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
// Collect all toplevel names imported by a given file
|
||||||
|
let imports = Rc::new(Cache::new({
|
||||||
|
let preparsed = Rc::clone(&preparsed);
|
||||||
|
let exports = Rc::clone(&exports);
|
||||||
|
move |path: Mrc<[String]>, _| -> ParseResult<HashMap<String, Mrc<[String]>>, ELoad> {
|
||||||
|
let entv = preparsed.try_find(&path)?;
|
||||||
|
let import_entries = parse::imports(entv.iter());
|
||||||
|
let mut imported_symbols: HashMap<String, Mrc<[String]>> = HashMap::new();
|
||||||
|
for imp in import_entries {
|
||||||
|
let export = exports.try_find(&imp.path)?;
|
||||||
|
if let Some(ref name) = imp.name {
|
||||||
|
if export.contains(name) {
|
||||||
|
imported_symbols.insert(name.clone(), Mrc::clone(&imp.path));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for exp in export.as_ref() {
|
||||||
|
imported_symbols.insert(exp.clone(), Mrc::clone(&imp.path));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}));
|
}
|
||||||
// Collect all toplevel names exported from a given file
|
Ok(imported_symbols)
|
||||||
let exports = Rc::new(Cache::new({
|
}
|
||||||
let loaded = Rc::clone(&loaded);
|
}));
|
||||||
let preparsed = Rc::clone(&preparsed);
|
// Final parse, operators are correctly separated
|
||||||
move |path: Mrc<[String]>, _| -> ParseResult<Vec<String>, ELoad> {
|
let parsed = Rc::new(Cache::new({
|
||||||
let loaded = loaded.try_find(&path)?;
|
let preparsed = Rc::clone(&preparsed);
|
||||||
if let Loaded::Namespace(names) = loaded.as_ref() {
|
let imports = Rc::clone(&imports);
|
||||||
return Ok(names.clone());
|
let loaded = Rc::clone(&loaded);
|
||||||
}
|
move |path: Mrc<[String]>, _| -> ParseResult<Vec<FileEntry>, ELoad> {
|
||||||
let preparsed = preparsed.try_find(&path)?;
|
let imported_ops: Vec<String> =
|
||||||
Ok(parse::exported_names(&preparsed)
|
imports.try_find(&path)?
|
||||||
.into_iter()
|
.keys()
|
||||||
.map(|n| n[0].clone())
|
.chain(prelude.iter())
|
||||||
.collect())
|
.filter(|s| parse::is_op(s))
|
||||||
}
|
.cloned()
|
||||||
}));
|
.collect();
|
||||||
// Collect all toplevel names imported by a given file
|
// let parser = file_parser(&prelude, &imported_ops);
|
||||||
let imports = Rc::new(Cache::new({
|
let pre = preparsed.try_find(&path)?;
|
||||||
let preparsed = Rc::clone(&preparsed);
|
if let Loaded::Module(source) = loaded.try_find(&path)?.as_ref() {
|
||||||
let exports = Rc::clone(&exports);
|
Ok(parse::reparse(&imported_ops, source.as_str(), &pre)?)
|
||||||
move |path: Mrc<[String]>, _| -> ParseResult<HashMap<String, Mrc<[String]>>, ELoad> {
|
} else { Err(ModuleError::None) }
|
||||||
let entv = preparsed.try_find(&path)?;
|
}
|
||||||
let import_entries = parse::imports(entv.iter());
|
}));
|
||||||
let mut imported_symbols: HashMap<String, Mrc<[String]>> = HashMap::new();
|
let name_resolver_rc = RefCell::new(NameResolver::new({
|
||||||
for imp in import_entries {
|
let modname = Rc::clone(&modname);
|
||||||
let export = exports.try_find(&imp.path)?;
|
move |path| {
|
||||||
if let Some(ref name) = imp.name {
|
Some(modname.try_find(&path).ok()?.as_ref().clone())
|
||||||
if export.contains(name) {
|
}
|
||||||
imported_symbols.insert(name.clone(), Mrc::clone(&imp.path));
|
}, {
|
||||||
}
|
let imports = Rc::clone(&imports);
|
||||||
} else {
|
move |path| {
|
||||||
for exp in export.as_ref() {
|
imports.try_find(&path).map(|f| f.as_ref().clone())
|
||||||
imported_symbols.insert(exp.clone(), Mrc::clone(&imp.path));
|
}
|
||||||
}
|
}));
|
||||||
}
|
// Turn parsed files into a bag of rules and a list of toplevel export names
|
||||||
}
|
let resolved = Rc::new(Cache::new({
|
||||||
Ok(imported_symbols)
|
let parsed = Rc::clone(&parsed);
|
||||||
}
|
let exports = Rc::clone(&exports);
|
||||||
}));
|
let imports = Rc::clone(&imports);
|
||||||
// Final parse, operators are correctly separated
|
let modname = Rc::clone(&modname);
|
||||||
let parsed = Rc::new(Cache::new({
|
move |path: Mrc<[String]>, _| -> ParseResult<Module, ELoad> {
|
||||||
let preparsed = Rc::clone(&preparsed);
|
let mut name_resolver = name_resolver_rc.borrow_mut();
|
||||||
let imports = Rc::clone(&imports);
|
let module = Module {
|
||||||
let loaded = Rc::clone(&loaded);
|
rules: parsed.try_find(&path)?
|
||||||
move |path: Mrc<[String]>, _| -> ParseResult<Vec<FileEntry>, ELoad> {
|
.iter()
|
||||||
let imported_ops: Vec<String> =
|
.filter_map(|ent| {
|
||||||
imports.try_find(&path)?
|
if let FileEntry::Rule(Rule{source, prio, target}, _) = ent {
|
||||||
.keys()
|
Some(Rule {
|
||||||
.chain(prelude.iter())
|
source: source.iter()
|
||||||
.filter(|s| parse::is_op(s))
|
.map(|ex| {
|
||||||
.cloned()
|
prefix_expr(ex, Mrc::clone(&path))
|
||||||
.collect();
|
}).collect(),
|
||||||
// let parser = file_parser(&prelude, &imported_ops);
|
target: target.iter().map(|ex| {
|
||||||
let pre = preparsed.try_find(&path)?;
|
prefix_expr(ex, Mrc::clone(&path))
|
||||||
if let Loaded::Module(source) = loaded.try_find(&path)?.as_ref() {
|
}).collect(),
|
||||||
Ok(parse::reparse(&imported_ops, source.as_str(), &pre)?)
|
prio: *prio,
|
||||||
} else { Err(ModuleError::None) }
|
})
|
||||||
}
|
} else { None }
|
||||||
}));
|
})
|
||||||
let name_resolver_rc = RefCell::new(NameResolver::new({
|
.map(|rule| Ok(super::Rule {
|
||||||
let modname = Rc::clone(&modname);
|
source: to_mrc_slice(rule.source.iter()
|
||||||
move |path| {
|
.map(|ex| name_resolver.process_expression(ex))
|
||||||
Some(modname.try_find(&path).ok()?.as_ref().clone())
|
.collect::<Result<Vec<_>, _>>()?),
|
||||||
}
|
target: to_mrc_slice(rule.target.iter()
|
||||||
}, {
|
.map(|ex| name_resolver.process_expression(ex))
|
||||||
let imports = Rc::clone(&imports);
|
.collect::<Result<Vec<_>, _>>()?),
|
||||||
move |path| {
|
..rule
|
||||||
imports.try_find(&path).map(|f| f.as_ref().clone())
|
}))
|
||||||
}
|
.collect::<ParseResult<Vec<super::Rule>, ELoad>>()?,
|
||||||
}));
|
exports: exports.try_find(&path)?.as_ref().clone(),
|
||||||
// Turn parsed files into a bag of rules and a list of toplevel export names
|
references: imports.try_find(&path)?
|
||||||
let resolved = Rc::new(Cache::new({
|
.values()
|
||||||
let parsed = Rc::clone(&parsed);
|
.filter_map(|imps| {
|
||||||
let exports = Rc::clone(&exports);
|
modname.try_find(imps).ok().map(|r| r.as_ref().clone())
|
||||||
let imports = Rc::clone(&imports);
|
})
|
||||||
let modname = Rc::clone(&modname);
|
.collect()
|
||||||
move |path: Mrc<[String]>, _| -> ParseResult<Module, ELoad> {
|
};
|
||||||
let mut name_resolver = name_resolver_rc.borrow_mut();
|
Ok(module)
|
||||||
let module = Module {
|
}
|
||||||
rules: parsed.try_find(&path)?
|
}));
|
||||||
.iter()
|
Cache::new({
|
||||||
.filter_map(|ent| {
|
let resolved = Rc::clone(&resolved);
|
||||||
if let FileEntry::Rule(Rule{source, prio, target}, _) = ent {
|
move |path: Mrc<[String]>, _| -> ParseResult<Vec<super::Rule>, ELoad> {
|
||||||
Some(Rule {
|
// Breadth-first search
|
||||||
source: source.iter()
|
let mut processed: HashSet<Mrc<[String]>> = HashSet::new();
|
||||||
.map(|ex| {
|
let mut rules: Vec<super::Rule> = Vec::new();
|
||||||
prefix_expr(ex, Mrc::clone(&path))
|
let mut pending: VecDeque<Mrc<[String]>> = VecDeque::new();
|
||||||
}).collect(),
|
pending.push_back(path);
|
||||||
target: target.iter().map(|ex| {
|
while let Some(el) = pending.pop_front() {
|
||||||
prefix_expr(ex, Mrc::clone(&path))
|
let resolved = resolved.try_find(&el)?;
|
||||||
}).collect(),
|
processed.insert(el.clone());
|
||||||
prio: *prio,
|
pending.extend(
|
||||||
})
|
resolved.references.iter()
|
||||||
} else { None }
|
.filter(|&v| !processed.contains(v))
|
||||||
})
|
.cloned()
|
||||||
.map(|rule| Ok(super::Rule {
|
);
|
||||||
source: to_mrc_slice(rule.source.iter()
|
rules.extend(
|
||||||
.map(|ex| name_resolver.process_expression(ex))
|
resolved.rules.iter().cloned()
|
||||||
.collect::<Result<Vec<_>, _>>()?),
|
)
|
||||||
target: to_mrc_slice(rule.target.iter()
|
};
|
||||||
.map(|ex| name_resolver.process_expression(ex))
|
Ok(rules)
|
||||||
.collect::<Result<Vec<_>, _>>()?),
|
}
|
||||||
..rule
|
})
|
||||||
}))
|
|
||||||
.collect::<ParseResult<Vec<super::Rule>, ELoad>>()?,
|
|
||||||
exports: exports.try_find(&path)?.as_ref().clone(),
|
|
||||||
references: imports.try_find(&path)?
|
|
||||||
.values()
|
|
||||||
.filter_map(|imps| {
|
|
||||||
modname.try_find(imps).ok().map(|r| r.as_ref().clone())
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
};
|
|
||||||
Ok(module)
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
Cache::new({
|
|
||||||
let resolved = Rc::clone(&resolved);
|
|
||||||
move |path: Mrc<[String]>, _| -> ParseResult<Vec<super::Rule>, ELoad> {
|
|
||||||
// Breadth-first search
|
|
||||||
let mut processed: HashSet<Mrc<[String]>> = HashSet::new();
|
|
||||||
let mut rules: Vec<super::Rule> = Vec::new();
|
|
||||||
let mut pending: VecDeque<Mrc<[String]>> = VecDeque::new();
|
|
||||||
pending.push_back(path);
|
|
||||||
while let Some(el) = pending.pop_front() {
|
|
||||||
let resolved = resolved.try_find(&el)?;
|
|
||||||
processed.insert(el.clone());
|
|
||||||
pending.extend(
|
|
||||||
resolved.references.iter()
|
|
||||||
.filter(|&v| !processed.contains(v))
|
|
||||||
.cloned()
|
|
||||||
);
|
|
||||||
rules.extend(
|
|
||||||
resolved.rules.iter().cloned()
|
|
||||||
)
|
|
||||||
};
|
|
||||||
Ok(rules)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use ordered_float::NotNan;
|
|||||||
use std::{hash::Hash, intrinsics::likely};
|
use std::{hash::Hash, intrinsics::likely};
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use crate::utils::mrc_empty_slice;
|
use crate::utils::mrc_empty_slice;
|
||||||
use crate::{executor::{ExternFn, Atom}, utils::one_mrc_slice};
|
use crate::{foreign::{ExternFn, Atom}, utils::one_mrc_slice};
|
||||||
|
|
||||||
use super::Literal;
|
use super::Literal;
|
||||||
|
|
||||||
@@ -12,167 +12,167 @@ use super::Literal;
|
|||||||
#[derive(PartialEq, Eq, Hash)]
|
#[derive(PartialEq, Eq, Hash)]
|
||||||
pub struct Expr(pub Clause, pub Mrc<[Clause]>);
|
pub struct Expr(pub Clause, pub Mrc<[Clause]>);
|
||||||
impl Expr {
|
impl Expr {
|
||||||
pub fn into_clause(self) -> Clause {
|
pub fn into_clause(self) -> Clause {
|
||||||
if likely(self.1.len() == 0) { self.0 }
|
if likely(self.1.len() == 0) { self.0 }
|
||||||
else { Clause::S('(', one_mrc_slice(self)) }
|
else { Clause::S('(', one_mrc_slice(self)) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for Expr {
|
impl Clone for Expr {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self(self.0.clone(), Mrc::clone(&self.1))
|
Self(self.0.clone(), Mrc::clone(&self.1))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for Expr {
|
impl Debug for Expr {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
let Expr(val, typ) = self;
|
let Expr(val, typ) = self;
|
||||||
write!(f, "{:?}", val)?;
|
write!(f, "{:?}", val)?;
|
||||||
for typ in typ.as_ref() {
|
for typ in typ.as_ref() {
|
||||||
write!(f, ":{:?}", typ)?
|
write!(f, ":{:?}", typ)?
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An S-expression as read from a source file
|
/// An S-expression as read from a source file
|
||||||
#[derive(PartialEq, Eq, Hash)]
|
#[derive(PartialEq, Eq, Hash)]
|
||||||
pub enum Clause {
|
pub enum Clause {
|
||||||
Literal(Literal),
|
Literal(Literal),
|
||||||
Name{
|
Name{
|
||||||
local: Option<String>,
|
local: Option<String>,
|
||||||
qualified: Mrc<[String]>
|
qualified: Mrc<[String]>
|
||||||
},
|
},
|
||||||
S(char, Mrc<[Expr]>),
|
S(char, Mrc<[Expr]>),
|
||||||
Explicit(Mrc<Expr>),
|
Explicit(Mrc<Expr>),
|
||||||
Lambda(String, Mrc<[Expr]>, Mrc<[Expr]>),
|
Lambda(String, Mrc<[Expr]>, Mrc<[Expr]>),
|
||||||
Auto(Option<String>, Mrc<[Expr]>, Mrc<[Expr]>),
|
Auto(Option<String>, Mrc<[Expr]>, Mrc<[Expr]>),
|
||||||
ExternFn(ExternFn),
|
ExternFn(ExternFn),
|
||||||
Atom(Atom),
|
Atom(Atom),
|
||||||
Placeh{
|
Placeh{
|
||||||
key: String,
|
key: String,
|
||||||
/// None => matches one token
|
/// None => matches one token
|
||||||
/// Some((prio, nonzero)) =>
|
/// Some((prio, nonzero)) =>
|
||||||
/// prio is the sizing priority for the vectorial (higher prio grows first)
|
/// prio is the sizing priority for the vectorial (higher prio grows first)
|
||||||
/// nonzero is whether the vectorial matches 1..n or 0..n tokens
|
/// nonzero is whether the vectorial matches 1..n or 0..n tokens
|
||||||
vec: Option<(usize, bool)>
|
vec: Option<(usize, bool)>
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
impl Clause {
|
impl Clause {
|
||||||
pub fn body(&self) -> Option<Mrc<[Expr]>> {
|
pub fn body(&self) -> Option<Mrc<[Expr]>> {
|
||||||
match self {
|
match self {
|
||||||
Self::Auto(_, _, body) |
|
Self::Auto(_, _, body) |
|
||||||
Self::Lambda(_, _, body) |
|
Self::Lambda(_, _, body) |
|
||||||
Self::S(_, body) => Some(Mrc::clone(body)),
|
Self::S(_, body) => Some(Mrc::clone(body)),
|
||||||
_ => None
|
_ => None
|
||||||
}
|
|
||||||
}
|
}
|
||||||
pub fn typ(&self) -> Option<Mrc<[Expr]>> {
|
}
|
||||||
match self {
|
pub fn typ(&self) -> Option<Mrc<[Expr]>> {
|
||||||
Self::Auto(_, typ, _) | Self::Lambda(_, typ, _) => Some(Mrc::clone(typ)),
|
match self {
|
||||||
_ => None
|
Self::Auto(_, typ, _) | Self::Lambda(_, typ, _) => Some(Mrc::clone(typ)),
|
||||||
}
|
_ => None
|
||||||
}
|
|
||||||
pub fn into_expr(self) -> Expr {
|
|
||||||
if let Self::S('(', body) = &self {
|
|
||||||
if body.len() == 1 { body[0].clone() }
|
|
||||||
else { Expr(self, mrc_empty_slice()) }
|
|
||||||
} else { Expr(self, mrc_empty_slice()) }
|
|
||||||
}
|
|
||||||
pub fn from_exprv(exprv: Mrc<[Expr]>) -> Option<Clause> {
|
|
||||||
if exprv.len() == 0 { None }
|
|
||||||
else if exprv.len() == 1 { Some(exprv[0].clone().into_clause()) }
|
|
||||||
else { Some(Self::S('(', exprv)) }
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
pub fn into_expr(self) -> Expr {
|
||||||
|
if let Self::S('(', body) = &self {
|
||||||
|
if body.len() == 1 { body[0].clone() }
|
||||||
|
else { Expr(self, mrc_empty_slice()) }
|
||||||
|
} else { Expr(self, mrc_empty_slice()) }
|
||||||
|
}
|
||||||
|
pub fn from_exprv(exprv: Mrc<[Expr]>) -> Option<Clause> {
|
||||||
|
if exprv.len() == 0 { None }
|
||||||
|
else if exprv.len() == 1 { Some(exprv[0].clone().into_clause()) }
|
||||||
|
else { Some(Self::S('(', exprv)) }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for Clause {
|
impl Clone for Clause {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
match self {
|
match self {
|
||||||
Self::S(c, b) => Self::S(*c, Mrc::clone(b)),
|
Self::S(c, b) => Self::S(*c, Mrc::clone(b)),
|
||||||
Self::Auto(n, t, b) => Self::Auto(
|
Self::Auto(n, t, b) => Self::Auto(
|
||||||
n.clone(), Mrc::clone(t), Mrc::clone(b)
|
n.clone(), Mrc::clone(t), Mrc::clone(b)
|
||||||
),
|
),
|
||||||
Self::Name { local: l, qualified: q } => Self::Name {
|
Self::Name { local: l, qualified: q } => Self::Name {
|
||||||
local: l.clone(), qualified: Mrc::clone(q)
|
local: l.clone(), qualified: Mrc::clone(q)
|
||||||
},
|
},
|
||||||
Self::Lambda(n, t, b) => Self::Lambda(
|
Self::Lambda(n, t, b) => Self::Lambda(
|
||||||
n.clone(), Mrc::clone(t), Mrc::clone(b)
|
n.clone(), Mrc::clone(t), Mrc::clone(b)
|
||||||
),
|
),
|
||||||
Self::Placeh{key, vec} => Self::Placeh{key: key.clone(), vec: *vec},
|
Self::Placeh{key, vec} => Self::Placeh{key: key.clone(), vec: *vec},
|
||||||
Self::Literal(l) => Self::Literal(l.clone()),
|
Self::Literal(l) => Self::Literal(l.clone()),
|
||||||
Self::ExternFn(nc) => Self::ExternFn(nc.clone()),
|
Self::ExternFn(nc) => Self::ExternFn(nc.clone()),
|
||||||
Self::Atom(a) => Self::Atom(a.clone()),
|
Self::Atom(a) => Self::Atom(a.clone()),
|
||||||
Self::Explicit(expr) => Self::Explicit(Mrc::clone(expr))
|
Self::Explicit(expr) => Self::Explicit(Mrc::clone(expr))
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fmt_expr_seq(it: &mut dyn Iterator<Item = &Expr>, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt_expr_seq(it: &mut dyn Iterator<Item = &Expr>, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
for item in Itertools::intersperse(it.map(Some), None) { match item {
|
for item in Itertools::intersperse(it.map(Some), None) { match item {
|
||||||
Some(expr) => write!(f, "{:?}", expr),
|
Some(expr) => write!(f, "{:?}", expr),
|
||||||
None => f.write_str(" "),
|
None => f.write_str(" "),
|
||||||
}? }
|
}? }
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for Clause {
|
impl Debug for Clause {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Literal(arg0) => write!(f, "{:?}", arg0),
|
Self::Literal(arg0) => write!(f, "{:?}", arg0),
|
||||||
Self::Name{local, qualified} =>
|
Self::Name{local, qualified} =>
|
||||||
if let Some(local) = local {write!(f, "{}`{}`", qualified.join("::"), local)}
|
if let Some(local) = local {write!(f, "{}`{}`", qualified.join("::"), local)}
|
||||||
else {write!(f, "{}", qualified.join("::"))},
|
else {write!(f, "{}", qualified.join("::"))},
|
||||||
Self::S(del, items) => {
|
Self::S(del, items) => {
|
||||||
f.write_str(&del.to_string())?;
|
f.write_str(&del.to_string())?;
|
||||||
fmt_expr_seq(&mut items.iter(), f)?;
|
fmt_expr_seq(&mut items.iter(), f)?;
|
||||||
f.write_str(match del {
|
f.write_str(match del {
|
||||||
'(' => ")", '[' => "]", '{' => "}",
|
'(' => ")", '[' => "]", '{' => "}",
|
||||||
_ => "CLOSING_DELIM"
|
_ => "CLOSING_DELIM"
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
Self::Lambda(name, argtyp, body) => {
|
Self::Lambda(name, argtyp, body) => {
|
||||||
f.write_str("\\")?;
|
f.write_str("\\")?;
|
||||||
f.write_str(name)?;
|
f.write_str(name)?;
|
||||||
f.write_str(":")?; fmt_expr_seq(&mut argtyp.iter(), f)?; f.write_str(".")?;
|
f.write_str(":")?; fmt_expr_seq(&mut argtyp.iter(), f)?; f.write_str(".")?;
|
||||||
fmt_expr_seq(&mut body.iter(), f)
|
fmt_expr_seq(&mut body.iter(), f)
|
||||||
},
|
},
|
||||||
Self::Auto(name, argtyp, body) => {
|
Self::Auto(name, argtyp, body) => {
|
||||||
f.write_str("@")?;
|
f.write_str("@")?;
|
||||||
f.write_str(&name.clone().unwrap_or_default())?;
|
f.write_str(&name.clone().unwrap_or_default())?;
|
||||||
f.write_str(":")?; fmt_expr_seq(&mut argtyp.iter(), f)?; f.write_str(".")?;
|
f.write_str(":")?; fmt_expr_seq(&mut argtyp.iter(), f)?; f.write_str(".")?;
|
||||||
fmt_expr_seq(&mut body.iter(), f)
|
fmt_expr_seq(&mut body.iter(), f)
|
||||||
},
|
},
|
||||||
Self::Placeh{key, vec: None} => write!(f, "${key}"),
|
Self::Placeh{key, vec: None} => write!(f, "${key}"),
|
||||||
Self::Placeh{key, vec: Some((prio, true))} => write!(f, "...${key}:{prio}"),
|
Self::Placeh{key, vec: Some((prio, true))} => write!(f, "...${key}:{prio}"),
|
||||||
Self::Placeh{key, vec: Some((prio, false))} => write!(f, "..${key}:{prio}"),
|
Self::Placeh{key, vec: Some((prio, false))} => write!(f, "..${key}:{prio}"),
|
||||||
Self::ExternFn(nc) => write!(f, "{nc:?}"),
|
Self::ExternFn(nc) => write!(f, "{nc:?}"),
|
||||||
Self::Atom(a) => write!(f, "{a:?}"),
|
Self::Atom(a) => write!(f, "{a:?}"),
|
||||||
Self::Explicit(expr) => write!(f, "@{:?}", expr.as_ref())
|
Self::Explicit(expr) => write!(f, "@{:?}", expr.as_ref())
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A substitution rule as read from the source
|
/// A substitution rule as read from the source
|
||||||
#[derive(PartialEq, Eq, Hash)]
|
#[derive(PartialEq, Eq, Hash)]
|
||||||
pub struct Rule {
|
pub struct Rule {
|
||||||
pub source: Mrc<[Expr]>,
|
pub source: Mrc<[Expr]>,
|
||||||
pub prio: NotNan<f64>,
|
pub prio: NotNan<f64>,
|
||||||
pub target: Mrc<[Expr]>
|
pub target: Mrc<[Expr]>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for Rule {
|
impl Clone for Rule {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self {
|
Self {
|
||||||
source: Mrc::clone(&self.source),
|
source: Mrc::clone(&self.source),
|
||||||
prio: self.prio,
|
prio: self.prio,
|
||||||
target: Mrc::clone(&self.target)
|
target: Mrc::clone(&self.target)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for Rule {
|
impl Debug for Rule {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "{:?} ={}=> {:?}", self.source, self.prio, self.target)
|
write!(f, "{:?} ={}=> {:?}", self.source, self.prio, self.target)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,187 +1,188 @@
|
|||||||
use mappable_rc::Mrc;
|
use mappable_rc::Mrc;
|
||||||
|
|
||||||
use crate::utils::{Stackframe, to_mrc_slice, mrc_empty_slice, ProtoMap};
|
use crate::utils::{Stackframe, to_mrc_slice, mrc_empty_slice, ProtoMap, one_mrc_slice};
|
||||||
|
|
||||||
use super::{ast, typed};
|
use super::{ast, typed};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
/// `()` as a clause is meaningless in lambda calculus
|
/// `()` as a clause is meaningless in lambda calculus
|
||||||
EmptyS,
|
EmptyS,
|
||||||
/// Only `(...)` may be converted to typed lambdas. `[...]` and `{...}` left in the code are
|
/// Only `(...)` may be converted to typed lambdas. `[...]` and `{...}` left in the code are
|
||||||
/// signs of incomplete macro execution
|
/// signs of incomplete macro execution
|
||||||
BadGroup(char),
|
BadGroup(char),
|
||||||
/// `foo:bar:baz` will be parsed as `(foo:bar):baz`, explicitly specifying `foo:(bar:baz)`
|
/// `foo:bar:baz` will be parsed as `(foo:bar):baz`, explicitly specifying `foo:(bar:baz)`
|
||||||
/// is forbidden and it's also meaningless since `baz` can only ever be the kind of types
|
/// is forbidden and it's also meaningless since `baz` can only ever be the kind of types
|
||||||
ExplicitBottomKind,
|
ExplicitBottomKind,
|
||||||
/// Name never bound in an enclosing scope - indicates incomplete macro substitution
|
/// Name never bound in an enclosing scope - indicates incomplete macro substitution
|
||||||
Unbound(String),
|
Unbound(String),
|
||||||
/// Namespaced names can never occur in the code, these are signs of incomplete macro execution
|
/// Namespaced names can never occur in the code, these are signs of incomplete macro execution
|
||||||
Symbol,
|
Symbol,
|
||||||
/// Placeholders shouldn't even occur in the code during macro execution. Something is clearly
|
/// Placeholders shouldn't even occur in the code during macro execution. Something is clearly
|
||||||
/// terribly wrong
|
/// terribly wrong
|
||||||
Placeholder,
|
Placeholder,
|
||||||
/// It's possible to try and transform the clause `(foo:bar)` into a typed clause,
|
/// It's possible to try and transform the clause `(foo:bar)` into a typed clause,
|
||||||
/// however the correct value of this ast clause is a typed expression (included in the error)
|
/// however the correct value of this ast clause is a typed expression (included in the error)
|
||||||
///
|
///
|
||||||
/// [expr] handles this case, so it's only really possible to get this
|
/// [expr] handles this case, so it's only really possible to get this
|
||||||
/// error if you're calling [clause] directly
|
/// error if you're calling [clause] directly
|
||||||
ExprToClause(typed::Expr),
|
ExprToClause(typed::Expr),
|
||||||
/// @ tokens only ever occur between a function and a parameter
|
/// @ tokens only ever occur between a function and a parameter
|
||||||
NonInfixAt
|
NonInfixAt
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Try to convert an expression from AST format to typed lambda
|
/// Try to convert an expression from AST format to typed lambda
|
||||||
pub fn expr(expr: &ast::Expr) -> Result<typed::Expr, Error> {
|
pub fn expr(expr: &ast::Expr) -> Result<typed::Expr, Error> {
|
||||||
Ok(expr_rec(expr, ProtoMap::new(), &mut 0, None)?.0)
|
Ok(expr_rec(expr, ProtoMap::new(), &mut 0, None)?.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Try and convert a single clause from AST format to typed lambda
|
/// Try and convert a single clause from AST format to typed lambda
|
||||||
pub fn clause(clause: &ast::Clause) -> Result<typed::Clause, Error> {
|
pub fn clause(clause: &ast::Clause) -> Result<typed::Clause, Error> {
|
||||||
Ok(clause_rec(clause, ProtoMap::new(), &mut 0, None)?.0)
|
Ok(clause_rec(clause, ProtoMap::new(), &mut 0, None)?.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Try and convert a sequence of expressions from AST format to typed lambda
|
/// Try and convert a sequence of expressions from AST format to typed lambda
|
||||||
pub fn exprv(exprv: &[ast::Expr]) -> Result<typed::Expr, Error> {
|
pub fn exprv(exprv: &[ast::Expr]) -> Result<typed::Expr, Error> {
|
||||||
Ok(exprv_rec(exprv, ProtoMap::new(), &mut 0, None)?.0)
|
Ok(exprv_rec(exprv, ProtoMap::new(), &mut 0, None)?.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
const NAMES_INLINE_COUNT:usize = 3;
|
const NAMES_INLINE_COUNT:usize = 3;
|
||||||
|
|
||||||
/// Recursive state of [exprv]
|
/// Recursive state of [exprv]
|
||||||
fn exprv_rec(
|
fn exprv_rec(
|
||||||
v: &[ast::Expr],
|
v: &[ast::Expr],
|
||||||
names: ProtoMap<&str, u64, NAMES_INLINE_COUNT>,
|
names: ProtoMap<&str, (u64, bool), NAMES_INLINE_COUNT>,
|
||||||
next_id: &mut u64,
|
next_id: &mut u64,
|
||||||
explicits: Option<&Stackframe<Mrc<typed::Expr>>>,
|
explicits: Option<&Stackframe<Mrc<typed::Expr>>>,
|
||||||
) -> Result<(typed::Expr, usize), Error> {
|
) -> Result<(typed::Expr, usize), Error> {
|
||||||
let (last, rest) = v.split_last().ok_or(Error::EmptyS)?;
|
let (last, rest) = v.split_last().ok_or(Error::EmptyS)?;
|
||||||
if rest.len() == 0 {return expr_rec(&v[0], names, next_id, explicits)}
|
if rest.len() == 0 {return expr_rec(&v[0], names, next_id, explicits)}
|
||||||
if let ast::Expr(ast::Clause::Explicit(inner), empty_slice) = last {
|
if let ast::Expr(ast::Clause::Explicit(inner), empty_slice) = last {
|
||||||
assert!(empty_slice.len() == 0,
|
assert!(empty_slice.len() == 0,
|
||||||
"It is assumed that Explicit nodes can never have type annotations as the \
|
"It is assumed that Explicit nodes can never have type annotations as the \
|
||||||
wrapped expression node matches all trailing colons."
|
wrapped expression node matches all trailing colons."
|
||||||
);
|
);
|
||||||
let (x, _) = expr_rec(inner.as_ref(), names, next_id, None)?;
|
let (x, _) = expr_rec(inner.as_ref(), names, next_id, None)?;
|
||||||
let new_explicits = Some(&Stackframe::opush(explicits, Mrc::new(x)));
|
let new_explicits = Some(&Stackframe::opush(explicits, Mrc::new(x)));
|
||||||
let (body, used_expls) = exprv_rec(rest, names, next_id, new_explicits)?;
|
let (body, used_expls) = exprv_rec(rest, names, next_id, new_explicits)?;
|
||||||
Ok((body, used_expls.saturating_sub(1)))
|
Ok((body, used_expls.saturating_sub(1)))
|
||||||
} else {
|
} else {
|
||||||
let (f, f_used_expls) = exprv_rec(rest, names, next_id, explicits)?;
|
let (f, f_used_expls) = exprv_rec(rest, names, next_id, explicits)?;
|
||||||
let x_explicits = Stackframe::opop(explicits, f_used_expls);
|
let x_explicits = Stackframe::opop(explicits, f_used_expls);
|
||||||
let (x, x_used_expls) = expr_rec(last, names, next_id, x_explicits)?;
|
let (x, x_used_expls) = expr_rec(last, names, next_id, x_explicits)?;
|
||||||
Ok((typed::Expr(
|
Ok((typed::Expr(
|
||||||
typed::Clause::Apply(Mrc::new(f), Mrc::new(x)),
|
typed::Clause::Apply(Mrc::new(f), Mrc::new(x)),
|
||||||
mrc_empty_slice()
|
mrc_empty_slice()
|
||||||
), x_used_expls + f_used_expls))
|
), x_used_expls + f_used_expls))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recursive state of [expr]
|
/// Recursive state of [expr]
|
||||||
fn expr_rec(
|
fn expr_rec(
|
||||||
ast::Expr(val, typ): &ast::Expr,
|
ast::Expr(val, typ): &ast::Expr,
|
||||||
names: ProtoMap<&str, u64, NAMES_INLINE_COUNT>,
|
names: ProtoMap<&str, (u64, bool), NAMES_INLINE_COUNT>,
|
||||||
next_id: &mut u64,
|
next_id: &mut u64,
|
||||||
explicits: Option<&Stackframe<Mrc<typed::Expr>>> // known explicit values
|
explicits: Option<&Stackframe<Mrc<typed::Expr>>> // known explicit values
|
||||||
) -> Result<(typed::Expr, usize), Error> { // (output, used_explicits)
|
) -> Result<(typed::Expr, usize), Error> { // (output, used_explicits)
|
||||||
let typ: Vec<typed::Clause> = typ.iter()
|
let typ: Vec<typed::Clause> = typ.iter()
|
||||||
.map(|c| Ok(clause_rec(c, names, next_id, None)?.0))
|
.map(|c| Ok(clause_rec(c, names, next_id, None)?.0))
|
||||||
.collect::<Result<_, _>>()?;
|
.collect::<Result<_, _>>()?;
|
||||||
if let ast::Clause::S(paren, body) = val {
|
if let ast::Clause::S(paren, body) = val {
|
||||||
if *paren != '(' {return Err(Error::BadGroup(*paren))}
|
if *paren != '(' {return Err(Error::BadGroup(*paren))}
|
||||||
let (typed::Expr(inner, inner_t), used_expls) = exprv_rec(
|
let (typed::Expr(inner, inner_t), used_expls) = exprv_rec(
|
||||||
body.as_ref(), names, next_id, explicits
|
body.as_ref(), names, next_id, explicits
|
||||||
)?;
|
)?;
|
||||||
let new_t = if typ.len() == 0 { inner_t } else {
|
let new_t = if typ.len() == 0 { inner_t } else {
|
||||||
to_mrc_slice(if inner_t.len() == 0 { typ } else {
|
to_mrc_slice(if inner_t.len() == 0 { typ } else {
|
||||||
inner_t.iter().chain(typ.iter()).cloned().collect()
|
inner_t.iter().chain(typ.iter()).cloned().collect()
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
Ok((typed::Expr(inner, new_t), used_expls))
|
Ok((typed::Expr(inner, new_t), used_expls))
|
||||||
} else {
|
} else {
|
||||||
let (cls, used_expls) = clause_rec(&val, names, next_id, explicits)?;
|
let (cls, used_expls) = clause_rec(&val, names, next_id, explicits)?;
|
||||||
Ok((typed::Expr(cls, to_mrc_slice(typ)), used_expls))
|
Ok((typed::Expr(cls, to_mrc_slice(typ)), used_expls))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recursive state of [clause]
|
/// Recursive state of [clause]
|
||||||
fn clause_rec(
|
fn clause_rec(
|
||||||
cls: &ast::Clause,
|
cls: &ast::Clause,
|
||||||
names: ProtoMap<&str, u64, NAMES_INLINE_COUNT>,
|
names: ProtoMap<&str, (u64, bool), NAMES_INLINE_COUNT>,
|
||||||
next_id: &mut u64,
|
next_id: &mut u64,
|
||||||
mut explicits: Option<&Stackframe<Mrc<typed::Expr>>>
|
mut explicits: Option<&Stackframe<Mrc<typed::Expr>>>
|
||||||
) -> Result<(typed::Clause, usize), Error> {
|
) -> Result<(typed::Clause, usize), Error> {
|
||||||
match cls { // (\t:(@T. Pair T T). t \left.\right. left) @number -- this will fail
|
match cls { // (\t:(@T. Pair T T). t \left.\right. left) @number -- this will fail
|
||||||
ast::Clause::ExternFn(e) => Ok((typed::Clause::ExternFn(e.clone()), 0)),
|
ast::Clause::ExternFn(e) => Ok((typed::Clause::ExternFn(e.clone()), 0)),
|
||||||
ast::Clause::Atom(a) => Ok((typed::Clause::Atom(a.clone()), 0)),
|
ast::Clause::Atom(a) => Ok((typed::Clause::Atom(a.clone()), 0)),
|
||||||
ast::Clause::Auto(no, t, b) => {
|
ast::Clause::Auto(no, t, b) => {
|
||||||
// Allocate id
|
// Allocate id
|
||||||
let id = *next_id;
|
let id = *next_id;
|
||||||
*next_id += 1;
|
*next_id += 1;
|
||||||
// Pop an explicit if available
|
// Pop an explicit if available
|
||||||
let (value, rest_explicits) = explicits.map(
|
let (value, rest_explicits) = explicits.map(
|
||||||
|Stackframe{ prev, item, .. }| {
|
|Stackframe{ prev, item, .. }| {
|
||||||
(Some(item), *prev)
|
(Some(item), *prev)
|
||||||
}
|
|
||||||
).unwrap_or_default();
|
|
||||||
explicits = rest_explicits;
|
|
||||||
// Convert the type
|
|
||||||
let typ = if t.len() == 0 {None} else {
|
|
||||||
let (typed::Expr(c, t), _) = exprv_rec(
|
|
||||||
t.as_ref(), names, next_id, None
|
|
||||||
)?;
|
|
||||||
if t.len() > 0 {return Err(Error::ExplicitBottomKind)}
|
|
||||||
else {Some(Mrc::new(c))}
|
|
||||||
};
|
|
||||||
// Traverse body with extended context
|
|
||||||
if let Some(name) = no {names.set(&&**name, id)}
|
|
||||||
let (body, used_expls) = exprv_rec(
|
|
||||||
b.as_ref(), names, next_id, explicits
|
|
||||||
)?;
|
|
||||||
// Produce a binding instead of an auto if explicit was available
|
|
||||||
if let Some(known_value) = value {
|
|
||||||
Ok((typed::Clause::Apply(
|
|
||||||
typed::Clause::Lambda(id, typ, Mrc::new(body)).wrap(),
|
|
||||||
Mrc::clone(known_value)
|
|
||||||
), used_expls + 1))
|
|
||||||
} else {
|
|
||||||
Ok((typed::Clause::Auto(id, typ, Mrc::new(body)), 0))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
ast::Clause::Lambda(n, t, b) => {
|
).unwrap_or_default();
|
||||||
// Allocate id
|
explicits = rest_explicits;
|
||||||
let id = *next_id;
|
// Convert the type
|
||||||
*next_id += 1;
|
let typ = if t.len() == 0 {mrc_empty_slice()} else {
|
||||||
// Convert the type
|
let (typed::Expr(c, t), _) = exprv_rec(
|
||||||
let typ = if t.len() == 0 {None} else {
|
t.as_ref(), names, next_id, None
|
||||||
let (typed::Expr(c, t), _) = exprv_rec(
|
)?;
|
||||||
t.as_ref(), names, next_id, None
|
if t.len() > 0 {return Err(Error::ExplicitBottomKind)}
|
||||||
)?;
|
else {one_mrc_slice(c)}
|
||||||
if t.len() > 0 {return Err(Error::ExplicitBottomKind)}
|
};
|
||||||
else {Some(Mrc::new(c))}
|
// Traverse body with extended context
|
||||||
};
|
if let Some(name) = no {names.set(&&**name, (id, true))}
|
||||||
names.set(&&**n, id);
|
let (body, used_expls) = exprv_rec(
|
||||||
let (body, used_expls) = exprv_rec(
|
b.as_ref(), names, next_id, explicits
|
||||||
b.as_ref(), names, next_id, explicits
|
)?;
|
||||||
)?;
|
// Produce a binding instead of an auto if explicit was available
|
||||||
Ok((typed::Clause::Lambda(id, typ, Mrc::new(body)), used_expls))
|
if let Some(known_value) = value {
|
||||||
}
|
Ok((typed::Clause::Apply(
|
||||||
ast::Clause::Literal(l) => Ok((typed::Clause::Literal(l.clone()), 0)),
|
typed::Clause::Lambda(id, typ, Mrc::new(body)).wrap(),
|
||||||
ast::Clause::Name { local: Some(arg), .. } => {
|
Mrc::clone(known_value)
|
||||||
let uid = names.get(&&**arg)
|
), used_expls + 1))
|
||||||
.ok_or_else(|| Error::Unbound(arg.clone()))?;
|
} else {
|
||||||
Ok((typed::Clause::Argument(*uid), 0))
|
Ok((typed::Clause::Auto(id, typ, Mrc::new(body)), 0))
|
||||||
}
|
}
|
||||||
ast::Clause::S(paren, entries) => {
|
|
||||||
if *paren != '(' {return Err(Error::BadGroup(*paren))}
|
|
||||||
let (typed::Expr(val, typ), used_expls) = exprv_rec(
|
|
||||||
entries.as_ref(), names, next_id, explicits
|
|
||||||
)?;
|
|
||||||
if typ.len() == 0 {Ok((val, used_expls))}
|
|
||||||
else {Err(Error::ExprToClause(typed::Expr(val, typ)))}
|
|
||||||
},
|
|
||||||
ast::Clause::Name { local: None, .. } => Err(Error::Symbol),
|
|
||||||
ast::Clause::Placeh { .. } => Err(Error::Placeholder),
|
|
||||||
ast::Clause::Explicit(..) => Err(Error::NonInfixAt)
|
|
||||||
}
|
}
|
||||||
|
ast::Clause::Lambda(n, t, b) => {
|
||||||
|
// Allocate id
|
||||||
|
let id = *next_id;
|
||||||
|
*next_id += 1;
|
||||||
|
// Convert the type
|
||||||
|
let typ = if t.len() == 0 {mrc_empty_slice()} else {
|
||||||
|
let (typed::Expr(c, t), _) = exprv_rec(
|
||||||
|
t.as_ref(), names, next_id, None
|
||||||
|
)?;
|
||||||
|
if t.len() > 0 {return Err(Error::ExplicitBottomKind)}
|
||||||
|
else {one_mrc_slice(c)}
|
||||||
|
};
|
||||||
|
names.set(&&**n, (id, false));
|
||||||
|
let (body, used_expls) = exprv_rec(
|
||||||
|
b.as_ref(), names, next_id, explicits
|
||||||
|
)?;
|
||||||
|
Ok((typed::Clause::Lambda(id, typ, Mrc::new(body)), used_expls))
|
||||||
|
}
|
||||||
|
ast::Clause::Literal(l) => Ok((typed::Clause::Literal(l.clone()), 0)),
|
||||||
|
ast::Clause::Name { local: Some(arg), .. } => {
|
||||||
|
let (uid, is_auto) = names.get(&&**arg)
|
||||||
|
.ok_or_else(|| Error::Unbound(arg.clone()))?;
|
||||||
|
let label = if *is_auto {typed::Clause::AutoArg} else {typed::Clause::LambdaArg};
|
||||||
|
Ok((label(*uid), 0))
|
||||||
|
}
|
||||||
|
ast::Clause::S(paren, entries) => {
|
||||||
|
if *paren != '(' {return Err(Error::BadGroup(*paren))}
|
||||||
|
let (typed::Expr(val, typ), used_expls) = exprv_rec(
|
||||||
|
entries.as_ref(), names, next_id, explicits
|
||||||
|
)?;
|
||||||
|
if typ.len() == 0 {Ok((val, used_expls))}
|
||||||
|
else {Err(Error::ExprToClause(typed::Expr(val, typ)))}
|
||||||
|
},
|
||||||
|
ast::Clause::Name { local: None, .. } => Err(Error::Symbol),
|
||||||
|
ast::Clause::Placeh { .. } => Err(Error::Placeholder),
|
||||||
|
ast::Clause::Explicit(..) => Err(Error::NonInfixAt)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -4,19 +4,19 @@ use std::fmt::Debug;
|
|||||||
/// An exact value, read from the AST and unmodified in shape until compilation
|
/// An exact value, read from the AST and unmodified in shape until compilation
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum Literal {
|
pub enum Literal {
|
||||||
Num(NotNan<f64>),
|
Num(NotNan<f64>),
|
||||||
Int(u64),
|
Int(u64),
|
||||||
Char(char),
|
Char(char),
|
||||||
Str(String),
|
Str(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for Literal {
|
impl Debug for Literal {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Num(arg0) => write!(f, "{:?}", arg0),
|
Self::Num(arg0) => write!(f, "{:?}", arg0),
|
||||||
Self::Int(arg0) => write!(f, "{:?}", arg0),
|
Self::Int(arg0) => write!(f, "{:?}", arg0),
|
||||||
Self::Char(arg0) => write!(f, "{:?}", arg0),
|
Self::Char(arg0) => write!(f, "{:?}", arg0),
|
||||||
Self::Str(arg0) => write!(f, "{:?}", arg0),
|
Self::Str(arg0) => write!(f, "{:?}", arg0),
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
use mappable_rc::Mrc;
|
use mappable_rc::Mrc;
|
||||||
use crate::executor::Atom;
|
use crate::foreign::{Atom, ExternFn};
|
||||||
use crate::utils::{to_mrc_slice, one_mrc_slice};
|
use crate::utils::{to_mrc_slice, one_mrc_slice};
|
||||||
use crate::{executor::ExternFn, utils::string_from_charset};
|
use crate::utils::string_from_charset;
|
||||||
|
|
||||||
use super::{Literal, ast_to_typed};
|
use super::{Literal, ast_to_typed};
|
||||||
use super::ast;
|
use super::ast;
|
||||||
@@ -16,121 +16,124 @@ struct Wrap(bool, bool);
|
|||||||
#[derive(PartialEq, Eq, Hash)]
|
#[derive(PartialEq, Eq, Hash)]
|
||||||
pub struct Expr(pub Clause, pub Mrc<[Clause]>);
|
pub struct Expr(pub Clause, pub Mrc<[Clause]>);
|
||||||
impl Expr {
|
impl Expr {
|
||||||
fn deep_fmt(&self, f: &mut std::fmt::Formatter<'_>, tr: Wrap) -> std::fmt::Result {
|
fn deep_fmt(&self, f: &mut std::fmt::Formatter<'_>, tr: Wrap) -> std::fmt::Result {
|
||||||
let Expr(val, typ) = self;
|
let Expr(val, typ) = self;
|
||||||
if typ.len() > 0 {
|
if typ.len() > 0 {
|
||||||
val.deep_fmt(f, Wrap(true, true))?;
|
val.deep_fmt(f, Wrap(true, true))?;
|
||||||
for typ in typ.as_ref() {
|
for typ in typ.as_ref() {
|
||||||
f.write_char(':')?;
|
f.write_char(':')?;
|
||||||
typ.deep_fmt(f, Wrap(true, true))?;
|
typ.deep_fmt(f, Wrap(true, true))?;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
val.deep_fmt(f, tr)?;
|
val.deep_fmt(f, tr)?;
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for Expr {
|
impl Clone for Expr {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self(self.0.clone(), Mrc::clone(&self.1))
|
Self(self.0.clone(), Mrc::clone(&self.1))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for Expr {
|
impl Debug for Expr {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
self.deep_fmt(f, Wrap(false, false))
|
self.deep_fmt(f, Wrap(false, false))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Hash)]
|
#[derive(PartialEq, Eq, Hash)]
|
||||||
pub enum Clause {
|
pub enum Clause {
|
||||||
Literal(Literal),
|
Literal(Literal),
|
||||||
Apply(Mrc<Expr>, Mrc<Expr>),
|
Apply(Mrc<Expr>, Mrc<Expr>),
|
||||||
Lambda(u64, Option<Mrc<Clause>>, Mrc<Expr>),
|
Lambda(u64, Mrc<[Clause]>, Mrc<Expr>),
|
||||||
Auto(u64, Option<Mrc<Clause>>, Mrc<Expr>),
|
Auto(u64, Mrc<[Clause]>, Mrc<Expr>),
|
||||||
Argument(u64),
|
LambdaArg(u64), AutoArg(u64),
|
||||||
ExternFn(ExternFn),
|
ExternFn(ExternFn),
|
||||||
Atom(Atom)
|
Atom(Atom)
|
||||||
}
|
}
|
||||||
|
|
||||||
const ARGNAME_CHARSET: &str = "abcdefghijklmnopqrstuvwxyz";
|
const ARGNAME_CHARSET: &str = "abcdefghijklmnopqrstuvwxyz";
|
||||||
|
|
||||||
fn parametric_fmt(
|
fn parametric_fmt(
|
||||||
f: &mut std::fmt::Formatter<'_>,
|
f: &mut std::fmt::Formatter<'_>,
|
||||||
prefix: &str, argtyp: Option<Mrc<Clause>>, body: Mrc<Expr>, uid: u64, wrap_right: bool
|
prefix: &str, argtyp: Mrc<[Clause]>, body: Mrc<Expr>, uid: u64, wrap_right: bool
|
||||||
) -> std::fmt::Result {
|
) -> std::fmt::Result {
|
||||||
if wrap_right { f.write_char('(')?; }
|
if wrap_right { f.write_char('(')?; }
|
||||||
f.write_str(prefix)?;
|
f.write_str(prefix)?;
|
||||||
f.write_str(&string_from_charset(uid, ARGNAME_CHARSET))?;
|
f.write_str(&string_from_charset(uid, ARGNAME_CHARSET))?;
|
||||||
if let Some(typ) = argtyp {
|
for typ in argtyp.iter() {
|
||||||
f.write_str(":")?;
|
f.write_str(":")?;
|
||||||
typ.deep_fmt(f, Wrap(false, false))?;
|
typ.deep_fmt(f, Wrap(false, false))?;
|
||||||
}
|
}
|
||||||
f.write_str(".")?;
|
f.write_str(".")?;
|
||||||
body.deep_fmt(f, Wrap(false, false))?;
|
body.deep_fmt(f, Wrap(false, false))?;
|
||||||
if wrap_right { f.write_char(')')?; }
|
if wrap_right { f.write_char(')')?; }
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clause {
|
impl Clause {
|
||||||
fn deep_fmt(&self, f: &mut std::fmt::Formatter<'_>, Wrap(wl, wr): Wrap)
|
fn deep_fmt(&self, f: &mut std::fmt::Formatter<'_>, Wrap(wl, wr): Wrap)
|
||||||
-> std::fmt::Result {
|
-> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Literal(arg0) => write!(f, "{arg0:?}"),
|
Self::Literal(arg0) => write!(f, "{arg0:?}"),
|
||||||
Self::ExternFn(nc) => write!(f, "{nc:?}"),
|
Self::ExternFn(nc) => write!(f, "{nc:?}"),
|
||||||
Self::Atom(a) => write!(f, "{a:?}"),
|
Self::Atom(a) => write!(f, "{a:?}"),
|
||||||
Self::Lambda(uid, argtyp, body) => parametric_fmt(f,
|
Self::Lambda(uid, argtyp, body) => parametric_fmt(f,
|
||||||
"\\", argtyp.as_ref().map(Mrc::clone), Mrc::clone(body), *uid, wr
|
"\\", Mrc::clone(argtyp), Mrc::clone(body), *uid, wr
|
||||||
),
|
),
|
||||||
Self::Auto(uid, argtyp, body) => parametric_fmt(f,
|
Self::Auto(uid, argtyp, body) => parametric_fmt(f,
|
||||||
"@", argtyp.as_ref().map(Mrc::clone), Mrc::clone(body), *uid, wr
|
"@", Mrc::clone(argtyp), Mrc::clone(body), *uid, wr
|
||||||
),
|
),
|
||||||
Self::Argument(uid) => f.write_str(&string_from_charset(*uid, ARGNAME_CHARSET)),
|
Self::LambdaArg(uid) | Self::AutoArg(uid) => f.write_str(&
|
||||||
Self::Apply(func, x) => {
|
string_from_charset(*uid, ARGNAME_CHARSET)
|
||||||
if wl { f.write_char('(')?; }
|
),
|
||||||
func.deep_fmt(f, Wrap(false, true) )?;
|
Self::Apply(func, x) => {
|
||||||
f.write_char(' ')?;
|
if wl { f.write_char('(')?; }
|
||||||
x.deep_fmt(f, Wrap(true, wr && !wl) )?;
|
func.deep_fmt(f, Wrap(false, true) )?;
|
||||||
if wl { f.write_char(')')?; }
|
f.write_char(' ')?;
|
||||||
Ok(())
|
x.deep_fmt(f, Wrap(true, wr && !wl) )?;
|
||||||
}
|
if wl { f.write_char(')')?; }
|
||||||
}
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
pub fn wrap(self) -> Mrc<Expr> { Mrc::new(Expr(self, to_mrc_slice(vec![]))) }
|
}
|
||||||
pub fn wrap_t(self, t: Clause) -> Mrc<Expr> { Mrc::new(Expr(self, one_mrc_slice(t))) }
|
pub fn wrap(self) -> Mrc<Expr> { Mrc::new(Expr(self, to_mrc_slice(vec![]))) }
|
||||||
|
pub fn wrap_t(self, t: Clause) -> Mrc<Expr> { Mrc::new(Expr(self, one_mrc_slice(t))) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for Clause {
|
impl Clone for Clause {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
match self {
|
match self {
|
||||||
Clause::Auto(uid,t, b) => Clause::Auto(*uid, t.as_ref().map(Mrc::clone), Mrc::clone(b)),
|
Clause::Auto(uid,t, b) => Clause::Auto(*uid, Mrc::clone(t), Mrc::clone(b)),
|
||||||
Clause::Lambda(uid, t, b) => Clause::Lambda(*uid, t.as_ref().map(Mrc::clone), Mrc::clone(b)),
|
Clause::Lambda(uid, t, b) => Clause::Lambda(*uid, Mrc::clone(t), Mrc::clone(b)),
|
||||||
Clause::Literal(l) => Clause::Literal(l.clone()),
|
Clause::Literal(l) => Clause::Literal(l.clone()),
|
||||||
Clause::ExternFn(nc) => Clause::ExternFn(nc.clone()),
|
Clause::ExternFn(nc) => Clause::ExternFn(nc.clone()),
|
||||||
Clause::Atom(a) => Clause::Atom(a.clone()),
|
Clause::Atom(a) => Clause::Atom(a.clone()),
|
||||||
Clause::Apply(f, x) => Clause::Apply(Mrc::clone(f), Mrc::clone(x)),
|
Clause::Apply(f, x) => Clause::Apply(Mrc::clone(f), Mrc::clone(x)),
|
||||||
Clause::Argument(lvl) => Clause::Argument(*lvl)
|
Clause::LambdaArg(id) => Clause::LambdaArg(*id),
|
||||||
}
|
Clause::AutoArg(id) => Clause::AutoArg(*id)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for Clause {
|
impl Debug for Clause {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
self.deep_fmt(f, Wrap(false, false))
|
self.deep_fmt(f, Wrap(false, false))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<&ast::Expr> for Expr {
|
impl TryFrom<&ast::Expr> for Expr {
|
||||||
type Error = ast_to_typed::Error;
|
type Error = ast_to_typed::Error;
|
||||||
fn try_from(value: &ast::Expr) -> Result<Self, Self::Error> {
|
fn try_from(value: &ast::Expr) -> Result<Self, Self::Error> {
|
||||||
ast_to_typed::expr(value)
|
ast_to_typed::expr(value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<&ast::Clause> for Clause {
|
impl TryFrom<&ast::Clause> for Clause {
|
||||||
type Error = ast_to_typed::Error;
|
type Error = ast_to_typed::Error;
|
||||||
fn try_from(value: &ast::Clause) -> Result<Self, Self::Error> {
|
fn try_from(value: &ast::Clause) -> Result<Self, Self::Error> {
|
||||||
ast_to_typed::clause(value)
|
ast_to_typed::clause(value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -11,55 +11,55 @@ use super::super::RuleError;
|
|||||||
|
|
||||||
fn verify_scalar_vec(pattern: &Expr, is_vec: &mut HashMap<String, bool>)
|
fn verify_scalar_vec(pattern: &Expr, is_vec: &mut HashMap<String, bool>)
|
||||||
-> Result<(), String> {
|
-> Result<(), String> {
|
||||||
let verify_clause = |clause: &Clause, is_vec: &mut HashMap<String, bool>| -> Result<(), String> {
|
let verify_clause = |clause: &Clause, is_vec: &mut HashMap<String, bool>| -> Result<(), String> {
|
||||||
match clause {
|
match clause {
|
||||||
Clause::Placeh{key, vec} => {
|
Clause::Placeh{key, vec} => {
|
||||||
if let Some(known) = is_vec.get(key) {
|
if let Some(known) = is_vec.get(key) {
|
||||||
if known != &vec.is_some() { return Err(key.to_string()) }
|
if known != &vec.is_some() { return Err(key.to_string()) }
|
||||||
} else {
|
} else {
|
||||||
is_vec.insert(key.clone(), vec.is_some());
|
is_vec.insert(key.clone(), vec.is_some());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Clause::Auto(name, typ, body) => {
|
Clause::Auto(name, typ, body) => {
|
||||||
if let Some(key) = name.as_ref().and_then(|key| key.strip_prefix('$')) {
|
if let Some(key) = name.as_ref().and_then(|key| key.strip_prefix('$')) {
|
||||||
if is_vec.get(key) == Some(&true) { return Err(key.to_string()) }
|
if is_vec.get(key) == Some(&true) { return Err(key.to_string()) }
|
||||||
}
|
}
|
||||||
typ.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
|
typ.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
|
||||||
body.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
|
body.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
|
||||||
}
|
}
|
||||||
Clause::Lambda(name, typ, body) => {
|
Clause::Lambda(name, typ, body) => {
|
||||||
if let Some(key) = name.strip_prefix('$') {
|
if let Some(key) = name.strip_prefix('$') {
|
||||||
if is_vec.get(key) == Some(&true) { return Err(key.to_string()) }
|
if is_vec.get(key) == Some(&true) { return Err(key.to_string()) }
|
||||||
}
|
}
|
||||||
typ.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
|
typ.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
|
||||||
body.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
|
body.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
|
||||||
}
|
}
|
||||||
Clause::S(_, body) => {
|
Clause::S(_, body) => {
|
||||||
body.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
|
body.iter().try_for_each(|e| verify_scalar_vec(e, is_vec))?;
|
||||||
}
|
}
|
||||||
_ => ()
|
_ => ()
|
||||||
};
|
|
||||||
Ok(())
|
|
||||||
};
|
};
|
||||||
let Expr(val, typ) = pattern;
|
|
||||||
verify_clause(val, is_vec)?;
|
|
||||||
for typ in typ.as_ref() {
|
|
||||||
verify_clause(typ, is_vec)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
};
|
||||||
|
let Expr(val, typ) = pattern;
|
||||||
|
verify_clause(val, is_vec)?;
|
||||||
|
for typ in typ.as_ref() {
|
||||||
|
verify_clause(typ, is_vec)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fn slice_to_vec(src: &mut Mrc<[Expr]>, tgt: &mut Mrc<[Expr]>) {
|
fn slice_to_vec(src: &mut Mrc<[Expr]>, tgt: &mut Mrc<[Expr]>) {
|
||||||
let prefix_expr = Expr(Clause::Placeh{key: "::prefix".to_string(), vec: Some((0, false))}, to_mrc_slice(vec![]));
|
let prefix_expr = Expr(Clause::Placeh{key: "::prefix".to_string(), vec: Some((0, false))}, to_mrc_slice(vec![]));
|
||||||
let postfix_expr = Expr(Clause::Placeh{key: "::postfix".to_string(), vec: Some((0, false))}, to_mrc_slice(vec![]));
|
let postfix_expr = Expr(Clause::Placeh{key: "::postfix".to_string(), vec: Some((0, false))}, to_mrc_slice(vec![]));
|
||||||
// Prefix or postfix to match the full vector
|
// Prefix or postfix to match the full vector
|
||||||
let head_multi = matches!(src.first().expect("Src can never be empty!").0, Clause::Placeh{vec: Some(_), ..});
|
let head_multi = matches!(src.first().expect("Src can never be empty!").0, Clause::Placeh{vec: Some(_), ..});
|
||||||
let tail_multi = matches!(src.last().expect("Impossible branch!").0, Clause::Placeh{vec: Some(_), ..});
|
let tail_multi = matches!(src.last().expect("Impossible branch!").0, Clause::Placeh{vec: Some(_), ..});
|
||||||
let prefix_vec = if head_multi {vec![]} else {vec![prefix_expr]};
|
let prefix_vec = if head_multi {vec![]} else {vec![prefix_expr]};
|
||||||
let postfix_vec = if tail_multi {vec![]} else {vec![postfix_expr]};
|
let postfix_vec = if tail_multi {vec![]} else {vec![postfix_expr]};
|
||||||
*src = to_mrc_slice(prefix_vec.iter().chain(src.iter()).chain(postfix_vec.iter()).cloned().collect());
|
*src = to_mrc_slice(prefix_vec.iter().chain(src.iter()).chain(postfix_vec.iter()).cloned().collect());
|
||||||
*tgt = to_mrc_slice(prefix_vec.iter().chain(tgt.iter()).chain(postfix_vec.iter()).cloned().collect());
|
*tgt = to_mrc_slice(prefix_vec.iter().chain(tgt.iter()).chain(postfix_vec.iter()).cloned().collect());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Traverse the tree, calling pred on every sibling list until it returns some vec
|
/// Traverse the tree, calling pred on every sibling list until it returns some vec
|
||||||
@@ -67,117 +67,117 @@ fn slice_to_vec(src: &mut Mrc<[Expr]>, tgt: &mut Mrc<[Expr]>) {
|
|||||||
/// return false if pred never returned some
|
/// return false if pred never returned some
|
||||||
fn update_first_seq_rec<F>(input: Mrc<[Expr]>, pred: &mut F) -> Option<Mrc<[Expr]>>
|
fn update_first_seq_rec<F>(input: Mrc<[Expr]>, pred: &mut F) -> Option<Mrc<[Expr]>>
|
||||||
where F: FnMut(Mrc<[Expr]>) -> Option<Mrc<[Expr]>> {
|
where F: FnMut(Mrc<[Expr]>) -> Option<Mrc<[Expr]>> {
|
||||||
if let o@Some(_) = pred(Mrc::clone(&input)) {o} else {
|
if let o@Some(_) = pred(Mrc::clone(&input)) {o} else {
|
||||||
for Expr(cls, _) in input.iter() {
|
for Expr(cls, _) in input.iter() {
|
||||||
if let Some(t) = cls.typ() {
|
if let Some(t) = cls.typ() {
|
||||||
if let o@Some(_) = update_first_seq_rec(t, pred) {return o}
|
if let o@Some(_) = update_first_seq_rec(t, pred) {return o}
|
||||||
}
|
}
|
||||||
if let Some(b) = cls.body() {
|
if let Some(b) = cls.body() {
|
||||||
if let o@Some(_) = update_first_seq_rec(b, pred) {return o}
|
if let o@Some(_) = update_first_seq_rec(b, pred) {return o}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// keep re-probing the input with pred until it stops matching
|
/// keep re-probing the input with pred until it stops matching
|
||||||
fn update_all_seqs<F>(input: Mrc<[Expr]>, pred: &mut F) -> Option<Mrc<[Expr]>>
|
fn update_all_seqs<F>(input: Mrc<[Expr]>, pred: &mut F) -> Option<Mrc<[Expr]>>
|
||||||
where F: FnMut(Mrc<[Expr]>) -> Option<Mrc<[Expr]>> {
|
where F: FnMut(Mrc<[Expr]>) -> Option<Mrc<[Expr]>> {
|
||||||
let mut tmp = update_first_seq_rec(input, pred);
|
let mut tmp = update_first_seq_rec(input, pred);
|
||||||
while let Some(xv) = tmp {
|
while let Some(xv) = tmp {
|
||||||
tmp = update_first_seq_rec(Mrc::clone(&xv), pred);
|
tmp = update_first_seq_rec(Mrc::clone(&xv), pred);
|
||||||
if tmp.is_none() {return Some(xv)}
|
if tmp.is_none() {return Some(xv)}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
// fn write_clause_rec(state: &State, clause: &Clause) ->
|
// fn write_clause_rec(state: &State, clause: &Clause) ->
|
||||||
|
|
||||||
fn write_expr_rec(state: &State, Expr(tpl_clause, tpl_typ): &Expr) -> Box<dyn Iterator<Item = Expr>> {
|
fn write_expr_rec(state: &State, Expr(tpl_clause, tpl_typ): &Expr) -> Box<dyn Iterator<Item = Expr>> {
|
||||||
let out_typ = tpl_typ.iter()
|
let out_typ = tpl_typ.iter()
|
||||||
.flat_map(|c| write_expr_rec(state, &c.clone().into_expr()))
|
.flat_map(|c| write_expr_rec(state, &c.clone().into_expr()))
|
||||||
.map(Expr::into_clause)
|
.map(Expr::into_clause)
|
||||||
.collect::<Mrc<[Clause]>>();
|
.collect::<Mrc<[Clause]>>();
|
||||||
match tpl_clause {
|
match tpl_clause {
|
||||||
Clause::Auto(name_opt, typ, body) => box_once(Expr(Clause::Auto(
|
Clause::Auto(name_opt, typ, body) => box_once(Expr(Clause::Auto(
|
||||||
name_opt.as_ref().and_then(|name| {
|
name_opt.as_ref().and_then(|name| {
|
||||||
if let Some(state_key) = name.strip_prefix('$') {
|
if let Some(state_key) = name.strip_prefix('$') {
|
||||||
match &state[state_key] {
|
match &state[state_key] {
|
||||||
Entry::NameOpt(name) => name.as_ref().map(|s| s.as_ref().to_owned()),
|
Entry::NameOpt(name) => name.as_ref().map(|s| s.as_ref().to_owned()),
|
||||||
Entry::Name(name) => Some(name.as_ref().to_owned()),
|
Entry::Name(name) => Some(name.as_ref().to_owned()),
|
||||||
_ => panic!("Auto template name may only be derived from Auto or Lambda name")
|
_ => panic!("Auto template name may only be derived from Auto or Lambda name")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Some(name.to_owned())
|
Some(name.to_owned())
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
write_slice_rec(state, typ),
|
write_slice_rec(state, typ),
|
||||||
write_slice_rec(state, body)
|
write_slice_rec(state, body)
|
||||||
), out_typ.to_owned())),
|
), out_typ.to_owned())),
|
||||||
Clause::Lambda(name, typ, body) => box_once(Expr(Clause::Lambda(
|
Clause::Lambda(name, typ, body) => box_once(Expr(Clause::Lambda(
|
||||||
if let Some(state_key) = name.strip_prefix('$') {
|
if let Some(state_key) = name.strip_prefix('$') {
|
||||||
if let Entry::Name(name) = &state[state_key] {
|
if let Entry::Name(name) = &state[state_key] {
|
||||||
name.as_ref().to_owned()
|
name.as_ref().to_owned()
|
||||||
} else {panic!("Lambda template name may only be derived from Lambda name")}
|
} else {panic!("Lambda template name may only be derived from Lambda name")}
|
||||||
} else {
|
} else {
|
||||||
name.to_owned()
|
name.to_owned()
|
||||||
},
|
},
|
||||||
write_slice_rec(state, typ),
|
write_slice_rec(state, typ),
|
||||||
write_slice_rec(state, body)
|
write_slice_rec(state, body)
|
||||||
), out_typ.to_owned())),
|
), out_typ.to_owned())),
|
||||||
Clause::S(c, body) => box_once(Expr(Clause::S(
|
Clause::S(c, body) => box_once(Expr(Clause::S(
|
||||||
*c,
|
*c,
|
||||||
write_slice_rec(state, body)
|
write_slice_rec(state, body)
|
||||||
), out_typ.to_owned())),
|
), out_typ.to_owned())),
|
||||||
Clause::Placeh{key, vec: None} => {
|
Clause::Placeh{key, vec: None} => {
|
||||||
let real_key = unwrap_or!(key.strip_prefix('_'); key);
|
let real_key = unwrap_or!(key.strip_prefix('_'); key);
|
||||||
match &state[real_key] {
|
match &state[real_key] {
|
||||||
Entry::Scalar(x) => box_once(x.as_ref().to_owned()),
|
Entry::Scalar(x) => box_once(x.as_ref().to_owned()),
|
||||||
Entry::Name(n) => box_once(Expr(Clause::Name {
|
Entry::Name(n) => box_once(Expr(Clause::Name {
|
||||||
local: Some(n.as_ref().to_owned()),
|
local: Some(n.as_ref().to_owned()),
|
||||||
qualified: one_mrc_slice(n.as_ref().to_owned())
|
qualified: one_mrc_slice(n.as_ref().to_owned())
|
||||||
}, mrc_empty_slice())),
|
}, mrc_empty_slice())),
|
||||||
_ => panic!("Scalar template may only be derived from scalar placeholder"),
|
_ => panic!("Scalar template may only be derived from scalar placeholder"),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Clause::Placeh{key, vec: Some(_)} => if let Entry::Vec(v) = &state[key] {
|
Clause::Placeh{key, vec: Some(_)} => if let Entry::Vec(v) = &state[key] {
|
||||||
into_boxed_iter(v.as_ref().to_owned())
|
into_boxed_iter(v.as_ref().to_owned())
|
||||||
} else {panic!("Vectorial template may only be derived from vectorial placeholder")},
|
} else {panic!("Vectorial template may only be derived from vectorial placeholder")},
|
||||||
Clause::Explicit(param) => {
|
Clause::Explicit(param) => {
|
||||||
assert!(out_typ.len() == 0, "Explicit should never have a type annotation");
|
assert!(out_typ.len() == 0, "Explicit should never have a type annotation");
|
||||||
box_once(Clause::Explicit(Mrc::new(
|
box_once(Clause::Explicit(Mrc::new(
|
||||||
Clause::from_exprv(write_expr_rec(state, param).collect())
|
Clause::from_exprv(write_expr_rec(state, param).collect())
|
||||||
.expect("Result shorter than template").into_expr()
|
.expect("Result shorter than template").into_expr()
|
||||||
)).into_expr())
|
)).into_expr())
|
||||||
},
|
},
|
||||||
// Explicit base case so that we get an error if Clause gets new values
|
// Explicit base case so that we get an error if Clause gets new values
|
||||||
c@Clause::Literal(_) | c@Clause::Name { .. } | c@Clause::ExternFn(_) | c@Clause::Atom(_) =>
|
c@Clause::Literal(_) | c@Clause::Name { .. } | c@Clause::ExternFn(_) | c@Clause::Atom(_) =>
|
||||||
box_once(Expr(c.to_owned(), out_typ.to_owned()))
|
box_once(Expr(c.to_owned(), out_typ.to_owned()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fill in a template from a state as produced by a pattern
|
/// Fill in a template from a state as produced by a pattern
|
||||||
fn write_slice_rec(state: &State, tpl: &Mrc<[Expr]>) -> Mrc<[Expr]> {
|
fn write_slice_rec(state: &State, tpl: &Mrc<[Expr]>) -> Mrc<[Expr]> {
|
||||||
eprintln!("Writing {tpl:?} with state {state:?}");
|
eprintln!("Writing {tpl:?} with state {state:?}");
|
||||||
tpl.iter().flat_map(|xpr| write_expr_rec(state, xpr)).collect()
|
tpl.iter().flat_map(|xpr| write_expr_rec(state, xpr)).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Apply a rule (a pair of pattern and template) to an expression
|
/// Apply a rule (a pair of pattern and template) to an expression
|
||||||
pub fn execute(mut src: Mrc<[Expr]>, mut tgt: Mrc<[Expr]>, input: Mrc<[Expr]>)
|
pub fn execute(mut src: Mrc<[Expr]>, mut tgt: Mrc<[Expr]>, input: Mrc<[Expr]>)
|
||||||
-> Result<Option<Mrc<[Expr]>>, RuleError> {
|
-> Result<Option<Mrc<[Expr]>>, RuleError> {
|
||||||
// Dimension check
|
// Dimension check
|
||||||
let mut is_vec_db = HashMap::new();
|
let mut is_vec_db = HashMap::new();
|
||||||
src.iter().try_for_each(|e| verify_scalar_vec(e, &mut is_vec_db))
|
src.iter().try_for_each(|e| verify_scalar_vec(e, &mut is_vec_db))
|
||||||
.map_err(RuleError::ScalarVecMismatch)?;
|
.map_err(RuleError::ScalarVecMismatch)?;
|
||||||
tgt.iter().try_for_each(|e| verify_scalar_vec(e, &mut is_vec_db))
|
tgt.iter().try_for_each(|e| verify_scalar_vec(e, &mut is_vec_db))
|
||||||
.map_err(RuleError::ScalarVecMismatch)?;
|
.map_err(RuleError::ScalarVecMismatch)?;
|
||||||
// Padding
|
// Padding
|
||||||
slice_to_vec(&mut src, &mut tgt);
|
slice_to_vec(&mut src, &mut tgt);
|
||||||
// Generate matcher
|
// Generate matcher
|
||||||
let matcher = SliceMatcherDnC::new(src);
|
let matcher = SliceMatcherDnC::new(src);
|
||||||
let matcher_cache = SliceMatcherDnC::get_matcher_cache();
|
let matcher_cache = SliceMatcherDnC::get_matcher_cache();
|
||||||
Ok(update_all_seqs(Mrc::clone(&input), &mut |p| {
|
Ok(update_all_seqs(Mrc::clone(&input), &mut |p| {
|
||||||
let state = matcher.match_range_cached(p, &matcher_cache)?;
|
let state = matcher.match_range_cached(p, &matcher_cache)?;
|
||||||
Some(write_slice_rec(&state, &tgt))
|
Some(write_slice_rec(&state, &tgt))
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,10 +14,10 @@ use super::split_at_max_vec::split_at_max_vec;
|
|||||||
#[derive(Debug, Eq, PartialEq, Hash)]
|
#[derive(Debug, Eq, PartialEq, Hash)]
|
||||||
pub struct CacheEntry<'a>(Mrc<[Expr]>, &'a SliceMatcherDnC);
|
pub struct CacheEntry<'a>(Mrc<[Expr]>, &'a SliceMatcherDnC);
|
||||||
impl<'a> Clone for CacheEntry<'a> {
|
impl<'a> Clone for CacheEntry<'a> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
let CacheEntry(mrc, matcher) = self;
|
let CacheEntry(mrc, matcher) = self;
|
||||||
CacheEntry(Mrc::clone(mrc), matcher)
|
CacheEntry(Mrc::clone(mrc), matcher)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -31,281 +31,281 @@ impl<'a> Clone for CacheEntry<'a> {
|
|||||||
/// a pattern on the entire tree.
|
/// a pattern on the entire tree.
|
||||||
#[derive(Clone, Eq)]
|
#[derive(Clone, Eq)]
|
||||||
pub struct SliceMatcherDnC {
|
pub struct SliceMatcherDnC {
|
||||||
/// The entire pattern this will match
|
/// The entire pattern this will match
|
||||||
pattern: Mrc<[Expr]>,
|
pattern: Mrc<[Expr]>,
|
||||||
/// The exact clause this can match
|
/// The exact clause this can match
|
||||||
clause: Mrc<Clause>,
|
clause: Mrc<Clause>,
|
||||||
/// Matcher for the parts of the pattern right from us
|
/// Matcher for the parts of the pattern right from us
|
||||||
right_subm: Option<Box<SliceMatcherDnC>>,
|
right_subm: Option<Box<SliceMatcherDnC>>,
|
||||||
/// Matcher for the parts of the pattern left from us
|
/// Matcher for the parts of the pattern left from us
|
||||||
left_subm: Option<Box<SliceMatcherDnC>>,
|
left_subm: Option<Box<SliceMatcherDnC>>,
|
||||||
/// Matcher for the body of this clause if it has one.
|
/// Matcher for the body of this clause if it has one.
|
||||||
/// Must be Some if pattern is (Auto, Lambda or S)
|
/// Must be Some if pattern is (Auto, Lambda or S)
|
||||||
body_subm: Option<Box<SliceMatcherDnC>>,
|
body_subm: Option<Box<SliceMatcherDnC>>,
|
||||||
/// Matcher for the type of this expression if it has one (Auto usually does)
|
/// Matcher for the type of this expression if it has one (Auto usually does)
|
||||||
/// Optional
|
/// Optional
|
||||||
typ_subm: Option<Box<SliceMatcherDnC>>,
|
typ_subm: Option<Box<SliceMatcherDnC>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for SliceMatcherDnC {
|
impl PartialEq for SliceMatcherDnC {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
self.pattern == other.pattern
|
self.pattern == other.pattern
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::hash::Hash for SliceMatcherDnC {
|
impl std::hash::Hash for SliceMatcherDnC {
|
||||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||||
self.pattern.hash(state);
|
self.pattern.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SliceMatcherDnC {
|
impl SliceMatcherDnC {
|
||||||
/// If this is true, `clause`, `typ_subm`, `body_subm` and `clause_qual_name` are meaningless.
|
/// If this is true, `clause`, `typ_subm`, `body_subm` and `clause_qual_name` are meaningless.
|
||||||
/// If it's false, it's also false for both side matchers.
|
/// If it's false, it's also false for both side matchers.
|
||||||
pub fn clause_is_vectorial(&self) -> bool {
|
pub fn clause_is_vectorial(&self) -> bool {
|
||||||
matches!(self.clause.as_ref(), Clause::Placeh{vec: Some(..), ..})
|
matches!(self.clause.as_ref(), Clause::Placeh{vec: Some(..), ..})
|
||||||
|
}
|
||||||
|
/// If clause is a name, the qualified name this can match
|
||||||
|
pub fn clause_qual_name(&self) -> Option<Mrc<[String]>> {
|
||||||
|
if let Clause::Name { qualified, .. } = self.clause.as_ref() {Some(Mrc::clone(qualified))} else {None}
|
||||||
|
}
|
||||||
|
/// If clause is a Placeh, the key in the state the match will be stored at
|
||||||
|
pub fn state_key(&self) -> Option<&String> {
|
||||||
|
if let Clause::Placeh { key, .. } = self.clause.as_ref() {Some(key)} else {None}
|
||||||
|
}
|
||||||
|
pub fn own_max_size(&self, total: usize) -> Option<usize> {
|
||||||
|
if !self.clause_is_vectorial() {
|
||||||
|
if total == self.len() {Some(total)} else {None}
|
||||||
|
} else {
|
||||||
|
let margin = self.min(Side::Left) + self.min(Side::Right);
|
||||||
|
if margin + self.own_min_size() <= total {Some(total - margin)} else {None}
|
||||||
}
|
}
|
||||||
/// If clause is a name, the qualified name this can match
|
}
|
||||||
pub fn clause_qual_name(&self) -> Option<Mrc<[String]>> {
|
pub fn own_min_size(&self) -> usize {
|
||||||
if let Clause::Name { qualified, .. } = self.clause.as_ref() {Some(Mrc::clone(qualified))} else {None}
|
if let Clause::Placeh { vec: Some((_, nonzero)), .. } = self.clause.as_ref() {
|
||||||
|
if *nonzero {1} else {0}
|
||||||
|
} else {self.len()}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enumerate all valid subdivisions based on the reported size constraints of self and
|
||||||
|
/// the two subranges
|
||||||
|
pub fn valid_subdivisions(&self,
|
||||||
|
range: Mrc<[Expr]>
|
||||||
|
) -> impl Iterator<Item = (Mrc<[Expr]>, Mrc<[Expr]>, Mrc<[Expr]>)> {
|
||||||
|
let own_max = unwrap_or!(self.own_max_size(range.len()); return box_empty());
|
||||||
|
let own_min = self.own_min_size();
|
||||||
|
let lmin = self.min(Side::Left);
|
||||||
|
let _lmax = self.max(Side::Left, range.len());
|
||||||
|
let rmin = self.min(Side::Right);
|
||||||
|
let _rmax = self.max(Side::Right, range.len());
|
||||||
|
let full_len = range.len();
|
||||||
|
Box::new((own_min..=own_max).rev().flat_map(move |own_len| {
|
||||||
|
let wiggle = full_len - lmin - rmin - own_len;
|
||||||
|
let range = Mrc::clone(&range);
|
||||||
|
(0..=wiggle).map(move |offset| {
|
||||||
|
let first_break = lmin + offset;
|
||||||
|
let second_break = first_break + own_len;
|
||||||
|
let left = mrc_derive(&range, |p| &p[0..first_break]);
|
||||||
|
let mid = mrc_derive(&range, |p| &p[first_break..second_break]);
|
||||||
|
let right = mrc_derive(&range, |p| &p[second_break..]);
|
||||||
|
(left, mid, right)
|
||||||
|
})
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new(pattern: Mrc<[Expr]>) -> Self {
|
||||||
|
let (clause, left_subm, right_subm) = mrc_try_derive(&pattern, |p| {
|
||||||
|
if p.len() == 1 {Some(&p[0].0)} else {None}
|
||||||
|
}).map(|e| (e, None, None))
|
||||||
|
.or_else(|| split_at_max_vec(Mrc::clone(&pattern)).map(|(left, _, right)| (
|
||||||
|
mrc_derive(&pattern, |p| &p[left.len()].0),
|
||||||
|
if !left.is_empty() {Some(Box::new(Self::new(left)))} else {None},
|
||||||
|
if !right.is_empty() {Some(Box::new(Self::new(right)))} else {None}
|
||||||
|
)))
|
||||||
|
.unwrap_or_else(|| (
|
||||||
|
mrc_derive(&pattern, |p| &p[0].0),
|
||||||
|
None,
|
||||||
|
Some(Box::new(Self::new(mrc_derive(&pattern, |p| &p[1..]))))
|
||||||
|
));
|
||||||
|
Self {
|
||||||
|
pattern, right_subm, left_subm,
|
||||||
|
clause: Mrc::clone(&clause),
|
||||||
|
body_subm: clause.body().map(|b| Box::new(Self::new(b))),
|
||||||
|
typ_subm: clause.typ().map(|t| Box::new(Self::new(t)))
|
||||||
}
|
}
|
||||||
/// If clause is a Placeh, the key in the state the match will be stored at
|
}
|
||||||
pub fn state_key(&self) -> Option<&String> {
|
|
||||||
if let Clause::Placeh { key, .. } = self.clause.as_ref() {Some(key)} else {None}
|
/// The shortest slice this pattern can match
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
if self.clause_is_vectorial() {
|
||||||
|
self.min(Side::Left) + self.min(Side::Right) + self.own_min_size()
|
||||||
|
} else {self.pattern.len()}
|
||||||
|
}
|
||||||
|
/// Pick a subpattern based on the parameter
|
||||||
|
fn side(&self, side: Side) -> Option<&SliceMatcherDnC> {
|
||||||
|
match side {
|
||||||
|
Side::Left => &self.left_subm,
|
||||||
|
Side::Right => &self.right_subm
|
||||||
|
}.as_ref().map(|b| b.as_ref())
|
||||||
|
}
|
||||||
|
/// The shortest slice the given side can match
|
||||||
|
fn min(&self, side: Side) -> usize {self.side(side).map_or(0, |right| right.len())}
|
||||||
|
/// The longest slice the given side can match
|
||||||
|
fn max(&self, side: Side, total: usize) -> usize {
|
||||||
|
self.side(side).map_or(0, |m| if m.clause_is_vectorial() {
|
||||||
|
total - self.min(side.opposite()) - self.own_min_size()
|
||||||
|
} else {m.len()})
|
||||||
|
}
|
||||||
|
/// Take the smallest possible slice from the given side
|
||||||
|
fn slice_min<'a>(&self, side: Side, range: &'a [Expr]) -> &'a [Expr] {
|
||||||
|
side.slice(self.min(side), range)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Matches the body on a range
|
||||||
|
/// # Panics
|
||||||
|
/// when called on an instance that does not have a body (not Auto, Lambda or S)
|
||||||
|
fn match_body<'a>(&'a self,
|
||||||
|
range: Mrc<[Expr]>, cache: &Cache<CacheEntry<'a>, Option<State>>
|
||||||
|
) -> Option<State> {
|
||||||
|
self.body_subm.as_ref()
|
||||||
|
.expect("Missing body matcher")
|
||||||
|
.match_range_cached(range, cache)
|
||||||
|
}
|
||||||
|
/// Matches the type and body on respective ranges
|
||||||
|
/// # Panics
|
||||||
|
/// when called on an instance that does not have a body (not Auto, Lambda or S)
|
||||||
|
fn match_parts<'a>(&'a self,
|
||||||
|
typ_range: Mrc<[Expr]>, body_range: Mrc<[Expr]>,
|
||||||
|
cache: &Cache<CacheEntry<'a>, Option<State>>
|
||||||
|
) -> Option<State> {
|
||||||
|
let typ_state = if let Some(typ) = &self.typ_subm {
|
||||||
|
typ.match_range_cached(typ_range, cache)?
|
||||||
|
} else {State::new()};
|
||||||
|
let body_state = self.match_body(body_range, cache)?;
|
||||||
|
typ_state + body_state
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Match the specified side-submatcher on the specified range with the cache
|
||||||
|
/// In absence of a side-submatcher empty ranges are matched to empty state
|
||||||
|
fn apply_side_with_cache<'a>(&'a self,
|
||||||
|
side: Side, range: Mrc<[Expr]>,
|
||||||
|
cache: &Cache<CacheEntry<'a>, Option<State>>
|
||||||
|
) -> Option<State> {
|
||||||
|
match &self.side(side) {
|
||||||
|
None => {
|
||||||
|
if !range.is_empty() {None}
|
||||||
|
else {Some(State::new())}
|
||||||
|
},
|
||||||
|
Some(m) => cache.try_find(&CacheEntry(range, m)).map(|s| s.as_ref().to_owned())
|
||||||
}
|
}
|
||||||
pub fn own_max_size(&self, total: usize) -> Option<usize> {
|
}
|
||||||
if !self.clause_is_vectorial() {
|
|
||||||
if total == self.len() {Some(total)} else {None}
|
fn match_range_scalar_cached<'a>(&'a self,
|
||||||
} else {
|
target: Mrc<[Expr]>,
|
||||||
let margin = self.min(Side::Left) + self.min(Side::Right);
|
cache: &Cache<CacheEntry<'a>, Option<State>>
|
||||||
if margin + self.own_min_size() <= total {Some(total - margin)} else {None}
|
) -> Option<State> {
|
||||||
|
let pos = self.min(Side::Left);
|
||||||
|
if target.len() != self.pattern.len() {return None}
|
||||||
|
let mut own_state = (
|
||||||
|
self.apply_side_with_cache(Side::Left, mrc_derive(&target, |t| &t[0..pos]), cache)?
|
||||||
|
+ self.apply_side_with_cache(Side::Right, mrc_derive(&target, |t| &t[pos+1..]), cache)
|
||||||
|
)?;
|
||||||
|
match (self.clause.as_ref(), &target.as_ref()[pos].0) {
|
||||||
|
(Clause::Literal(val), Clause::Literal(tgt)) => {
|
||||||
|
if val == tgt {Some(own_state)} else {None}
|
||||||
|
}
|
||||||
|
(Clause::Placeh{key, vec: None}, tgt_clause) => {
|
||||||
|
if let Some(real_key) = key.strip_prefix('_') {
|
||||||
|
if let Clause::Name { local: Some(value), .. } = tgt_clause {
|
||||||
|
own_state.insert_name(real_key, value)
|
||||||
|
} else {None}
|
||||||
|
} else {own_state.insert_scalar(&key, &target[pos])}
|
||||||
|
}
|
||||||
|
(Clause::S(c, _), Clause::S(c_tgt, body_range)) => {
|
||||||
|
if c != c_tgt {return None}
|
||||||
|
own_state + self.match_parts(to_mrc_slice(vec![]), Mrc::clone(body_range), cache)
|
||||||
|
}
|
||||||
|
(Clause::Name{qualified, ..}, Clause::Name{qualified: q_tgt, ..}) => {
|
||||||
|
if qualified == q_tgt {Some(own_state)} else {None}
|
||||||
|
}
|
||||||
|
(Clause::Lambda(name, _, _), Clause::Lambda(name_tgt, typ_tgt, body_tgt)) => {
|
||||||
|
// Primarily, the name works as a placeholder
|
||||||
|
if let Some(state_key) = name.strip_prefix('$') {
|
||||||
|
own_state = own_state.insert_name(state_key, name_tgt)?
|
||||||
|
} else if name != name_tgt {return None}
|
||||||
|
// ^ But if you're weird like that, it can also work as a constraint
|
||||||
|
own_state + self.match_parts(Mrc::clone(typ_tgt), Mrc::clone(body_tgt), cache)
|
||||||
|
}
|
||||||
|
(Clause::Auto(name_opt, _, _), Clause::Auto(name_range, typ_range, body_range)) => {
|
||||||
|
if let Some(name) = name_opt {
|
||||||
|
// TODO: Enforce this at construction, on a type system level
|
||||||
|
let state_key = name.strip_prefix('$')
|
||||||
|
.expect("Auto patterns may only reference, never enforce the name");
|
||||||
|
own_state = own_state.insert_name_opt(state_key, name_range.as_ref())?
|
||||||
}
|
}
|
||||||
|
own_state + self.match_parts(Mrc::clone(typ_range), Mrc::clone(body_range), cache)
|
||||||
|
},
|
||||||
|
_ => None
|
||||||
}
|
}
|
||||||
pub fn own_min_size(&self) -> usize {
|
}
|
||||||
if let Clause::Placeh { vec: Some((_, nonzero)), .. } = self.clause.as_ref() {
|
|
||||||
if *nonzero {1} else {0}
|
|
||||||
} else {self.len()}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Enumerate all valid subdivisions based on the reported size constraints of self and
|
|
||||||
/// the two subranges
|
|
||||||
pub fn valid_subdivisions(&self,
|
|
||||||
range: Mrc<[Expr]>
|
|
||||||
) -> impl Iterator<Item = (Mrc<[Expr]>, Mrc<[Expr]>, Mrc<[Expr]>)> {
|
|
||||||
let own_max = unwrap_or!(self.own_max_size(range.len()); return box_empty());
|
|
||||||
let own_min = self.own_min_size();
|
|
||||||
let lmin = self.min(Side::Left);
|
|
||||||
let _lmax = self.max(Side::Left, range.len());
|
|
||||||
let rmin = self.min(Side::Right);
|
|
||||||
let _rmax = self.max(Side::Right, range.len());
|
|
||||||
let full_len = range.len();
|
|
||||||
Box::new((own_min..=own_max).rev().flat_map(move |own_len| {
|
|
||||||
let wiggle = full_len - lmin - rmin - own_len;
|
|
||||||
let range = Mrc::clone(&range);
|
|
||||||
(0..=wiggle).map(move |offset| {
|
|
||||||
let first_break = lmin + offset;
|
|
||||||
let second_break = first_break + own_len;
|
|
||||||
let left = mrc_derive(&range, |p| &p[0..first_break]);
|
|
||||||
let mid = mrc_derive(&range, |p| &p[first_break..second_break]);
|
|
||||||
let right = mrc_derive(&range, |p| &p[second_break..]);
|
|
||||||
(left, mid, right)
|
|
||||||
})
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new(pattern: Mrc<[Expr]>) -> Self {
|
/// Match the range with a vectorial _assuming we are a vectorial_
|
||||||
let (clause, left_subm, right_subm) = mrc_try_derive(&pattern, |p| {
|
fn match_range_vectorial_cached<'a>(&'a self,
|
||||||
if p.len() == 1 {Some(&p[0].0)} else {None}
|
name: &str,
|
||||||
}).map(|e| (e, None, None))
|
target: Mrc<[Expr]>,
|
||||||
.or_else(|| split_at_max_vec(Mrc::clone(&pattern)).map(|(left, _, right)| (
|
cache: &Cache<CacheEntry<'a>, Option<State>>
|
||||||
mrc_derive(&pattern, |p| &p[left.len()].0),
|
) -> Option<State> {
|
||||||
if !left.is_empty() {Some(Box::new(Self::new(left)))} else {None},
|
// Step through valid slicings based on reported size constraints in order
|
||||||
if !right.is_empty() {Some(Box::new(Self::new(right)))} else {None}
|
// from longest own section to shortest and from left to right
|
||||||
)))
|
for (left, own, right) in self.valid_subdivisions(target) {
|
||||||
.unwrap_or_else(|| (
|
return Some(unwrap_or!(
|
||||||
mrc_derive(&pattern, |p| &p[0].0),
|
self.apply_side_with_cache(Side::Left, left, cache)
|
||||||
None,
|
.and_then(|lres| lres + self.apply_side_with_cache(Side::Right, right, cache))
|
||||||
Some(Box::new(Self::new(mrc_derive(&pattern, |p| &p[1..]))))
|
.and_then(|side_res| side_res.insert_vec(name, own.as_ref()));
|
||||||
));
|
continue
|
||||||
Self {
|
))
|
||||||
pattern, right_subm, left_subm,
|
|
||||||
clause: Mrc::clone(&clause),
|
|
||||||
body_subm: clause.body().map(|b| Box::new(Self::new(b))),
|
|
||||||
typ_subm: clause.typ().map(|t| Box::new(Self::new(t)))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
/// The shortest slice this pattern can match
|
/// Try and match the specified range
|
||||||
fn len(&self) -> usize {
|
pub fn match_range_cached<'a>(&'a self,
|
||||||
if self.clause_is_vectorial() {
|
target: Mrc<[Expr]>,
|
||||||
self.min(Side::Left) + self.min(Side::Right) + self.own_min_size()
|
cache: &Cache<CacheEntry<'a>, Option<State>>
|
||||||
} else {self.pattern.len()}
|
) -> Option<State> {
|
||||||
}
|
if self.pattern.is_empty() {
|
||||||
/// Pick a subpattern based on the parameter
|
return if target.is_empty() {Some(State::new())} else {None}
|
||||||
fn side(&self, side: Side) -> Option<&SliceMatcherDnC> {
|
|
||||||
match side {
|
|
||||||
Side::Left => &self.left_subm,
|
|
||||||
Side::Right => &self.right_subm
|
|
||||||
}.as_ref().map(|b| b.as_ref())
|
|
||||||
}
|
|
||||||
/// The shortest slice the given side can match
|
|
||||||
fn min(&self, side: Side) -> usize {self.side(side).map_or(0, |right| right.len())}
|
|
||||||
/// The longest slice the given side can match
|
|
||||||
fn max(&self, side: Side, total: usize) -> usize {
|
|
||||||
self.side(side).map_or(0, |m| if m.clause_is_vectorial() {
|
|
||||||
total - self.min(side.opposite()) - self.own_min_size()
|
|
||||||
} else {m.len()})
|
|
||||||
}
|
|
||||||
/// Take the smallest possible slice from the given side
|
|
||||||
fn slice_min<'a>(&self, side: Side, range: &'a [Expr]) -> &'a [Expr] {
|
|
||||||
side.slice(self.min(side), range)
|
|
||||||
}
|
}
|
||||||
|
if self.clause_is_vectorial() {
|
||||||
|
let key = self.state_key().expect("Vectorial implies key");
|
||||||
|
self.match_range_vectorial_cached(key, target, cache)
|
||||||
|
} else {self.match_range_scalar_cached(target, cache)}
|
||||||
|
}
|
||||||
|
|
||||||
/// Matches the body on a range
|
pub fn get_matcher_cache<'a>()
|
||||||
/// # Panics
|
-> Cache<'a, CacheEntry<'a>, Option<State>> {
|
||||||
/// when called on an instance that does not have a body (not Auto, Lambda or S)
|
Cache::new(
|
||||||
fn match_body<'a>(&'a self,
|
|CacheEntry(tgt, matcher), cache| {
|
||||||
range: Mrc<[Expr]>, cache: &Cache<CacheEntry<'a>, Option<State>>
|
matcher.match_range_cached(tgt, cache)
|
||||||
) -> Option<State> {
|
}
|
||||||
self.body_subm.as_ref()
|
)
|
||||||
.expect("Missing body matcher")
|
}
|
||||||
.match_range_cached(range, cache)
|
|
||||||
}
|
|
||||||
/// Matches the type and body on respective ranges
|
|
||||||
/// # Panics
|
|
||||||
/// when called on an instance that does not have a body (not Auto, Lambda or S)
|
|
||||||
fn match_parts<'a>(&'a self,
|
|
||||||
typ_range: Mrc<[Expr]>, body_range: Mrc<[Expr]>,
|
|
||||||
cache: &Cache<CacheEntry<'a>, Option<State>>
|
|
||||||
) -> Option<State> {
|
|
||||||
let typ_state = if let Some(typ) = &self.typ_subm {
|
|
||||||
typ.match_range_cached(typ_range, cache)?
|
|
||||||
} else {State::new()};
|
|
||||||
let body_state = self.match_body(body_range, cache)?;
|
|
||||||
typ_state + body_state
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Match the specified side-submatcher on the specified range with the cache
|
pub fn match_range(&self, target: Mrc<[Expr]>) -> Option<State> {
|
||||||
/// In absence of a side-submatcher empty ranges are matched to empty state
|
self.match_range_cached(target, &Self::get_matcher_cache())
|
||||||
fn apply_side_with_cache<'a>(&'a self,
|
}
|
||||||
side: Side, range: Mrc<[Expr]>,
|
|
||||||
cache: &Cache<CacheEntry<'a>, Option<State>>
|
|
||||||
) -> Option<State> {
|
|
||||||
match &self.side(side) {
|
|
||||||
None => {
|
|
||||||
if !range.is_empty() {None}
|
|
||||||
else {Some(State::new())}
|
|
||||||
},
|
|
||||||
Some(m) => cache.try_find(&CacheEntry(range, m)).map(|s| s.as_ref().to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn match_range_scalar_cached<'a>(&'a self,
|
|
||||||
target: Mrc<[Expr]>,
|
|
||||||
cache: &Cache<CacheEntry<'a>, Option<State>>
|
|
||||||
) -> Option<State> {
|
|
||||||
let pos = self.min(Side::Left);
|
|
||||||
if target.len() != self.pattern.len() {return None}
|
|
||||||
let mut own_state = (
|
|
||||||
self.apply_side_with_cache(Side::Left, mrc_derive(&target, |t| &t[0..pos]), cache)?
|
|
||||||
+ self.apply_side_with_cache(Side::Right, mrc_derive(&target, |t| &t[pos+1..]), cache)
|
|
||||||
)?;
|
|
||||||
match (self.clause.as_ref(), &target.as_ref()[pos].0) {
|
|
||||||
(Clause::Literal(val), Clause::Literal(tgt)) => {
|
|
||||||
if val == tgt {Some(own_state)} else {None}
|
|
||||||
}
|
|
||||||
(Clause::Placeh{key, vec: None}, tgt_clause) => {
|
|
||||||
if let Some(real_key) = key.strip_prefix('_') {
|
|
||||||
if let Clause::Name { local: Some(value), .. } = tgt_clause {
|
|
||||||
own_state.insert_name(real_key, value)
|
|
||||||
} else {None}
|
|
||||||
} else {own_state.insert_scalar(&key, &target[pos])}
|
|
||||||
}
|
|
||||||
(Clause::S(c, _), Clause::S(c_tgt, body_range)) => {
|
|
||||||
if c != c_tgt {return None}
|
|
||||||
own_state + self.match_parts(to_mrc_slice(vec![]), Mrc::clone(body_range), cache)
|
|
||||||
}
|
|
||||||
(Clause::Name{qualified, ..}, Clause::Name{qualified: q_tgt, ..}) => {
|
|
||||||
if qualified == q_tgt {Some(own_state)} else {None}
|
|
||||||
}
|
|
||||||
(Clause::Lambda(name, _, _), Clause::Lambda(name_tgt, typ_tgt, body_tgt)) => {
|
|
||||||
// Primarily, the name works as a placeholder
|
|
||||||
if let Some(state_key) = name.strip_prefix('$') {
|
|
||||||
own_state = own_state.insert_name(state_key, name_tgt)?
|
|
||||||
} else if name != name_tgt {return None}
|
|
||||||
// ^ But if you're weird like that, it can also work as a constraint
|
|
||||||
own_state + self.match_parts(Mrc::clone(typ_tgt), Mrc::clone(body_tgt), cache)
|
|
||||||
}
|
|
||||||
(Clause::Auto(name_opt, _, _), Clause::Auto(name_range, typ_range, body_range)) => {
|
|
||||||
if let Some(name) = name_opt {
|
|
||||||
// TODO: Enforce this at construction, on a type system level
|
|
||||||
let state_key = name.strip_prefix('$')
|
|
||||||
.expect("Auto patterns may only reference, never enforce the name");
|
|
||||||
own_state = own_state.insert_name_opt(state_key, name_range.as_ref())?
|
|
||||||
}
|
|
||||||
own_state + self.match_parts(Mrc::clone(typ_range), Mrc::clone(body_range), cache)
|
|
||||||
},
|
|
||||||
_ => None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Match the range with a vectorial _assuming we are a vectorial_
|
|
||||||
fn match_range_vectorial_cached<'a>(&'a self,
|
|
||||||
name: &str,
|
|
||||||
target: Mrc<[Expr]>,
|
|
||||||
cache: &Cache<CacheEntry<'a>, Option<State>>
|
|
||||||
) -> Option<State> {
|
|
||||||
// Step through valid slicings based on reported size constraints in order
|
|
||||||
// from longest own section to shortest and from left to right
|
|
||||||
for (left, own, right) in self.valid_subdivisions(target) {
|
|
||||||
return Some(unwrap_or!(
|
|
||||||
self.apply_side_with_cache(Side::Left, left, cache)
|
|
||||||
.and_then(|lres| lres + self.apply_side_with_cache(Side::Right, right, cache))
|
|
||||||
.and_then(|side_res| side_res.insert_vec(name, own.as_ref()));
|
|
||||||
continue
|
|
||||||
))
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Try and match the specified range
|
|
||||||
pub fn match_range_cached<'a>(&'a self,
|
|
||||||
target: Mrc<[Expr]>,
|
|
||||||
cache: &Cache<CacheEntry<'a>, Option<State>>
|
|
||||||
) -> Option<State> {
|
|
||||||
if self.pattern.is_empty() {
|
|
||||||
return if target.is_empty() {Some(State::new())} else {None}
|
|
||||||
}
|
|
||||||
if self.clause_is_vectorial() {
|
|
||||||
let key = self.state_key().expect("Vectorial implies key");
|
|
||||||
self.match_range_vectorial_cached(key, target, cache)
|
|
||||||
} else {self.match_range_scalar_cached(target, cache)}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_matcher_cache<'a>()
|
|
||||||
-> Cache<'a, CacheEntry<'a>, Option<State>> {
|
|
||||||
Cache::new(
|
|
||||||
|CacheEntry(tgt, matcher), cache| {
|
|
||||||
matcher.match_range_cached(tgt, cache)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn match_range(&self, target: Mrc<[Expr]>) -> Option<State> {
|
|
||||||
self.match_range_cached(target, &Self::get_matcher_cache())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for SliceMatcherDnC {
|
impl Debug for SliceMatcherDnC {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
f.debug_struct("Matcher")
|
f.debug_struct("Matcher")
|
||||||
.field("clause", &self.clause)
|
.field("clause", &self.clause)
|
||||||
.field("vectorial", &self.clause_is_vectorial())
|
.field("vectorial", &self.clause_is_vectorial())
|
||||||
.field("min", &self.len())
|
.field("min", &self.len())
|
||||||
.field("left", &self.left_subm)
|
.field("left", &self.left_subm)
|
||||||
.field("right", &self.right_subm)
|
.field("right", &self.right_subm)
|
||||||
.field("lmin", &self.min(Side::Left))
|
.field("lmin", &self.min(Side::Left))
|
||||||
.field("rmin", &self.min(Side::Right))
|
.field("rmin", &self.min(Side::Right))
|
||||||
.finish()
|
.finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,27 +7,27 @@ use crate::utils::{mrc_derive, mrc_try_derive};
|
|||||||
pub type MaxVecSplit = (Mrc<[Expr]>, (Mrc<str>, usize, bool), Mrc<[Expr]>);
|
pub type MaxVecSplit = (Mrc<[Expr]>, (Mrc<str>, usize, bool), Mrc<[Expr]>);
|
||||||
/// Derive the details of the central vectorial and the two sides from a slice of Expr's
|
/// Derive the details of the central vectorial and the two sides from a slice of Expr's
|
||||||
pub fn split_at_max_vec(pattern: Mrc<[Expr]>) -> Option<MaxVecSplit> {
|
pub fn split_at_max_vec(pattern: Mrc<[Expr]>) -> Option<MaxVecSplit> {
|
||||||
let rngidx = pattern.iter().position_max_by_key(|ex| {
|
let rngidx = pattern.iter().position_max_by_key(|ex| {
|
||||||
if let Expr(Clause::Placeh{vec: Some((prio, _)), ..}, _) = ex {
|
if let Expr(Clause::Placeh{vec: Some((prio, _)), ..}, _) = ex {
|
||||||
*prio as i64
|
*prio as i64
|
||||||
} else { -1 }
|
} else { -1 }
|
||||||
})?;
|
})?;
|
||||||
let left = mrc_derive(&pattern, |p| &p[0..rngidx]);
|
let left = mrc_derive(&pattern, |p| &p[0..rngidx]);
|
||||||
let placeh = mrc_derive(&pattern, |p| &p[rngidx].0);
|
let placeh = mrc_derive(&pattern, |p| &p[rngidx].0);
|
||||||
let right = if rngidx == pattern.len() {
|
let right = if rngidx == pattern.len() {
|
||||||
mrc_derive(&pattern, |x| &x[0..1])
|
mrc_derive(&pattern, |x| &x[0..1])
|
||||||
} else {
|
} else {
|
||||||
mrc_derive(&pattern, |x| &x[rngidx + 1..])
|
mrc_derive(&pattern, |x| &x[rngidx + 1..])
|
||||||
};
|
};
|
||||||
mrc_try_derive(&placeh, |p| {
|
mrc_try_derive(&placeh, |p| {
|
||||||
if let Clause::Placeh{key, vec: Some(_)} = p {
|
if let Clause::Placeh{key, vec: Some(_)} = p {
|
||||||
Some(key)
|
Some(key)
|
||||||
} else {None} // Repeated below on unchanged data
|
} else {None} // Repeated below on unchanged data
|
||||||
}).map(|key| {
|
}).map(|key| {
|
||||||
let key = mrc_derive(&key, String::as_str);
|
let key = mrc_derive(&key, String::as_str);
|
||||||
if let Clause::Placeh{vec: Some((prio, nonzero)), ..} = placeh.as_ref() {
|
if let Clause::Placeh{vec: Some((prio, nonzero)), ..} = placeh.as_ref() {
|
||||||
(left, (key, *prio, *nonzero), right)
|
(left, (key, *prio, *nonzero), right)
|
||||||
}
|
}
|
||||||
else {panic!("Impossible branch")} // Duplicate of above
|
else {panic!("Impossible branch")} // Duplicate of above
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,10 +6,10 @@ use crate::ast::Expr;
|
|||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub enum Entry {
|
pub enum Entry {
|
||||||
Vec(Rc<Vec<Expr>>),
|
Vec(Rc<Vec<Expr>>),
|
||||||
Scalar(Rc<Expr>),
|
Scalar(Rc<Expr>),
|
||||||
Name(Rc<String>),
|
Name(Rc<String>),
|
||||||
NameOpt(Option<Rc<String>>)
|
NameOpt(Option<Rc<String>>)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A bucket of indexed expression fragments. Addition may fail if there's a conflict.
|
/// A bucket of indexed expression fragments. Addition may fail if there's a conflict.
|
||||||
@@ -19,129 +19,129 @@ pub struct State(HashMap<String, Entry>);
|
|||||||
/// Clone without also cloning arbitrarily heavy Expr objects.
|
/// Clone without also cloning arbitrarily heavy Expr objects.
|
||||||
/// Key is expected to be a very short string with an allocator overhead close to zero.
|
/// Key is expected to be a very short string with an allocator overhead close to zero.
|
||||||
impl Clone for Entry {
|
impl Clone for Entry {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
match self {
|
match self {
|
||||||
Self::Name(n) => Self::Name(Rc::clone(n)),
|
Self::Name(n) => Self::Name(Rc::clone(n)),
|
||||||
Self::Scalar(x) => Self::Scalar(Rc::clone(x)),
|
Self::Scalar(x) => Self::Scalar(Rc::clone(x)),
|
||||||
Self::Vec(v) => Self::Vec(Rc::clone(v)),
|
Self::Vec(v) => Self::Vec(Rc::clone(v)),
|
||||||
Self::NameOpt(o) => Self::NameOpt(o.as_ref().map(Rc::clone))
|
Self::NameOpt(o) => Self::NameOpt(o.as_ref().map(Rc::clone))
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl State {
|
impl State {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self(HashMap::new())
|
Self(HashMap::new())
|
||||||
|
}
|
||||||
|
pub fn insert_vec<S>(mut self, k: &S, v: &[Expr]) -> Option<Self>
|
||||||
|
where S: AsRef<str> + ToString + ?Sized + Debug {
|
||||||
|
if let Some(old) = self.0.get(k.as_ref()) {
|
||||||
|
if let Entry::Vec(val) = old {
|
||||||
|
if val.as_slice() != v {return None}
|
||||||
|
} else {return None}
|
||||||
|
} else {
|
||||||
|
self.0.insert(k.to_string(), Entry::Vec(Rc::new(v.to_vec())));
|
||||||
}
|
}
|
||||||
pub fn insert_vec<S>(mut self, k: &S, v: &[Expr]) -> Option<Self>
|
Some(self)
|
||||||
where S: AsRef<str> + ToString + ?Sized + Debug {
|
}
|
||||||
if let Some(old) = self.0.get(k.as_ref()) {
|
pub fn insert_scalar<S>(mut self, k: &S, v: &Expr) -> Option<Self>
|
||||||
if let Entry::Vec(val) = old {
|
where S: AsRef<str> + ToString + ?Sized {
|
||||||
if val.as_slice() != v {return None}
|
if let Some(old) = self.0.get(k.as_ref()) {
|
||||||
} else {return None}
|
if let Entry::Scalar(val) = old {
|
||||||
} else {
|
if val.as_ref() != v {return None}
|
||||||
self.0.insert(k.to_string(), Entry::Vec(Rc::new(v.to_vec())));
|
} else {return None}
|
||||||
|
} else {
|
||||||
|
self.0.insert(k.to_string(), Entry::Scalar(Rc::new(v.to_owned())));
|
||||||
|
}
|
||||||
|
Some(self)
|
||||||
|
}
|
||||||
|
pub fn insert_name<S1, S2>(mut self, k: &S1, v: &S2) -> Option<Self>
|
||||||
|
where
|
||||||
|
S1: AsRef<str> + ToString + ?Sized,
|
||||||
|
S2: AsRef<str> + ToString + ?Sized
|
||||||
|
{
|
||||||
|
if let Some(old) = self.0.get(k.as_ref()) {
|
||||||
|
if let Entry::Name(val) = old {
|
||||||
|
if val.as_str() != v.as_ref() {return None}
|
||||||
|
} else {return None}
|
||||||
|
} else {
|
||||||
|
self.0.insert(k.to_string(), Entry::Name(Rc::new(v.to_string())));
|
||||||
|
}
|
||||||
|
Some(self)
|
||||||
|
}
|
||||||
|
pub fn insert_name_opt<S1, S2>(mut self, k: &S1, v: Option<&S2>) -> Option<Self>
|
||||||
|
where
|
||||||
|
S1: AsRef<str> + ToString + ?Sized,
|
||||||
|
S2: AsRef<str> + ToString + ?Sized
|
||||||
|
{
|
||||||
|
if let Some(old) = self.0.get(k.as_ref()) {
|
||||||
|
if let Entry::NameOpt(val) = old {
|
||||||
|
if val.as_ref().map(|s| s.as_ref().as_str()) != v.map(|s| s.as_ref()) {
|
||||||
|
return None
|
||||||
}
|
}
|
||||||
Some(self)
|
} else {return None}
|
||||||
|
} else {
|
||||||
|
self.0.insert(k.to_string(), Entry::NameOpt(v.map(|s| Rc::new(s.to_string()))));
|
||||||
}
|
}
|
||||||
pub fn insert_scalar<S>(mut self, k: &S, v: &Expr) -> Option<Self>
|
Some(self)
|
||||||
where S: AsRef<str> + ToString + ?Sized {
|
}
|
||||||
if let Some(old) = self.0.get(k.as_ref()) {
|
/// Insert a new entry, return None on conflict
|
||||||
if let Entry::Scalar(val) = old {
|
pub fn insert_pair(mut self, (k, v): (String, Entry)) -> Option<State> {
|
||||||
if val.as_ref() != v {return None}
|
if let Some(old) = self.0.get(&k) {
|
||||||
} else {return None}
|
if old != &v {return None}
|
||||||
} else {
|
} else {
|
||||||
self.0.insert(k.to_string(), Entry::Scalar(Rc::new(v.to_owned())));
|
self.0.insert(k, v);
|
||||||
}
|
|
||||||
Some(self)
|
|
||||||
}
|
|
||||||
pub fn insert_name<S1, S2>(mut self, k: &S1, v: &S2) -> Option<Self>
|
|
||||||
where
|
|
||||||
S1: AsRef<str> + ToString + ?Sized,
|
|
||||||
S2: AsRef<str> + ToString + ?Sized
|
|
||||||
{
|
|
||||||
if let Some(old) = self.0.get(k.as_ref()) {
|
|
||||||
if let Entry::Name(val) = old {
|
|
||||||
if val.as_str() != v.as_ref() {return None}
|
|
||||||
} else {return None}
|
|
||||||
} else {
|
|
||||||
self.0.insert(k.to_string(), Entry::Name(Rc::new(v.to_string())));
|
|
||||||
}
|
|
||||||
Some(self)
|
|
||||||
}
|
|
||||||
pub fn insert_name_opt<S1, S2>(mut self, k: &S1, v: Option<&S2>) -> Option<Self>
|
|
||||||
where
|
|
||||||
S1: AsRef<str> + ToString + ?Sized,
|
|
||||||
S2: AsRef<str> + ToString + ?Sized
|
|
||||||
{
|
|
||||||
if let Some(old) = self.0.get(k.as_ref()) {
|
|
||||||
if let Entry::NameOpt(val) = old {
|
|
||||||
if val.as_ref().map(|s| s.as_ref().as_str()) != v.map(|s| s.as_ref()) {
|
|
||||||
return None
|
|
||||||
}
|
|
||||||
} else {return None}
|
|
||||||
} else {
|
|
||||||
self.0.insert(k.to_string(), Entry::NameOpt(v.map(|s| Rc::new(s.to_string()))));
|
|
||||||
}
|
|
||||||
Some(self)
|
|
||||||
}
|
|
||||||
/// Insert a new entry, return None on conflict
|
|
||||||
pub fn insert_pair(mut self, (k, v): (String, Entry)) -> Option<State> {
|
|
||||||
if let Some(old) = self.0.get(&k) {
|
|
||||||
if old != &v {return None}
|
|
||||||
} else {
|
|
||||||
self.0.insert(k, v);
|
|
||||||
}
|
|
||||||
Some(self)
|
|
||||||
}
|
|
||||||
/// Returns `true` if the state contains no data
|
|
||||||
pub fn empty(&self) -> bool {
|
|
||||||
self.0.is_empty()
|
|
||||||
}
|
}
|
||||||
|
Some(self)
|
||||||
|
}
|
||||||
|
/// Returns `true` if the state contains no data
|
||||||
|
pub fn empty(&self) -> bool {
|
||||||
|
self.0.is_empty()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Add for State {
|
impl Add for State {
|
||||||
type Output = Option<State>;
|
type Output = Option<State>;
|
||||||
|
|
||||||
fn add(mut self, rhs: Self) -> Self::Output {
|
fn add(mut self, rhs: Self) -> Self::Output {
|
||||||
if self.empty() {
|
if self.empty() {
|
||||||
return Some(rhs)
|
return Some(rhs)
|
||||||
}
|
|
||||||
for pair in rhs.0 {
|
|
||||||
self = self.insert_pair(pair)?
|
|
||||||
}
|
|
||||||
Some(self)
|
|
||||||
}
|
}
|
||||||
|
for pair in rhs.0 {
|
||||||
|
self = self.insert_pair(pair)?
|
||||||
|
}
|
||||||
|
Some(self)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Add<Option<State>> for State {
|
impl Add<Option<State>> for State {
|
||||||
type Output = Option<State>;
|
type Output = Option<State>;
|
||||||
|
|
||||||
fn add(self, rhs: Option<State>) -> Self::Output {
|
fn add(self, rhs: Option<State>) -> Self::Output {
|
||||||
rhs.and_then(|s| self + s)
|
rhs.and_then(|s| self + s)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> Index<S> for State where S: AsRef<str> {
|
impl<S> Index<S> for State where S: AsRef<str> {
|
||||||
type Output = Entry;
|
type Output = Entry;
|
||||||
|
|
||||||
fn index(&self, index: S) -> &Self::Output {
|
fn index(&self, index: S) -> &Self::Output {
|
||||||
return &self.0[index.as_ref()]
|
return &self.0[index.as_ref()]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoIterator for State {
|
impl IntoIterator for State {
|
||||||
type Item = (String, Entry);
|
type Item = (String, Entry);
|
||||||
|
|
||||||
type IntoIter = hashbrown::hash_map::IntoIter<String, Entry>;
|
type IntoIter = hashbrown::hash_map::IntoIter<String, Entry>;
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
self.0.into_iter()
|
self.0.into_iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for State {
|
impl Debug for State {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "{:?}", self.0)
|
write!(f, "{:?}", self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -9,44 +9,44 @@ use super::{super::ast::Rule, executor::execute, RuleError};
|
|||||||
/// Manages a priority queue of substitution rules and allows to apply them
|
/// Manages a priority queue of substitution rules and allows to apply them
|
||||||
pub struct Repository(Vec<Rule>);
|
pub struct Repository(Vec<Rule>);
|
||||||
impl Repository {
|
impl Repository {
|
||||||
pub fn new(mut rules: Vec<Rule>) -> Self {
|
pub fn new(mut rules: Vec<Rule>) -> Self {
|
||||||
rules.sort_by_key(|r| r.prio);
|
rules.sort_by_key(|r| r.prio);
|
||||||
Self(rules)
|
Self(rules)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempt to run each rule in priority order once
|
/// Attempt to run each rule in priority order once
|
||||||
pub fn step(&self, mut code: Mrc<[Expr]>) -> Result<Option<Mrc<[Expr]>>, RuleError> {
|
pub fn step(&self, mut code: Mrc<[Expr]>) -> Result<Option<Mrc<[Expr]>>, RuleError> {
|
||||||
let mut ran_once = false;
|
let mut ran_once = false;
|
||||||
for rule in self.0.iter() {
|
for rule in self.0.iter() {
|
||||||
if let Some(tmp) = execute(
|
if let Some(tmp) = execute(
|
||||||
Mrc::clone(&rule.source), Mrc::clone(&rule.target),
|
Mrc::clone(&rule.source), Mrc::clone(&rule.target),
|
||||||
Mrc::clone(&code)
|
Mrc::clone(&code)
|
||||||
)? {
|
)? {
|
||||||
ran_once = true;
|
ran_once = true;
|
||||||
code = tmp;
|
code = tmp;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
Ok(if ran_once {Some(code)} else {None})
|
|
||||||
}
|
}
|
||||||
|
Ok(if ran_once {Some(code)} else {None})
|
||||||
|
}
|
||||||
|
|
||||||
/// Attempt to run each rule in priority order `limit` times. Returns the final
|
/// Attempt to run each rule in priority order `limit` times. Returns the final
|
||||||
/// tree and the number of iterations left to the limit.
|
/// tree and the number of iterations left to the limit.
|
||||||
pub fn long_step(&self, mut code: Mrc<[Expr]>, mut limit: usize)
|
pub fn long_step(&self, mut code: Mrc<[Expr]>, mut limit: usize)
|
||||||
-> Result<(Mrc<[Expr]>, usize), RuleError> {
|
-> Result<(Mrc<[Expr]>, usize), RuleError> {
|
||||||
while let Some(tmp) = self.step(Mrc::clone(&code))? {
|
while let Some(tmp) = self.step(Mrc::clone(&code))? {
|
||||||
if 0 >= limit {break}
|
if 0 >= limit {break}
|
||||||
limit -= 1;
|
limit -= 1;
|
||||||
code = tmp
|
code = tmp
|
||||||
}
|
|
||||||
Ok((code, limit))
|
|
||||||
}
|
}
|
||||||
|
Ok((code, limit))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for Repository {
|
impl Debug for Repository {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
for rule in self.0.iter() {
|
for rule in self.0.iter() {
|
||||||
writeln!(f, "{rule:?}")?
|
writeln!(f, "{rule:?}")?
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,17 +2,17 @@ use std::{fmt, error::Error};
|
|||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum RuleError {
|
pub enum RuleError {
|
||||||
BadState(String),
|
BadState(String),
|
||||||
ScalarVecMismatch(String)
|
ScalarVecMismatch(String)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for RuleError {
|
impl fmt::Display for RuleError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::BadState(key) => write!(f, "Key {:?} not in match pattern", key),
|
Self::BadState(key) => write!(f, "Key {:?} not in match pattern", key),
|
||||||
Self::ScalarVecMismatch(key) =>
|
Self::ScalarVecMismatch(key) =>
|
||||||
write!(f, "Key {:?} used inconsistently with and without ellipsis", key)
|
write!(f, "Key {:?} used inconsistently with and without ellipsis", key)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
impl Error for RuleError {}
|
impl Error for RuleError {}
|
||||||
51
src/scheduler/generator_task.rs
Normal file
51
src/scheduler/generator_task.rs
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
use std::{ops::{Generator, GeneratorState}, pin::Pin};
|
||||||
|
|
||||||
|
use super::{Task, Nice, TaskState};
|
||||||
|
|
||||||
|
pub struct GeneratorTask<G: Generator<(), Yield = ()>> {
|
||||||
|
nice: Nice,
|
||||||
|
generator: Pin<Box<G>>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<G> GeneratorTask<G> where G: Generator<(), Yield = ()> {
|
||||||
|
fn new(nice: Nice, generator: G) -> Self { Self {
|
||||||
|
nice,
|
||||||
|
generator: Box::pin(generator)
|
||||||
|
} }
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<G> Task for GeneratorTask<G>
|
||||||
|
where G: Generator<(), Yield = ()> {
|
||||||
|
type Result = G::Return;
|
||||||
|
|
||||||
|
fn run_once(&mut self) -> super::TaskState<Self::Result> {
|
||||||
|
match self.generator.as_mut().resume(()) {
|
||||||
|
GeneratorState::Yielded(()) => super::TaskState::Yield,
|
||||||
|
GeneratorState::Complete(r) => super::TaskState::Complete(r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Task for Pin<Box<T>> where T: Generator<(), Yield = ()> {
|
||||||
|
type Result = T::Return;
|
||||||
|
|
||||||
|
fn run_once(&mut self) -> super::TaskState<Self::Result> {
|
||||||
|
match self.as_mut().resume(()) {
|
||||||
|
GeneratorState::Yielded(()) => TaskState::Yield,
|
||||||
|
GeneratorState::Complete(r) => TaskState::Complete(r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! subtask {
|
||||||
|
($g:tt) => { {
|
||||||
|
let task = $g;
|
||||||
|
loop {
|
||||||
|
match task.run_once() {
|
||||||
|
TaskState::Yield => yield;
|
||||||
|
TaskState::Complete(r) => break r;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} };
|
||||||
|
}
|
||||||
47
src/scheduler/mod.rs
Normal file
47
src/scheduler/mod.rs
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
mod generator_task;
|
||||||
|
mod task_pair;
|
||||||
|
mod task_vec;
|
||||||
|
|
||||||
|
pub type Nice = u16;
|
||||||
|
pub type Priority = i32;
|
||||||
|
|
||||||
|
pub enum TaskState<R> {
|
||||||
|
Yield,
|
||||||
|
Complete(R)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait Task {
|
||||||
|
type Result;
|
||||||
|
|
||||||
|
fn run_once(&mut self) -> TaskState<Self::Result>;
|
||||||
|
|
||||||
|
fn run_n_times(&mut self, count: u64) -> TaskState<Self::Result> {
|
||||||
|
for _ in 0..count {
|
||||||
|
if let r@TaskState::Complete(_) = self.run_once() {
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return TaskState::Yield
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_to_completion(&mut self) -> Self::Result {
|
||||||
|
loop { if let TaskState::Complete(r) = self.run_once() {return r} }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn boxed<'a>(self) -> TaskBox<'a, Self::Result> where Self: 'a + Sized { Box::new(self) }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type TaskBox<'a, T> = Box<dyn Task<Result = T> + 'a>;
|
||||||
|
|
||||||
|
impl<'a, R> Task for TaskBox<'a, R> {
|
||||||
|
type Result = R;
|
||||||
|
|
||||||
|
fn run_once(&mut self) -> TaskState<Self::Result> { self.as_mut().run_once() }
|
||||||
|
fn run_n_times(&mut self, count: u64) -> TaskState<Self::Result> {
|
||||||
|
self.as_mut().run_n_times(count)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_to_completion(&mut self) -> Self::Result {
|
||||||
|
self.as_mut().run_to_completion()
|
||||||
|
}
|
||||||
|
}
|
||||||
3
src/scheduler/notes.md
Normal file
3
src/scheduler/notes.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# Purpose
|
||||||
|
|
||||||
|
Type expressions are trees. Any single branch could terminate the solver and any branch may be nonterminating, therefore all of them must be run concurrently. Thread-based concurrency isn't an option because a compiler must be perfectly deterministic. It is also beneficial to have fine-grained control over the relative priority of different tasks.
|
||||||
67
src/scheduler/task_pair.rs
Normal file
67
src/scheduler/task_pair.rs
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
use crate::utils::translate::process;
|
||||||
|
|
||||||
|
use super::{Task, Nice, Priority, TaskState};
|
||||||
|
|
||||||
|
enum TaskPairState<T: Task, U: Task> {
|
||||||
|
Empty,
|
||||||
|
Left(T, U::Result),
|
||||||
|
Right(T::Result, U),
|
||||||
|
Both(T, U)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TaskPair<T: Task, U: Task> {
|
||||||
|
l_nice: Nice,
|
||||||
|
r_nice: Nice,
|
||||||
|
state: TaskPairState<T, U>,
|
||||||
|
tally: Priority,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Task, U: Task> TaskPair<T, U> {
|
||||||
|
pub fn new(l_nice: Nice, left: T, r_nice: Nice, right: U) -> Self {
|
||||||
|
Self {
|
||||||
|
l_nice, r_nice,
|
||||||
|
tally: 0,
|
||||||
|
state: TaskPairState::Both(left, right)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Task, U: Task> Task for TaskPair<T, U> {
|
||||||
|
type Result = (T::Result, U::Result);
|
||||||
|
|
||||||
|
fn run_once(&mut self) -> TaskState<Self::Result> {
|
||||||
|
let TaskPair{ state, tally, l_nice, r_nice } = self;
|
||||||
|
let ret = process(state, |s| match s {
|
||||||
|
TaskPairState::Empty => panic!("Generator completed and empty"),
|
||||||
|
TaskPairState::Left(mut l_task, r_res) => {
|
||||||
|
match l_task.run_once() {
|
||||||
|
TaskState::Complete(r) => (TaskPairState::Empty, TaskState::Complete((r, r_res))),
|
||||||
|
TaskState::Yield => (TaskPairState::Left(l_task, r_res), TaskState::Yield),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TaskPairState::Right(l_res, mut r_task) => {
|
||||||
|
match r_task.run_once() {
|
||||||
|
TaskState::Complete(r) => (TaskPairState::Empty, TaskState::Complete((l_res, r))),
|
||||||
|
TaskState::Yield => (TaskPairState::Right(l_res, r_task), TaskState::Yield),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TaskPairState::Both(mut l_task, mut r_task) => {
|
||||||
|
let state = if 0 <= *tally {
|
||||||
|
*tally -= *l_nice as Priority;
|
||||||
|
match l_task.run_once() {
|
||||||
|
TaskState::Complete(r) => TaskPairState::Right(r, r_task),
|
||||||
|
TaskState::Yield => TaskPairState::Both(l_task, r_task),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
*tally += *r_nice as Priority;
|
||||||
|
match r_task.run_once() {
|
||||||
|
TaskState::Complete(r) => TaskPairState::Left(l_task, r),
|
||||||
|
TaskState::Yield => TaskPairState::Both(l_task, r_task),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
(state, TaskState::Yield)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
}
|
||||||
107
src/scheduler/task_vec.rs
Normal file
107
src/scheduler/task_vec.rs
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
use std::iter;
|
||||||
|
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
|
use super::{Task, Nice, TaskState};
|
||||||
|
|
||||||
|
const NORMALIZATION_THRESHOLD:Nice = Nice::MAX / 4;
|
||||||
|
|
||||||
|
struct TaskEntry<T: Task> {
|
||||||
|
nice: Nice,
|
||||||
|
position: usize,
|
||||||
|
tally: Nice,
|
||||||
|
task: T
|
||||||
|
}
|
||||||
|
|
||||||
|
struct TaskVec<T: Task> {
|
||||||
|
results: Vec<Option<T::Result>>,
|
||||||
|
task_heap: Vec<Option<TaskEntry<T>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Task> TaskVec<T> {
|
||||||
|
pub fn new(tasks: Vec<(Nice, T)>) -> Self {
|
||||||
|
let mut results = Vec::with_capacity(tasks.len());
|
||||||
|
results.resize_with(tasks.len(), || None);
|
||||||
|
let task_heap = tasks.into_iter().enumerate()
|
||||||
|
.map(|(position, (nice, task))| Some(TaskEntry{ nice, task, position, tally: 1 }))
|
||||||
|
.collect_vec();
|
||||||
|
Self { results, task_heap }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn entry(&self, i: usize) -> Option<&TaskEntry<T>> {
|
||||||
|
if self.task_heap.len() <= i {None}
|
||||||
|
else {self.task_heap[i].as_ref()}
|
||||||
|
}
|
||||||
|
fn entry_mut(&mut self, i: usize) -> Option<&mut TaskEntry<T>> {
|
||||||
|
if self.task_heap.len() <= i {None}
|
||||||
|
else {self.task_heap[i].as_mut()}
|
||||||
|
}
|
||||||
|
fn tally(&self, i: usize) -> Nice {
|
||||||
|
self.task_heap[i].as_ref().map(|e| e.tally).unwrap_or(0)
|
||||||
|
}
|
||||||
|
fn swap(&mut self, a: usize, b: usize) {
|
||||||
|
self.task_heap.swap(a, b);
|
||||||
|
}
|
||||||
|
fn iter_mut(&mut self) -> impl Iterator<Item = &mut TaskEntry<T>> {
|
||||||
|
self.task_heap.iter_mut().filter_map(|e| e.as_mut())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn normalize(&mut self) {
|
||||||
|
let shrink_count = self.task_heap.iter().rev().take_while(|e| e.is_none()).count();
|
||||||
|
let new_len = self.task_heap.len() - shrink_count;
|
||||||
|
self.task_heap.splice(0..new_len, iter::empty());
|
||||||
|
let head = self.entry_mut(0);
|
||||||
|
let offset = if let Some(e) = head {
|
||||||
|
let offset = e.tally - 1;
|
||||||
|
if offset < NORMALIZATION_THRESHOLD {return}
|
||||||
|
e.tally = 1;
|
||||||
|
offset
|
||||||
|
} else {return};
|
||||||
|
for entry in self.iter_mut() { entry.tally -= offset }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sink(&mut self, i: usize) {
|
||||||
|
let lchi = 2*i + 1;
|
||||||
|
let rchi = 2*i + 2;
|
||||||
|
let t = self.tally(i);
|
||||||
|
let lcht = if let Some(e) = self.entry(lchi) {e.tally} else {
|
||||||
|
if self.tally(rchi) < t {
|
||||||
|
self.swap(rchi, i);
|
||||||
|
self.sink(rchi);
|
||||||
|
}
|
||||||
|
return
|
||||||
|
};
|
||||||
|
let rcht = if let Some(e) = self.entry(rchi) {e.tally} else {
|
||||||
|
if self.tally(lchi) < t {
|
||||||
|
self.swap(lchi, i);
|
||||||
|
self.sink(lchi);
|
||||||
|
}
|
||||||
|
return
|
||||||
|
};
|
||||||
|
let mchi = {
|
||||||
|
if rcht < t && rcht < lcht {rchi}
|
||||||
|
else if lcht < t && lcht < rcht {lchi}
|
||||||
|
else {return}
|
||||||
|
};
|
||||||
|
self.swap(i, mchi);
|
||||||
|
self.sink(mchi);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Task> Task for TaskVec<T> {
|
||||||
|
fn run_once(&mut self) -> super::TaskState<Self::Result> {
|
||||||
|
let head = &mut self.task_heap[0];
|
||||||
|
let head_entry = head.as_mut().expect("All completed, cannot run further");
|
||||||
|
head_entry.tally += head_entry.nice;
|
||||||
|
match head_entry.task.run_once() {
|
||||||
|
TaskState::Complete(r) => {
|
||||||
|
self.results[head_entry.position] = Some(r);
|
||||||
|
*head = None;
|
||||||
|
self.sink(0);
|
||||||
|
if self.entry(0).is_some() {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
use std::{borrow::Borrow};
|
|
||||||
use std::hash::Hash;
|
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
|
||||||
use mappable_rc::Mrc;
|
|
||||||
|
|
||||||
use crate::{ast::{Expr, Clause}, utils::mrc_to_iter};
|
|
||||||
|
|
||||||
pub struct Substitution(HashMap<String, Mrc<Expr>>);
|
|
||||||
impl Substitution {
|
|
||||||
fn new() -> Self { Self(HashMap::new()) }
|
|
||||||
fn apply<Q: ?Sized + Hash + Eq>(&self, q: &Q) -> Option<Mrc<Expr>>
|
|
||||||
where String: Borrow<Q> {
|
|
||||||
self.0.get(q).map(Mrc::clone)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hindley_milner(a: Mrc<[Expr]>, b: Mrc<[Expr]>) -> Result<Substitution, ()> {
|
|
||||||
hindley_milner_rec(Substitution::new(), a, b)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hindley_milner_rec(mut s: Substitution, a: Mrc<[Expr]>, b: Mrc<[Expr]>)
|
|
||||||
-> Result<Substitution, ()> {
|
|
||||||
if a.len() != b.len() {return Err(())}
|
|
||||||
for (mut a, mut b) in mrc_to_iter(a).zip(mrc_to_iter(b)) {
|
|
||||||
if let Clause::Placeh{key, ..} = &a.0 {
|
|
||||||
if let Some(ex) = s.apply(key) { a = ex }
|
|
||||||
}
|
|
||||||
if let Clause::Placeh{key, ..} = &b.0 {
|
|
||||||
if let Some(ex) = s.apply(key) { b = ex }
|
|
||||||
}
|
|
||||||
if !matches!(&a.0, Clause::Placeh{..}) { (a, b) = (b, a) }
|
|
||||||
match (&a.0, &b.0) {
|
|
||||||
(Clause::Placeh{key:a_key,..}, Clause::Placeh{key:b_key,..}) =>
|
|
||||||
if a_key == b_key {return Ok(s)},
|
|
||||||
|
|
||||||
_ => return Err(())
|
|
||||||
}
|
|
||||||
if let (Clause::Placeh{key: a_key,..}, Clause::Placeh{key: b_key,..}) = (&a.0, &b.0) {
|
|
||||||
if a_key == b_key {return Ok(s)}
|
|
||||||
} else if let (Clause::S(_, a_body), Clause::S(_, b_body)) = (&a.0, &b.0) {
|
|
||||||
s = hindley_milner_rec(s, Mrc::clone(a_body), Mrc::clone(b_body))?
|
|
||||||
} else if let ()
|
|
||||||
}
|
|
||||||
Ok(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn occurs(key: &str, val: &Expr) -> bool {
|
|
||||||
match val.0 {
|
|
||||||
Clause::Auto(_, _, body) => body.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
// mod hindley_milner;
|
|
||||||
|
|
||||||
#[derive(Clone, Hash, PartialEq, Eq)]
|
|
||||||
pub enum Expression<L, V, O, F> {
|
|
||||||
Literal(L),
|
|
||||||
Variable(V),
|
|
||||||
Operation(O, Vec<Expression<L, V, O, F>>),
|
|
||||||
Lazy(F)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Rule {
|
|
||||||
|
|
||||||
}
|
|
||||||
148
src/utils/bfs.rs
148
src/utils/bfs.rs
@@ -17,27 +17,27 @@ use crate::utils::BoxedIter;
|
|||||||
pub fn bfs<T, F, I>(init: T, neighbors: F)
|
pub fn bfs<T, F, I>(init: T, neighbors: F)
|
||||||
-> impl Iterator<Item = T>
|
-> impl Iterator<Item = T>
|
||||||
where T: Eq + Hash + Clone + std::fmt::Debug,
|
where T: Eq + Hash + Clone + std::fmt::Debug,
|
||||||
F: Fn(T) -> I, I: Iterator<Item = T>
|
F: Fn(T) -> I, I: Iterator<Item = T>
|
||||||
{
|
{
|
||||||
let mut visited: HashSet<T> = HashSet::new();
|
let mut visited: HashSet<T> = HashSet::new();
|
||||||
let mut visit_queue: VecDeque<T> = VecDeque::from([init]);
|
let mut visit_queue: VecDeque<T> = VecDeque::from([init]);
|
||||||
let mut unpack_queue: VecDeque<T> = VecDeque::new();
|
let mut unpack_queue: VecDeque<T> = VecDeque::new();
|
||||||
iter::from_fn(move || {
|
iter::from_fn(move || {
|
||||||
let next = {loop {
|
let next = {loop {
|
||||||
let next = unwrap_or!(visit_queue.pop_front(); break None);
|
let next = unwrap_or!(visit_queue.pop_front(); break None);
|
||||||
if !visited.contains(&next) { break Some(next) }
|
if !visited.contains(&next) { break Some(next) }
|
||||||
}}.or_else(|| loop {
|
}}.or_else(|| loop {
|
||||||
let unpacked = unwrap_or!(unpack_queue.pop_front(); break None);
|
let unpacked = unwrap_or!(unpack_queue.pop_front(); break None);
|
||||||
let mut nbv = neighbors(unpacked).filter(|t| !visited.contains(t));
|
let mut nbv = neighbors(unpacked).filter(|t| !visited.contains(t));
|
||||||
if let Some(next) = nbv.next() {
|
if let Some(next) = nbv.next() {
|
||||||
visit_queue.extend(nbv);
|
visit_queue.extend(nbv);
|
||||||
break Some(next)
|
break Some(next)
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
visited.insert(next.clone());
|
visited.insert(next.clone());
|
||||||
unpack_queue.push_back(next.clone());
|
unpack_queue.push_back(next.clone());
|
||||||
Some(next)
|
Some(next)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Same as [bfs] but with a recursion depth limit
|
/// Same as [bfs] but with a recursion depth limit
|
||||||
@@ -48,66 +48,66 @@ where T: Eq + Hash + Clone + std::fmt::Debug,
|
|||||||
pub fn bfs_upto<'a, T: 'a, F: 'a, I: 'a>(init: T, neighbors: F, limit: usize)
|
pub fn bfs_upto<'a, T: 'a, F: 'a, I: 'a>(init: T, neighbors: F, limit: usize)
|
||||||
-> impl Iterator<Item = T> + 'a
|
-> impl Iterator<Item = T> + 'a
|
||||||
where T: Eq + Hash + Clone + std::fmt::Debug,
|
where T: Eq + Hash + Clone + std::fmt::Debug,
|
||||||
F: Fn(T) -> I, I: Iterator<Item = T>
|
F: Fn(T) -> I, I: Iterator<Item = T>
|
||||||
{
|
{
|
||||||
/// Newtype to store the recursion depth but exclude it from equality comparisons
|
/// Newtype to store the recursion depth but exclude it from equality comparisons
|
||||||
/// Because BFS visits nodes in increasing distance order, when a node is visited for the
|
/// Because BFS visits nodes in increasing distance order, when a node is visited for the
|
||||||
/// second time it will never override the earlier version of itself. This is not the case
|
/// second time it will never override the earlier version of itself. This is not the case
|
||||||
/// with Djikstra's algorithm, which can be conceptualised as a "weighted BFS".
|
/// with Djikstra's algorithm, which can be conceptualised as a "weighted BFS".
|
||||||
#[derive(Eq, Clone, Debug)]
|
#[derive(Eq, Clone, Debug)]
|
||||||
struct Wrap<U>(usize, U);
|
struct Wrap<U>(usize, U);
|
||||||
impl<U: PartialEq> PartialEq for Wrap<U> {
|
impl<U: PartialEq> PartialEq for Wrap<U> {
|
||||||
fn eq(&self, other: &Self) -> bool { self.1.eq(&other.1) }
|
fn eq(&self, other: &Self) -> bool { self.1.eq(&other.1) }
|
||||||
}
|
}
|
||||||
impl<U: Hash> Hash for Wrap<U> {
|
impl<U: Hash> Hash for Wrap<U> {
|
||||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.1.hash(state) }
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.1.hash(state) }
|
||||||
}
|
}
|
||||||
bfs(Wrap(0, init), move |Wrap(dist, t)| -> BoxedIter<Wrap<T>> { // boxed because we branch
|
bfs(Wrap(0, init), move |Wrap(dist, t)| -> BoxedIter<Wrap<T>> { // boxed because we branch
|
||||||
if dist == limit {Box::new(iter::empty())}
|
if dist == limit {Box::new(iter::empty())}
|
||||||
else {Box::new(neighbors(t).map(move |t| Wrap(dist + 1, t)))}
|
else {Box::new(neighbors(t).map(move |t| Wrap(dist + 1, t)))}
|
||||||
}).map(|Wrap(_, t)| t)
|
}).map(|Wrap(_, t)| t)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
type Graph = Vec<Vec<usize>>;
|
type Graph = Vec<Vec<usize>>;
|
||||||
fn neighbors(graph: &Graph, pt: usize) -> impl Iterator<Item = usize> + '_ {
|
fn neighbors(graph: &Graph, pt: usize) -> impl Iterator<Item = usize> + '_ {
|
||||||
graph[pt].iter().copied()
|
graph[pt].iter().copied()
|
||||||
}
|
}
|
||||||
fn from_neighborhood_matrix(matrix: Vec<Vec<usize>>) -> Graph {
|
fn from_neighborhood_matrix(matrix: Vec<Vec<usize>>) -> Graph {
|
||||||
matrix.into_iter().map(|v| {
|
matrix.into_iter().map(|v| {
|
||||||
v.into_iter().enumerate().filter_map(|(i, ent)| {
|
v.into_iter().enumerate().filter_map(|(i, ent)| {
|
||||||
if ent > 1 {panic!("Neighborhood matrices must contain binary values")}
|
if ent > 1 {panic!("Neighborhood matrices must contain binary values")}
|
||||||
else if ent == 1 {Some(i)}
|
else if ent == 1 {Some(i)}
|
||||||
else {None}
|
else {None}
|
||||||
}).collect()
|
}).collect()
|
||||||
}).collect()
|
}).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_square() {
|
fn test_square() {
|
||||||
let simple_graph = from_neighborhood_matrix(vec![
|
let simple_graph = from_neighborhood_matrix(vec![
|
||||||
vec![0,1,0,1,1,0,0,0],
|
vec![0,1,0,1,1,0,0,0],
|
||||||
vec![1,0,1,0,0,1,0,0],
|
vec![1,0,1,0,0,1,0,0],
|
||||||
vec![0,1,0,1,0,0,1,0],
|
vec![0,1,0,1,0,0,1,0],
|
||||||
vec![1,0,1,0,0,0,0,1],
|
vec![1,0,1,0,0,0,0,1],
|
||||||
vec![1,0,0,0,0,1,0,1],
|
vec![1,0,0,0,0,1,0,1],
|
||||||
vec![0,1,0,0,1,0,1,0],
|
vec![0,1,0,0,1,0,1,0],
|
||||||
vec![0,0,1,0,0,1,0,1],
|
vec![0,0,1,0,0,1,0,1],
|
||||||
vec![0,0,0,1,1,0,1,0],
|
vec![0,0,0,1,1,0,1,0],
|
||||||
]);
|
]);
|
||||||
let scan = bfs(0, |n| neighbors(&simple_graph, n)).collect_vec();
|
let scan = bfs(0, |n| neighbors(&simple_graph, n)).collect_vec();
|
||||||
assert_eq!(scan, vec![0, 1, 3, 4, 2, 5, 7, 6])
|
assert_eq!(scan, vec![0, 1, 3, 4, 2, 5, 7, 6])
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
fn test_stringbuilder() {
|
fn test_stringbuilder() {
|
||||||
let scan = bfs("".to_string(), |s| {
|
let scan = bfs("".to_string(), |s| {
|
||||||
vec![s.clone()+";", s.clone()+"a", s+"aaa"].into_iter()
|
vec![s.clone()+";", s.clone()+"a", s+"aaa"].into_iter()
|
||||||
}).take(30).collect_vec();
|
}).take(30).collect_vec();
|
||||||
println!("{scan:?}")
|
println!("{scan:?}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -4,93 +4,93 @@ use mappable_rc::Mrc;
|
|||||||
|
|
||||||
/// Convenience trait for overriding Mrc's strange cloning logic
|
/// Convenience trait for overriding Mrc's strange cloning logic
|
||||||
pub trait MyClone {
|
pub trait MyClone {
|
||||||
fn my_clone(&self) -> Self;
|
fn my_clone(&self) -> Self;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> MyClone for T where T: Clone {
|
impl<T> MyClone for T where T: Clone {
|
||||||
default fn my_clone(&self) -> Self { self.clone() }
|
default fn my_clone(&self) -> Self { self.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: ?Sized> MyClone for Rc<T> {
|
impl<T: ?Sized> MyClone for Rc<T> {
|
||||||
fn my_clone(&self) -> Self { Rc::clone(self) }
|
fn my_clone(&self) -> Self { Rc::clone(self) }
|
||||||
}
|
}
|
||||||
impl<T: ?Sized> MyClone for Mrc<T> {
|
impl<T: ?Sized> MyClone for Mrc<T> {
|
||||||
fn my_clone(&self) -> Self { Mrc::clone(self) }
|
fn my_clone(&self) -> Self { Mrc::clone(self) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Cache the return values of an effectless closure in a hashmap
|
/// Cache the return values of an effectless closure in a hashmap
|
||||||
/// Inspired by the closure_cacher crate.
|
/// Inspired by the closure_cacher crate.
|
||||||
pub struct Cache<'a, I, O: 'static> {
|
pub struct Cache<'a, I, O: 'static> {
|
||||||
store: RefCell<HashMap<I, Mrc<O>>>,
|
store: RefCell<HashMap<I, Mrc<O>>>,
|
||||||
closure: Box<dyn Fn (I, &Self) -> Mrc<O> + 'a>
|
closure: Box<dyn Fn (I, &Self) -> Mrc<O> + 'a>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, I, O> Cache<'a, I, O> where
|
impl<'a, I, O> Cache<'a, I, O> where
|
||||||
I: Eq + Hash + MyClone
|
I: Eq + Hash + MyClone
|
||||||
{
|
{
|
||||||
pub fn new<F: 'a>(closure: F) -> Self where F: Fn(I, &Self) -> O {
|
pub fn new<F: 'a>(closure: F) -> Self where F: Fn(I, &Self) -> O {
|
||||||
Self::new_raw(move |o, s| Mrc::new(closure(o, s)))
|
Self::new_raw(move |o, s| Mrc::new(closure(o, s)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Take an Mrc<O> closure rather than an O closure
|
/// Take an Mrc<O> closure rather than an O closure
|
||||||
/// Used internally to derive caches from other systems working with Mrc-s
|
/// Used internally to derive caches from other systems working with Mrc-s
|
||||||
pub fn new_raw<F: 'a>(closure: F) -> Self where F: Fn(I, &Self) -> Mrc<O> {
|
pub fn new_raw<F: 'a>(closure: F) -> Self where F: Fn(I, &Self) -> Mrc<O> {
|
||||||
Self {
|
Self {
|
||||||
store: RefCell::new(HashMap::new()),
|
store: RefCell::new(HashMap::new()),
|
||||||
closure: Box::new(closure)
|
closure: Box::new(closure)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Produce and cache a result by cloning I if necessary
|
/// Produce and cache a result by cloning I if necessary
|
||||||
pub fn find(&self, i: &I) -> Mrc<O> {
|
pub fn find(&self, i: &I) -> Mrc<O> {
|
||||||
let closure = &self.closure;
|
let closure = &self.closure;
|
||||||
if let Some(v) = self.store.borrow().get(i) {
|
if let Some(v) = self.store.borrow().get(i) {
|
||||||
return Mrc::clone(v)
|
return Mrc::clone(v)
|
||||||
}
|
|
||||||
// In the moment of invocation the refcell is on immutable
|
|
||||||
// this is important for recursive calculations
|
|
||||||
let result = closure(i.my_clone(), self);
|
|
||||||
let mut store = self.store.borrow_mut();
|
|
||||||
Mrc::clone(store.raw_entry_mut().from_key(i)
|
|
||||||
.or_insert_with(|| (i.my_clone(), result)).1)
|
|
||||||
}
|
}
|
||||||
|
// In the moment of invocation the refcell is on immutable
|
||||||
|
// this is important for recursive calculations
|
||||||
|
let result = closure(i.my_clone(), self);
|
||||||
|
let mut store = self.store.borrow_mut();
|
||||||
|
Mrc::clone(store.raw_entry_mut().from_key(i)
|
||||||
|
.or_insert_with(|| (i.my_clone(), result)).1)
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
/// Return the result if it has already been computed
|
/// Return the result if it has already been computed
|
||||||
pub fn known(&self, i: &I) -> Option<Mrc<O>> {
|
pub fn known(&self, i: &I) -> Option<Mrc<O>> {
|
||||||
let store = self.store.borrow();
|
let store = self.store.borrow();
|
||||||
store.get(i).map(Mrc::clone)
|
store.get(i).map(Mrc::clone)
|
||||||
}
|
}
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
/// Forget the output for the given input
|
/// Forget the output for the given input
|
||||||
pub fn drop(&self, i: &I) -> bool {
|
pub fn drop(&self, i: &I) -> bool {
|
||||||
self.store.borrow_mut().remove(i).is_some()
|
self.store.borrow_mut().remove(i).is_some()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, I, O, E> Cache<'a, I, Result<O, E>> where
|
impl<'a, I, O, E> Cache<'a, I, Result<O, E>> where
|
||||||
I: Eq + Hash + MyClone,
|
I: Eq + Hash + MyClone,
|
||||||
// O: Clone,
|
// O: Clone,
|
||||||
E: Clone
|
E: Clone
|
||||||
{
|
{
|
||||||
/// Sink the ref from a Result into the Ok value, such that cloning only occurs on the sad path
|
/// Sink the ref from a Result into the Ok value, such that cloning only occurs on the sad path
|
||||||
/// but the return value can be short-circuited
|
/// but the return value can be short-circuited
|
||||||
pub fn try_find(&self, i: &I) -> Result<Mrc<O>, E> {
|
pub fn try_find(&self, i: &I) -> Result<Mrc<O>, E> {
|
||||||
let ent = self.find(i);
|
let ent = self.find(i);
|
||||||
Mrc::try_map(ent, |t| t.as_ref().ok())
|
Mrc::try_map(ent, |t| t.as_ref().ok())
|
||||||
.map_err(|res| Result::as_ref(&res).err().unwrap().to_owned())
|
.map_err(|res| Result::as_ref(&res).err().unwrap().to_owned())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, I, O> Cache<'a, I, Option<O>> where
|
impl<'a, I, O> Cache<'a, I, Option<O>> where
|
||||||
I: Eq + Hash + MyClone,
|
I: Eq + Hash + MyClone,
|
||||||
// O: Clone
|
// O: Clone
|
||||||
{
|
{
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
/// Sink the ref from an Option into the Some value such that the return value can be
|
/// Sink the ref from an Option into the Some value such that the return value can be
|
||||||
/// short-circuited
|
/// short-circuited
|
||||||
pub fn try_find(&self, i: &I) -> Option<Mrc<O>> where I: Clone {
|
pub fn try_find(&self, i: &I) -> Option<Mrc<O>> where I: Clone {
|
||||||
let ent = self.find(i);
|
let ent = self.find(i);
|
||||||
Mrc::try_map(ent, |o| o.as_ref()).ok()
|
Mrc::try_map(ent, |o| o.as_ref()).ok()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,10 +11,10 @@
|
|||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// xloop!(for i in 0..10; {
|
/// xloop!(for i in 0..10; {
|
||||||
/// connection.try_connect()
|
/// connection.try_connect()
|
||||||
/// if connection.ready() {
|
/// if connection.ready() {
|
||||||
/// break Some(connection)
|
/// break Some(connection)
|
||||||
/// }
|
/// }
|
||||||
/// }; None)
|
/// }; None)
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
@@ -22,17 +22,17 @@
|
|||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// xloop!(while socket.is_open(); {
|
/// xloop!(while socket.is_open(); {
|
||||||
/// let (data, is_end) = socket.read();
|
/// let (data, is_end) = socket.read();
|
||||||
/// all_data.append(data)
|
/// all_data.append(data)
|
||||||
/// if is_end { break Ok(all_data) }
|
/// if is_end { break Ok(all_data) }
|
||||||
/// }; {
|
/// }; {
|
||||||
/// if let Ok(new_sock) = open_socket(socket.position()) {
|
/// if let Ok(new_sock) = open_socket(socket.position()) {
|
||||||
/// new_sock.set_position(socket.position());
|
/// new_sock.set_position(socket.position());
|
||||||
/// socket = new_sock;
|
/// socket = new_sock;
|
||||||
/// continue
|
/// continue
|
||||||
/// } else {
|
/// } else {
|
||||||
/// Err(DownloadError::ConnectionLost)
|
/// Err(DownloadError::ConnectionLost)
|
||||||
/// }
|
/// }
|
||||||
/// })
|
/// })
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
@@ -40,7 +40,7 @@
|
|||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// xloop!(let mut leap = 1; own_id*2 + leap < batch_size; leap *= 2; {
|
/// xloop!(let mut leap = 1; own_id*2 + leap < batch_size; leap *= 2; {
|
||||||
/// batch[own_id*2] += batch[own_id*2 + leap]
|
/// batch[own_id*2] += batch[own_id*2 + leap]
|
||||||
/// })
|
/// })
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
@@ -51,41 +51,41 @@
|
|||||||
/// **todo** find a valid use case for While let for a demo
|
/// **todo** find a valid use case for While let for a demo
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! xloop {
|
macro_rules! xloop {
|
||||||
(for $p:pat in $it:expr; $body:stmt) => {
|
(for $p:pat in $it:expr; $body:stmt) => {
|
||||||
xloop!(for $p in $it; $body; ())
|
xloop!(for $p in $it; $body; ())
|
||||||
};
|
};
|
||||||
(for $p:pat in $it:expr; $body:stmt; $exit:stmt) => {
|
(for $p:pat in $it:expr; $body:stmt; $exit:stmt) => {
|
||||||
{
|
{
|
||||||
let mut __xloop__ = $it.into_iter();
|
let mut __xloop__ = $it.into_iter();
|
||||||
xloop!(let Some($p) = __xloop__.next(); $body; $exit)
|
xloop!(let Some($p) = __xloop__.next(); $body; $exit)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
(let $p:pat = $e:expr; $body:stmt) => {
|
(let $p:pat = $e:expr; $body:stmt) => {
|
||||||
xloop!(let $p = $e; $body; ())
|
xloop!(let $p = $e; $body; ())
|
||||||
};
|
};
|
||||||
(let $p:pat = $e:expr; $body:stmt; $exit:stmt) => {
|
(let $p:pat = $e:expr; $body:stmt; $exit:stmt) => {
|
||||||
{
|
{
|
||||||
loop {
|
loop {
|
||||||
if let $p = $e { $body }
|
if let $p = $e { $body }
|
||||||
else { break { $exit } }
|
else { break { $exit } }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
(while $cond:expr; $body:stmt) => {
|
(while $cond:expr; $body:stmt) => {
|
||||||
xloop!($cond; $body; ())
|
xloop!($cond; $body; ())
|
||||||
};
|
};
|
||||||
(while $cond:expr; $body:stmt; $exit:stmt) => {
|
(while $cond:expr; $body:stmt; $exit:stmt) => {
|
||||||
{
|
{
|
||||||
loop {
|
loop {
|
||||||
if $cond { break { $exit } }
|
if $cond { break { $exit } }
|
||||||
else { $body }
|
else { $body }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
($init:stmt; $cond:expr; $step:stmt; $body:stmt) => {
|
($init:stmt; $cond:expr; $step:stmt; $body:stmt) => {
|
||||||
xloop!(for ( $init; $cond; $step ) $body; ())
|
xloop!(for ( $init; $cond; $step ) $body; ())
|
||||||
};
|
};
|
||||||
($init:stmt; $cond:expr; $step:stmt; $body:stmt; $exit:stmt) => {
|
($init:stmt; $cond:expr; $step:stmt; $body:stmt; $exit:stmt) => {
|
||||||
{ $init; xloop!(while !($cond); { $body; $step }; $exit) }
|
{ $init; xloop!(while !($cond); { $body; $step }; $exit) }
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -6,31 +6,31 @@ pub type BoxedIter<'a, T> = Box<dyn Iterator<Item = T> + 'a>;
|
|||||||
pub type BoxedIterIter<'a, T> = BoxedIter<'a, BoxedIter<'a, T>>;
|
pub type BoxedIterIter<'a, T> = BoxedIter<'a, BoxedIter<'a, T>>;
|
||||||
/// BoxedIter of a single element
|
/// BoxedIter of a single element
|
||||||
pub fn box_once<'a, T: 'a>(t: T) -> BoxedIter<'a, T> {
|
pub fn box_once<'a, T: 'a>(t: T) -> BoxedIter<'a, T> {
|
||||||
Box::new(iter::once(t))
|
Box::new(iter::once(t))
|
||||||
}
|
}
|
||||||
/// BoxedIter of no elements
|
/// BoxedIter of no elements
|
||||||
pub fn box_empty<'a, T: 'a>() -> BoxedIter<'a, T> {
|
pub fn box_empty<'a, T: 'a>() -> BoxedIter<'a, T> {
|
||||||
Box::new(iter::empty())
|
Box::new(iter::empty())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! box_chain {
|
macro_rules! box_chain {
|
||||||
($curr:expr) => {
|
($curr:expr) => {
|
||||||
Box::new($curr) as BoxedIter<_>
|
Box::new($curr) as BoxedIter<_>
|
||||||
};
|
};
|
||||||
($curr:expr, $($rest:expr),*) => {
|
($curr:expr, $($rest:expr),*) => {
|
||||||
Box::new($curr$(.chain($rest))*) as $crate::utils::iter::BoxedIter<_>
|
Box::new($curr$(.chain($rest))*) as $crate::utils::iter::BoxedIter<_>
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn box_flatten<'a, T: 'a, I: 'a, J: 'a>(i: I) -> BoxedIter<'a, T>
|
pub fn box_flatten<'a, T: 'a, I: 'a, J: 'a>(i: I) -> BoxedIter<'a, T>
|
||||||
where
|
where
|
||||||
J: Iterator<Item = T>,
|
J: Iterator<Item = T>,
|
||||||
I: Iterator<Item = J>,
|
I: Iterator<Item = J>,
|
||||||
{
|
{
|
||||||
Box::new(i.flatten())
|
Box::new(i.flatten())
|
||||||
}
|
}
|
||||||
pub fn into_boxed_iter<'a, T: 'a>(t: T) -> BoxedIter<'a, <T as IntoIterator>::Item>
|
pub fn into_boxed_iter<'a, T: 'a>(t: T) -> BoxedIter<'a, <T as IntoIterator>::Item>
|
||||||
where T: IntoIterator {
|
where T: IntoIterator {
|
||||||
Box::new(t.into_iter())
|
Box::new(t.into_iter())
|
||||||
}
|
}
|
||||||
@@ -5,23 +5,23 @@ use std::mem;
|
|||||||
/// Merge two sorted iterators into a sorted iterator.
|
/// Merge two sorted iterators into a sorted iterator.
|
||||||
pub fn merge_sorted<T, I, J, F, O>(mut i: I, mut j: J, mut f: F) -> impl Iterator<Item = T>
|
pub fn merge_sorted<T, I, J, F, O>(mut i: I, mut j: J, mut f: F) -> impl Iterator<Item = T>
|
||||||
where
|
where
|
||||||
I: Iterator<Item = T>, J: Iterator<Item = T>,
|
I: Iterator<Item = T>, J: Iterator<Item = T>,
|
||||||
F: FnMut(&T) -> O, O: Ord,
|
F: FnMut(&T) -> O, O: Ord,
|
||||||
{
|
{
|
||||||
let mut i_item: Option<T> = None;
|
let mut i_item: Option<T> = None;
|
||||||
let mut j_item: Option<T> = None;
|
let mut j_item: Option<T> = None;
|
||||||
std::iter::from_fn(move || {
|
std::iter::from_fn(move || {
|
||||||
match (&mut i_item, &mut j_item) {
|
match (&mut i_item, &mut j_item) {
|
||||||
(&mut None, &mut None) => None,
|
(&mut None, &mut None) => None,
|
||||||
(&mut None, j_item @ &mut Some(_)) => Some((j_item, None)),
|
(&mut None, j_item @ &mut Some(_)) => Some((j_item, None)),
|
||||||
(i_item @ &mut Some(_), &mut None) => Some((i_item, i.next())),
|
(i_item @ &mut Some(_), &mut None) => Some((i_item, i.next())),
|
||||||
(Some(i_val), Some(j_val)) => Some(
|
(Some(i_val), Some(j_val)) => Some(
|
||||||
if f(i_val) < f(j_val) {
|
if f(i_val) < f(j_val) {
|
||||||
(&mut i_item, i.next())
|
(&mut i_item, i.next())
|
||||||
} else {
|
} else {
|
||||||
(&mut j_item, j.next())
|
(&mut j_item, j.next())
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}.and_then(|(dest, value)| mem::replace(dest, value))
|
}.and_then(|(dest, value)| mem::replace(dest, value))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -1,78 +1,85 @@
|
|||||||
mod cache;
|
mod cache;
|
||||||
|
pub mod translate;
|
||||||
|
pub use cache::Cache;
|
||||||
mod substack;
|
mod substack;
|
||||||
|
pub use substack::Stackframe;
|
||||||
mod side;
|
mod side;
|
||||||
|
pub use side::Side;
|
||||||
mod merge_sorted;
|
mod merge_sorted;
|
||||||
|
pub use merge_sorted::merge_sorted;
|
||||||
mod unwrap_or;
|
mod unwrap_or;
|
||||||
pub mod iter;
|
pub mod iter;
|
||||||
|
pub use iter::BoxedIter;
|
||||||
mod bfs;
|
mod bfs;
|
||||||
mod unless_let;
|
mod unless_let;
|
||||||
mod string_from_charset;
|
mod string_from_charset;
|
||||||
|
pub use string_from_charset::string_from_charset;
|
||||||
mod for_loop;
|
mod for_loop;
|
||||||
mod protomap;
|
mod protomap;
|
||||||
|
|
||||||
pub use cache::Cache;
|
|
||||||
use mappable_rc::Mrc;
|
|
||||||
pub use substack::Stackframe;
|
|
||||||
pub use side::Side;
|
|
||||||
pub use merge_sorted::merge_sorted;
|
|
||||||
pub use iter::BoxedIter;
|
|
||||||
pub use string_from_charset::string_from_charset;
|
|
||||||
pub use protomap::ProtoMap;
|
pub use protomap::ProtoMap;
|
||||||
|
mod product2;
|
||||||
|
pub use product2::Product2;
|
||||||
|
|
||||||
|
use mappable_rc::Mrc;
|
||||||
|
|
||||||
pub fn mrc_derive<T: ?Sized, P, U: ?Sized>(m: &Mrc<T>, p: P) -> Mrc<U>
|
pub fn mrc_derive<T: ?Sized, P, U: ?Sized>(m: &Mrc<T>, p: P) -> Mrc<U>
|
||||||
where P: for<'a> FnOnce(&'a T) -> &'a U {
|
where P: for<'a> FnOnce(&'a T) -> &'a U {
|
||||||
Mrc::map(Mrc::clone(m), p)
|
Mrc::map(Mrc::clone(m), p)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mrc_try_derive<T: ?Sized, P, U: ?Sized>(m: &Mrc<T>, p: P) -> Option<Mrc<U>>
|
pub fn mrc_try_derive<T: ?Sized, P, U: ?Sized>(m: &Mrc<T>, p: P) -> Option<Mrc<U>>
|
||||||
where P: for<'a> FnOnce(&'a T) -> Option<&'a U> {
|
where P: for<'a> FnOnce(&'a T) -> Option<&'a U> {
|
||||||
Mrc::try_map(Mrc::clone(m), p).ok()
|
Mrc::try_map(Mrc::clone(m), p).ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mrc_empty_slice<T>() -> Mrc<[T]> {
|
pub fn mrc_empty_slice<T>() -> Mrc<[T]> {
|
||||||
mrc_derive_slice(&Mrc::new(Vec::new()))
|
mrc_derive_slice(&Mrc::new(Vec::new()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_mrc_slice<T>(v: Vec<T>) -> Mrc<[T]> {
|
pub fn to_mrc_slice<T>(v: Vec<T>) -> Mrc<[T]> {
|
||||||
Mrc::map(Mrc::new(v), |v| v.as_slice())
|
Mrc::map(Mrc::new(v), |v| v.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn collect_to_mrc<I>(iter: I) -> Mrc<[I::Item]> where I: Iterator {
|
pub fn collect_to_mrc<I>(iter: I) -> Mrc<[I::Item]> where I: Iterator {
|
||||||
to_mrc_slice(iter.collect())
|
to_mrc_slice(iter.collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mrc_derive_slice<T>(mv: &Mrc<Vec<T>>) -> Mrc<[T]> {
|
pub fn mrc_derive_slice<T>(mv: &Mrc<Vec<T>>) -> Mrc<[T]> {
|
||||||
mrc_derive(mv, |v| v.as_slice())
|
mrc_derive(mv, |v| v.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn one_mrc_slice<T>(t: T) -> Mrc<[T]> {
|
pub fn one_mrc_slice<T>(t: T) -> Mrc<[T]> {
|
||||||
Mrc::map(Mrc::new([t; 1]), |v| v.as_slice())
|
Mrc::map(Mrc::new([t; 1]), |v| v.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mrc_to_iter<T>(ms: Mrc<[T]>) -> impl Iterator<Item = Mrc<T>> {
|
pub fn mrc_to_iter<T>(ms: Mrc<[T]>) -> impl Iterator<Item = Mrc<T>> {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
std::iter::from_fn(move || if i < ms.len() {
|
std::iter::from_fn(move || if i < ms.len() {
|
||||||
let out = Some(mrc_derive(&ms, |s| &s[i]));
|
let out = Some(mrc_derive(&ms, |s| &s[i]));
|
||||||
i += 1;
|
i += 1;
|
||||||
out
|
out
|
||||||
} else {None})
|
} else {None})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mrc_unnest<T>(m: &Mrc<Mrc<T>>) -> Mrc<T> {
|
pub fn mrc_unnest<T>(m: &Mrc<Mrc<T>>) -> Mrc<T> {
|
||||||
Mrc::clone(m.as_ref())
|
Mrc::clone(m.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mrc_slice_to_only<T>(m: Mrc<[T]>) -> Result<Mrc<T>, ()> {
|
pub fn mrc_slice_to_only<T>(m: Mrc<[T]>) -> Result<Mrc<T>, ()> {
|
||||||
Mrc::try_map(m, |slice| {
|
Mrc::try_map(m, |slice| {
|
||||||
if slice.len() != 1 {None}
|
if slice.len() != 1 {None}
|
||||||
else {Some(&slice[0])}
|
else {Some(&slice[0])}
|
||||||
}).map_err(|_| ())
|
}).map_err(|_| ())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mrc_slice_to_only_option<T>(m: Mrc<[T]>) -> Result<Option<Mrc<T>>, ()> {
|
pub fn mrc_slice_to_only_option<T>(m: Mrc<[T]>) -> Result<Option<Mrc<T>>, ()> {
|
||||||
if m.len() > 1 {return Err(())}
|
if m.len() > 1 {return Err(())}
|
||||||
Ok(Mrc::try_map(m, |slice| {
|
Ok(Mrc::try_map(m, |slice| {
|
||||||
if slice.len() == 0 {None}
|
if slice.len() == 0 {None}
|
||||||
else {Some(&slice[0])}
|
else {Some(&slice[0])}
|
||||||
}).ok())
|
}).ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn mrc_concat<T: Clone>(a: &Mrc<[T]>, b: &Mrc<[T]>) -> Mrc<[T]> {
|
||||||
|
collect_to_mrc(a.iter().chain(b.iter()).cloned())
|
||||||
}
|
}
|
||||||
53
src/utils/product2.rs
Normal file
53
src/utils/product2.rs
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
use super::Side;
|
||||||
|
|
||||||
|
/// The output of a two-part algorithm. The values are
|
||||||
|
///
|
||||||
|
/// - [Product2::Left] or [Product2::Right] if one of the arguments is the product
|
||||||
|
/// - [Product2::Either] if the arguments are identical
|
||||||
|
/// - [Product2::New] if the product is a different value from either
|
||||||
|
pub enum Product2<T> {
|
||||||
|
Left,
|
||||||
|
Right,
|
||||||
|
Either,
|
||||||
|
New(T)
|
||||||
|
}
|
||||||
|
impl<T> Product2<T> {
|
||||||
|
/// Convert the product into a concrete value by providing the original arguments
|
||||||
|
pub fn pick(self, left: T, right: T) -> T {
|
||||||
|
match self {
|
||||||
|
Self::Left | Self::Either => left,
|
||||||
|
Self::Right => right,
|
||||||
|
Self::New(t) => t
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Combine some subresults into a tuple representing a greater result
|
||||||
|
pub fn join<U>(
|
||||||
|
self, (lt, rt): (T, T),
|
||||||
|
second: Product2<U>, (lu, ru): (U, U)
|
||||||
|
) -> Product2<(T, U)> {
|
||||||
|
match (self, second) {
|
||||||
|
(Self::Either, Product2::Either) => Product2::Either,
|
||||||
|
(Self::Left | Self::Either, Product2::Left | Product2::Either) => Product2::Left,
|
||||||
|
(Self::Right | Self::Either, Product2::Right | Product2::Either) => Product2::Right,
|
||||||
|
(t, u) => Product2::New((t.pick(lt, rt), u.pick(lu, ru)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Translate results back into the type of the original problem.
|
||||||
|
pub fn map<A, F: FnOnce(T) -> A>(self, f: F) -> Product2<A> {
|
||||||
|
match self {
|
||||||
|
Product2::Left => Product2::Left, Product2::Right => Product2::Right,
|
||||||
|
Product2::Either => Product2::Either,
|
||||||
|
Product2::New(t) => Product2::New(f(t))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Technically very different but sometimes neecessary to translate
|
||||||
|
impl<T> From<Side> for Product2<T> {
|
||||||
|
fn from(value: Side) -> Self {match value {
|
||||||
|
Side::Left => Self::Left,
|
||||||
|
Side::Right => Self::Right
|
||||||
|
}}
|
||||||
|
}
|
||||||
@@ -13,152 +13,152 @@ const INLINE_ENTRIES: usize = 2;
|
|||||||
/// plus wasted stack space which is likely wasted L1 as well. The cost of underruns is wasted stack
|
/// plus wasted stack space which is likely wasted L1 as well. The cost of underruns is wasted stack
|
||||||
/// space.
|
/// space.
|
||||||
pub struct ProtoMap<'a, K, V, const STACK_COUNT: usize = 2> {
|
pub struct ProtoMap<'a, K, V, const STACK_COUNT: usize = 2> {
|
||||||
entries: SmallVec<[(K, Option<V>); STACK_COUNT]>,
|
entries: SmallVec<[(K, Option<V>); STACK_COUNT]>,
|
||||||
prototype: Option<&'a ProtoMap<'a, K, V, STACK_COUNT>>
|
prototype: Option<&'a ProtoMap<'a, K, V, STACK_COUNT>>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, K, V, const STACK_COUNT: usize> ProtoMap<'a, K, V, STACK_COUNT> {
|
impl<'a, K, V, const STACK_COUNT: usize> ProtoMap<'a, K, V, STACK_COUNT> {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
entries: SmallVec::new(),
|
entries: SmallVec::new(),
|
||||||
prototype: None
|
prototype: None
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Mutable reference to entry without checking proto in O(m)
|
/// Mutable reference to entry without checking proto in O(m)
|
||||||
fn local_entry_mut<'b, Q: ?Sized>(&'b mut self, query: &Q)
|
fn local_entry_mut<'b, Q: ?Sized>(&'b mut self, query: &Q)
|
||||||
-> Option<(usize, &'b mut K, &'b mut Option<V>)>
|
-> Option<(usize, &'b mut K, &'b mut Option<V>)>
|
||||||
where K: Borrow<Q>, Q: Eq
|
where K: Borrow<Q>, Q: Eq
|
||||||
{
|
{
|
||||||
self.entries.iter_mut().enumerate().find_map(|(i, (k, v))| {
|
self.entries.iter_mut().enumerate().find_map(|(i, (k, v))| {
|
||||||
if query.eq((*k).borrow()) { Some((i, k, v)) } else { None }
|
if query.eq((*k).borrow()) { Some((i, k, v)) } else { None }
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Entry without checking proto in O(m)
|
/// Entry without checking proto in O(m)
|
||||||
fn local_entry<'b, Q: ?Sized>(&'b self, query: &Q)
|
fn local_entry<'b, Q: ?Sized>(&'b self, query: &Q)
|
||||||
-> Option<(usize, &'b K, &'b Option<V>)>
|
-> Option<(usize, &'b K, &'b Option<V>)>
|
||||||
where K: Borrow<Q>, Q: Eq
|
where K: Borrow<Q>, Q: Eq
|
||||||
{
|
{
|
||||||
self.entries.iter().enumerate().find_map(|(i, (k, v))| {
|
self.entries.iter().enumerate().find_map(|(i, (k, v))| {
|
||||||
if query.eq((*k).borrow()) { Some((i, k, v)) } else { None }
|
if query.eq((*k).borrow()) { Some((i, k, v)) } else { None }
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Find entry in prototype chain in O(n)
|
/// Find entry in prototype chain in O(n)
|
||||||
pub fn get<'b, Q: ?Sized>(&'b self, query: &Q) -> Option<&'b V>
|
pub fn get<'b, Q: ?Sized>(&'b self, query: &Q) -> Option<&'b V>
|
||||||
where K: Borrow<Q>, Q: Eq
|
where K: Borrow<Q>, Q: Eq
|
||||||
{
|
{
|
||||||
if let Some((_, _, v)) = self.local_entry(query) {
|
if let Some((_, _, v)) = self.local_entry(query) {
|
||||||
v.as_ref()
|
v.as_ref()
|
||||||
} else {
|
} else {
|
||||||
self.prototype?.get(query)
|
self.prototype?.get(query)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Record a value for the given key in O(m)
|
/// Record a value for the given key in O(m)
|
||||||
pub fn set(&mut self, key: &K, value: V) where K: Eq + Clone {
|
pub fn set(&mut self, key: &K, value: V) where K: Eq + Clone {
|
||||||
if let Some((_, _, v)) = self.local_entry_mut(key) {
|
if let Some((_, _, v)) = self.local_entry_mut(key) {
|
||||||
*v = Some(value);
|
*v = Some(value);
|
||||||
} else {
|
} else {
|
||||||
self.entries.push((key.clone(), Some(value)))
|
self.entries.push((key.clone(), Some(value)))
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Delete in a memory-efficient way in O(n)
|
/// Delete in a memory-efficient way in O(n)
|
||||||
pub fn delete_small(&mut self, key: &K) where K: Eq + Clone {
|
pub fn delete_small(&mut self, key: &K) where K: Eq + Clone {
|
||||||
let exists_up = self.prototype.and_then(|p| p.get(key)).is_some();
|
let exists_up = self.prototype.and_then(|p| p.get(key)).is_some();
|
||||||
let local_entry = self.local_entry_mut(key);
|
let local_entry = self.local_entry_mut(key);
|
||||||
match (exists_up, local_entry) {
|
match (exists_up, local_entry) {
|
||||||
(false, None) => (), // nothing to do
|
(false, None) => (), // nothing to do
|
||||||
(false, Some((i, _, _))) => { self.entries.remove(i); }, // forget locally
|
(false, Some((i, _, _))) => { self.entries.remove(i); }, // forget locally
|
||||||
(true, Some((_, _, v))) => *v = None, // update local override to cover
|
(true, Some((_, _, v))) => *v = None, // update local override to cover
|
||||||
(true, None) => self.entries.push((key.clone(), None)), // create new
|
(true, None) => self.entries.push((key.clone(), None)), // create new
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Delete in O(m) without checking the prototype chain
|
/// Delete in O(m) without checking the prototype chain
|
||||||
/// May produce unnecessary cover over previously unknown key
|
/// May produce unnecessary cover over previously unknown key
|
||||||
pub fn delete_fast(&mut self, key: &K) where K: Eq + Clone {
|
pub fn delete_fast(&mut self, key: &K) where K: Eq + Clone {
|
||||||
if let Some((_, _, v)) = self.local_entry_mut(key) {
|
if let Some((_, _, v)) = self.local_entry_mut(key) {
|
||||||
*v = None
|
*v = None
|
||||||
} else {
|
} else {
|
||||||
self.entries.push((key.clone(), None))
|
self.entries.push((key.clone(), None))
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Iterate over the values defined herein and on the prototype chain
|
/// Iterate over the values defined herein and on the prototype chain
|
||||||
/// Note that this will visit keys multiple times
|
/// Note that this will visit keys multiple times
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &(K, Option<V>)> {
|
pub fn iter(&self) -> impl Iterator<Item = &(K, Option<V>)> {
|
||||||
let mut map = self;
|
let mut map = self;
|
||||||
iter::from_fn(move || {
|
iter::from_fn(move || {
|
||||||
let pairs = map.entries.iter();
|
let pairs = map.entries.iter();
|
||||||
map = map.prototype?;
|
map = map.prototype?;
|
||||||
Some(pairs)
|
Some(pairs)
|
||||||
}).flatten()
|
}).flatten()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Visit the keys in an unsafe random order, repeated arbitrarily many times
|
/// Visit the keys in an unsafe random order, repeated arbitrarily many times
|
||||||
pub fn keys(&self) -> impl Iterator<Item = &K> {
|
pub fn keys(&self) -> impl Iterator<Item = &K> {
|
||||||
self.iter().map(|(k, _)| k)
|
self.iter().map(|(k, _)| k)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Visit the values in random order
|
/// Visit the values in random order
|
||||||
pub fn values(&self) -> impl Iterator<Item = &V> {
|
pub fn values(&self) -> impl Iterator<Item = &V> {
|
||||||
self.iter().filter_map(|(_, v)| v.as_ref())
|
self.iter().filter_map(|(_, v)| v.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update the prototype, and correspondingly the lifetime of the map
|
/// Update the prototype, and correspondingly the lifetime of the map
|
||||||
pub fn set_proto<'b>(self, proto: &'b ProtoMap<'b, K, V, STACK_COUNT>)
|
pub fn set_proto<'b>(self, proto: &'b ProtoMap<'b, K, V, STACK_COUNT>)
|
||||||
-> ProtoMap<'b, K, V, STACK_COUNT> {
|
-> ProtoMap<'b, K, V, STACK_COUNT> {
|
||||||
ProtoMap {
|
ProtoMap {
|
||||||
entries: self.entries,
|
entries: self.entries,
|
||||||
prototype: Some(proto)
|
prototype: Some(proto)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, K, V, const STACK_COUNT: usize>
|
impl<T, K, V, const STACK_COUNT: usize>
|
||||||
From<T> for ProtoMap<'_, K, V, STACK_COUNT>
|
From<T> for ProtoMap<'_, K, V, STACK_COUNT>
|
||||||
where T: IntoIterator<Item = (K, V)> {
|
where T: IntoIterator<Item = (K, V)> {
|
||||||
fn from(value: T) -> Self {
|
fn from(value: T) -> Self {
|
||||||
Self {
|
Self {
|
||||||
entries: value.into_iter().map(|(k, v)| (k, Some(v))).collect(),
|
entries: value.into_iter().map(|(k, v)| (k, Some(v))).collect(),
|
||||||
prototype: None
|
prototype: None
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Q: ?Sized, K, V, const STACK_COUNT: usize>
|
impl<Q: ?Sized, K, V, const STACK_COUNT: usize>
|
||||||
Index<&Q> for ProtoMap<'_, K, V, STACK_COUNT>
|
Index<&Q> for ProtoMap<'_, K, V, STACK_COUNT>
|
||||||
where K: Borrow<Q>, Q: Eq {
|
where K: Borrow<Q>, Q: Eq {
|
||||||
type Output = V;
|
type Output = V;
|
||||||
fn index(&self, index: &Q) -> &Self::Output {
|
fn index(&self, index: &Q) -> &Self::Output {
|
||||||
self.get(index).expect("Index not found in map")
|
self.get(index).expect("Index not found in map")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K: Clone, V: Clone, const STACK_COUNT: usize>
|
impl<K: Clone, V: Clone, const STACK_COUNT: usize>
|
||||||
Clone for ProtoMap<'_, K, V, STACK_COUNT> {
|
Clone for ProtoMap<'_, K, V, STACK_COUNT> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self {
|
Self {
|
||||||
entries: self.entries.clone(),
|
entries: self.entries.clone(),
|
||||||
prototype: self.prototype
|
prototype: self.prototype
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, K: 'a, V: 'a, const STACK_COUNT: usize>
|
impl<'a, K: 'a, V: 'a, const STACK_COUNT: usize>
|
||||||
Add<(K, V)> for &'a ProtoMap<'a, K, V, STACK_COUNT> {
|
Add<(K, V)> for &'a ProtoMap<'a, K, V, STACK_COUNT> {
|
||||||
type Output = ProtoMap<'a, K, V, STACK_COUNT>;
|
type Output = ProtoMap<'a, K, V, STACK_COUNT>;
|
||||||
fn add(self, rhs: (K, V)) -> Self::Output {
|
fn add(self, rhs: (K, V)) -> Self::Output {
|
||||||
ProtoMap::from([rhs]).set_proto(self)
|
ProtoMap::from([rhs]).set_proto(self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! protomap {
|
macro_rules! protomap {
|
||||||
($($ent:expr),*) => {
|
($($ent:expr),*) => {
|
||||||
ProtoMap::from([$($ent:expr),*])
|
ProtoMap::from([$($ent:expr),*])
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,50 +4,50 @@ use std::fmt::Display;
|
|||||||
pub enum Side {Left, Right}
|
pub enum Side {Left, Right}
|
||||||
|
|
||||||
impl Display for Side {
|
impl Display for Side {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Left => write!(f, "Left"),
|
Self::Left => write!(f, "Left"),
|
||||||
Self::Right => write!(f, "Right"),
|
Self::Right => write!(f, "Right"),
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Side {
|
impl Side {
|
||||||
pub fn opposite(&self) -> Self {
|
pub fn opposite(&self) -> Self {
|
||||||
match self {
|
match self {
|
||||||
Self::Left => Self::Right,
|
Self::Left => Self::Right,
|
||||||
Self::Right => Self::Left
|
Self::Right => Self::Left
|
||||||
}
|
|
||||||
}
|
}
|
||||||
/// Shorthand for opposite
|
}
|
||||||
pub fn inv(&self) -> Self { self.opposite() }
|
/// Shorthand for opposite
|
||||||
/// take N elements from this end of a slice
|
pub fn inv(&self) -> Self { self.opposite() }
|
||||||
pub fn slice<'a, T>(&self, size: usize, slice: &'a [T]) -> &'a [T] {
|
/// take N elements from this end of a slice
|
||||||
match self {
|
pub fn slice<'a, T>(&self, size: usize, slice: &'a [T]) -> &'a [T] {
|
||||||
Side::Left => &slice[..size],
|
match self {
|
||||||
Side::Right => &slice[slice.len() - size..]
|
Side::Left => &slice[..size],
|
||||||
}
|
Side::Right => &slice[slice.len() - size..]
|
||||||
}
|
}
|
||||||
/// ignore N elements from this end of a slice
|
}
|
||||||
pub fn crop<'a, T>(&self, margin: usize, slice: &'a [T]) -> &'a [T] {
|
/// ignore N elements from this end of a slice
|
||||||
self.opposite().slice(slice.len() - margin, slice)
|
pub fn crop<'a, T>(&self, margin: usize, slice: &'a [T]) -> &'a [T] {
|
||||||
|
self.opposite().slice(slice.len() - margin, slice)
|
||||||
|
}
|
||||||
|
/// ignore N elements from this end and M elements from the other end of a slice
|
||||||
|
pub fn crop_both<'a, T>(&self, margin: usize, opposite: usize, slice: &'a [T]) -> &'a [T] {
|
||||||
|
self.crop(margin, self.opposite().crop(opposite, slice))
|
||||||
|
}
|
||||||
|
/// Pick this side from a pair of things
|
||||||
|
pub fn pick<T>(&self, pair: (T, T)) -> T {
|
||||||
|
match self {
|
||||||
|
Side::Left => pair.0,
|
||||||
|
Side::Right => pair.1
|
||||||
}
|
}
|
||||||
/// ignore N elements from this end and M elements from the other end of a slice
|
}
|
||||||
pub fn crop_both<'a, T>(&self, margin: usize, opposite: usize, slice: &'a [T]) -> &'a [T] {
|
/// Make a pair with the first element on this side
|
||||||
self.crop(margin, self.opposite().crop(opposite, slice))
|
pub fn pair<T>(&self, this: T, opposite: T) -> (T, T) {
|
||||||
}
|
match self {
|
||||||
/// Pick this side from a pair of things
|
Side::Left => (this, opposite),
|
||||||
pub fn pick<T>(&self, pair: (T, T)) -> T {
|
Side::Right => (opposite, this)
|
||||||
match self {
|
|
||||||
Side::Left => pair.0,
|
|
||||||
Side::Right => pair.1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// Make a pair with the first element on this side
|
|
||||||
pub fn pair<T>(&self, this: T, opposite: T) -> (T, T) {
|
|
||||||
match self {
|
|
||||||
Side::Left => (this, opposite),
|
|
||||||
Side::Right => (opposite, this)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -1,14 +1,14 @@
|
|||||||
fn string_from_charset_rec(val: u64, digits: &str) -> String {
|
fn string_from_charset_rec(val: u64, digits: &str) -> String {
|
||||||
let radix = digits.len() as u64;
|
let radix = digits.len() as u64;
|
||||||
let mut prefix = if val > radix {
|
let mut prefix = if val > radix {
|
||||||
string_from_charset_rec(val / radix, digits)
|
string_from_charset_rec(val / radix, digits)
|
||||||
} else {String::new()};
|
} else {String::new()};
|
||||||
prefix.push(digits.chars().nth(val as usize - 1).unwrap_or_else(
|
prefix.push(digits.chars().nth(val as usize - 1).unwrap_or_else(
|
||||||
|| panic!("Overindexed digit set \"{}\" with {}", digits, val - 1)
|
|| panic!("Overindexed digit set \"{}\" with {}", digits, val - 1)
|
||||||
));
|
));
|
||||||
prefix
|
prefix
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn string_from_charset(val: u64, digits: &str) -> String {
|
pub fn string_from_charset(val: u64, digits: &str) -> String {
|
||||||
string_from_charset_rec(val + 1, digits)
|
string_from_charset_rec(val + 1, digits)
|
||||||
}
|
}
|
||||||
@@ -5,70 +5,84 @@ use std::fmt::Debug;
|
|||||||
/// deep enough to warrant a heap-allocated set
|
/// deep enough to warrant a heap-allocated set
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
pub struct Stackframe<'a, T> {
|
pub struct Stackframe<'a, T> {
|
||||||
pub item: T,
|
pub item: T,
|
||||||
pub prev: Option<&'a Stackframe<'a, T>>,
|
pub prev: Option<&'a Stackframe<'a, T>>,
|
||||||
pub len: usize
|
pub len: usize
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, T: 'a> Stackframe<'a, T> {
|
impl<'a, T: 'a> Stackframe<'a, T> {
|
||||||
pub fn new(item: T) -> Self {
|
pub fn new(item: T) -> Self {
|
||||||
Self {
|
Self {
|
||||||
item,
|
item,
|
||||||
prev: None,
|
prev: None,
|
||||||
len: 1
|
len: 1
|
||||||
}
|
|
||||||
}
|
}
|
||||||
/// Get the item owned by this listlike, very fast O(1)
|
}
|
||||||
pub fn item(&self) -> &T { &self.item }
|
/// Get the item owned by this listlike, very fast O(1)
|
||||||
/// Get the next link in the list, very fast O(1)
|
pub fn item(&self) -> &T { &self.item }
|
||||||
pub fn prev(&self) -> Option<&'a Stackframe<T>> { self.prev }
|
/// Get the next link in the list, very fast O(1)
|
||||||
/// Construct an iterator over the listlike, very fast O(1)
|
pub fn prev(&self) -> Option<&'a Stackframe<T>> { self.prev }
|
||||||
pub fn iter(&self) -> StackframeIterator<T> {
|
/// Construct an iterator over the listlike, very fast O(1)
|
||||||
StackframeIterator { curr: Some(self) }
|
pub fn iter(&self) -> StackframeIterator<T> {
|
||||||
|
StackframeIterator { curr: Some(self) }
|
||||||
|
}
|
||||||
|
pub fn push(&self, item: T) -> Stackframe<'_, T> {
|
||||||
|
Stackframe {
|
||||||
|
item,
|
||||||
|
prev: Some(self),
|
||||||
|
len: self.len + 1
|
||||||
}
|
}
|
||||||
pub fn push(&self, item: T) -> Stackframe<'_, T> {
|
}
|
||||||
Stackframe {
|
pub fn opush(prev: Option<&'a Self>, item: T) -> Self {
|
||||||
item,
|
Self {
|
||||||
prev: Some(self),
|
item,
|
||||||
len: self.len + 1
|
prev,
|
||||||
}
|
len: prev.map_or(1, |s| s.len)
|
||||||
}
|
|
||||||
pub fn opush(prev: Option<&Self>, item: T) -> Self {
|
|
||||||
Self {
|
|
||||||
item,
|
|
||||||
prev,
|
|
||||||
len: prev.map_or(1, |s| s.len)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn len(&self) -> usize { self.len }
|
|
||||||
pub fn pop(&self, count: usize) -> Option<&Self> {
|
|
||||||
if count == 0 {Some(self)}
|
|
||||||
else {self.prev.expect("Index out of range").pop(count - 1)}
|
|
||||||
}
|
|
||||||
pub fn opop(cur: Option<&Self>, count: usize) -> Option<&Self> {
|
|
||||||
if count == 0 {cur}
|
|
||||||
else {Self::opop(cur.expect("Index out of range").prev, count - 1)}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
pub fn len(&self) -> usize { self.len }
|
||||||
|
pub fn pop(&self, count: usize) -> Option<&Self> {
|
||||||
|
if count == 0 {Some(self)}
|
||||||
|
else {self.prev.expect("Index out of range").pop(count - 1)}
|
||||||
|
}
|
||||||
|
pub fn opop(cur: Option<&Self>, count: usize) -> Option<&Self> {
|
||||||
|
if count == 0 {cur}
|
||||||
|
else {Self::opop(cur.expect("Index out of range").prev, count - 1)}
|
||||||
|
}
|
||||||
|
pub fn o_into_iter(curr: Option<&Self>) -> StackframeIterator<T> {
|
||||||
|
StackframeIterator { curr }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, T> Debug for Stackframe<'a, T> where T: Debug {
|
impl<'a, T> Debug for Stackframe<'a, T> where T: Debug {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "Substack")?;
|
write!(f, "Substack")?;
|
||||||
f.debug_list().entries(self.iter()).finish()
|
f.debug_list().entries(self.iter()).finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct StackframeIterator<'a, T> {
|
pub struct StackframeIterator<'a, T> {
|
||||||
curr: Option<&'a Stackframe<'a, T>>
|
curr: Option<&'a Stackframe<'a, T>>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T> StackframeIterator<'a, T> {
|
||||||
|
pub fn first_some<U, F: Fn(&T) -> Option<U>>(&mut self, f: F) -> Option<U> {
|
||||||
|
while let Some(x) = self.next() {
|
||||||
|
if let Some(result) = f(x) {
|
||||||
|
return Some(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, T> Iterator for StackframeIterator<'a, T> {
|
impl<'a, T> Iterator for StackframeIterator<'a, T> {
|
||||||
type Item = &'a T;
|
type Item = &'a T;
|
||||||
fn next(&mut self) -> Option<&'a T> {
|
fn next(&mut self) -> Option<&'a T> {
|
||||||
let curr = self.curr?;
|
let curr = self.curr?;
|
||||||
let item = curr.item();
|
let item = curr.item();
|
||||||
let prev = curr.prev();
|
let prev = curr.prev();
|
||||||
self.curr = prev;
|
self.curr = prev;
|
||||||
Some(item)
|
Some(item)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
22
src/utils/translate.rs
Normal file
22
src/utils/translate.rs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
use std::mem;
|
||||||
|
|
||||||
|
pub fn translate<T, F: FnOnce(T) -> T>(data: &mut T, f: F) {
|
||||||
|
unsafe {
|
||||||
|
let mut acc = mem::MaybeUninit::<T>::uninit().assume_init();
|
||||||
|
mem::swap(&mut acc, data);
|
||||||
|
let mut new = f(acc);
|
||||||
|
mem::swap(&mut new, data);
|
||||||
|
mem::forget(new);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn process<T, U, F: FnOnce(T) -> (T, U)>(data: &mut T, f: F) -> U {
|
||||||
|
unsafe {
|
||||||
|
let mut acc = mem::MaybeUninit::<T>::uninit().assume_init();
|
||||||
|
mem::swap(&mut acc, data);
|
||||||
|
let (mut new, ret) = f(acc);
|
||||||
|
mem::swap(&mut new, data);
|
||||||
|
mem::forget(new);
|
||||||
|
ret
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! unless_let {
|
macro_rules! unless_let {
|
||||||
($m:pat_param = $expr:tt) => {
|
($m:pat_param = $expr:tt) => {
|
||||||
if let $m = $expr {} else
|
if let $m = $expr {} else
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! unwrap_or {
|
macro_rules! unwrap_or {
|
||||||
($m:expr; $fail:expr) => {
|
($m:expr; $fail:expr) => {
|
||||||
{ if let Some(res) = ($m) {res} else {$fail} }
|
{ if let Some(res) = ($m) {res} else {$fail} }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user