pre-recording backup

This commit is contained in:
2023-05-17 03:49:26 +01:00
parent 126494c63f
commit 330ddbe399
29 changed files with 404 additions and 195 deletions

View File

@@ -0,0 +1,15 @@
export Y := \f.(\x.f (x x))(\x.f (x x))
export loop $r on (..$parameters) with ...$tail =0x5p129=> Y (\$r.
bind_names (..$parameters) (...$tail)
) ..$parameters
-- bind each of the names in the first argument as a parameter for the second argument
bind_names ($name ..$rest) $payload =0x1p250=> \$name. bind_names (..$rest) $payload
bind_names () (...$payload) =0x1p250=> ...$payload
export ...$prefix $ ...$suffix:1 =0x1p34=> ...$prefix (...$suffix)
export ...$prefix |> $fn ..$suffix:1 =0x2p32=> $fn (...$prefix) ..$suffix
export (...$argv) => ...$body =0x2p129=> (bind_names (...$argv) (...$body))
$name => ...$body =0x1p129=> (\$name. ...$body)

View File

@@ -2,10 +2,13 @@ import std::(parse_float, to_string)
import std::(readline, print)
export main := do{
cps print "left operand: ";
cps data = readline;
let a = parse_float data;
cps print "operator: ";
cps op = readline;
cps print ("\"" ++ op ++ "\"\n");
cps print ("you selected \"" ++ op ++ "\"\n");
cps print "right operand: ";
cps data = readline;
let b = parse_float data;
let result = (
@@ -13,8 +16,8 @@ export main := do{
else if op == "-" then a - b
else if op == "*" then a * b
else if op == "/" then a / b
else "Unsupported operation" -- dynamically typed shenanigans
else (panic "Unsupported operation")
);
cps print (to_string result ++ "\n");
cps print ("Result: " ++ to_string result ++ "\n");
0
}

View File

@@ -1,15 +1,15 @@
export Y := \f.(\x.f (x x))(\x.f (x x))
export loop $r on (...$parameters) with ...$tail =0x5p512=> Y (\$r.
bind_names (...$parameters) (...$tail)
) ...$parameters
export loop $r on (..$parameters) with ...$tail =0x5p129=> Y (\$r.
bind_names (..$parameters) (...$tail)
) ..$parameters
-- bind each of the names in the first argument as a parameter for the second argument
bind_names ($name ..$rest) $payload =0x2p1000=> \$name. bind_names (..$rest) $payload
bind_names () (...$payload) =0x1p1000=> ...$payload
bind_names ($name ..$rest) $payload =0x1p250=> \$name. bind_names (..$rest) $payload
bind_names () (...$payload) =0x1p250=> ...$payload
export ...$prefix $ ...$suffix:1 =0x1p130=> ...$prefix (...$suffix)
export ...$prefix |> $fn ..$suffix:1 =0x2p130=> $fn (...$prefix) ..$suffix
export ...$prefix $ ...$suffix:1 =0x1p34=> ...$prefix (...$suffix)
export ...$prefix |> $fn ..$suffix:1 =0x2p32=> $fn (...$prefix) ..$suffix
export (...$argv) => ...$body =0x2p512=> (bind_names (...$argv) (...$body))
$name => ...$body =0x1p512=> (\$name. ...$body)
export (...$argv) => ...$body =0x2p129=> (bind_names (...$argv) (...$body))
$name => ...$body =0x1p129=> (\$name. ...$body)

View File

@@ -34,8 +34,8 @@ export take := \list.\n. (
else pop list end \head.\tail. cons head $ r tail $ n - 1
)
new[...$item, ...$rest:1] =0x2p333=> (cons (...$item) new[...$rest])
new[...$end] =0x1p333=> (cons (...$end) end)
new[] =0x1p333=> end
new[...$item, ...$rest:1] =0x2p84=> (cons (...$item) new[...$rest])
new[...$end] =0x1p84=> (cons (...$end) end)
new[] =0x1p84=> end
export ::(new)

View File

@@ -1,15 +1,15 @@
export Y := \f.(\x.f (x x))(\x.f (x x))
export loop $r on (...$parameters) with ...$tail =0x5p512=> Y (\$r.
bind_names (...$parameters) (...$tail)
) ...$parameters
export loop $r on (..$parameters) with ...$tail =0x5p129=> Y (\$r.
bind_names (..$parameters) (...$tail)
) ..$parameters
-- bind each of the names in the first argument as a parameter for the second argument
bind_names ($name ..$rest) $payload =0x2p1000=> \$name. bind_names (..$rest) $payload
bind_names () (...$payload) =0x1p1000=> ...$payload
bind_names ($name ..$rest) $payload =0x1p250=> \$name. bind_names (..$rest) $payload
bind_names () (...$payload) =0x1p250=> ...$payload
export ...$prefix $ ...$suffix:1 =0x1p130=> ...$prefix (...$suffix)
export ...$prefix |> $fn ..$suffix:1 =0x2p130=> $fn (...$prefix) ..$suffix
export ...$prefix $ ...$suffix:1 =0x1p34=> ...$prefix (...$suffix)
export ...$prefix |> $fn ..$suffix:1 =0x2p32=> $fn (...$prefix) ..$suffix
export (...$argv) => ...$body =0x2p512=> (bind_names (...$argv) (...$body))
$name => ...$body =0x1p512=> (\$name. ...$body)
export (...$argv) => ...$body =0x2p129=> (bind_names (...$argv) (...$body))
$name => ...$body =0x1p129=> (\$name. ...$body)

View File

@@ -41,8 +41,8 @@ export get := \list.\n. (
else r tail (n - 1)
)
new[...$item, ...$rest:1] =0x2p333=> (cons (...$item) new[...$rest])
new[...$end] =0x1p333=> (cons (...$end) end)
new[] =0x1p333=> end
new[...$item, ...$rest:1] =0x2p84=> (cons (...$item) new[...$rest])
new[...$end] =0x1p84=> (cons (...$end) end)
new[] =0x1p84=> end
export ::(new)

View File

@@ -2,6 +2,7 @@ import list
import map
import option
import fn::*
import std::(print, to_string)
export main := do{
let foo = map::new[
@@ -10,13 +11,8 @@ export main := do{
"baz" = 3,
"bar" = 4
];
map::get foo "bar"
|> option::unwrap
}
--[
export main := do{
let foo = list::new[1, 2, 3];
map::fst foo
}
]--
let num = map::get foo "bar"
|> option::unwrap;
cps print (to_string num ++ "\n");
0
}

View File

@@ -16,9 +16,6 @@ export snd := \l. (
(panic "2 elements expected")
\x.x
)
export print_pair := \l. (
to_string (fst l) ++ " = " ++ to_string (snd l)
)
-- constructors
@@ -51,7 +48,7 @@ export del := \m.\k. (
)
-- remove all occurrences of a key
export clear := \m.\k. (
export delall := \m.\k. (
loop r on (m) with
list::pop m list::end \head.\tail.
if (fst head) == k then r tail
@@ -65,10 +62,18 @@ export set := \m.\k.\v. (
|> add k v
)
new[...$tail:2, ...$key = ...$value:1] =0x2p333=> (
-- ensure that there's only one instance of each key in the map
export normalize := \m. do{
let normal = empty
loop r on (m normal) with
list::pop m normal \head.\tail.
r tail $ set normal (fst head) (snd head)
}
new[...$tail:2, ...$key = ...$value:1] =0x2p84=> (
set new[...$tail] (...$key) (...$value)
)
new[...$key = ...$value:1] =0x1p333=> (add empty (...$key) (...$value))
new[] =0x1p333=> empty
new[...$key = ...$value:1] =0x1p84=> (add empty (...$key) (...$value))
new[] =0x1p84=> empty
export ::(new)

View File

@@ -0,0 +1,3 @@
export main := do{
}

View File

@@ -0,0 +1,3 @@
# List
In order to use lists as tuples, one needs to be able to access arbitrary elements by index. This is done by the new `list::get` function which returns an `option`. Since most lists in complex datastructures are of known length, this leads to a lot of unreachable branches. The marking and elimination of these called for the definition of `option::unwrap` and `std::panic`.

View File

@@ -0,0 +1,6 @@
This example demonstrates the construction of a basic functional map.
The `fn.orc` file is exactly identical to [the version in list-processing][1]
`list.orc` and `option.orc` are extended to accommodate additional functionality.
[1]: ../list-processing/fn.md

View File

@@ -0,0 +1,10 @@
# Map
A map implemented using a list of 2-length lists each containing a key and a corresponding value. Although `list` defines a `pair` for internal use, a binary `list` was chosen to test the performance of the interpreter.
While using a Church-pair instead of a list to store individual entries could multiply the performance of this map, a greater improvement can be achieved by using some sort of tree structure. This implementation is meant for very small maps such as those representing a typical struct.
## cover vs erase
In a list map like this one, most operations are O(n), except insertion which has an O(1) variant - appending a new frame with the new value without checking if one already exists. This is not generally a good idea, but in some extreme situations the time it saves can be very valuable.

View File

@@ -0,0 +1,5 @@
# Option
This example uses a lot of lists of known length, but with the introduction of `list::get` a lot of `option`s are added to the flow of logic. A way to mark impossible branches is needed.
This is handled using a new external function called `std::panic`. Since Orchid is a sandboxed language this doesn't actually cause a Rust panic, instead it produces a dedicated ExternError when it's first reduced. Using this, `option::unwrap` is trivial to define.

View File

@@ -0,0 +1,76 @@
# Macros
Left-associative unparenthesized function calls are intuitive in the typical case of just applying functions to a limited number of arguments, but they're not very flexible. Haskell solves this problem by defining a diverse array of syntax primitives for individual use cases such as `do` blocks for monadic operations. This system is fairly rigid. In contrast, Rust and Lisp enable library developers to invent their own syntax that intuitively describes the concepts the library at hand encodes. In Orchid's codebase, I defined several macros to streamline tasks like defining functions in Rust that are visible to Orchid, or translating between various intermediate representations.
## Generalized kerning
In the referenced video essay, a proof of the Turing completeness of generalized kerning is presented. The proof involves encoding a Turing machine in a string and some kerning rules. The state of the machine is next to the read-write head and all previous states are enumerated next to the tape because kerning rules are reversible. The end result looks something like this:
```
abcbcddddef|1110000110[0]a00111010011101110
```
The rules are translated into kerning rules. For a rule
> in state `a` seeing `0`: new state is `b`, write `1` and go `left`
the kerning rule would look like this (template instantiated for all possible characters):
```
$1 [ 0 ] a equals a < $1 ] b 0
```
Some global rules are also needed, also instantiated for all possible characters in the templated positions
```
$1 $2 < equals $2 < $1 unless $1 is |
| $1 < equals $1 | >
> $1 $2 equals $1 > $2 unless $2 is ]
> $1 ] equals [ $1 ]
```
What I really appreciate in this proof is how visual it is; based on this, it's easy to imagine how one would go about encoding a pushdown automaton, lambda calculus or other interesting tree-walking procedures. This is exactly why I based my preprocessor on this system.
## Namespaced tokens
Rust macros operate on the bare tokens and therefore are prone to accidental aliasing. Every other item in Rust follows a rigorous namespacing scheme, but macros break this structure, probably because macro execution happens before namespace resolution. The language doesn't suffer too much from this problem, but the relativity of namespacing
limits their potential.
Orchid's substitution rules operate on namespaced tokens. This means that the macros can hook into each other. Consider the following example, which is a modified version of a real rule included in the prelude:
in _procedural.orc_
```orchid
export do { ...$statement ; ...$rest:1 } =10_001=> (
statement (...$statement) do { ...$rest }
)
export do { ...$return } =10_000=> (...$return)
export statement (let $_name = ...$value) ...$next =10_000=> (
(\$_name. ...$next) (...$value)
)
```
in _cpsio.orc_
```orchid
import procedural::statement
export statement (cps $_name = ...$operation) ...$next =10_001=> (
(...$operation) \$_name. ...$next
)
export statement (cps ...$operation) ...$next =10_000=> (
(...$operation) (...$next)
)
```
in _main.orc_
```orchid
import procedural::(do, let, ;)
import cpsio::cps
export main := do{
cps data = readline;
let a = parse_float data * 2;
cps print (data ++ " doubled is " ++ stringify a)
}
```
Notice how, despite heavy use of macros, it's never ambiguous where a particular name is coming from. Namespacing, including import statements, is entirely unaffected by the macro system. The source of names is completely invariant.

View File

@@ -0,0 +1,33 @@
# Implementation
THe optimization of this macro execution algorithm is an interesting challenge with a diverse range of potential optimizations. The current solution is very far from ideal, but it scales to the small experimental workloads I've tried so far and it can accommodate future improvements without any major restructuring.
The scheduling of macros is delegated to a unit called the rule repository, while the matching of rules to a given clause sequence is delegated to a unit called the matcher. Other tasks are split out into distinct self-contained functions, but these two have well-defined interfaces and encapsulate data. Constants are processed by the repository one at a time, which means that the data processed by this subsystem typically corresponds to a single struct, function or other top-level source item.
## keyword dependencies
The most straightforward optimization is to skip patterns that doesn contain tokens that don't appear in the code at all. This is done by the repository to skip entire rules, but not by the rules on the level of individual slices. This is a possible path of improvement for the future.
## Matchers
There are various ways to implement matching. To keep the architecture flexible, the repository is generic over the matcher bounded with a very small trait.
The current implementation of the matcher attempts to build a tree of matchers rooted in the highest priority vectorial placeholder. On each level The specializations are defined as follows:
- `VecMatcher` corresponds to a subpattern that starts and ends with a vectorial. Each matcher also matches the scalars in between its submatchers, this is not explicitly mentioned.
- `Placeholder` corresponds to a vectorial placeholder with no lower priority vectorials around it
It may reject zero-length slices but contains no other logic
- `Scan` corresponds to a high priority vectorial on one side of the pattern with lower priority vectorials next to it.
It moves the boundary - consisting of scalars - from one side to the other
- `Middle` corresponds to a high priority vectorial surrounded on both sides by lower priority vectorials.
This requires by far the most complicated logic, collecting matches for its scalar separators on either side, sorting their pairings by the length of the gap, then applying the submatchers on either side until a match is found. This uses copious heap allocations and it's generally not very efficient. Luckily, this kind of pattern almost never appears in practice.
- `ScalMatcher` tests a single token. Since vectorials in subtrees are strictly lower priority than those in parent enclosing sequences `S` and `Lambda` don't require a lot of advanced negotiation logic. They normally appear in sequence, as a their operations are trivially generalizable to a static sequence of them.
- `AnyMatcher` tests a sequence and wraps either a sequence of `ScalMatcher` or a single `VecMatcher` surrounded by two sequences of `ScalMatcher`.

View File

@@ -1,10 +1,8 @@
# Macros
The macros describe several independent sequential programs that are expected to be able to interact with each other. To make debugging easier, the order of execution of independent macros should also be relatively static.
## Execution order
The macro executor follows a manually specified priority cascade, with priorities ranging from 0 to f64 max (0x2p1023, exclusive). Priorities are accepted in any valid floating point format, but usually written in binary or hexadecimal natural form, as this format represents floating point precision on the syntax level, thus making precision errors extremely unlikely.
The macros describe several independent sequential programs that are expected to be able to interact with each other. To make debugging easier, the order of execution of internal steps within independent macros has to be relatively static.
The macro executor follows a manually specified priority cascade, with priorities ranging from 0 to 0xep255, exclusive. Priorities are accepted in any valid floating point format, but usually written in binary or hexadecimal natural form, as this format represents floating point precision on the syntax level, thus making precision errors extremely unlikely.
The range of valid priorities is divided up into bands, much like radio bands. In this case, the bands serve to establish a high level ordering between instructions.
@@ -12,26 +10,26 @@ The bands are each an even 32 orders of magnitude, with space in between for fut
| | | | |
| :-----------: | :------: | :---------: | :----------: |
| 0-31 | 32-63 | 64-95 | 96-127 |
| 0-7 | 8-15 | 16-23 | 24-31 |
| optimizations | x | | |
| 128-159 | 160-191 | 192-223 | 224-255 |
| 32-39 | 40-47 | 48-55 | 56-63 |
| operators | | | x |
| 256-287 | 288-319 | 320-351 | 352-383 |
| 64-71 | 72-79 | 80-87 | 88-95 |
| | | expressions | |
| 384-415 | 416-447 | 448-479 | 480-511 |
| 96-103 | 104-111 | 112-119 | 120-127 |
| | x | | |
| 512-543 | 544-575 | 576-607 | 608-639 |
| 128-135 | 136-143 | 144-151 | 152-159 |
| bindings | | | x |
| 640-671 | 672-703 | 704-735 | 736-767 |
| 160-167 | 168-175 | 176-183 | 184-191 |
| | | x | |
| 768-799 | 800-831 | 832-863 | 864-895 |
| 192-199 | 200-207 | 208-215 | 216-223 |
| | aliases* | | |
| 896-927 | 928-959 | 960-991 | 992- |
| 224-231 | 232-239 | 240-247 | 248- |
| integrations | | | transitional |
### Transitional states
Transitional states produced and consumed by the same macro program occupy the range above 0x1p991. Nothing in this range should be written by the user or triggered by an interaction of distinct macro programs, the purpose of this high range is to prevent devices such as carriages from interacting. Any transformation sequence in this range can assume that the tree is inert other than its own operation.
Transitional states produced and consumed by the same macro program occupy the unbounded top region of the f64 field. Nothing in this range should be written by the user or triggered by an interaction of distinct macro programs, the purpose of this high range is to prevent devices such as carriages from interacting. Any transformation sequence in this range can assume that the tree is inert other than its own operation.
### Integrations

View File

@@ -19,80 +19,3 @@ The merits of pure functional code are well known, but I would like to highlight
Reactive programming is an increasingly popular paradigm for enabling systems to interact with changing state without recomputing subresults that have not been modified. It is getting popular despite the fact that enabling this kind of programming in classical languages - most notably javascript, where it appears to be the most popular - involves lots of boilerplate and complicated constructs using many many lambda functions. In a lazy language this is essentially the default.
In addition, lazy, pure code lends itself to optimization. Deforestation and TCO are implied and CTFE (or in the case of an interpreted language ahead-of-time function execution) along with a host of other optimizations are more convenient.
# Macros
Left-associative unparenthesized function calls are intuitive in the typical case of just applying functions to a limited number of arguments, but they're not very flexible. Haskell solves this problem by defining a diverse array of syntax primitives for individual use cases such as `do` blocks for monadic operations. This system is fairly rigid. In contrast, Rust enables library developers to invent their own syntax that intuitively describes the concepts the library at hand encodes. In Orchid's codebase, I defined several macros to streamline tasks like defining functions in Rust that are visible to Orchid, or translating between various intermediate representations.
## Generalized kerning
In the referenced video essay, a proof of the Turing completeness of generalized kerning is presented. The proof involves encoding a Turing machine in a string and some kerning rules. The state of the machine is next to the read-write head and all previous states are enumerated next to the tape because kerning rules are reversible. The end result looks something like this:
```
abcbcddddef|1110000110[0]a00111010011101110
```
The rules are translated into kerning rules. For a rule
> in state `a` seeing `0`: new state is `b`, write `1` and go `left`
the kerning rule would look like this (template instantiated for all possible characters):
```
$1 [ 0 ] a equals a < $1 ] b 0
```
Some global rules are also needed, also instantiated for all possible characters in the templated positions
```
$1 $2 < equals $2 < $1 unless $1 is |
| $1 < equals $1 | >
> $1 $2 equals $1 > $2 unless $2 is ]
> $1 ] equals [ $1 ]
```
What I really appreciate in this proof is how visual it is; based on this, it's easy to imagine how one would go about encoding a pushdown automaton, lambda calculus or other interesting tree-walking procedures. This is exactly why I based my preprocessor on this system.
## Namespaced tokens
Rust macros operate on the bare tokens and therefore are prone to accidental aliasing. Every other item in Rust follows a rigorous namespacing scheme, but macros break this structure, probably because macro execution happens before namespace resolution. The language doesn't suffer too much from this problem, but the relativity of namespacing
limits their potential.
Orchid's substitution rules operate on namespaced tokens. This means that the macros can hook into each other. Consider the following example, which is a modified version of a real rule included in the prelude:
in _procedural.orc_
```orchid
export do { ...$statement ; ...$rest:1 } =10_001=> (
statement (...$statement) do { ...$rest }
)
export do { ...$return } =10_000=> (...$return)
export statement (let $_name = ...$value) ...$next =10_000=> (
(\$_name. ...$next) (...$value)
)
```
in _cpsio.orc_
```orchid
import procedural::statement
export statement (cps $_name = ...$operation) ...$next =10_001=> (
(...$operation) \$_name. ...$next
)
export statement (cps ...$operation) ...$next =10_000=> (
(...$operation) (...$next)
)
```
in _main.orc_
```orchid
import procedural::(do, let, ;)
import cpsio::cps
export main := do{
cps data = readline;
let a = parse_float data * 2;
cps print (data ++ " doubled is " ++ stringify a)
}
```
Notice how, despite heavy use of macros, it's never ambiguous where a particular name is coming from. Namespacing, including import statements, is entirely unaffected by the macro system. The source of names is completely invariant.

35
src/external/cpsio/io.rs vendored Normal file
View File

@@ -0,0 +1,35 @@
use std::io::{self, Write, stdin};
use crate::{representations::{interpreted::{ExprInst, Clause}, Primitive, Literal}, atomic_inert, interpreter::{HandlerParm, HandlerRes}, unwrap_or, external::runtime_error::RuntimeError};
#[derive(Clone, Debug)]
pub enum IO {
Print(String, ExprInst),
Readline(ExprInst)
}
atomic_inert!(IO);
pub fn handle(effect: HandlerParm) -> HandlerRes {
let io: &IO = unwrap_or!(
effect.as_any().downcast_ref();
return Err(effect)
);
match io {
IO::Print(str, cont) => {
print!("{}", str);
io::stdout().flush().unwrap();
Ok(Ok(cont.clone()))
},
IO::Readline(cont) => {
let mut buf = String::new();
if let Err(e) = stdin().read_line(&mut buf) {
return Ok(Err(RuntimeError::ext(e.to_string(), "reading from stdin")));
}
buf.pop();
Ok(Ok(Clause::Apply {
f: cont.clone(),
x: Clause::P(Primitive::Literal(Literal::Str(buf))).wrap()
}.wrap()))
}
}
}

View File

@@ -4,6 +4,9 @@ mod print;
mod readline;
mod debug;
mod panic;
mod io;
pub use io::{IO, handle};
pub fn cpsio(i: &Interner) -> ConstTree {
ConstTree::tree([

View File

@@ -1,11 +1,12 @@
use std::fmt::Debug;
use std::io::{self, Write};
use std::rc::Rc;
use crate::external::litconv::with_str;
use crate::representations::PathSet;
use crate::{atomic_impl, atomic_redirect, externfn_impl};
use crate::representations::interpreted::{Clause, ExprInst};
use crate::foreign::{Atomic, AtomicResult, AtomicReturn};
use crate::interpreter::Context;
use crate::{atomic_impl, atomic_redirect, externfn_impl, atomic_defaults};
use crate::representations::interpreted::ExprInst;
use super::io::IO;
/// Print function
///
@@ -22,13 +23,21 @@ externfn_impl!(Print2, |_: &Self, x: ExprInst| Ok(Print1{x}));
#[derive(Debug, Clone)]
pub struct Print1{ x: ExprInst }
atomic_redirect!(Print1, x);
atomic_impl!(Print1, |Self{ x }: &Self, _| {
with_str(x, |s| {
print!("{}", s);
io::stdout().flush().unwrap();
Ok(Clause::Lambda {
args: Some(PathSet{ steps: Rc::new(vec![]), next: None }),
body: Clause::LambdaArg.wrap()
})
atomic_impl!(Print1);
externfn_impl!(Print1, |this: &Self, x: ExprInst| {
with_str(&this.x, |s| {
Ok(Print0{ s: s.clone(), x })
})
});
#[derive(Debug, Clone)]
pub struct Print0{ s: String, x: ExprInst }
impl Atomic for Print0 {
atomic_defaults!();
fn run(&self, ctx: Context) -> AtomicResult {
Ok(AtomicReturn::from_data(
IO::Print(self.s.clone(), self.x.clone()),
ctx
))
}
}

View File

@@ -1,10 +1,11 @@
use std::fmt::Debug;
use std::io::stdin;
use crate::external::runtime_error::RuntimeError;
use crate::{atomic_impl, atomic_redirect, externfn_impl};
use crate::representations::{Primitive, Literal};
use crate::representations::interpreted::{Clause, ExprInst};
use crate::foreign::{Atomic, AtomicResult, AtomicReturn};
use crate::interpreter::Context;
use crate::{externfn_impl, atomic_defaults};
use crate::representations::interpreted::ExprInst;
use super::io::IO;
/// Readln function
///
@@ -20,14 +21,12 @@ externfn_impl!(Readln2, |_: &Self, x: ExprInst| Ok(Readln1{x}));
#[derive(Debug, Clone)]
pub struct Readln1{ x: ExprInst }
atomic_redirect!(Readln1, x);
atomic_impl!(Readln1, |Self{ x }: &Self, _| {
let mut buf = String::new();
stdin().read_line(&mut buf)
.map_err(|e| RuntimeError::ext(e.to_string(), "reading from stdin"))?;
buf.pop();
Ok(Clause::Apply {
f: x.clone(),
x: Clause::P(Primitive::Literal(Literal::Str(buf))).wrap()
})
});
impl Atomic for Readln1 {
atomic_defaults!();
fn run(&self, ctx: Context) -> AtomicResult {
Ok(AtomicReturn::from_data(
IO::Readline(self.x.clone()),
ctx
))
}
}

2
src/external/mod.rs vendored
View File

@@ -7,3 +7,5 @@ mod cpsio;
mod runtime_error;
mod bool;
mod litconv;
pub use cpsio::{IO, handle};

View File

@@ -17,6 +17,16 @@ pub struct AtomicReturn {
pub gas: Option<usize>,
pub inert: bool
}
impl AtomicReturn {
/// Wrap an inert atomic for delivery to the supervisor
pub fn from_data<D: Atomic>(d: D, c: Context) -> Self {
AtomicReturn {
clause: d.to_atom_cls(),
gas: c.gas,
inert: false
}
}
}
// Aliases for concise macros
pub type RcError = Rc<dyn ExternError>;

View File

@@ -5,4 +5,4 @@ mod run;
pub use context::{Context, Return};
pub use error::RuntimeError;
pub use run::{run};
pub use run::{run, run_handler, Handler, HandlerParm, HandlerRes};

View File

@@ -1,4 +1,7 @@
use crate::foreign::AtomicReturn;
use std::mem;
use std::rc::Rc;
use crate::foreign::{AtomicReturn, Atomic, ExternError, Atom};
use crate::representations::Primitive;
use crate::representations::interpreted::{Clause, ExprInst};
@@ -6,9 +9,10 @@ use super::apply::apply;
use super::error::RuntimeError;
use super::context::{Context, Return};
pub fn run(expr: ExprInst, mut ctx: Context)
-> Result<Return, RuntimeError>
{
pub fn run(
expr: ExprInst,
mut ctx: Context
) -> Result<Return, RuntimeError> {
let (state, (gas, inert)) = expr.try_normalize(|cls| -> Result<(Clause, _), RuntimeError> {
let mut i = cls.clone();
while ctx.gas.map(|g| g > 0).unwrap_or(true) {
@@ -39,4 +43,67 @@ pub fn run(expr: ExprInst, mut ctx: Context)
Ok((i, (ctx.gas, false)))
})?;
Ok(Return { state, gas, inert })
}
pub type HandlerParm = Box<dyn Atomic>;
pub type HandlerRes = Result<
Result<ExprInst, Rc<dyn ExternError>>,
HandlerParm
>;
pub trait Handler {
fn resolve(&mut self, data: HandlerParm) -> HandlerRes;
fn then<T: Handler>(self, t: T) -> impl Handler
where Self: Sized {
Pair(self, t)
}
}
impl<F> Handler for F
where F: FnMut(HandlerParm) -> HandlerRes
{
fn resolve(&mut self, data: HandlerParm) -> HandlerRes {
self(data)
}
}
pub struct Pair<T, U>(T, U);
impl<T: Handler, U: Handler> Handler for Pair<T, U> {
fn resolve(&mut self, data: HandlerParm) -> HandlerRes {
match self.0.resolve(data) {
Ok(out) => Ok(out),
Err(data) => self.1.resolve(data)
}
}
}
pub fn run_handler(
mut expr: ExprInst,
mut handler: impl Handler,
mut ctx: Context
) -> Result<Return, RuntimeError> {
loop {
let ret = run(expr.clone(), ctx.clone())?;
if ret.gas == Some(0) {
return Ok(ret)
}
let state_ex = ret.state.expr();
let a = if let Clause::P(Primitive::Atom(a)) = &state_ex.clause {a}
else {
mem::drop(state_ex);
return Ok(ret)
};
let boxed = a.clone().0;
expr = match handler.resolve(boxed) {
Ok(r) => r.map_err(RuntimeError::Extern)?,
Err(e) => return Ok(Return{
gas: ret.gas,
inert: ret.inert,
state: Clause::P(Primitive::Atom(Atom(e))).wrap()
})
};
ctx.gas = ret.gas;
}
}

View File

@@ -10,6 +10,7 @@
#![feature(map_try_insert)]
#![feature(slice_group_by)]
#![feature(trait_alias)]
#![feature(return_position_impl_trait_in_trait)]
mod parse;
mod interner;

View File

@@ -1,7 +1,6 @@
use std::rc::Rc;
use hashbrown::HashMap;
use itertools::Itertools;
use crate::pipeline::error::ProjectError;
use crate::interner::{Token, Interner};

View File

@@ -1,7 +1,9 @@
use std::format;
use std::rc::Rc;
use std::fmt::{Debug, Write};
use hashbrown::HashSet;
use ordered_float::NotNan;
use crate::interner::{Token, Interner, InternedDisplay};
use crate::utils::Substack;
@@ -33,7 +35,7 @@ impl<M: InternedDisplay + Matcher> InternedDisplay for CachedRule<M> {
/// Manages a priority queue of substitution rules and allows to apply them
pub struct Repository<M: Matcher> {
cache: Vec<(CachedRule<M>, HashSet<Token<Vec<Token<String>>>>)>
cache: Vec<(CachedRule<M>, HashSet<Token<Vec<Token<String>>>>, NotNan<f64>)>
}
impl<M: Matcher> Repository<M> {
pub fn new(mut rules: Vec<Rule>, i: &Interner)
@@ -42,6 +44,7 @@ impl<M: Matcher> Repository<M> {
rules.sort_by_key(|r| -r.prio);
let cache = rules.into_iter()
.map(|r| {
let prio = r.prio;
let rule = prepare_rule(r.clone(), i)
.map_err(|e| (r, e))?;
let mut glossary = HashSet::new();
@@ -56,7 +59,7 @@ impl<M: Matcher> Repository<M> {
source: rule.source,
template: rule.target
};
Ok((prep, glossary))
Ok((prep, glossary, prio))
})
.collect::<Result<Vec<_>, _>>()?;
Ok(Self{cache})
@@ -67,7 +70,7 @@ impl<M: Matcher> Repository<M> {
let mut glossary = HashSet::new();
code.visit_names(Substack::Bottom, &mut |op| { glossary.insert(op); });
// println!("Glossary for code: {:?}", print_nname_seq(glossary.iter(), i));
for (rule, deps) in self.cache.iter() {
for (rule, deps, _) in self.cache.iter() {
if !deps.is_subset(&glossary) { continue; }
let product = update_first_seq::expr(code, &mut |exprv| {
let state = rule.matcher.apply(exprv.as_slice())?;
@@ -122,11 +125,17 @@ impl<M: Debug + Matcher> Debug for Repository<M> {
}
}
fn fmt_hex(num: f64) -> String {
let exponent = (num.log2() / 4_f64).floor();
let mantissa = num / 16_f64.powf(exponent);
format!("0x{:x}p{}", mantissa as i64, exponent as i64)
}
impl<M: InternedDisplay + Matcher> InternedDisplay for Repository<M> {
fn fmt_i(&self, f: &mut std::fmt::Formatter<'_>, i: &Interner) -> std::fmt::Result {
writeln!(f, "Repository[")?;
for (item, _) in self.cache.iter() {
write!(f, "\t")?;
for (item, _, p) in self.cache.iter() {
write!(f, "\t{}", fmt_hex(f64::from(*p)))?;
item.fmt_i(f, i)?;
writeln!(f)?;
}

View File

@@ -4,6 +4,7 @@ use std::rc::Rc;
use hashbrown::HashMap;
use itertools::Itertools;
use crate::external::handle;
use crate::interpreter::Return;
use crate::representations::{ast_to_postmacro, postmacro_to_interpreted};
use crate::{external, xloop, interpreter};
@@ -20,30 +21,28 @@ import std::(
concatenate
)
export ...$a + ...$b =1001=> (add (...$a) (...$b))
export ...$a - ...$b:1 =1001=> (subtract (...$a) (...$b))
export ...$a * ...$b =1000=> (multiply (...$a) (...$b))
export ...$a % ...$b:1 =1000=> (remainder (...$a) (...$b))
export ...$a / ...$b:1 =1000=> (divide (...$a) (...$b))
export ...$a == ...$b =1002=> (equals (...$a) (...$b))
export ...$a ++ ...$b =1003=> (concatenate (...$a) (...$b))
export ...$a + ...$b =0x2p36=> (add (...$a) (...$b))
export ...$a - ...$b:1 =0x2p36=> (subtract (...$a) (...$b))
export ...$a * ...$b =0x1p36=> (multiply (...$a) (...$b))
export ...$a % ...$b:1 =0x1p36=> (remainder (...$a) (...$b))
export ...$a / ...$b:1 =0x1p36=> (divide (...$a) (...$b))
export ...$a == ...$b =0x3p36=> (equals (...$a) (...$b))
export ...$a ++ ...$b =0x4p36=> (concatenate (...$a) (...$b))
export do { ...$statement ; ...$rest:1 } =0x2p543=> (
statement (...$statement) do { ...$rest }
)
export do { ...$return } =0x1p543=> (...$return)
export do { ...$statement ; ...$rest:1 } =0x2p130=> statement (...$statement) do { ...$rest }
export do { ...$return } =0x1p130=> ...$return
export statement (let $name = ...$value) ...$next =0x1p1000=> (
export statement (let $name = ...$value) ...$next =0x1p230=> (
(\$name. ...$next) (...$value)
)
export statement (cps $name = ...$operation) ...$next =0x2p1000=> (
export statement (cps $name = ...$operation) ...$next =0x2p230=> (
(...$operation) \$name. ...$next
)
export statement (cps ...$operation) ...$next =0x1p1000=> (
export statement (cps ...$operation) ...$next =0x1p230=> (
(...$operation) (...$next)
)
export if ...$cond then ...$true else ...$false:1 =0x1p320=> (
export if ...$cond then ...$true else ...$false:1 =0x1p84=> (
ifthenelse (...$cond) (...$true) (...$false)
)
@@ -102,7 +101,7 @@ pub fn run_dir(dir: &Path) {
rule.bundle(&i)
)
});
// println!("Repo dump: {}", repo.bundle(&i));
println!("Repo dump: {}", repo.bundle(&i));
let mut exec_table = HashMap::new();
for (name, source) in consts.iter() {
// let nval = entrypoint(&i); let name = &nval; let source = &consts[name];
@@ -114,7 +113,7 @@ pub fn run_dir(dir: &Path) {
match repo.step(&tree) {
None => break tree,
Some(phase) => {
println!("Step {idx}/{macro_timeout}: {}", phase.bundle(&i));
// println!("Step {idx}/{macro_timeout}: {}", phase.bundle(&i));
tree = phase;
},
}
@@ -138,10 +137,11 @@ pub fn run_dir(dir: &Path) {
.join(", ")
)
});
let Return{ gas, state, inert } = interpreter::run(entrypoint.clone(), ctx)
let io_handler = handle;
let ret = interpreter::run_handler(entrypoint.clone(), io_handler, ctx);
let Return{ gas, state, inert } = ret
.unwrap_or_else(|e| panic!("Runtime error: {}", e));
if inert {
println!("Expression not reducible");
println!("Settled at {}", state.expr().clause.bundle(&i));
println!("Remaining gas: {}",
gas.map(|g| g.to_string())
@@ -149,5 +149,4 @@ pub fn run_dir(dir: &Path) {
);
}
if gas == Some(0) {println!("Ran out of gas!")}
else {println!("Expression not reducible.")}
}