forked from Orchid/orchid
Backup commit before crunch
This commit is contained in:
20
examples/calculator/main.orc
Normal file
20
examples/calculator/main.orc
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import std::(parse_float, to_string)
|
||||||
|
import std::(readline, print)
|
||||||
|
|
||||||
|
export main := do{
|
||||||
|
cps data = readline;
|
||||||
|
let a = parse_float data;
|
||||||
|
cps op = readline;
|
||||||
|
cps print ("\"" ++ op ++ "\"\n");
|
||||||
|
cps data = readline;
|
||||||
|
let b = parse_float data;
|
||||||
|
let result = (
|
||||||
|
if op == "+" then a + b
|
||||||
|
else if op == "-" then a - b
|
||||||
|
else if op == "*" then a * b
|
||||||
|
else if op == "/" then a / b
|
||||||
|
else "Unsupported operation" -- dynamically typed shenanigans
|
||||||
|
);
|
||||||
|
cps print (to_string result ++ "\n");
|
||||||
|
0
|
||||||
|
}
|
||||||
26
examples/list-processing/main.orc
Normal file
26
examples/list-processing/main.orc
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import std::(to_string, print)
|
||||||
|
import super::list
|
||||||
|
import fn::*
|
||||||
|
|
||||||
|
export main := do{
|
||||||
|
let foo = list::new[1, 2, 3, 4, 5, 6];
|
||||||
|
let bar = list::map foo n => n * 2;
|
||||||
|
let sum = bar
|
||||||
|
|> list::skip 2
|
||||||
|
|> list::take 3
|
||||||
|
|> list::reduce 0 (a b) => a + b;
|
||||||
|
cps print $ to_string sum ++ "\n";
|
||||||
|
0
|
||||||
|
}
|
||||||
|
|
||||||
|
--[
|
||||||
|
export main := do{
|
||||||
|
let n = 1;
|
||||||
|
let acc = 1;
|
||||||
|
loop r on (n acc) with (
|
||||||
|
if n == 5
|
||||||
|
then print acc
|
||||||
|
else r (n + 1) (acc * 2)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
]--
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
import std::(parse_float, to_string)
|
|
||||||
import std::(readline, print, debug)
|
|
||||||
import std::(concatenate)
|
|
||||||
import super::list
|
|
||||||
import fn::*
|
|
||||||
|
|
||||||
--[ export main := do{
|
|
||||||
cps data = readline;
|
|
||||||
let a = parse_float data;
|
|
||||||
cps op = readline;
|
|
||||||
cps print ("\"" ++ op ++ "\"\n");
|
|
||||||
cps data = readline;
|
|
||||||
let b = parse_float data;
|
|
||||||
let result = (
|
|
||||||
if op == "+" then a + b
|
|
||||||
else if op == "-" then a - b
|
|
||||||
else if op == "*" then a * b
|
|
||||||
else if op == "/" then a / b
|
|
||||||
else "Unsupported operation" -- dynamically typed shenanigans
|
|
||||||
);
|
|
||||||
cps print (to_string result ++ "\n");
|
|
||||||
0
|
|
||||||
} ]--
|
|
||||||
|
|
||||||
export main := do{
|
|
||||||
let foo = list::new[1, 2, 3, 4, 5, 6];
|
|
||||||
let bar = list::map foo n => n * 2;
|
|
||||||
let sum = bar
|
|
||||||
|> list::skip 2
|
|
||||||
|> list::take 3
|
|
||||||
|> list::reduce 0 (a b) => a + b;
|
|
||||||
cps print $ to_string sum ++ "\n";
|
|
||||||
0
|
|
||||||
}
|
|
||||||
|
|
||||||
--[
|
|
||||||
export main := do{
|
|
||||||
let n = 1;
|
|
||||||
let acc = 1;
|
|
||||||
loop r on (n acc) with (
|
|
||||||
if n == 5
|
|
||||||
then print acc
|
|
||||||
else r (n + 1) (acc * 2)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
]--
|
|
||||||
--[
|
|
||||||
export main := do{
|
|
||||||
let n = 1;
|
|
||||||
loop r on (n) with (
|
|
||||||
debug r
|
|
||||||
)
|
|
||||||
}
|
|
||||||
]--
|
|
||||||
15
examples/maps/fn.orc
Normal file
15
examples/maps/fn.orc
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
export Y := \f.(\x.f (x x))(\x.f (x x))
|
||||||
|
|
||||||
|
export loop $r on (...$parameters) with ...$tail =0x5p512=> Y (\$r.
|
||||||
|
bind_names (...$parameters) (...$tail)
|
||||||
|
) ...$parameters
|
||||||
|
|
||||||
|
-- bind each of the names in the first argument as a parameter for the second argument
|
||||||
|
bind_names ($name ..$rest) $payload =0x2p1000=> \$name. bind_names (..$rest) $payload
|
||||||
|
bind_names () (...$payload) =0x1p1000=> ...$payload
|
||||||
|
|
||||||
|
export ...$prefix $ ...$suffix:1 =0x1p130=> ...$prefix (...$suffix)
|
||||||
|
export ...$prefix |> $fn ..$suffix:1 =0x2p130=> $fn (...$prefix) ..$suffix
|
||||||
|
|
||||||
|
export (...$argv) => ...$body =0x2p512=> (bind_names (...$argv) (...$body))
|
||||||
|
$name => ...$body =0x1p512=> (\$name. ...$body)
|
||||||
48
examples/maps/list.orc
Normal file
48
examples/maps/list.orc
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import option
|
||||||
|
import super::fn::*
|
||||||
|
|
||||||
|
pair := \a.\b. \f. f a b
|
||||||
|
|
||||||
|
-- Constructors
|
||||||
|
|
||||||
|
export cons := \hd.\tl. option::some (pair hd tl)
|
||||||
|
export end := option::none
|
||||||
|
|
||||||
|
export pop := \list.\default.\f.list default \cons.cons f
|
||||||
|
|
||||||
|
-- Operators
|
||||||
|
|
||||||
|
export reduce := \list.\acc.\f. (
|
||||||
|
loop r on (list acc) with
|
||||||
|
pop list acc \head.\tail. r tail (f acc head)
|
||||||
|
)
|
||||||
|
|
||||||
|
export map := \list.\f. (
|
||||||
|
loop r on (list) with
|
||||||
|
pop list end \head.\tail. cons (f head) (r tail)
|
||||||
|
)
|
||||||
|
|
||||||
|
export skip := \list.\n. (
|
||||||
|
loop r on (list n) with
|
||||||
|
if n == 0 then list
|
||||||
|
else pop list end \head.\tail. r tail (n - 1)
|
||||||
|
)
|
||||||
|
|
||||||
|
export take := \list.\n. (
|
||||||
|
loop r on (list n) with
|
||||||
|
if n == 0 then end
|
||||||
|
else pop list end \head.\tail. cons head $ r tail $ n - 1
|
||||||
|
)
|
||||||
|
|
||||||
|
export get := \list.\n. (
|
||||||
|
loop r on (list n) with
|
||||||
|
pop list option::none \head.\tail.
|
||||||
|
if n == 0 then option::some head
|
||||||
|
else r tail (n - 1)
|
||||||
|
)
|
||||||
|
|
||||||
|
new[...$item, ...$rest:1] =0x2p333=> (cons (...$item) new[...$rest])
|
||||||
|
new[...$end] =0x1p333=> (cons (...$end) end)
|
||||||
|
new[] =0x1p333=> end
|
||||||
|
|
||||||
|
export ::(new)
|
||||||
22
examples/maps/main.orc
Normal file
22
examples/maps/main.orc
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import list
|
||||||
|
import map
|
||||||
|
import option
|
||||||
|
import fn::*
|
||||||
|
|
||||||
|
export main := do{
|
||||||
|
let foo = map::new[
|
||||||
|
"foo" = 1,
|
||||||
|
"bar" = 2,
|
||||||
|
"baz" = 3,
|
||||||
|
"bar" = 4
|
||||||
|
];
|
||||||
|
map::get foo "bar"
|
||||||
|
|> option::unwrap
|
||||||
|
}
|
||||||
|
|
||||||
|
--[
|
||||||
|
export main := do{
|
||||||
|
let foo = list::new[1, 2, 3];
|
||||||
|
map::fst foo
|
||||||
|
}
|
||||||
|
]--
|
||||||
74
examples/maps/map.orc
Normal file
74
examples/maps/map.orc
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
import list
|
||||||
|
import option
|
||||||
|
import fn::*
|
||||||
|
import std::to_string
|
||||||
|
import std::debug
|
||||||
|
|
||||||
|
-- utilities for using lists as pairs
|
||||||
|
|
||||||
|
export fst := \l. (
|
||||||
|
list::get l 0
|
||||||
|
(panic "nonempty expected")
|
||||||
|
\x.x
|
||||||
|
)
|
||||||
|
export snd := \l. (
|
||||||
|
list::get l 1
|
||||||
|
(panic "2 elements expected")
|
||||||
|
\x.x
|
||||||
|
)
|
||||||
|
export print_pair := \l. (
|
||||||
|
to_string (fst l) ++ " = " ++ to_string (snd l)
|
||||||
|
)
|
||||||
|
|
||||||
|
-- constructors
|
||||||
|
|
||||||
|
export empty := list::end
|
||||||
|
export add := \m.\k.\v. (
|
||||||
|
list::cons
|
||||||
|
list::new[k, v]
|
||||||
|
m
|
||||||
|
)
|
||||||
|
|
||||||
|
-- queries
|
||||||
|
|
||||||
|
-- return the last occurrence of a key if exists
|
||||||
|
export get := \m.\k. (
|
||||||
|
loop r on (m) with
|
||||||
|
list::pop m option::none \head.\tail.
|
||||||
|
if fst head == k
|
||||||
|
then option::some $ snd head
|
||||||
|
else r tail
|
||||||
|
)
|
||||||
|
|
||||||
|
-- commands
|
||||||
|
|
||||||
|
-- remove one occurrence of a key
|
||||||
|
export del := \m.\k. (
|
||||||
|
loop r on (m) with
|
||||||
|
list::pop m list::end \head.\tail.
|
||||||
|
if fst head == k then tail
|
||||||
|
else list::cons head $ r tail
|
||||||
|
)
|
||||||
|
|
||||||
|
-- remove all occurrences of a key
|
||||||
|
export clear := \m.\k. (
|
||||||
|
loop r on (m) with
|
||||||
|
list::pop m list::end \head.\tail.
|
||||||
|
if (fst head) == k then r tail
|
||||||
|
else list::cons head $ r tail
|
||||||
|
)
|
||||||
|
|
||||||
|
-- replace at most one occurrence of a key
|
||||||
|
export set := \m.\k.\v. (
|
||||||
|
m
|
||||||
|
|> del k
|
||||||
|
|> add k v
|
||||||
|
)
|
||||||
|
|
||||||
|
new[...$tail:2, ...$key = ...$value:1] =0x2p333=> (
|
||||||
|
set new[...$tail] (...$key) (...$value)
|
||||||
|
)
|
||||||
|
new[...$key = ...$value:1] =0x1p333=> (add empty (...$key) (...$value))
|
||||||
|
new[] =0x1p333=> empty
|
||||||
|
|
||||||
|
export ::(new)
|
||||||
9
examples/maps/option.orc
Normal file
9
examples/maps/option.orc
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
import std::panic
|
||||||
|
|
||||||
|
export some := \v. \d.\f. f v
|
||||||
|
export none := \d.\f. d
|
||||||
|
|
||||||
|
export map := \option.\f. option none f
|
||||||
|
export flatten := \option. option none \opt. opt
|
||||||
|
export flatmap := \option.\f. option none \opt. map opt f
|
||||||
|
export unwrap := \option. option (panic "value expected") \x.x
|
||||||
19
notes/papers/demo/notes.md
Normal file
19
notes/papers/demo/notes.md
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
Orhid is a lazy, pure functional langauge with an execution model inspired by Haskell. It has a simple, principled syntax resembling mathematical notation commonly used to describe the lambda calculus. State is held in closures and multi-parameter functions are represented using currying.
|
||||||
|
|
||||||
|
This minimalism is in an effort to make parsing and code generation easier, as complex structures are defined using syntax-level macros. The macro system is insipred by generalized kerning which is a well-known Turing-complete system.
|
||||||
|
|
||||||
|
Macros consist of substitution rules applied to the tokenized, namespaced source. These rules can make use of placeholders to transform the expression tree. Placeholders can match exactly one, at least one, or any number of tokens. Macros are used to define infix operators, name bindings, friendly loop syntax over the Y-combinator and more.
|
||||||
|
|
||||||
|
Because substitution rules are applied to the namespaced tokens, macro programs can interact with each other; parts of the pattern that triggers a macro can be generated by other macros while other parts may be provided by the user. In this way, libraries can define extension interfaces where other libraries can integrate with their constructs, and an individual token can take on many meanings depending on context.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Orchid is designed to be embedded in a Rust application. The entire program lifecycle consists of three stages which can be individually configured:
|
||||||
|
|
||||||
|
1. The parser pipeline is responsible for converting text - usually files - into a module tree. It allows the embedder to define the environment the code will see in terms of a series of file trees that are parsed in the context of preceding layers.
|
||||||
|
|
||||||
|
2. The macro executor operates entirely on the output of the pipeline. Macro programs don't necessarily halt, so the executor provides an API to find and resolve one match at a time.
|
||||||
|
|
||||||
|
3. The interpreter is a single function operating on an expression with a symbol table for resolving named constants. It also allows setting a limit to the number of normalization steps - this is commonly known as gas.
|
||||||
|
|
||||||
|
Interfacing between eager procedural and lazy functional code can be challenging, especially with the vastly different suites of optimizations. To make this a little easier, we provides an array of Rust macros that streamline the process of exposing Rust functions to Orchid code. The usage of these is demonstrated in the standard library.
|
||||||
121
notes/papers/demo/slides.md
Normal file
121
notes/papers/demo/slides.md
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
---
|
||||||
|
marp: true
|
||||||
|
class: invert
|
||||||
|
---
|
||||||
|
|
||||||
|
# Orchid
|
||||||
|
|
||||||
|
some tagline
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Syntax
|
||||||
|
|
||||||
|
basically lambda calc
|
||||||
|
```
|
||||||
|
half := \n. div n 2
|
||||||
|
pair := \a.\b. \f. f a b
|
||||||
|
increment := add 1
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Macros
|
||||||
|
|
||||||
|
match and replace token sequences
|
||||||
|
```
|
||||||
|
if ...$cond then ...$true else ...$false ==> (ifthenelse (...$cond) (...$true) (...$false))
|
||||||
|
```
|
||||||
|
...while keeping parameters intact
|
||||||
|
```
|
||||||
|
$data -- a single token (including parenthesized sequences)
|
||||||
|
...$data -- at least one token
|
||||||
|
..$data -- zero or more tokens
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Macros
|
||||||
|
|
||||||
|
define operators...
|
||||||
|
```
|
||||||
|
...$a + ...$b ==> (add (...$a) (...$b))
|
||||||
|
```
|
||||||
|
...and name bindings...
|
||||||
|
```
|
||||||
|
let $name = ...$value in ...$body ==> (\$name. ...$body) ...$value
|
||||||
|
```
|
||||||
|
...and control structures
|
||||||
|
```
|
||||||
|
loop $r on (...$parameters) with ...$tail ==> Y (\$r.
|
||||||
|
bind_names (...$parameters) (...$tail)
|
||||||
|
) ...$parameters
|
||||||
|
|
||||||
|
-- bind each of the names in the first argument as a parameter for the second argument
|
||||||
|
bind_names ($name ..$rest) $payload ==> \$name. bind_names (..$rest) $payload
|
||||||
|
bind_names () (...$payload) ==> ...$payload
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Macros
|
||||||
|
|
||||||
|
can expose interfaces...
|
||||||
|
```
|
||||||
|
do { ...$statement ; ...$rest } ==> (statement (...$statement) do { ...$rest })
|
||||||
|
do { ...$return } ==> (...$return)
|
||||||
|
```
|
||||||
|
...to be used by others...
|
||||||
|
```
|
||||||
|
statement (let $name = ...$value) ...$next ==> ((\$name. ...$next) (...$value))
|
||||||
|
statement (cps $name = ...$operation) ...$next ==> ((...$operation) \$name. ...$next)
|
||||||
|
statement (cps ...$operation) ...$next ==> ((...$operation) (...$next))
|
||||||
|
```
|
||||||
|
...to define any syntax
|
||||||
|
```
|
||||||
|
export main := do{
|
||||||
|
cps data = readline;
|
||||||
|
let double = parse_float data * 2;
|
||||||
|
cps print (to_string double ++ "\n")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Control
|
||||||
|
|
||||||
|
remains with the embedder
|
||||||
|
|
||||||
|
| | extension | supervision |
|
||||||
|
| ----------: | :----------------: | :--------------------: |
|
||||||
|
| pipeline | external libraries | file IO interception |
|
||||||
|
| macros | | step-by-step execution |
|
||||||
|
| interpreter | constants, input | gas |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Extensions
|
||||||
|
|
||||||
|
```rs
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use crate::external::litconv::with_lit;
|
||||||
|
use crate::representations::{interpreted::ExprInst, Literal};
|
||||||
|
use crate::{atomic_impl, atomic_redirect, externfn_impl};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ToString1;
|
||||||
|
externfn_impl!(ToString1, |_: &Self, x: ExprInst| Ok(ToString0{x}));
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ToString0{ x: ExprInst }
|
||||||
|
atomic_redirect!(ToString0, x);
|
||||||
|
atomic_impl!(ToString0, |Self{ x }: &Self, _| {
|
||||||
|
let string = with_lit(x, |l| Ok(match l {
|
||||||
|
Literal::Char(c) => c.to_string(),
|
||||||
|
Literal::Uint(i) => i.to_string(),
|
||||||
|
Literal::Num(n) => n.to_string(),
|
||||||
|
Literal::Str(s) => s.clone()
|
||||||
|
}))?;
|
||||||
|
Ok(string.into())
|
||||||
|
});
|
||||||
|
```
|
||||||
54
notes/papers/report/parts/examples/+index.md
Normal file
54
notes/papers/report/parts/examples/+index.md
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# Examples
|
||||||
|
|
||||||
|
The following examples all work in the submitted version of Orchid, they're included in various subdircetories of `examples`.
|
||||||
|
|
||||||
|
## Prelude
|
||||||
|
|
||||||
|
All code files implicitly include the head statement
|
||||||
|
|
||||||
|
```
|
||||||
|
import prelude::*
|
||||||
|
```
|
||||||
|
|
||||||
|
The `prelude` module is a string literal compiled into the interpreter. Its contents are as follows:
|
||||||
|
|
||||||
|
```rs
|
||||||
|
static PRELUDE_TXT:&str = r#"
|
||||||
|
import std::(
|
||||||
|
add, subtract, multiply, remainder, divide,
|
||||||
|
equals, ifthenelse,
|
||||||
|
concatenate
|
||||||
|
)
|
||||||
|
|
||||||
|
export ...$a + ...$b =1001=> (add (...$a) (...$b))
|
||||||
|
export ...$a - ...$b:1 =1001=> (subtract (...$a) (...$b))
|
||||||
|
export ...$a * ...$b =1000=> (multiply (...$a) (...$b))
|
||||||
|
export ...$a % ...$b:1 =1000=> (remainder (...$a) (...$b))
|
||||||
|
export ...$a / ...$b:1 =1000=> (divide (...$a) (...$b))
|
||||||
|
export ...$a == ...$b =1002=> (equals (...$a) (...$b))
|
||||||
|
export ...$a ++ ...$b =1003=> (concatenate (...$a) (...$b))
|
||||||
|
|
||||||
|
export do { ...$statement ; ...$rest:1 } =0x2p543=> (
|
||||||
|
statement (...$statement) do { ...$rest }
|
||||||
|
)
|
||||||
|
export do { ...$return } =0x1p543=> (...$return)
|
||||||
|
|
||||||
|
export statement (let $name = ...$value) ...$next =0x1p1000=> (
|
||||||
|
(\$name. ...$next) (...$value)
|
||||||
|
)
|
||||||
|
export statement (cps $name = ...$operation) ...$next =0x2p1000=> (
|
||||||
|
(...$operation) \$name. ...$next
|
||||||
|
)
|
||||||
|
export statement (cps ...$operation) ...$next =0x1p1000=> (
|
||||||
|
(...$operation) (...$next)
|
||||||
|
)
|
||||||
|
|
||||||
|
export if ...$cond then ...$true else ...$false:1 =0x1p320=> (
|
||||||
|
ifthenelse (...$cond) (...$true) (...$false)
|
||||||
|
)
|
||||||
|
|
||||||
|
export ::(,)
|
||||||
|
"#;
|
||||||
|
```
|
||||||
|
|
||||||
|
The meaning of each of these rules is explained in the [calculator example](./calculator.md). The exact file is included here just as a reference while reading the other examples.
|
||||||
@@ -1,24 +1,7 @@
|
|||||||
This example showcases common list processing functions and some functional programming utilities. It is also the first multi-file demo.
|
# Fn
|
||||||
|
|
||||||
_in main.orc_
|
This file contains a variety of utilities for functional programming
|
||||||
```
|
|
||||||
import std::(to_string, print)
|
|
||||||
import super::list
|
|
||||||
import fn::*
|
|
||||||
|
|
||||||
export main := do{
|
|
||||||
let foo = list::new[1, 2, 3, 4, 5, 6];
|
|
||||||
let bar = list::map foo n => n * 2;
|
|
||||||
let sum = bar
|
|
||||||
|> list::skip 2
|
|
||||||
|> list::take 3
|
|
||||||
|> list::reduce 0 (a b) => a + b;
|
|
||||||
cps print $ to_string sum ++ "\n";
|
|
||||||
0
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
_in fn.orc_
|
|
||||||
```
|
```
|
||||||
export Y := \f.(\x.f (x x))(\x.f (x x))
|
export Y := \f.(\x.f (x x))(\x.f (x x))
|
||||||
|
|
||||||
@@ -37,70 +20,11 @@ export (...$argv) => ...$body =0x2p512=> (bind_names (...$argv) (...$body))
|
|||||||
$name => ...$body =0x1p512=> (\$name. ...$body)
|
$name => ...$body =0x1p512=> (\$name. ...$body)
|
||||||
```
|
```
|
||||||
|
|
||||||
_in list.orc_
|
## bind_names
|
||||||
```
|
|
||||||
import option
|
|
||||||
import super::fn::*
|
|
||||||
|
|
||||||
pair := \a.\b. \f. f a b
|
|
||||||
|
|
||||||
-- Constructors
|
|
||||||
|
|
||||||
export cons := \hd.\tl. option::some (pair hd tl)
|
|
||||||
export end := option::none
|
|
||||||
|
|
||||||
export pop := \list.\default.\f. list default \cons.cons f
|
|
||||||
|
|
||||||
-- Operators
|
|
||||||
|
|
||||||
export reduce := \list.\acc.\f. (
|
|
||||||
loop r on (list acc) with
|
|
||||||
pop list acc \head.\tail. r tail (f acc head)
|
|
||||||
)
|
|
||||||
|
|
||||||
export map := \list.\f. (
|
|
||||||
loop r on (list) with
|
|
||||||
pop list end \head.\tail. cons (f head) (r tail)
|
|
||||||
)
|
|
||||||
|
|
||||||
export skip := \list.\n. (
|
|
||||||
loop r on (list n) with
|
|
||||||
if n == 0 then list
|
|
||||||
else pop list end \head.\tail. r tail (n - 1)
|
|
||||||
)
|
|
||||||
|
|
||||||
export take := \list.\n. (
|
|
||||||
loop r on (list n) with
|
|
||||||
if n == 0 then end
|
|
||||||
else pop list end \head.\tail. cons head $ r tail $ n - 1
|
|
||||||
)
|
|
||||||
|
|
||||||
new[...$item, ...$rest:1] =0x2p333=> (cons (...$item) new[...$rest])
|
|
||||||
new[...$end] =0x1p333=> (cons (...$end) end)
|
|
||||||
new[] =0x1p333=> end
|
|
||||||
|
|
||||||
export ::(new)
|
|
||||||
```
|
|
||||||
|
|
||||||
_in option.orc_
|
|
||||||
```
|
|
||||||
export some := \v. \d.\f. f v
|
|
||||||
export none := \d.\f. d
|
|
||||||
|
|
||||||
export map := \option.\f. option none f
|
|
||||||
export flatten := \option. option none \opt. opt
|
|
||||||
export flatmap := \option.\f. option none \opt. map opt f
|
|
||||||
```
|
|
||||||
|
|
||||||
The `main` function uses a `do{}` block to enclose a series of name bindings. It imports `list` as a sibling module and `fn` as a top-level file. These files are in identical position, the purpose of this is just to test various ways to reference modules.
|
|
||||||
|
|
||||||
## fn
|
|
||||||
|
|
||||||
### bind_names
|
|
||||||
|
|
||||||
This is a utility macro for binding a list of names on an expression. It demonstrates how to extract reusable macro program fragments to simplify common tasks. This demonstrative version simply takes a sequence of name tokens without any separators or custom programming, but its functionality can be extended in the future to include eg. destructuring.
|
This is a utility macro for binding a list of names on an expression. It demonstrates how to extract reusable macro program fragments to simplify common tasks. This demonstrative version simply takes a sequence of name tokens without any separators or custom programming, but its functionality can be extended in the future to include eg. destructuring.
|
||||||
|
|
||||||
### arrow functions
|
## arrow functions
|
||||||
|
|
||||||
The arrow `=>` operator here is used to define inline functions. It is very similar to the native `\x.` lambda, except that native lambdas use higher priority than any macro so they can't appear inside a `do{}` block as all of the subsequent lines would be consumed by them. It is parsed using the following rules:
|
The arrow `=>` operator here is used to define inline functions. It is very similar to the native `\x.` lambda, except that native lambdas use higher priority than any macro so they can't appear inside a `do{}` block as all of the subsequent lines would be consumed by them. It is parsed using the following rules:
|
||||||
```
|
```
|
||||||
@@ -108,7 +32,7 @@ export (...$argv) => ...$body =0x2p512=> (bind_names (...$argv) (...$body))
|
|||||||
$name => ...$body =0x1p512=> (\$name. ...$body)
|
$name => ...$body =0x1p512=> (\$name. ...$body)
|
||||||
```
|
```
|
||||||
|
|
||||||
### pipelines
|
## pipelines
|
||||||
|
|
||||||
This is a concept borrowed from Elixir. The `|>` operator simply inserts the output of the previous expression to the first argument of the following function.
|
This is a concept borrowed from Elixir. The `|>` operator simply inserts the output of the previous expression to the first argument of the following function.
|
||||||
```
|
```
|
||||||
@@ -117,11 +41,11 @@ export ...$prefix |> $fn ..$suffix:1 =0x2p130=> $fn (...$prefix) ..$suffix
|
|||||||
|
|
||||||
It is processed left-to-right, but leaves the suffix on the same level as the function and sinks the prefix, which means that long pipelines eventually become left associative despite the inverted processing order.
|
It is processed left-to-right, but leaves the suffix on the same level as the function and sinks the prefix, which means that long pipelines eventually become left associative despite the inverted processing order.
|
||||||
|
|
||||||
### right-associative function call operator
|
## right-associative function call operator
|
||||||
|
|
||||||
The `$` operator is analogous to its Haskell counterpart. It is right-associative and very low priority. Its purpose is to eliminate trailing parentheses.
|
The `$` operator is analogous to its Haskell counterpart. It is right-associative and very low priority. Its purpose is to eliminate trailing parentheses.
|
||||||
|
|
||||||
### Loop expression
|
## Loop expression
|
||||||
|
|
||||||
Recursion in lambda calculus is achieved using a fixpoint combinator. The classic version of this combinator described by Church is the [Y-combinator][hb_tlc], defined like so:
|
Recursion in lambda calculus is achieved using a fixpoint combinator. The classic version of this combinator described by Church is the [Y-combinator][hb_tlc], defined like so:
|
||||||
```
|
```
|
||||||
71
notes/papers/report/parts/examples/list-processing/list.md
Normal file
71
notes/papers/report/parts/examples/list-processing/list.md
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
# List
|
||||||
|
|
||||||
|
These files demonstrate building datastructures using closures.
|
||||||
|
|
||||||
|
## Option.orc
|
||||||
|
|
||||||
|
Option is among the simplest datastructures. It either stores a value or nothing. To interact with it, one must provide a default value and a selector.
|
||||||
|
|
||||||
|
```
|
||||||
|
export some := \v. \d.\f. f v
|
||||||
|
export none := \d.\f. d
|
||||||
|
|
||||||
|
export map := \option.\f. option none f
|
||||||
|
export flatten := \option. option none \opt. opt
|
||||||
|
export flatmap := \option.\f. option none \opt. map opt f
|
||||||
|
```
|
||||||
|
|
||||||
|
The selector is required in lambda calculus because the only way to obtain information about values is to evaluate them, but it's not actually necessary in Orchid because it's always possible to pass a primitive of incompatible type as the default value and then use equality comparison to decide whether we got the value in the option or our dud. Regardless, this interface is vastly more convenient and probably more familiar to programmers coming from functional languages.
|
||||||
|
|
||||||
|
## List.orc
|
||||||
|
|
||||||
|
The linked list is an outstandingly powerful and versatile datastructure and the backbone of practical functional programming. This implementation uses a locally defined church pair and the option defined above in an effort to be more transparent, although this means that the essential operation of splitting the head and tail or returning a default value becomes an explicit function (here named `pop`) instead of the intrinsic interface of the list itself.
|
||||||
|
|
||||||
|
_in list.orc_
|
||||||
|
```
|
||||||
|
import option
|
||||||
|
import super::fn::*
|
||||||
|
|
||||||
|
pair := \a.\b. \f. f a b
|
||||||
|
|
||||||
|
-- Constructors
|
||||||
|
|
||||||
|
export cons := \hd.\tl. option::some (pair hd tl)
|
||||||
|
export end := option::none
|
||||||
|
|
||||||
|
-- Operators
|
||||||
|
|
||||||
|
export pop := \list.\default.\f. list default \cons.cons f
|
||||||
|
|
||||||
|
export reduce := \list.\acc.\f. (
|
||||||
|
loop r on (list acc) with
|
||||||
|
pop list acc \head.\tail. r tail (f acc head)
|
||||||
|
)
|
||||||
|
|
||||||
|
export map := \list.\f. (
|
||||||
|
loop r on (list) with
|
||||||
|
pop list end \head.\tail. cons (f head) (r tail)
|
||||||
|
)
|
||||||
|
|
||||||
|
export skip := \list.\n. (
|
||||||
|
loop r on (list n) with
|
||||||
|
if n == 0 then list
|
||||||
|
else pop list end \head.\tail. r tail (n - 1)
|
||||||
|
)
|
||||||
|
|
||||||
|
export take := \list.\n. (
|
||||||
|
loop r on (list n) with
|
||||||
|
if n == 0 then end
|
||||||
|
else pop list end \head.\tail. cons head $ r tail $ n - 1
|
||||||
|
)
|
||||||
|
|
||||||
|
new[...$item, ...$rest:1] =0x2p333=> (cons (...$item) new[...$rest])
|
||||||
|
new[...$end] =0x1p333=> (cons (...$end) end)
|
||||||
|
new[] =0x1p333=> end
|
||||||
|
|
||||||
|
export ::(new)
|
||||||
|
```
|
||||||
|
|
||||||
|
Most of these operations should be self-explanatory in the context of the parts defined in [fn.md](./fn.md).
|
||||||
|
|
||||||
|
The `new[]` macro builds a list from data. Because they are expected to contain expressions, the fields here are comma separated unlike in `fn::=>` and `fn::loop`. I did not find this inconsistency jarring during initial testing, but it may be updated if further improvements to `loop` and `=>`'s syntax open up the possibility of multi-token field descriptions.
|
||||||
30
notes/papers/report/parts/examples/list-processing/main.md
Normal file
30
notes/papers/report/parts/examples/list-processing/main.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
This example showcases common list processing functions and some functional programming utilities. It is also the first multi-file demo.
|
||||||
|
|
||||||
|
_in main.orc_
|
||||||
|
```
|
||||||
|
import std::(to_string, print)
|
||||||
|
import super::list
|
||||||
|
import fn::*
|
||||||
|
|
||||||
|
export main := do{
|
||||||
|
let foo = list::new[1, 2, 3, 4, 5, 6];
|
||||||
|
let bar = list::map foo n => n * 2;
|
||||||
|
let sum = bar
|
||||||
|
|> list::skip 2
|
||||||
|
|> list::take 3
|
||||||
|
|> list::reduce 0 (a b) => a + b;
|
||||||
|
cps print $ to_string sum ++ "\n";
|
||||||
|
0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This file imports `list` as a sibling module and `fn` as a top-level file. These files are in identical position, the purpose of this is just to test various ways to reference modules.
|
||||||
|
|
||||||
|
- The contents of _fn.orc_ are described in [fn](./fn.md)
|
||||||
|
- _list.orc_ and its dependency, _option.orc_ are described in [list](./list.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
The `main` function uses a `do{}` block to enclose a series of name bindings. It constructs a list of numbers 1-6. This is done eagerly, or at least a linked list of the same size is constructed eagerly, although the `cons` calls are left until the first read. Due to Orchid's laziness, `bar` gets assigned the `map` call as-is. `sum` is assigned from the `|>` pipe chain, which is essentially the same as a chain of further name bindings; the return value of each function is passed as the first argument of the next, pushing subsequent arguments out of the way.
|
||||||
|
|
||||||
|
When the `print` expression is evaluated, the updates are applied as needed; the mapping is never applied to 1 and 2, and none of the loops in the list processing functions execute their body on the list object containing 6.
|
||||||
@@ -55,9 +55,19 @@ Being a pure language, Orchid carries the potential to serialize functions and s
|
|||||||
|
|
||||||
The flexible macro system enables library developers to invent their own syntax for essentially anything. I considered defining macros for html, music scores / midi data, marble and flow diagrams.
|
The flexible macro system enables library developers to invent their own syntax for essentially anything. I considered defining macros for html, music scores / midi data, marble and flow diagrams.
|
||||||
|
|
||||||
### DMA/MMIO
|
### Unsafe
|
||||||
|
|
||||||
TODO
|
These functions may be exposed by a direct Orchid interpreter but they would probably not be included in the library exposed by an embedder.
|
||||||
|
|
||||||
|
#### system calls
|
||||||
|
|
||||||
|
While individual system APIs can be exposed to the program using dedicated Rust bindings, this takes time and limits the power of the language. The general solution to this in high level languages is to expose the `system()` function which enables high level code to interact with _some kind of shell_, the shell of the operating system. What shell this exactly is and what tools are available through it is up to the user to discover.
|
||||||
|
|
||||||
|
#### DMA/MMIO
|
||||||
|
|
||||||
|
As a high level language, Orchid doesn't inherently have direct memory access, in part because it's not generally required. Regardless, a way of writing to and reading from exact memory addresses may be useful in the development of libraries that interface with hardware such as a rapsberry pi's GPIO pins.
|
||||||
|
|
||||||
|
On general this is probably better accomplished using Rust functions that interface with Orchid, but this will eventually inevitably lead to several functions that do nothing but read a number from an address or write a number to an address, except the addresses are wrapped in various tagged structs. This repetition could be nipped in the bud by simply exposing a function for mmio and allowing the Orchid side to define the wrappers.
|
||||||
|
|
||||||
## Type system
|
## Type system
|
||||||
|
|
||||||
@@ -67,5 +77,5 @@ Originally, Orchid was meant to have a type system that used Orchid itself to bu
|
|||||||
|
|
||||||
### Alternatives
|
### Alternatives
|
||||||
|
|
||||||
During initial testing of the working version, I found that the most common kind of programming error in lambda calculus appears to be arity mismatch or syntax errors that result in arity mismatch. Without any kind of type checking this is especially difficult to debug as every function looks the same. This can be addressed with a much simpler type system similar to System-F. Any such type checker would have to be constructed so as to only verify user-provided information regarding the arity of functions without attempting to find the arity of every expression, since System-F is strongly normalising and Orchid like any general purpose language supports potentially infinite loops.
|
During initial testing of the working version, I found that the most common kind of programming error in lambda calculus appears to be arity mismatch or syntax error that results in arity mismatch. Without any kind of type checking this is especially difficult to debug as every function looks the same. This can be addressed with a much simpler type system similar to System-F. Any such type checker would have to be constructed so as to only verify user-provided information regarding the arity of functions without attempting to find the arity of every expression, since System-F is strongly normalising and Orchid like any general purpose language supports potentially infinite loops.
|
||||||
|
|
||||||
|
|||||||
@@ -16,4 +16,4 @@ My plan for Orchid was to use Orchid itself as a type system as well; rather tha
|
|||||||
[tc2]: https://blog.rust-lang.org/2022/10/28/gats-stabilization.html
|
[tc2]: https://blog.rust-lang.org/2022/10/28/gats-stabilization.html
|
||||||
[tc3]: https://wiki.haskell.org/Type_SK
|
[tc3]: https://wiki.haskell.org/Type_SK
|
||||||
|
|
||||||
A description of the planned type system is available in [[type_system/01-main.md|Appendix T]]
|
A description of the planned type system is available in [[type_system/+index|Appendix T]]
|
||||||
@@ -10,45 +10,49 @@ The range of valid priorities is divided up into bands, much like radio bands. I
|
|||||||
|
|
||||||
The bands are each an even 32 orders of magnitude, with space in between for future expansion
|
The bands are each an even 32 orders of magnitude, with space in between for future expansion
|
||||||
|
|
||||||
| | | | |
|
| | | | |
|
||||||
| :----------: | :-----: | :---------: | :----------: |
|
| :-----------: | :------: | :---------: | :----------: |
|
||||||
| 0-31 | 32-63 | 64-95 | 96-127 |
|
| 0-31 | 32-63 | 64-95 | 96-127 |
|
||||||
| | x | | |
|
| optimizations | x | | |
|
||||||
| 128-159 | 160-191 | 192-223 | 224-255 |
|
| 128-159 | 160-191 | 192-223 | 224-255 |
|
||||||
| operators | | | x |
|
| operators | | | x |
|
||||||
| 256-287 | 288-319 | 320-351 | 352-383 |
|
| 256-287 | 288-319 | 320-351 | 352-383 |
|
||||||
| | | expressions | |
|
| | | expressions | |
|
||||||
| 384-415 | 416-447 | 448-479 | 480-511 |
|
| 384-415 | 416-447 | 448-479 | 480-511 |
|
||||||
| | x | | |
|
| | x | | |
|
||||||
| 512-543 | 544-575 | 576-607 | 608-639 |
|
| 512-543 | 544-575 | 576-607 | 608-639 |
|
||||||
| bindings | | | x |
|
| bindings | | | x |
|
||||||
| 640-671 | 672-703 | 704-735 | 736-767 |
|
| 640-671 | 672-703 | 704-735 | 736-767 |
|
||||||
| | | aliases | |
|
| | | x | |
|
||||||
| 768-799 | 800-831 | 832-863 | 864-895 |
|
| 768-799 | 800-831 | 832-863 | 864-895 |
|
||||||
| | x | | |
|
| | aliases* | | |
|
||||||
| 896-927 | 928-959 | 960-991 | 992- |
|
| 896-927 | 928-959 | 960-991 | 992- |
|
||||||
| integrations | | | transitional |
|
| integrations | | | transitional |
|
||||||
|
|
||||||
### Transitional states
|
### Transitional states
|
||||||
|
|
||||||
Transitional states produced and consumed by the same macro program occupy the range above 0x1p991. Nothing in this range should be written by the user or triggered by an interaction of distinct macro programs, the purpose of this high range is to prevent devices such as carriages from interacting. Any transformation sequence in this range can assume that the tree is inert other than its own operation.
|
Transitional states produced and consumed by the same macro program occupy the range above 0x1p991. Nothing in this range should be written by the user or triggered by an interaction of distinct macro programs, the purpose of this high range is to prevent devices such as carriages from interacting. Any transformation sequence in this range can assume that the tree is inert other than its own operation.
|
||||||
|
|
||||||
### Integrations
|
### Integrations
|
||||||
|
|
||||||
Integrations expect an inert syntax tree but at least one hidden token does not belong to the macro program that resolves the rule, so it's additionally important that all macro programs be in a documented state at the time of resolution.
|
Integrations expect an inert syntax tree but at least one token in the pattern is external to the macro program that resolves the rule, so it's critical that all macro programs be in a documented state at the time of resolution.
|
||||||
|
|
||||||
### Aliases
|
### Aliases
|
||||||
|
|
||||||
Fragments of code extracted for readability.
|
Fragments of code extracted for readability are all at exactly 0x1p800. These may be written by programmers who are not comfortable with macros or metaprogramming. They must have unique single token patterns. Because their priority is higher than any entry point, they can safely contain parts of other macro invocations. They have a single priority number because they can't conceivably require internal ordering adjustments and their usage is meant to be be as straightforward as possible.
|
||||||
|
|
||||||
### Binding builders
|
### Binding builders
|
||||||
|
|
||||||
Syntax elements that manipulate bindings should be executed earlier. Do blocks and match statements are good examples of this category. Anything with a lower priority trigger can assume that all names are correctly bound.
|
Syntax elements that manipulate bindings should be executed earlier. `do` blocks and (future) `match` statements are good examples of this category. Anything with a lower priority trigger can assume that all names are correctly bound.
|
||||||
|
|
||||||
### Expressions
|
### Expressions
|
||||||
|
|
||||||
Things that essentially work like function calls just with added structure, such as if/then/else
|
Things that essentially work like function calls just with added structure, such as `if`/`then`/`else` or `loop`. These are usually just more intuitive custom forms that are otherwise identical to a macro
|
||||||
|
|
||||||
### Operators
|
### Operators
|
||||||
|
|
||||||
Binary and unary operators that process the chunks of text on either side
|
Binary and unary operators that process the chunks of text on either side. Within the band, these macros are prioritized in inverse precedence order and apply to the entire range of clauses before and after themselves, to ensure that function calls have the highest perceived priority.
|
||||||
|
|
||||||
|
### Optimizations
|
||||||
|
|
||||||
|
Macros that operate on a fully resolved lambda code and look for known patterns that can be simplified. I did not manage to create a working example of this but for instance repeated string concatenation is a good example.
|
||||||
@@ -1,13 +1,6 @@
|
|||||||
# The pipeline
|
# The pipeline
|
||||||
|
|
||||||
The conversion of Orchid files into a collection of macro rules is a relatively complicated process. First, the source files are loaded and an initial parsing pass is executed. Because the set of supported operators influences the correct lexing of expressions, the output of this pass can't directly be used. The parts of each module that are known to be valid are
|
The conversion of Orchid files into a collection of macro rules is a relatively complicated process that took several attempts to get right.
|
||||||
|
|
||||||
- the imports, because they don't use expressions at all
|
|
||||||
- the visibility and pattern of macro rule definitions, because it is required to separate distinct operators with spaces
|
|
||||||
- the visibility and name of constant definitions
|
|
||||||
- the name of submodules and these same elements in their bodies
|
|
||||||
|
|
||||||
This preparsed data is then used to locate all files in the solution, and to collect all operators visible to a certain file for a final parsing pass. It is necessary to refer to imported modules for a complete list of operators because glob imports don't offer any information about the set of names but still import all operators for the purpose of lexing.
|
|
||||||
|
|
||||||
## Push vs pull logistics
|
## Push vs pull logistics
|
||||||
|
|
||||||
@@ -15,4 +8,60 @@ The initial POC implementation of Orchid used pull logistics aka lazy evaluation
|
|||||||
|
|
||||||
Additionally, in a lot of cases lazy evaluation is undesirable. Most programmers other than the developers of Python would like to receive syntax errors in dead functions because statically identifiable errors are usually either typos that are trivial to fix or born out of a misconception on the programmer's part which is worth addressing in case it produces silent errors elsewhere. But errors are produced when the calculation of a value fails, so to produce errors all values about all functions msut be calculated.
|
Additionally, in a lot of cases lazy evaluation is undesirable. Most programmers other than the developers of Python would like to receive syntax errors in dead functions because statically identifiable errors are usually either typos that are trivial to fix or born out of a misconception on the programmer's part which is worth addressing in case it produces silent errors elsewhere. But errors are produced when the calculation of a value fails, so to produce errors all values about all functions msut be calculated.
|
||||||
|
|
||||||
To address these issues, the second iteration only uses pull logistics for the preparsing and file collection phase, and the only errors guaranteed to be produced by this stage are imports from missing files and syntax errors regarding the structure of the S-expressions.
|
To address these issues, the second iteration only uses pull logistics for the preparsing and file collection phase, and the only errors guaranteed to be produced by this stage are imports from missing files and syntax errors regarding the structure of the S-expressions.
|
||||||
|
|
||||||
|
## Stages
|
||||||
|
|
||||||
|
As of writing, the pipeline consists of three main stages; source loading, tree-building and name resolution. These break down into multiple substages.
|
||||||
|
|
||||||
|
All stages support various ways to introduce blind spots and precomputed values into their processing. This is used to load the standard library, prelude, and possibly externally defined intermediate stages of injected code.
|
||||||
|
|
||||||
|
### Source loading
|
||||||
|
|
||||||
|
This stage encapsulates pull logistics. It collects all source files that should be included in the compilation in a hashmap keyed by their project-relative path. All subsequent operations are executed on every element of this map unconditionally.
|
||||||
|
|
||||||
|
The files and directory listings are obtained from an injected function for flexibility. File collection is motivated by a set of target paths, and injected paths can be ignored with a callback.
|
||||||
|
|
||||||
|
Parsing itself is outsourced to a Chumsky parser defined separately. This parser expects a list of operators for tokenization, but such a list is not available without knowledge of other files because glob imports don't provide information about the operators they define so much of the parsed data is invalid. What is known to be valid are
|
||||||
|
|
||||||
|
- the types of all lines
|
||||||
|
- line types `import` and `export`
|
||||||
|
- the pattern of `rule` lines
|
||||||
|
- the name of `constant` and `namespace` lines
|
||||||
|
- valid parts of the `exported` variant of lines
|
||||||
|
- valid parts of the body of `namespace` lines
|
||||||
|
|
||||||
|
This information is compiled into a very barebones module representation and returned alongside the loaded source code.
|
||||||
|
|
||||||
|
### Tree building
|
||||||
|
|
||||||
|
This stage aims to collect all modules in a single tree. To achieve this, it re-parses each file with the set of operators collected from the datastructure built during preparsing. The glob imports in the resulting FileEntry lists are eliminated, and the names in the bodies of expressions and macro rules are prefixed with the module path in preparation for macro execution.
|
||||||
|
|
||||||
|
Operator collection can be advised about the exports of injected modules using a callback, and a prelude in the form of a list of line objects - in the shape emitted by the parser - can be injected before the contents of every module to define universally accessible names. Since these lines are processed for every file, it's generally best to just insert a single glob import from a module that defines everything. The interpreter inserts `import prelude::*`.
|
||||||
|
|
||||||
|
### Import resolution
|
||||||
|
|
||||||
|
This stage aims to produce a tree ready for consumption by a macro executor or any other subsystem. It replaces every name originating from imported namespaces in every module with the original name.
|
||||||
|
|
||||||
|
Injection is supported with a function which takes a path and, if it's valid in the injected tree, returns its original value even if that's the path itself. This is used both to skip resolving names in the injected modules - which are expected to have already been processed using this step - and of course to find the origin of imports from the injected tree.
|
||||||
|
|
||||||
|
## Layered parsing
|
||||||
|
|
||||||
|
The most important export of the pipeline is the `parse_layer` function, which acts as a façade over the complex system described above. The environment in which user code runs is bootstrapped using repeated invocations of this function. It has the following options
|
||||||
|
|
||||||
|
1. targets that motivate file loading
|
||||||
|
|
||||||
|
In the case of intermediate layers this can be a list of all included module names. The targets are only required to be valid, global import paths without a globstar.
|
||||||
|
|
||||||
|
2. a function that performs file and directory reads.
|
||||||
|
|
||||||
|
This is normally set to a lambda that relays requests to `pipeline::file_loader`, but it can be replaced with another function if source code is to be loaded from an emulated file system, such as an in-memory tree or an online package repository.
|
||||||
|
|
||||||
|
3. the previous layer as an environment
|
||||||
|
4. a prelude to every file
|
||||||
|
|
||||||
|
The interpreter sets this to `import prelude::*`. If the embedder defines its own prelude it's a good idea to append it.
|
||||||
|
|
||||||
|
### The first layer
|
||||||
|
|
||||||
|
The other important exports of the pipeline are `ConstTree` and `from_const_tree`. These are used to define a base layer that exposes extern functions. `ConstTree` implements `Add` so distinct libraries of extern functions can be intuitively combined.
|
||||||
@@ -2,6 +2,10 @@
|
|||||||
|
|
||||||
Orchid expressions are similar in nature to lambda calculus or haskell, except whitespace is mostly irrelevant.
|
Orchid expressions are similar in nature to lambda calculus or haskell, except whitespace is mostly irrelevant.
|
||||||
|
|
||||||
|
## Comments
|
||||||
|
|
||||||
|
Orchid borrows Lua's comment syntax. Line comments start with `--` and end at a line break. Block comments start with `--[` and end with `]--`.
|
||||||
|
|
||||||
## Names
|
## Names
|
||||||
|
|
||||||
`name` and `ns_name` tokens appear all over the place in this spec. They represent operators, function names, arguments, modules. A `name` is
|
`name` and `ns_name` tokens appear all over the place in this spec. They represent operators, function names, arguments, modules. A `name` is
|
||||||
|
|||||||
@@ -2,6 +2,4 @@
|
|||||||
|
|
||||||
The vast majority of algorithms involved in this project are multiple recursive in nature. Very often information on higher levels would influence the entire subtree. A good example is the resolution of name bindings. The size of the call stack is associated with the growth of the set of names, only the top needs to be mutated, but all names seen in enclosing scopes need to be accessible. The datastructure we need is essentially a linked list on the stack.
|
The vast majority of algorithms involved in this project are multiple recursive in nature. Very often information on higher levels would influence the entire subtree. A good example is the resolution of name bindings. The size of the call stack is associated with the growth of the set of names, only the top needs to be mutated, but all names seen in enclosing scopes need to be accessible. The datastructure we need is essentially a linked list on the stack.
|
||||||
|
|
||||||
This is a very common and not particularly interesting datastructure; much like quicksort, every C project of considerable size that uses recursion includes some definition of it. However, I still think it deserves some attention, precisely because it's so common. For example, my implementation also defines an iterator, and a reasonably efficient implementation of the outstandingly common operation of collecting the stack into a Vec that starts at the bottom.
|
This is a very common and not particularly interesting datastructure; much like quicksort, every C project of considerable size that uses recursion includes some definition of it. However, I still think it deserves some attention, precisely because it's so common. For example, my implementation also defines an iterator, and a reasonably efficient, safe implementation of the outstandingly common operation of collecting the stack into a Vec that starts at the bottom.
|
||||||
|
|
||||||
Another extension to this concept I wrote to help with the type system is a stackbound map. This is not a very good implementation and it definitely needs more work to be worth publishing, but it stands to show that the concept of a substack is versatile and powerful.
|
|
||||||
4
src/external/bool/equals.rs
vendored
4
src/external/bool/equals.rs
vendored
@@ -34,12 +34,14 @@ externfn_impl!(Equals1, |this: &Self, x: ExprInst| {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Equals0 { a: Literal, x: ExprInst }
|
pub struct Equals0 { a: Literal, x: ExprInst }
|
||||||
atomic_redirect!(Equals0, x);
|
atomic_redirect!(Equals0, x);
|
||||||
atomic_impl!(Equals0, |Self{ a, x }: &Self| {
|
atomic_impl!(Equals0, |Self{ a, x }: &Self, _| {
|
||||||
let eqls = with_lit(x, |l| Ok(match (a, l) {
|
let eqls = with_lit(x, |l| Ok(match (a, l) {
|
||||||
(Literal::Char(c1), Literal::Char(c2)) => c1 == c2,
|
(Literal::Char(c1), Literal::Char(c2)) => c1 == c2,
|
||||||
(Literal::Num(n1), Literal::Num(n2)) => n1 == n2,
|
(Literal::Num(n1), Literal::Num(n2)) => n1 == n2,
|
||||||
(Literal::Str(s1), Literal::Str(s2)) => s1 == s2,
|
(Literal::Str(s1), Literal::Str(s2)) => s1 == s2,
|
||||||
(Literal::Uint(i1), Literal::Uint(i2)) => i1 == i2,
|
(Literal::Uint(i1), Literal::Uint(i2)) => i1 == i2,
|
||||||
|
(Literal::Num(n1), Literal::Uint(u1)) => *n1 == (*u1 as f64),
|
||||||
|
(Literal::Uint(u1), Literal::Num(n1)) => *n1 == (*u1 as f64),
|
||||||
(_, _) => AssertionError::fail(x.clone(), "the expected type")?,
|
(_, _) => AssertionError::fail(x.clone(), "the expected type")?,
|
||||||
}))?;
|
}))?;
|
||||||
Ok(Boolean::from(eqls).to_atom_cls())
|
Ok(Boolean::from(eqls).to_atom_cls())
|
||||||
|
|||||||
2
src/external/bool/ifthenelse.rs
vendored
2
src/external/bool/ifthenelse.rs
vendored
@@ -22,7 +22,7 @@ externfn_impl!(IfThenElse1, |_: &Self, x: ExprInst| Ok(IfThenElse0{x}));
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct IfThenElse0{ x: ExprInst }
|
pub struct IfThenElse0{ x: ExprInst }
|
||||||
atomic_redirect!(IfThenElse0, x);
|
atomic_redirect!(IfThenElse0, x);
|
||||||
atomic_impl!(IfThenElse0, |this: &Self| {
|
atomic_impl!(IfThenElse0, |this: &Self, _| {
|
||||||
let Boolean(b) = this.x.clone().try_into()
|
let Boolean(b) = this.x.clone().try_into()
|
||||||
.map_err(|_| AssertionError::ext(this.x.clone(), "a boolean"))?;
|
.map_err(|_| AssertionError::ext(this.x.clone(), "a boolean"))?;
|
||||||
Ok(if b { Clause::Lambda {
|
Ok(if b { Clause::Lambda {
|
||||||
|
|||||||
4
src/external/bool/mod.rs
vendored
4
src/external/bool/mod.rs
vendored
@@ -9,6 +9,8 @@ use crate::{pipeline::ConstTree, interner::Interner};
|
|||||||
pub fn bool(i: &Interner) -> ConstTree {
|
pub fn bool(i: &Interner) -> ConstTree {
|
||||||
ConstTree::tree([
|
ConstTree::tree([
|
||||||
(i.i("ifthenelse"), ConstTree::xfn(ifthenelse::IfThenElse1)),
|
(i.i("ifthenelse"), ConstTree::xfn(ifthenelse::IfThenElse1)),
|
||||||
(i.i("equals"), ConstTree::xfn(equals::Equals2))
|
(i.i("equals"), ConstTree::xfn(equals::Equals2)),
|
||||||
|
(i.i("true"), ConstTree::atom(Boolean(true))),
|
||||||
|
(i.i("false"), ConstTree::atom(Boolean(false)))
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
2
src/external/conv/parse_float.rs
vendored
2
src/external/conv/parse_float.rs
vendored
@@ -24,7 +24,7 @@ externfn_impl!(ParseFloat1, |_: &Self, x: ExprInst| Ok(ParseFloat0{x}));
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ParseFloat0{ x: ExprInst }
|
pub struct ParseFloat0{ x: ExprInst }
|
||||||
atomic_redirect!(ParseFloat0, x);
|
atomic_redirect!(ParseFloat0, x);
|
||||||
atomic_impl!(ParseFloat0, |Self{ x }: &Self| {
|
atomic_impl!(ParseFloat0, |Self{ x }: &Self, _| {
|
||||||
let number = with_lit(x, |l| Ok(match l {
|
let number = with_lit(x, |l| Ok(match l {
|
||||||
Literal::Str(s) => {
|
Literal::Str(s) => {
|
||||||
let parser = float_parser();
|
let parser = float_parser();
|
||||||
|
|||||||
2
src/external/conv/parse_uint.rs
vendored
2
src/external/conv/parse_uint.rs
vendored
@@ -23,7 +23,7 @@ externfn_impl!(ParseUint1, |_: &Self, x: ExprInst| Ok(ParseUint0{x}));
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ParseUint0{ x: ExprInst }
|
pub struct ParseUint0{ x: ExprInst }
|
||||||
atomic_redirect!(ParseUint0, x);
|
atomic_redirect!(ParseUint0, x);
|
||||||
atomic_impl!(ParseUint0, |Self{ x }: &Self| {
|
atomic_impl!(ParseUint0, |Self{ x }: &Self, _| {
|
||||||
let uint = with_lit(x, |l| Ok(match l {
|
let uint = with_lit(x, |l| Ok(match l {
|
||||||
Literal::Str(s) => {
|
Literal::Str(s) => {
|
||||||
let parser = int_parser();
|
let parser = int_parser();
|
||||||
|
|||||||
2
src/external/conv/to_string.rs
vendored
2
src/external/conv/to_string.rs
vendored
@@ -20,7 +20,7 @@ externfn_impl!(ToString1, |_: &Self, x: ExprInst| Ok(ToString0{x}));
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ToString0{ x: ExprInst }
|
pub struct ToString0{ x: ExprInst }
|
||||||
atomic_redirect!(ToString0, x);
|
atomic_redirect!(ToString0, x);
|
||||||
atomic_impl!(ToString0, |Self{ x }: &Self| {
|
atomic_impl!(ToString0, |Self{ x }: &Self, _| {
|
||||||
let string = with_lit(x, |l| Ok(match l {
|
let string = with_lit(x, |l| Ok(match l {
|
||||||
Literal::Char(c) => c.to_string(),
|
Literal::Char(c) => c.to_string(),
|
||||||
Literal::Uint(i) => i.to_string(),
|
Literal::Uint(i) => i.to_string(),
|
||||||
|
|||||||
33
src/external/cpsio/debug.rs
vendored
Normal file
33
src/external/cpsio/debug.rs
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
use crate::foreign::{Atomic, AtomicReturn};
|
||||||
|
use crate::interner::InternedDisplay;
|
||||||
|
use crate::interpreter::Context;
|
||||||
|
use crate::{externfn_impl, atomic_defaults};
|
||||||
|
use crate::representations::interpreted::ExprInst;
|
||||||
|
|
||||||
|
/// Debug function
|
||||||
|
///
|
||||||
|
/// Next state: [Debug0]
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Debug2;
|
||||||
|
externfn_impl!(Debug2, |_: &Self, x: ExprInst| Ok(Debug1{x}));
|
||||||
|
|
||||||
|
/// Partially applied Print function
|
||||||
|
///
|
||||||
|
/// Prev state: [Debug1]
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Debug1{ x: ExprInst }
|
||||||
|
impl Atomic for Debug1 {
|
||||||
|
atomic_defaults!();
|
||||||
|
fn run(&self, ctx: Context) -> crate::foreign::AtomicResult {
|
||||||
|
println!("{}", self.x.bundle(&ctx.interner));
|
||||||
|
Ok(AtomicReturn{
|
||||||
|
clause: self.x.expr().clause.clone(),
|
||||||
|
gas: ctx.gas.map(|g| g - 1),
|
||||||
|
inert: false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
6
src/external/cpsio/mod.rs
vendored
6
src/external/cpsio/mod.rs
vendored
@@ -2,10 +2,14 @@ use crate::{interner::Interner, pipeline::ConstTree};
|
|||||||
|
|
||||||
mod print;
|
mod print;
|
||||||
mod readline;
|
mod readline;
|
||||||
|
mod debug;
|
||||||
|
mod panic;
|
||||||
|
|
||||||
pub fn cpsio(i: &Interner) -> ConstTree {
|
pub fn cpsio(i: &Interner) -> ConstTree {
|
||||||
ConstTree::tree([
|
ConstTree::tree([
|
||||||
(i.i("print"), ConstTree::xfn(print::Print2)),
|
(i.i("print"), ConstTree::xfn(print::Print2)),
|
||||||
(i.i("readline"), ConstTree::xfn(readline::Readln2))
|
(i.i("readline"), ConstTree::xfn(readline::Readln2)),
|
||||||
|
(i.i("debug"), ConstTree::xfn(debug::Debug2)),
|
||||||
|
(i.i("panic"), ConstTree::xfn(panic::Panic1))
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
29
src/external/cpsio/panic.rs
vendored
Normal file
29
src/external/cpsio/panic.rs
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
use std::fmt::Display;
|
||||||
|
|
||||||
|
use crate::{atomic_impl, atomic_redirect, externfn_impl};
|
||||||
|
use crate::external::litconv::with_str;
|
||||||
|
use crate::representations::interpreted::ExprInst;
|
||||||
|
use crate::foreign::ExternError;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Panic1;
|
||||||
|
externfn_impl!(Panic1, |_: &Self, x: ExprInst| Ok(Panic0{ x }));
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Panic0{ x: ExprInst }
|
||||||
|
atomic_redirect!(Panic0, x);
|
||||||
|
atomic_impl!(Panic0, |Self{ x }: &Self, _| {
|
||||||
|
with_str(x, |s| {
|
||||||
|
Err(OrchidPanic(s.clone()).into_extern())
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
pub struct OrchidPanic(String);
|
||||||
|
|
||||||
|
impl Display for OrchidPanic {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "Orchid code panicked: {}", self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExternError for OrchidPanic {}
|
||||||
4
src/external/cpsio/print.rs
vendored
4
src/external/cpsio/print.rs
vendored
@@ -1,4 +1,5 @@
|
|||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
use std::io::{self, Write};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use crate::external::litconv::with_str;
|
use crate::external::litconv::with_str;
|
||||||
@@ -21,9 +22,10 @@ externfn_impl!(Print2, |_: &Self, x: ExprInst| Ok(Print1{x}));
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Print1{ x: ExprInst }
|
pub struct Print1{ x: ExprInst }
|
||||||
atomic_redirect!(Print1, x);
|
atomic_redirect!(Print1, x);
|
||||||
atomic_impl!(Print1, |Self{ x }: &Self| {
|
atomic_impl!(Print1, |Self{ x }: &Self, _| {
|
||||||
with_str(x, |s| {
|
with_str(x, |s| {
|
||||||
print!("{}", s);
|
print!("{}", s);
|
||||||
|
io::stdout().flush().unwrap();
|
||||||
Ok(Clause::Lambda {
|
Ok(Clause::Lambda {
|
||||||
args: Some(PathSet{ steps: Rc::new(vec![]), next: None }),
|
args: Some(PathSet{ steps: Rc::new(vec![]), next: None }),
|
||||||
body: Clause::LambdaArg.wrap()
|
body: Clause::LambdaArg.wrap()
|
||||||
|
|||||||
2
src/external/cpsio/readline.rs
vendored
2
src/external/cpsio/readline.rs
vendored
@@ -21,7 +21,7 @@ externfn_impl!(Readln2, |_: &Self, x: ExprInst| Ok(Readln1{x}));
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Readln1{ x: ExprInst }
|
pub struct Readln1{ x: ExprInst }
|
||||||
atomic_redirect!(Readln1, x);
|
atomic_redirect!(Readln1, x);
|
||||||
atomic_impl!(Readln1, |Self{ x }: &Self| {
|
atomic_impl!(Readln1, |Self{ x }: &Self, _| {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
stdin().read_line(&mut buf)
|
stdin().read_line(&mut buf)
|
||||||
.map_err(|e| RuntimeError::ext(e.to_string(), "reading from stdin"))?;
|
.map_err(|e| RuntimeError::ext(e.to_string(), "reading from stdin"))?;
|
||||||
|
|||||||
2
src/external/num/numeric.rs
vendored
2
src/external/num/numeric.rs
vendored
@@ -48,7 +48,7 @@ impl Sub for Numeric {
|
|||||||
|
|
||||||
fn sub(self, rhs: Self) -> Self::Output {
|
fn sub(self, rhs: Self) -> Self::Output {
|
||||||
match (self, rhs) {
|
match (self, rhs) {
|
||||||
(Numeric::Uint(a), Numeric::Uint(b)) if b < a => Numeric::Uint(a - b),
|
(Numeric::Uint(a), Numeric::Uint(b)) if b <= a => Numeric::Uint(a - b),
|
||||||
(Numeric::Uint(a), Numeric::Uint(b))
|
(Numeric::Uint(a), Numeric::Uint(b))
|
||||||
=> Numeric::num(a as f64 - b as f64),
|
=> Numeric::num(a as f64 - b as f64),
|
||||||
(Numeric::Num(a), Numeric::Num(b)) => Numeric::num(a - b),
|
(Numeric::Num(a), Numeric::Num(b)) => Numeric::num(a - b),
|
||||||
|
|||||||
7
src/external/num/operators/add.rs
vendored
7
src/external/num/operators/add.rs
vendored
@@ -9,7 +9,6 @@ use crate::representations::interpreted::ExprInst;
|
|||||||
/// Add function
|
/// Add function
|
||||||
///
|
///
|
||||||
/// Next state: [Add1]
|
/// Next state: [Add1]
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Add2;
|
pub struct Add2;
|
||||||
externfn_impl!(Add2, |_: &Self, x: ExprInst| Ok(Add1{x}));
|
externfn_impl!(Add2, |_: &Self, x: ExprInst| Ok(Add1{x}));
|
||||||
@@ -17,7 +16,6 @@ externfn_impl!(Add2, |_: &Self, x: ExprInst| Ok(Add1{x}));
|
|||||||
/// Partially applied Add function
|
/// Partially applied Add function
|
||||||
///
|
///
|
||||||
/// Prev state: [Add2]; Next state: [Add0]
|
/// Prev state: [Add2]; Next state: [Add0]
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Add1{ x: ExprInst }
|
pub struct Add1{ x: ExprInst }
|
||||||
atomic_redirect!(Add1, x);
|
atomic_redirect!(Add1, x);
|
||||||
@@ -30,11 +28,12 @@ externfn_impl!(Add1, |this: &Self, x: ExprInst| {
|
|||||||
/// Fully applied Add function.
|
/// Fully applied Add function.
|
||||||
///
|
///
|
||||||
/// Prev state: [Add1]
|
/// Prev state: [Add1]
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Add0 { a: Numeric, x: ExprInst }
|
pub struct Add0 { a: Numeric, x: ExprInst }
|
||||||
atomic_redirect!(Add0, x);
|
atomic_redirect!(Add0, x);
|
||||||
atomic_impl!(Add0, |Self{ a, x }: &Self| {
|
atomic_impl!(Add0, |Self{ a, x }: &Self, _| {
|
||||||
let b: Numeric = x.clone().try_into()?;
|
let b: Numeric = x.clone().try_into()?;
|
||||||
Ok((*a + b).into())
|
Ok((*a + b).into())
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
2
src/external/num/operators/divide.rs
vendored
2
src/external/num/operators/divide.rs
vendored
@@ -34,7 +34,7 @@ externfn_impl!(Divide1, |this: &Self, x: ExprInst| {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Divide0 { a: Numeric, x: ExprInst }
|
pub struct Divide0 { a: Numeric, x: ExprInst }
|
||||||
atomic_redirect!(Divide0, x);
|
atomic_redirect!(Divide0, x);
|
||||||
atomic_impl!(Divide0, |Self{ a, x }: &Self| {
|
atomic_impl!(Divide0, |Self{ a, x }: &Self, _| {
|
||||||
let b: Numeric = x.clone().try_into()?;
|
let b: Numeric = x.clone().try_into()?;
|
||||||
Ok((*a / b).into())
|
Ok((*a / b).into())
|
||||||
});
|
});
|
||||||
2
src/external/num/operators/multiply.rs
vendored
2
src/external/num/operators/multiply.rs
vendored
@@ -34,7 +34,7 @@ externfn_impl!(Multiply1, |this: &Self, x: ExprInst| {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Multiply0 { a: Numeric, x: ExprInst }
|
pub struct Multiply0 { a: Numeric, x: ExprInst }
|
||||||
atomic_redirect!(Multiply0, x);
|
atomic_redirect!(Multiply0, x);
|
||||||
atomic_impl!(Multiply0, |Self{ a, x }: &Self| {
|
atomic_impl!(Multiply0, |Self{ a, x }: &Self, _| {
|
||||||
let b: Numeric = x.clone().try_into()?;
|
let b: Numeric = x.clone().try_into()?;
|
||||||
Ok((*a * b).into())
|
Ok((*a * b).into())
|
||||||
});
|
});
|
||||||
2
src/external/num/operators/remainder.rs
vendored
2
src/external/num/operators/remainder.rs
vendored
@@ -34,7 +34,7 @@ externfn_impl!(Remainder1, |this: &Self, x: ExprInst| {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Remainder0 { a: Numeric, x: ExprInst }
|
pub struct Remainder0 { a: Numeric, x: ExprInst }
|
||||||
atomic_redirect!(Remainder0, x);
|
atomic_redirect!(Remainder0, x);
|
||||||
atomic_impl!(Remainder0, |Self{ a, x }: &Self| {
|
atomic_impl!(Remainder0, |Self{ a, x }: &Self, _| {
|
||||||
let b: Numeric = x.clone().try_into()?;
|
let b: Numeric = x.clone().try_into()?;
|
||||||
Ok((*a % b).into())
|
Ok((*a % b).into())
|
||||||
});
|
});
|
||||||
2
src/external/num/operators/subtract.rs
vendored
2
src/external/num/operators/subtract.rs
vendored
@@ -34,7 +34,7 @@ externfn_impl!(Subtract1, |this: &Self, x: ExprInst| {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Subtract0 { a: Numeric, x: ExprInst }
|
pub struct Subtract0 { a: Numeric, x: ExprInst }
|
||||||
atomic_redirect!(Subtract0, x);
|
atomic_redirect!(Subtract0, x);
|
||||||
atomic_impl!(Subtract0, |Self{ a, x }: &Self| {
|
atomic_impl!(Subtract0, |Self{ a, x }: &Self, _| {
|
||||||
let b: Numeric = x.clone().try_into()?;
|
let b: Numeric = x.clone().try_into()?;
|
||||||
Ok((*a - b).into())
|
Ok((*a - b).into())
|
||||||
});
|
});
|
||||||
2
src/external/str/char_at.rs
vendored
2
src/external/str/char_at.rs
vendored
@@ -33,7 +33,7 @@ externfn_impl!(CharAt1, |this: &Self, x: ExprInst| {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct CharAt0 { s: String, x: ExprInst }
|
pub struct CharAt0 { s: String, x: ExprInst }
|
||||||
atomic_redirect!(CharAt0, x);
|
atomic_redirect!(CharAt0, x);
|
||||||
atomic_impl!(CharAt0, |Self{ s, x }: &Self| {
|
atomic_impl!(CharAt0, |Self{ s, x }: &Self, _| {
|
||||||
with_uint(x, |i| if let Some(c) = s.chars().nth(i as usize) {
|
with_uint(x, |i| if let Some(c) = s.chars().nth(i as usize) {
|
||||||
Ok(Clause::P(Primitive::Literal(Literal::Char(c))))
|
Ok(Clause::P(Primitive::Literal(Literal::Char(c))))
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
2
src/external/str/concatenate.rs
vendored
2
src/external/str/concatenate.rs
vendored
@@ -32,7 +32,7 @@ externfn_impl!(Concatenate1, |this: &Self, c: ExprInst| {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Concatenate0 { a: String, c: ExprInst }
|
pub struct Concatenate0 { a: String, c: ExprInst }
|
||||||
atomic_redirect!(Concatenate0, c);
|
atomic_redirect!(Concatenate0, c);
|
||||||
atomic_impl!(Concatenate0, |Self{ a, c }: &Self| {
|
atomic_impl!(Concatenate0, |Self{ a, c }: &Self, _| {
|
||||||
with_str(c, |b| Ok(Clause::P(Primitive::Literal(
|
with_str(c, |b| Ok(Clause::P(Primitive::Literal(
|
||||||
Literal::Str(a.to_owned() + b)
|
Literal::Str(a.to_owned() + b)
|
||||||
))))
|
))))
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ use std::fmt::Debug;
|
|||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! atomic_impl {
|
macro_rules! atomic_impl {
|
||||||
($typ:ident) => {
|
($typ:ident) => {
|
||||||
atomic_impl!{$typ, |this: &Self| {
|
atomic_impl!{$typ, |this: &Self, _: $crate::interpreter::Context| {
|
||||||
use $crate::foreign::ExternFn;
|
use $crate::foreign::ExternFn;
|
||||||
Ok(this.clone().to_xfn_cls())
|
Ok(this.clone().to_xfn_cls())
|
||||||
}}
|
}}
|
||||||
@@ -64,7 +64,7 @@ macro_rules! atomic_impl {
|
|||||||
>::from((self, state));
|
>::from((self, state));
|
||||||
// branch off or wrap up
|
// branch off or wrap up
|
||||||
let clause = if inert {
|
let clause = if inert {
|
||||||
match ($next_phase)(&next_self) {
|
match ($next_phase)(&next_self, ctx) {
|
||||||
Ok(r) => r,
|
Ok(r) => r,
|
||||||
Err(e) => return Err(
|
Err(e) => return Err(
|
||||||
$crate::interpreter::RuntimeError::Extern(e)
|
$crate::interpreter::RuntimeError::Extern(e)
|
||||||
|
|||||||
@@ -82,7 +82,10 @@ pub fn apply(
|
|||||||
(new_xpr.clause.clone(), (ctx.gas.map(|x| x - 1), false))
|
(new_xpr.clause.clone(), (ctx.gas.map(|x| x - 1), false))
|
||||||
} else {(body.expr().clause.clone(), (ctx.gas, false))}),
|
} else {(body.expr().clause.clone(), (ctx.gas, false))}),
|
||||||
Clause::Constant(name) => {
|
Clause::Constant(name) => {
|
||||||
let symval = ctx.symbols.get(name).expect("missing symbol for function").clone();
|
let symval = if let Some(sym) = ctx.symbols.get(name) {sym.clone()}
|
||||||
|
else { panic!("missing symbol for function {}",
|
||||||
|
ctx.interner.extern_vec(*name).join("::")
|
||||||
|
)};
|
||||||
Ok((Clause::Apply { f: symval, x, }, (ctx.gas, false)))
|
Ok((Clause::Apply { f: symval, x, }, (ctx.gas, false)))
|
||||||
}
|
}
|
||||||
Clause::P(Primitive::Atom(atom)) => { // take a step in expanding atom
|
Clause::P(Primitive::Atom(atom)) => { // take a step in expanding atom
|
||||||
|
|||||||
@@ -1,11 +1,12 @@
|
|||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
|
|
||||||
use crate::representations::interpreted::ExprInst;
|
use crate::representations::interpreted::ExprInst;
|
||||||
use crate::interner::Token;
|
use crate::interner::{Token, Interner};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Context<'a> {
|
pub struct Context<'a> {
|
||||||
pub symbols: &'a HashMap<Token<Vec<Token<String>>>, ExprInst>,
|
pub symbols: &'a HashMap<Token<Vec<Token<String>>>, ExprInst>,
|
||||||
|
pub interner: &'a Interner,
|
||||||
pub gas: Option<usize>,
|
pub gas: Option<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -66,8 +66,8 @@ fn namespace_parser<'a>(
|
|||||||
.ignore_then(filter_map_lex(enum_filter!(Lexeme::Name)))
|
.ignore_then(filter_map_lex(enum_filter!(Lexeme::Name)))
|
||||||
.then(
|
.then(
|
||||||
any().repeated().delimited_by(
|
any().repeated().delimited_by(
|
||||||
Lexeme::LP('{').parser(),
|
Lexeme::LP('(').parser(),
|
||||||
Lexeme::RP('{').parser()
|
Lexeme::RP('(').parser()
|
||||||
).try_map(move |body, _| {
|
).try_map(move |body, _| {
|
||||||
split_lines(&body)
|
split_lines(&body)
|
||||||
.map(|l| line.parse(l))
|
.map(|l| line.parse(l))
|
||||||
@@ -120,6 +120,7 @@ pub fn line_parser<'a>(ctx: impl Context + 'a)
|
|||||||
pub fn split_lines(data: &[Entry]) -> impl Iterator<Item = &[Entry]> {
|
pub fn split_lines(data: &[Entry]) -> impl Iterator<Item = &[Entry]> {
|
||||||
let mut source = data.iter().enumerate();
|
let mut source = data.iter().enumerate();
|
||||||
let mut last_slice = 0;
|
let mut last_slice = 0;
|
||||||
|
let mut finished = false;
|
||||||
iter::from_fn(move || {
|
iter::from_fn(move || {
|
||||||
let mut paren_count = 0;
|
let mut paren_count = 0;
|
||||||
while let Some((i, Entry{ lexeme, .. })) = source.next() {
|
while let Some((i, Entry{ lexeme, .. })) = source.next() {
|
||||||
@@ -134,6 +135,11 @@ pub fn split_lines(data: &[Entry]) -> impl Iterator<Item = &[Entry]> {
|
|||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// Include last line even without trailing newline
|
||||||
|
if !finished {
|
||||||
|
finished = true;
|
||||||
|
return Some(&data[last_slice..])
|
||||||
|
}
|
||||||
None
|
None
|
||||||
}).filter(|s| s.len() > 0)
|
}).filter(|s| s.len() > 0)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,20 @@ use crate::{utils::Substack, interner::{Token, Interner}, pipeline::{ProjectModu
|
|||||||
|
|
||||||
use super::{alias_map::AliasMap, decls::InjectedAsFn};
|
use super::{alias_map::AliasMap, decls::InjectedAsFn};
|
||||||
|
|
||||||
|
fn resolve(
|
||||||
|
token: Token<Vec<Token<String>>>,
|
||||||
|
alias_map: &AliasMap,
|
||||||
|
i: &Interner,
|
||||||
|
) -> Option<Vec<Token<String>>> {
|
||||||
|
if let Some(alias) = alias_map.resolve(token) {
|
||||||
|
Some(i.r(alias).clone())
|
||||||
|
} else if let Some((foot, body)) = i.r(token).split_last() {
|
||||||
|
let mut new_beginning = resolve(i.i(body), alias_map, i)?;
|
||||||
|
new_beginning.push(*foot);
|
||||||
|
Some(new_beginning)
|
||||||
|
} else {None}
|
||||||
|
}
|
||||||
|
|
||||||
fn process_expr(
|
fn process_expr(
|
||||||
expr: &Expr,
|
expr: &Expr,
|
||||||
alias_map: &AliasMap,
|
alias_map: &AliasMap,
|
||||||
@@ -14,9 +28,15 @@ fn process_expr(
|
|||||||
) -> Expr {
|
) -> Expr {
|
||||||
expr.map_names(&|n| {
|
expr.map_names(&|n| {
|
||||||
injected_as(&i.r(n)[..]).or_else(|| {
|
injected_as(&i.r(n)[..]).or_else(|| {
|
||||||
alias_map.resolve(n).map(|n| {
|
let next_v = resolve(n, alias_map, i)?;
|
||||||
injected_as(&i.r(n)[..]).unwrap_or(n)
|
// println!("Resolved alias {} to {}",
|
||||||
})
|
// i.extern_vec(n).join("::"),
|
||||||
|
// i.extern_all(&next_v).join("::")
|
||||||
|
// );
|
||||||
|
Some(
|
||||||
|
injected_as(&next_v)
|
||||||
|
.unwrap_or_else(|| i.i(&next_v))
|
||||||
|
)
|
||||||
})
|
})
|
||||||
}).unwrap_or_else(|| expr.clone())
|
}).unwrap_or_else(|| expr.clone())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
use crate::interner::Interner;
|
use crate::interner::Interner;
|
||||||
use crate::pipeline::error::ProjectError;
|
use crate::pipeline::error::ProjectError;
|
||||||
use crate::pipeline::project_tree::ProjectTree;
|
use crate::pipeline::project_tree::ProjectTree;
|
||||||
@@ -23,6 +25,14 @@ pub fn resolve_imports(
|
|||||||
&project, &mut map,
|
&project, &mut map,
|
||||||
i, injected_as
|
i, injected_as
|
||||||
)?;
|
)?;
|
||||||
|
println!("Aliases: {{{:?}}}",
|
||||||
|
map.targets.iter()
|
||||||
|
.map(|(kt, vt)| format!("{} => {}",
|
||||||
|
i.extern_vec(*kt).join("::"),
|
||||||
|
i.extern_vec(*vt).join("::")
|
||||||
|
))
|
||||||
|
.join(", ")
|
||||||
|
);
|
||||||
let new_mod = apply_aliases(project.0.as_ref(), &map, i, injected_as);
|
let new_mod = apply_aliases(project.0.as_ref(), &map, i, injected_as);
|
||||||
Ok(ProjectTree(Rc::new(new_mod)))
|
Ok(ProjectTree(Rc::new(new_mod)))
|
||||||
}
|
}
|
||||||
@@ -39,7 +39,7 @@ pub fn parse_layer<'a>(
|
|||||||
))
|
))
|
||||||
};
|
};
|
||||||
let source = source_loader::load_source(
|
let source = source_loader::load_source(
|
||||||
targets, i, loader, &|path| injected_as(path).is_some()
|
targets, prelude, i, loader, &|path| injected_as(path).is_some()
|
||||||
)?;
|
)?;
|
||||||
let tree = project_tree::build_tree(source, i, prelude, &injected_names)?;
|
let tree = project_tree::build_tree(source, i, prelude, &injected_names)?;
|
||||||
let sum = ProjectTree(Rc::new(
|
let sum = ProjectTree(Rc::new(
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
use crate::pipeline::error::ProjectError;
|
use crate::pipeline::error::ProjectError;
|
||||||
use crate::interner::{Token, Interner};
|
use crate::interner::{Token, Interner};
|
||||||
@@ -138,6 +139,11 @@ fn source_to_module(
|
|||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.collect::<HashMap<_, _>>();
|
.collect::<HashMap<_, _>>();
|
||||||
|
// println!(
|
||||||
|
// "Constructing file-module {} with members ({})",
|
||||||
|
// i.extern_all(&path_v[..]).join("::"),
|
||||||
|
// exports.keys().map(|t| i.r(*t)).join(", ")
|
||||||
|
// );
|
||||||
Rc::new(Module {
|
Rc::new(Module {
|
||||||
imports,
|
imports,
|
||||||
items,
|
items,
|
||||||
@@ -174,10 +180,15 @@ fn files_to_module(
|
|||||||
(namespace, ModEntry{ exported: true, member })
|
(namespace, ModEntry{ exported: true, member })
|
||||||
})
|
})
|
||||||
.collect::<HashMap<_, _>>();
|
.collect::<HashMap<_, _>>();
|
||||||
let exports = items.keys()
|
let exports: HashMap<_, _> = items.keys()
|
||||||
.copied()
|
.copied()
|
||||||
.map(|name| (name, i.i(&pushed(&path_v, name))))
|
.map(|name| (name, i.i(&pushed(&path_v, name))))
|
||||||
.collect();
|
.collect();
|
||||||
|
// println!(
|
||||||
|
// "Constructing module {} with items ({})",
|
||||||
|
// i.extern_all(&path_v[..]).join("::"),
|
||||||
|
// exports.keys().map(|t| i.r(*t)).join(", ")
|
||||||
|
// );
|
||||||
Rc::new(Module{
|
Rc::new(Module{
|
||||||
items,
|
items,
|
||||||
imports: vec![],
|
imports: vec![],
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
|
use std::println;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use hashbrown::HashSet;
|
use hashbrown::HashSet;
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
use crate::representations::tree::WalkErrorKind;
|
use crate::representations::tree::WalkErrorKind;
|
||||||
use crate::pipeline::source_loader::LoadedSourceTable;
|
use crate::pipeline::source_loader::LoadedSourceTable;
|
||||||
@@ -30,7 +32,13 @@ pub fn collect_exported_ops(
|
|||||||
i: &Interner,
|
i: &Interner,
|
||||||
injected: &impl InjectedOperatorsFn
|
injected: &impl InjectedOperatorsFn
|
||||||
) -> OpsResult {
|
) -> OpsResult {
|
||||||
if let Some(i) = injected(path) {return Ok(i)}
|
if let Some(ops) = injected(path) {
|
||||||
|
if path == i.i(&[i.i("prelude")][..]) {
|
||||||
|
println!("%%% Prelude exported ops %%%");
|
||||||
|
println!("{}", ops.iter().map(|t| i.r(*t)).join(", "));
|
||||||
|
}
|
||||||
|
return Ok(ops)
|
||||||
|
}
|
||||||
let is_file = |n: &[Token<String>]| loaded.contains_key(&i.i(n));
|
let is_file = |n: &[Token<String>]| loaded.contains_key(&i.i(n));
|
||||||
let path_s = &i.r(path)[..];
|
let path_s = &i.r(path)[..];
|
||||||
let name_split = split_name(path_s, &is_file);
|
let name_split = split_name(path_s, &is_file);
|
||||||
@@ -59,11 +67,15 @@ pub fn collect_exported_ops(
|
|||||||
.collect()
|
.collect()
|
||||||
}.rc(),
|
}.rc(),
|
||||||
})?;
|
})?;
|
||||||
Ok(Rc::new(module.items.iter()
|
let out: HashSet<_> = module.items.iter()
|
||||||
.filter(|(_, v)| v.exported)
|
.filter(|(_, v)| v.exported)
|
||||||
.map(|(k, _)| *k)
|
.map(|(k, _)| *k)
|
||||||
.collect()
|
.collect();
|
||||||
))
|
if path == i.i(&[i.i("prelude")][..]) {
|
||||||
|
println!("%%% Prelude exported ops %%%");
|
||||||
|
println!("{}", out.iter().map(|t| i.r(*t)).join(", "));
|
||||||
|
}
|
||||||
|
Ok(Rc::new(out))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_cache<'a>(
|
pub fn mk_cache<'a>(
|
||||||
@@ -71,5 +83,7 @@ pub fn mk_cache<'a>(
|
|||||||
i: &'a Interner,
|
i: &'a Interner,
|
||||||
injected: &'a impl InjectedOperatorsFn,
|
injected: &'a impl InjectedOperatorsFn,
|
||||||
) -> ExportedOpsCache<'a> {
|
) -> ExportedOpsCache<'a> {
|
||||||
Cache::new(|path, _this| collect_exported_ops(path, loaded, i, injected))
|
Cache::new(|path, _this| {
|
||||||
|
collect_exported_ops(path, loaded, i, injected)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use hashbrown::HashSet;
|
use hashbrown::HashSet;
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
use crate::parse::is_op;
|
use crate::parse::is_op;
|
||||||
use crate::pipeline::error::ProjectError;
|
use crate::pipeline::error::ProjectError;
|
||||||
@@ -34,9 +35,11 @@ pub fn collect_ops_for(
|
|||||||
) -> OpsResult {
|
) -> OpsResult {
|
||||||
let tree = &loaded[&i.i(file)].preparsed.0;
|
let tree = &loaded[&i.i(file)].preparsed.0;
|
||||||
let mut ret = HashSet::new();
|
let mut ret = HashSet::new();
|
||||||
|
println!("collecting ops for {}", i.extern_all(file).join("::"));
|
||||||
tree_all_ops(tree.as_ref(), &mut ret);
|
tree_all_ops(tree.as_ref(), &mut ret);
|
||||||
tree.visit_all_imports(&mut |modpath, module, import| {
|
tree.visit_all_imports(&mut |modpath, module, import| {
|
||||||
if let Some(n) = import.name { ret.insert(n); } else {
|
if let Some(n) = import.name { ret.insert(n); } else {
|
||||||
|
println!("\tglob import from {}", i.extern_vec(import.path).join("::"));
|
||||||
let path = import_abs_path(
|
let path = import_abs_path(
|
||||||
&file, modpath, module, &i.r(import.path)[..], i
|
&file, modpath, module, &i.r(import.path)[..], i
|
||||||
).expect("This error should have been caught during loading");
|
).expect("This error should have been caught during loading");
|
||||||
@@ -45,5 +48,9 @@ pub fn collect_ops_for(
|
|||||||
Ok::<_, Rc<dyn ProjectError>>(())
|
Ok::<_, Rc<dyn ProjectError>>(())
|
||||||
})?;
|
})?;
|
||||||
ret.drain_filter(|t| !is_op(i.r(*t)));
|
ret.drain_filter(|t| !is_op(i.r(*t)));
|
||||||
|
if file == &[i.i("map")][..] {
|
||||||
|
println!(" %%% ops in map %%% ");
|
||||||
|
println!("{}", ret.iter().map(|t| i.r(*t)).join(", "))
|
||||||
|
}
|
||||||
Ok(Rc::new(ret))
|
Ok(Rc::new(ret))
|
||||||
}
|
}
|
||||||
@@ -5,7 +5,7 @@ use hashbrown::HashMap;
|
|||||||
use crate::representations::tree::{ModEntry, ModMember, Module};
|
use crate::representations::tree::{ModEntry, ModMember, Module};
|
||||||
use crate::representations::Primitive;
|
use crate::representations::Primitive;
|
||||||
use crate::representations::location::Location;
|
use crate::representations::location::Location;
|
||||||
use crate::foreign::ExternFn;
|
use crate::foreign::{ExternFn, Atomic, Atom};
|
||||||
use crate::interner::{Token, Interner};
|
use crate::interner::{Token, Interner};
|
||||||
use crate::ast::{Expr, Clause};
|
use crate::ast::{Expr, Clause};
|
||||||
use crate::utils::{Substack, pushed};
|
use crate::utils::{Substack, pushed};
|
||||||
@@ -17,12 +17,18 @@ pub enum ConstTree {
|
|||||||
Tree(HashMap<Token<String>, ConstTree>)
|
Tree(HashMap<Token<String>, ConstTree>)
|
||||||
}
|
}
|
||||||
impl ConstTree {
|
impl ConstTree {
|
||||||
pub fn xfn(xfn: impl ExternFn + 'static) -> Self {
|
pub fn primitive(primitive: Primitive) -> Self {
|
||||||
Self::Const(Expr{
|
Self::Const(Expr{
|
||||||
location: Location::Unknown,
|
location: Location::Unknown,
|
||||||
value: Clause::P(Primitive::ExternFn(Box::new(xfn)))
|
value: Clause::P(primitive)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
pub fn xfn(xfn: impl ExternFn + 'static) -> Self {
|
||||||
|
Self::primitive(Primitive::ExternFn(Box::new(xfn)))
|
||||||
|
}
|
||||||
|
pub fn atom(atom: impl Atomic + 'static) -> Self {
|
||||||
|
Self::primitive(Primitive::Atom(Atom(Box::new(atom))))
|
||||||
|
}
|
||||||
pub fn tree(
|
pub fn tree(
|
||||||
arr: impl IntoIterator<Item = (Token<String>, Self)>
|
arr: impl IntoIterator<Item = (Token<String>, Self)>
|
||||||
) -> Self {
|
) -> Self {
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ use crate::pipeline::split_name::split_name;
|
|||||||
use crate::interner::{Token, Interner};
|
use crate::interner::{Token, Interner};
|
||||||
|
|
||||||
use crate::pipeline::file_loader::{Loaded, load_text, IOResult};
|
use crate::pipeline::file_loader::{Loaded, load_text, IOResult};
|
||||||
|
use crate::representations::sourcefile::FileEntry;
|
||||||
use super::loaded_source::{LoadedSourceTable, LoadedSource};
|
use super::loaded_source::{LoadedSourceTable, LoadedSource};
|
||||||
use super::preparse::preparse;
|
use super::preparse::preparse;
|
||||||
|
|
||||||
@@ -15,6 +16,7 @@ use super::preparse::preparse;
|
|||||||
fn load_abs_path_rec(
|
fn load_abs_path_rec(
|
||||||
abs_path: Token<Vec<Token<String>>>,
|
abs_path: Token<Vec<Token<String>>>,
|
||||||
table: &mut LoadedSourceTable,
|
table: &mut LoadedSourceTable,
|
||||||
|
prelude: &[FileEntry],
|
||||||
i: &Interner,
|
i: &Interner,
|
||||||
get_source: &impl Fn(Token<Vec<Token<String>>>) -> IOResult,
|
get_source: &impl Fn(Token<Vec<Token<String>>>) -> IOResult,
|
||||||
is_injected: &impl Fn(&[Token<String>]) -> bool
|
is_injected: &impl Fn(&[Token<String>]) -> bool
|
||||||
@@ -39,7 +41,7 @@ fn load_abs_path_rec(
|
|||||||
.chain(iter::once(i.i(item)))
|
.chain(iter::once(i.i(item)))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
load_abs_path_rec(
|
load_abs_path_rec(
|
||||||
i.i(&abs_subpath), table, i, get_source, is_injected
|
i.i(&abs_subpath), table, prelude, i, get_source, is_injected
|
||||||
)?
|
)?
|
||||||
}
|
}
|
||||||
return Ok(());
|
return Ok(());
|
||||||
@@ -48,7 +50,7 @@ fn load_abs_path_rec(
|
|||||||
let text = load_text(i.i(filename), &get_source, i)?;
|
let text = load_text(i.i(filename), &get_source, i)?;
|
||||||
let preparsed = preparse(
|
let preparsed = preparse(
|
||||||
filename.iter().map(|t| i.r(*t)).cloned().collect(),
|
filename.iter().map(|t| i.r(*t)).cloned().collect(),
|
||||||
text.as_str(), i
|
text.as_str(), prelude, i
|
||||||
)?;
|
)?;
|
||||||
table.insert(abs_path, LoadedSource{ text, preparsed: preparsed.clone() });
|
table.insert(abs_path, LoadedSource{ text, preparsed: preparsed.clone() });
|
||||||
// recurse on all imported modules
|
// recurse on all imported modules
|
||||||
@@ -58,7 +60,9 @@ fn load_abs_path_rec(
|
|||||||
module, &import.nonglob_path(i), i
|
module, &import.nonglob_path(i), i
|
||||||
)?;
|
)?;
|
||||||
// recurse on imported module
|
// recurse on imported module
|
||||||
load_abs_path_rec(i.i(&abs_pathv), table, i, get_source, is_injected)
|
load_abs_path_rec(
|
||||||
|
i.i(&abs_pathv), table, prelude, i, get_source, is_injected
|
||||||
|
)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -66,6 +70,7 @@ fn load_abs_path_rec(
|
|||||||
/// imports that aren't injected.
|
/// imports that aren't injected.
|
||||||
pub fn load_source(
|
pub fn load_source(
|
||||||
targets: &[Token<Vec<Token<String>>>],
|
targets: &[Token<Vec<Token<String>>>],
|
||||||
|
prelude: &[FileEntry],
|
||||||
i: &Interner,
|
i: &Interner,
|
||||||
get_source: &impl Fn(Token<Vec<Token<String>>>) -> IOResult,
|
get_source: &impl Fn(Token<Vec<Token<String>>>) -> IOResult,
|
||||||
is_injected: &impl Fn(&[Token<String>]) -> bool,
|
is_injected: &impl Fn(&[Token<String>]) -> bool,
|
||||||
@@ -75,6 +80,7 @@ pub fn load_source(
|
|||||||
load_abs_path_rec(
|
load_abs_path_rec(
|
||||||
*target,
|
*target,
|
||||||
&mut table,
|
&mut table,
|
||||||
|
prelude,
|
||||||
i, get_source, is_injected
|
i, get_source, is_injected
|
||||||
)?
|
)?
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -38,23 +38,27 @@ fn add_export<K: Eq + Hash>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Convert source lines into a module
|
/// Convert source lines into a module
|
||||||
fn to_module(src: &[FileEntry], i: &Interner) -> Rc<Module<(), ()>>
|
fn to_module(
|
||||||
{
|
src: &[FileEntry],
|
||||||
let imports = imports(src.iter()).cloned().collect::<Vec<_>>();
|
prelude: &[FileEntry],
|
||||||
let mut items = src.iter().filter_map(|ent| match ent {
|
i: &Interner
|
||||||
|
) -> Rc<Module<(), ()>> {
|
||||||
|
let all_src = || src.iter().chain(prelude.iter());
|
||||||
|
let imports = imports(all_src()).cloned().collect::<Vec<_>>();
|
||||||
|
let mut items = all_src().filter_map(|ent| match ent {
|
||||||
FileEntry::Internal(Member::Namespace(name, data)) => {
|
FileEntry::Internal(Member::Namespace(name, data)) => {
|
||||||
let member = ModMember::Sub(to_module(data, i));
|
let member = ModMember::Sub(to_module(data, prelude, i));
|
||||||
let entry = ModEntry{ exported: false, member };
|
let entry = ModEntry{ exported: false, member };
|
||||||
Some((*name, entry))
|
Some((*name, entry))
|
||||||
}
|
}
|
||||||
FileEntry::Exported(Member::Namespace(name, data)) => {
|
FileEntry::Exported(Member::Namespace(name, data)) => {
|
||||||
let member = ModMember::Sub(to_module(data, i));
|
let member = ModMember::Sub(to_module(data, prelude, i));
|
||||||
let entry = ModEntry{ exported: true, member };
|
let entry = ModEntry{ exported: true, member };
|
||||||
Some((*name, entry))
|
Some((*name, entry))
|
||||||
}
|
}
|
||||||
_ => None
|
_ => None
|
||||||
}).collect::<HashMap<_, _>>();
|
}).collect::<HashMap<_, _>>();
|
||||||
for file_entry in src { match file_entry {
|
for file_entry in all_src() { match file_entry {
|
||||||
FileEntry::Comment(_) | FileEntry::Import(_)
|
FileEntry::Comment(_) | FileEntry::Import(_)
|
||||||
| FileEntry::Internal(Member::Namespace(..))
|
| FileEntry::Internal(Member::Namespace(..))
|
||||||
| FileEntry::Exported(Member::Namespace(..)) => (),
|
| FileEntry::Exported(Member::Namespace(..)) => (),
|
||||||
@@ -83,8 +87,12 @@ fn to_module(src: &[FileEntry], i: &Interner) -> Rc<Module<(), ()>>
|
|||||||
|
|
||||||
/// Preparse the module. At this stage, only the imports and
|
/// Preparse the module. At this stage, only the imports and
|
||||||
/// names defined by the module can be parsed
|
/// names defined by the module can be parsed
|
||||||
pub fn preparse(file: Vec<String>, source: &str, i: &Interner)
|
pub fn preparse(
|
||||||
-> Result<Preparsed, Rc<dyn ProjectError>> {
|
file: Vec<String>,
|
||||||
|
source: &str,
|
||||||
|
prelude: &[FileEntry],
|
||||||
|
i: &Interner,
|
||||||
|
) -> Result<Preparsed, Rc<dyn ProjectError>> {
|
||||||
// Parse with no operators
|
// Parse with no operators
|
||||||
let ctx = ParsingContext::<&str>::new(&[], i, Rc::new(file.clone()));
|
let ctx = ParsingContext::<&str>::new(&[], i, Rc::new(file.clone()));
|
||||||
let entries = parse::parse(source, ctx)
|
let entries = parse::parse(source, ctx)
|
||||||
@@ -98,5 +106,5 @@ pub fn preparse(file: Vec<String>, source: &str, i: &Interner)
|
|||||||
namespace: ns.into_iter().map(|t| i.r(t)).cloned().collect(),
|
namespace: ns.into_iter().map(|t| i.r(t)).cloned().collect(),
|
||||||
file: Rc::new(file.clone())
|
file: Rc::new(file.clone())
|
||||||
}.rc())?;
|
}.rc())?;
|
||||||
Ok(Preparsed(to_module(&normalized, i)))
|
Ok(Preparsed(to_module(&normalized, prelude, i)))
|
||||||
}
|
}
|
||||||
@@ -8,6 +8,7 @@ pub fn any_match<'a>(matcher: &AnyMatcher, seq: &'a [Expr])
|
|||||||
match matcher {
|
match matcher {
|
||||||
AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq),
|
AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq),
|
||||||
AnyMatcher::Vec{ left, mid, right } => {
|
AnyMatcher::Vec{ left, mid, right } => {
|
||||||
|
if seq.len() < left.len() + right.len() {return None};
|
||||||
let left_split = left.len();
|
let left_split = left.len();
|
||||||
let right_split = seq.len() - right.len();
|
let right_split = seq.len() - right.len();
|
||||||
let mut state = scalv_match(left, &seq[..left_split])?;
|
let mut state = scalv_match(left, &seq[..left_split])?;
|
||||||
|
|||||||
@@ -28,24 +28,26 @@ export ...$a / ...$b:1 =1000=> (divide (...$a) (...$b))
|
|||||||
export ...$a == ...$b =1002=> (equals (...$a) (...$b))
|
export ...$a == ...$b =1002=> (equals (...$a) (...$b))
|
||||||
export ...$a ++ ...$b =1003=> (concatenate (...$a) (...$b))
|
export ...$a ++ ...$b =1003=> (concatenate (...$a) (...$b))
|
||||||
|
|
||||||
export do { ...$statement ; ...$rest:1 } =10_001=> (
|
export do { ...$statement ; ...$rest:1 } =0x2p543=> (
|
||||||
statement (...$statement) do { ...$rest }
|
statement (...$statement) do { ...$rest }
|
||||||
)
|
)
|
||||||
export do { ...$return } =10_000=> (...$return)
|
export do { ...$return } =0x1p543=> (...$return)
|
||||||
|
|
||||||
export statement (let $name = ...$value) ...$next =10_000=> (
|
export statement (let $name = ...$value) ...$next =0x1p1000=> (
|
||||||
(\$name. ...$next) (...$value)
|
(\$name. ...$next) (...$value)
|
||||||
)
|
)
|
||||||
export statement (cps $name = ...$operation) ...$next =10_001=> (
|
export statement (cps $name = ...$operation) ...$next =0x2p1000=> (
|
||||||
(...$operation) \$name. ...$next
|
(...$operation) \$name. ...$next
|
||||||
)
|
)
|
||||||
export statement (cps ...$operation) ...$next =10_000=> (
|
export statement (cps ...$operation) ...$next =0x1p1000=> (
|
||||||
(...$operation) (...$next)
|
(...$operation) (...$next)
|
||||||
)
|
)
|
||||||
|
|
||||||
export if ...$cond then ...$true else ...$false:1 =5_000=> (
|
export if ...$cond then ...$true else ...$false:1 =0x1p320=> (
|
||||||
ifthenelse (...$cond) (...$true) (...$false)
|
ifthenelse (...$cond) (...$true) (...$false)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
export ::(,)
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
fn prelude_path(i: &Interner) -> Token<Vec<Token<String>>>
|
fn prelude_path(i: &Interner) -> Token<Vec<Token<String>>>
|
||||||
@@ -100,7 +102,7 @@ pub fn run_dir(dir: &Path) {
|
|||||||
rule.bundle(&i)
|
rule.bundle(&i)
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
println!("Repo dump: {}", repo.bundle(&i));
|
// println!("Repo dump: {}", repo.bundle(&i));
|
||||||
let mut exec_table = HashMap::new();
|
let mut exec_table = HashMap::new();
|
||||||
for (name, source) in consts.iter() {
|
for (name, source) in consts.iter() {
|
||||||
// let nval = entrypoint(&i); let name = &nval; let source = &consts[name];
|
// let nval = entrypoint(&i); let name = &nval; let source = &consts[name];
|
||||||
@@ -125,6 +127,7 @@ pub fn run_dir(dir: &Path) {
|
|||||||
println!("macro execution complete");
|
println!("macro execution complete");
|
||||||
let ctx = interpreter::Context {
|
let ctx = interpreter::Context {
|
||||||
symbols: &exec_table,
|
symbols: &exec_table,
|
||||||
|
interner: &i,
|
||||||
gas: None
|
gas: None
|
||||||
};
|
};
|
||||||
let entrypoint = exec_table.get(&entrypoint(&i))
|
let entrypoint = exec_table.get(&entrypoint(&i))
|
||||||
|
|||||||
Reference in New Issue
Block a user