diff --git a/examples/calculator/main.orc b/examples/calculator/main.orc new file mode 100644 index 0000000..dcbee35 --- /dev/null +++ b/examples/calculator/main.orc @@ -0,0 +1,20 @@ +import std::(parse_float, to_string) +import std::(readline, print) + +export main := do{ + cps data = readline; + let a = parse_float data; + cps op = readline; + cps print ("\"" ++ op ++ "\"\n"); + cps data = readline; + let b = parse_float data; + let result = ( + if op == "+" then a + b + else if op == "-" then a - b + else if op == "*" then a * b + else if op == "/" then a / b + else "Unsupported operation" -- dynamically typed shenanigans + ); + cps print (to_string result ++ "\n"); + 0 +} \ No newline at end of file diff --git a/examples/lite/fn.orc b/examples/list-processing/fn.orc similarity index 100% rename from examples/lite/fn.orc rename to examples/list-processing/fn.orc diff --git a/examples/lite/list.orc b/examples/list-processing/list.orc similarity index 100% rename from examples/lite/list.orc rename to examples/list-processing/list.orc diff --git a/examples/list-processing/main.orc b/examples/list-processing/main.orc new file mode 100644 index 0000000..6dce4fd --- /dev/null +++ b/examples/list-processing/main.orc @@ -0,0 +1,26 @@ +import std::(to_string, print) +import super::list +import fn::* + +export main := do{ + let foo = list::new[1, 2, 3, 4, 5, 6]; + let bar = list::map foo n => n * 2; + let sum = bar + |> list::skip 2 + |> list::take 3 + |> list::reduce 0 (a b) => a + b; + cps print $ to_string sum ++ "\n"; + 0 +} + +--[ +export main := do{ + let n = 1; + let acc = 1; + loop r on (n acc) with ( + if n == 5 + then print acc + else r (n + 1) (acc * 2) + ) +} +]-- \ No newline at end of file diff --git a/examples/lite/option.orc b/examples/list-processing/option.orc similarity index 100% rename from examples/lite/option.orc rename to examples/list-processing/option.orc diff --git a/examples/lite/main.orc b/examples/lite/main.orc deleted file mode 100644 index c51da12..0000000 --- a/examples/lite/main.orc +++ /dev/null @@ -1,54 +0,0 @@ -import std::(parse_float, to_string) -import std::(readline, print, debug) -import std::(concatenate) -import super::list -import fn::* - ---[ export main := do{ - cps data = readline; - let a = parse_float data; - cps op = readline; - cps print ("\"" ++ op ++ "\"\n"); - cps data = readline; - let b = parse_float data; - let result = ( - if op == "+" then a + b - else if op == "-" then a - b - else if op == "*" then a * b - else if op == "/" then a / b - else "Unsupported operation" -- dynamically typed shenanigans - ); - cps print (to_string result ++ "\n"); - 0 -} ]-- - -export main := do{ - let foo = list::new[1, 2, 3, 4, 5, 6]; - let bar = list::map foo n => n * 2; - let sum = bar - |> list::skip 2 - |> list::take 3 - |> list::reduce 0 (a b) => a + b; - cps print $ to_string sum ++ "\n"; - 0 -} - ---[ -export main := do{ - let n = 1; - let acc = 1; - loop r on (n acc) with ( - if n == 5 - then print acc - else r (n + 1) (acc * 2) - ) -} -]-- ---[ -export main := do{ - let n = 1; - loop r on (n) with ( - debug r - ) -} -]-- \ No newline at end of file diff --git a/examples/maps/fn.orc b/examples/maps/fn.orc new file mode 100644 index 0000000..da42d77 --- /dev/null +++ b/examples/maps/fn.orc @@ -0,0 +1,15 @@ +export Y := \f.(\x.f (x x))(\x.f (x x)) + +export loop $r on (...$parameters) with ...$tail =0x5p512=> Y (\$r. + bind_names (...$parameters) (...$tail) +) ...$parameters + +-- bind each of the names in the first argument as a parameter for the second argument +bind_names ($name ..$rest) $payload =0x2p1000=> \$name. bind_names (..$rest) $payload +bind_names () (...$payload) =0x1p1000=> ...$payload + +export ...$prefix $ ...$suffix:1 =0x1p130=> ...$prefix (...$suffix) +export ...$prefix |> $fn ..$suffix:1 =0x2p130=> $fn (...$prefix) ..$suffix + +export (...$argv) => ...$body =0x2p512=> (bind_names (...$argv) (...$body)) +$name => ...$body =0x1p512=> (\$name. ...$body) \ No newline at end of file diff --git a/examples/maps/list.orc b/examples/maps/list.orc new file mode 100644 index 0000000..bb15ca3 --- /dev/null +++ b/examples/maps/list.orc @@ -0,0 +1,48 @@ +import option +import super::fn::* + +pair := \a.\b. \f. f a b + +-- Constructors + +export cons := \hd.\tl. option::some (pair hd tl) +export end := option::none + +export pop := \list.\default.\f.list default \cons.cons f + +-- Operators + +export reduce := \list.\acc.\f. ( + loop r on (list acc) with + pop list acc \head.\tail. r tail (f acc head) +) + +export map := \list.\f. ( + loop r on (list) with + pop list end \head.\tail. cons (f head) (r tail) +) + +export skip := \list.\n. ( + loop r on (list n) with + if n == 0 then list + else pop list end \head.\tail. r tail (n - 1) +) + +export take := \list.\n. ( + loop r on (list n) with + if n == 0 then end + else pop list end \head.\tail. cons head $ r tail $ n - 1 +) + +export get := \list.\n. ( + loop r on (list n) with + pop list option::none \head.\tail. + if n == 0 then option::some head + else r tail (n - 1) +) + +new[...$item, ...$rest:1] =0x2p333=> (cons (...$item) new[...$rest]) +new[...$end] =0x1p333=> (cons (...$end) end) +new[] =0x1p333=> end + +export ::(new) diff --git a/examples/maps/main.orc b/examples/maps/main.orc new file mode 100644 index 0000000..8517505 --- /dev/null +++ b/examples/maps/main.orc @@ -0,0 +1,22 @@ +import list +import map +import option +import fn::* + +export main := do{ + let foo = map::new[ + "foo" = 1, + "bar" = 2, + "baz" = 3, + "bar" = 4 + ]; + map::get foo "bar" + |> option::unwrap +} + +--[ +export main := do{ + let foo = list::new[1, 2, 3]; + map::fst foo +} +]-- \ No newline at end of file diff --git a/examples/maps/map.orc b/examples/maps/map.orc new file mode 100644 index 0000000..eadc39c --- /dev/null +++ b/examples/maps/map.orc @@ -0,0 +1,74 @@ +import list +import option +import fn::* +import std::to_string +import std::debug + +-- utilities for using lists as pairs + +export fst := \l. ( + list::get l 0 + (panic "nonempty expected") + \x.x +) +export snd := \l. ( + list::get l 1 + (panic "2 elements expected") + \x.x +) +export print_pair := \l. ( + to_string (fst l) ++ " = " ++ to_string (snd l) +) + +-- constructors + +export empty := list::end +export add := \m.\k.\v. ( + list::cons + list::new[k, v] + m +) + +-- queries + +-- return the last occurrence of a key if exists +export get := \m.\k. ( + loop r on (m) with + list::pop m option::none \head.\tail. + if fst head == k + then option::some $ snd head + else r tail +) + +-- commands + +-- remove one occurrence of a key +export del := \m.\k. ( + loop r on (m) with + list::pop m list::end \head.\tail. + if fst head == k then tail + else list::cons head $ r tail +) + +-- remove all occurrences of a key +export clear := \m.\k. ( + loop r on (m) with + list::pop m list::end \head.\tail. + if (fst head) == k then r tail + else list::cons head $ r tail +) + +-- replace at most one occurrence of a key +export set := \m.\k.\v. ( + m + |> del k + |> add k v +) + +new[...$tail:2, ...$key = ...$value:1] =0x2p333=> ( + set new[...$tail] (...$key) (...$value) +) +new[...$key = ...$value:1] =0x1p333=> (add empty (...$key) (...$value)) +new[] =0x1p333=> empty + +export ::(new) \ No newline at end of file diff --git a/examples/maps/option.orc b/examples/maps/option.orc new file mode 100644 index 0000000..6cd8719 --- /dev/null +++ b/examples/maps/option.orc @@ -0,0 +1,9 @@ +import std::panic + +export some := \v. \d.\f. f v +export none := \d.\f. d + +export map := \option.\f. option none f +export flatten := \option. option none \opt. opt +export flatmap := \option.\f. option none \opt. map opt f +export unwrap := \option. option (panic "value expected") \x.x \ No newline at end of file diff --git a/notes/papers/demo/notes.md b/notes/papers/demo/notes.md new file mode 100644 index 0000000..103ebfb --- /dev/null +++ b/notes/papers/demo/notes.md @@ -0,0 +1,19 @@ +Orhid is a lazy, pure functional langauge with an execution model inspired by Haskell. It has a simple, principled syntax resembling mathematical notation commonly used to describe the lambda calculus. State is held in closures and multi-parameter functions are represented using currying. + +This minimalism is in an effort to make parsing and code generation easier, as complex structures are defined using syntax-level macros. The macro system is insipred by generalized kerning which is a well-known Turing-complete system. + +Macros consist of substitution rules applied to the tokenized, namespaced source. These rules can make use of placeholders to transform the expression tree. Placeholders can match exactly one, at least one, or any number of tokens. Macros are used to define infix operators, name bindings, friendly loop syntax over the Y-combinator and more. + +Because substitution rules are applied to the namespaced tokens, macro programs can interact with each other; parts of the pattern that triggers a macro can be generated by other macros while other parts may be provided by the user. In this way, libraries can define extension interfaces where other libraries can integrate with their constructs, and an individual token can take on many meanings depending on context. + +--- + +Orchid is designed to be embedded in a Rust application. The entire program lifecycle consists of three stages which can be individually configured: + +1. The parser pipeline is responsible for converting text - usually files - into a module tree. It allows the embedder to define the environment the code will see in terms of a series of file trees that are parsed in the context of preceding layers. + +2. The macro executor operates entirely on the output of the pipeline. Macro programs don't necessarily halt, so the executor provides an API to find and resolve one match at a time. + +3. The interpreter is a single function operating on an expression with a symbol table for resolving named constants. It also allows setting a limit to the number of normalization steps - this is commonly known as gas. + +Interfacing between eager procedural and lazy functional code can be challenging, especially with the vastly different suites of optimizations. To make this a little easier, we provides an array of Rust macros that streamline the process of exposing Rust functions to Orchid code. The usage of these is demonstrated in the standard library. \ No newline at end of file diff --git a/notes/papers/demo/slides.md b/notes/papers/demo/slides.md new file mode 100644 index 0000000..3fce820 --- /dev/null +++ b/notes/papers/demo/slides.md @@ -0,0 +1,121 @@ +--- +marp: true +class: invert +--- + +# Orchid + +some tagline + +--- + +## Syntax + +basically lambda calc +``` +half := \n. div n 2 +pair := \a.\b. \f. f a b +increment := add 1 +``` + +--- + +## Macros + +match and replace token sequences +``` +if ...$cond then ...$true else ...$false ==> (ifthenelse (...$cond) (...$true) (...$false)) +``` +...while keeping parameters intact +``` +$data -- a single token (including parenthesized sequences) +...$data -- at least one token +..$data -- zero or more tokens +``` + +--- + +## Macros + +define operators... +``` +...$a + ...$b ==> (add (...$a) (...$b)) +``` +...and name bindings... +``` +let $name = ...$value in ...$body ==> (\$name. ...$body) ...$value +``` +...and control structures +``` +loop $r on (...$parameters) with ...$tail ==> Y (\$r. + bind_names (...$parameters) (...$tail) +) ...$parameters + +-- bind each of the names in the first argument as a parameter for the second argument +bind_names ($name ..$rest) $payload ==> \$name. bind_names (..$rest) $payload +bind_names () (...$payload) ==> ...$payload +``` + +--- + +## Macros + +can expose interfaces... +``` +do { ...$statement ; ...$rest } ==> (statement (...$statement) do { ...$rest }) +do { ...$return } ==> (...$return) +``` +...to be used by others... +``` +statement (let $name = ...$value) ...$next ==> ((\$name. ...$next) (...$value)) +statement (cps $name = ...$operation) ...$next ==> ((...$operation) \$name. ...$next) +statement (cps ...$operation) ...$next ==> ((...$operation) (...$next)) +``` +...to define any syntax +``` +export main := do{ + cps data = readline; + let double = parse_float data * 2; + cps print (to_string double ++ "\n") +} +``` + +--- + +## Control + +remains with the embedder + +| | extension | supervision | +| ----------: | :----------------: | :--------------------: | +| pipeline | external libraries | file IO interception | +| macros | | step-by-step execution | +| interpreter | constants, input | gas | + +--- + +## Extensions + +```rs +use std::fmt::Debug; +use crate::external::litconv::with_lit; +use crate::representations::{interpreted::ExprInst, Literal}; +use crate::{atomic_impl, atomic_redirect, externfn_impl}; + +#[derive(Clone)] +pub struct ToString1; +externfn_impl!(ToString1, |_: &Self, x: ExprInst| Ok(ToString0{x})); + +#[derive(Debug, Clone)] +pub struct ToString0{ x: ExprInst } +atomic_redirect!(ToString0, x); +atomic_impl!(ToString0, |Self{ x }: &Self, _| { + let string = with_lit(x, |l| Ok(match l { + Literal::Char(c) => c.to_string(), + Literal::Uint(i) => i.to_string(), + Literal::Num(n) => n.to_string(), + Literal::Str(s) => s.clone() + }))?; + Ok(string.into()) +}); +``` \ No newline at end of file diff --git a/notes/papers/report/parts/examples/+index.md b/notes/papers/report/parts/examples/+index.md new file mode 100644 index 0000000..605db6b --- /dev/null +++ b/notes/papers/report/parts/examples/+index.md @@ -0,0 +1,54 @@ +# Examples + +The following examples all work in the submitted version of Orchid, they're included in various subdircetories of `examples`. + +## Prelude + +All code files implicitly include the head statement + +``` +import prelude::* +``` + +The `prelude` module is a string literal compiled into the interpreter. Its contents are as follows: + +```rs +static PRELUDE_TXT:&str = r#" +import std::( + add, subtract, multiply, remainder, divide, + equals, ifthenelse, + concatenate +) + +export ...$a + ...$b =1001=> (add (...$a) (...$b)) +export ...$a - ...$b:1 =1001=> (subtract (...$a) (...$b)) +export ...$a * ...$b =1000=> (multiply (...$a) (...$b)) +export ...$a % ...$b:1 =1000=> (remainder (...$a) (...$b)) +export ...$a / ...$b:1 =1000=> (divide (...$a) (...$b)) +export ...$a == ...$b =1002=> (equals (...$a) (...$b)) +export ...$a ++ ...$b =1003=> (concatenate (...$a) (...$b)) + +export do { ...$statement ; ...$rest:1 } =0x2p543=> ( + statement (...$statement) do { ...$rest } +) +export do { ...$return } =0x1p543=> (...$return) + +export statement (let $name = ...$value) ...$next =0x1p1000=> ( + (\$name. ...$next) (...$value) +) +export statement (cps $name = ...$operation) ...$next =0x2p1000=> ( + (...$operation) \$name. ...$next +) +export statement (cps ...$operation) ...$next =0x1p1000=> ( + (...$operation) (...$next) +) + +export if ...$cond then ...$true else ...$false:1 =0x1p320=> ( + ifthenelse (...$cond) (...$true) (...$false) +) + +export ::(,) +"#; +``` + +The meaning of each of these rules is explained in the [calculator example](./calculator.md). The exact file is included here just as a reference while reading the other examples. \ No newline at end of file diff --git a/notes/papers/report/parts/examples/list-processing.md b/notes/papers/report/parts/examples/list-processing/fn.md similarity index 64% rename from notes/papers/report/parts/examples/list-processing.md rename to notes/papers/report/parts/examples/list-processing/fn.md index f8faf05..e122920 100644 --- a/notes/papers/report/parts/examples/list-processing.md +++ b/notes/papers/report/parts/examples/list-processing/fn.md @@ -1,24 +1,7 @@ -This example showcases common list processing functions and some functional programming utilities. It is also the first multi-file demo. +# Fn -_in main.orc_ -``` -import std::(to_string, print) -import super::list -import fn::* +This file contains a variety of utilities for functional programming -export main := do{ - let foo = list::new[1, 2, 3, 4, 5, 6]; - let bar = list::map foo n => n * 2; - let sum = bar - |> list::skip 2 - |> list::take 3 - |> list::reduce 0 (a b) => a + b; - cps print $ to_string sum ++ "\n"; - 0 -} -``` - -_in fn.orc_ ``` export Y := \f.(\x.f (x x))(\x.f (x x)) @@ -37,70 +20,11 @@ export (...$argv) => ...$body =0x2p512=> (bind_names (...$argv) (...$body)) $name => ...$body =0x1p512=> (\$name. ...$body) ``` -_in list.orc_ -``` -import option -import super::fn::* - -pair := \a.\b. \f. f a b - --- Constructors - -export cons := \hd.\tl. option::some (pair hd tl) -export end := option::none - -export pop := \list.\default.\f. list default \cons.cons f - --- Operators - -export reduce := \list.\acc.\f. ( - loop r on (list acc) with - pop list acc \head.\tail. r tail (f acc head) -) - -export map := \list.\f. ( - loop r on (list) with - pop list end \head.\tail. cons (f head) (r tail) -) - -export skip := \list.\n. ( - loop r on (list n) with - if n == 0 then list - else pop list end \head.\tail. r tail (n - 1) -) - -export take := \list.\n. ( - loop r on (list n) with - if n == 0 then end - else pop list end \head.\tail. cons head $ r tail $ n - 1 -) - -new[...$item, ...$rest:1] =0x2p333=> (cons (...$item) new[...$rest]) -new[...$end] =0x1p333=> (cons (...$end) end) -new[] =0x1p333=> end - -export ::(new) -``` - -_in option.orc_ -``` -export some := \v. \d.\f. f v -export none := \d.\f. d - -export map := \option.\f. option none f -export flatten := \option. option none \opt. opt -export flatmap := \option.\f. option none \opt. map opt f -``` - -The `main` function uses a `do{}` block to enclose a series of name bindings. It imports `list` as a sibling module and `fn` as a top-level file. These files are in identical position, the purpose of this is just to test various ways to reference modules. - -## fn - -### bind_names +## bind_names This is a utility macro for binding a list of names on an expression. It demonstrates how to extract reusable macro program fragments to simplify common tasks. This demonstrative version simply takes a sequence of name tokens without any separators or custom programming, but its functionality can be extended in the future to include eg. destructuring. -### arrow functions +## arrow functions The arrow `=>` operator here is used to define inline functions. It is very similar to the native `\x.` lambda, except that native lambdas use higher priority than any macro so they can't appear inside a `do{}` block as all of the subsequent lines would be consumed by them. It is parsed using the following rules: ``` @@ -108,7 +32,7 @@ export (...$argv) => ...$body =0x2p512=> (bind_names (...$argv) (...$body)) $name => ...$body =0x1p512=> (\$name. ...$body) ``` -### pipelines +## pipelines This is a concept borrowed from Elixir. The `|>` operator simply inserts the output of the previous expression to the first argument of the following function. ``` @@ -117,11 +41,11 @@ export ...$prefix |> $fn ..$suffix:1 =0x2p130=> $fn (...$prefix) ..$suffix It is processed left-to-right, but leaves the suffix on the same level as the function and sinks the prefix, which means that long pipelines eventually become left associative despite the inverted processing order. -### right-associative function call operator +## right-associative function call operator The `$` operator is analogous to its Haskell counterpart. It is right-associative and very low priority. Its purpose is to eliminate trailing parentheses. -### Loop expression +## Loop expression Recursion in lambda calculus is achieved using a fixpoint combinator. The classic version of this combinator described by Church is the [Y-combinator][hb_tlc], defined like so: ``` diff --git a/notes/papers/report/parts/examples/list-processing/list.md b/notes/papers/report/parts/examples/list-processing/list.md new file mode 100644 index 0000000..f47dc85 --- /dev/null +++ b/notes/papers/report/parts/examples/list-processing/list.md @@ -0,0 +1,71 @@ +# List + +These files demonstrate building datastructures using closures. + +## Option.orc + +Option is among the simplest datastructures. It either stores a value or nothing. To interact with it, one must provide a default value and a selector. + +``` +export some := \v. \d.\f. f v +export none := \d.\f. d + +export map := \option.\f. option none f +export flatten := \option. option none \opt. opt +export flatmap := \option.\f. option none \opt. map opt f +``` + +The selector is required in lambda calculus because the only way to obtain information about values is to evaluate them, but it's not actually necessary in Orchid because it's always possible to pass a primitive of incompatible type as the default value and then use equality comparison to decide whether we got the value in the option or our dud. Regardless, this interface is vastly more convenient and probably more familiar to programmers coming from functional languages. + +## List.orc + +The linked list is an outstandingly powerful and versatile datastructure and the backbone of practical functional programming. This implementation uses a locally defined church pair and the option defined above in an effort to be more transparent, although this means that the essential operation of splitting the head and tail or returning a default value becomes an explicit function (here named `pop`) instead of the intrinsic interface of the list itself. + +_in list.orc_ +``` +import option +import super::fn::* + +pair := \a.\b. \f. f a b + +-- Constructors + +export cons := \hd.\tl. option::some (pair hd tl) +export end := option::none + +-- Operators + +export pop := \list.\default.\f. list default \cons.cons f + +export reduce := \list.\acc.\f. ( + loop r on (list acc) with + pop list acc \head.\tail. r tail (f acc head) +) + +export map := \list.\f. ( + loop r on (list) with + pop list end \head.\tail. cons (f head) (r tail) +) + +export skip := \list.\n. ( + loop r on (list n) with + if n == 0 then list + else pop list end \head.\tail. r tail (n - 1) +) + +export take := \list.\n. ( + loop r on (list n) with + if n == 0 then end + else pop list end \head.\tail. cons head $ r tail $ n - 1 +) + +new[...$item, ...$rest:1] =0x2p333=> (cons (...$item) new[...$rest]) +new[...$end] =0x1p333=> (cons (...$end) end) +new[] =0x1p333=> end + +export ::(new) +``` + +Most of these operations should be self-explanatory in the context of the parts defined in [fn.md](./fn.md). + +The `new[]` macro builds a list from data. Because they are expected to contain expressions, the fields here are comma separated unlike in `fn::=>` and `fn::loop`. I did not find this inconsistency jarring during initial testing, but it may be updated if further improvements to `loop` and `=>`'s syntax open up the possibility of multi-token field descriptions. \ No newline at end of file diff --git a/notes/papers/report/parts/examples/list-processing/main.md b/notes/papers/report/parts/examples/list-processing/main.md new file mode 100644 index 0000000..b4e9ac8 --- /dev/null +++ b/notes/papers/report/parts/examples/list-processing/main.md @@ -0,0 +1,30 @@ +This example showcases common list processing functions and some functional programming utilities. It is also the first multi-file demo. + +_in main.orc_ +``` +import std::(to_string, print) +import super::list +import fn::* + +export main := do{ + let foo = list::new[1, 2, 3, 4, 5, 6]; + let bar = list::map foo n => n * 2; + let sum = bar + |> list::skip 2 + |> list::take 3 + |> list::reduce 0 (a b) => a + b; + cps print $ to_string sum ++ "\n"; + 0 +} +``` + +This file imports `list` as a sibling module and `fn` as a top-level file. These files are in identical position, the purpose of this is just to test various ways to reference modules. + +- The contents of _fn.orc_ are described in [fn](./fn.md) +- _list.orc_ and its dependency, _option.orc_ are described in [list](./list.md) + +--- + +The `main` function uses a `do{}` block to enclose a series of name bindings. It constructs a list of numbers 1-6. This is done eagerly, or at least a linked list of the same size is constructed eagerly, although the `cons` calls are left until the first read. Due to Orchid's laziness, `bar` gets assigned the `map` call as-is. `sum` is assigned from the `|>` pipe chain, which is essentially the same as a chain of further name bindings; the return value of each function is passed as the first argument of the next, pushing subsequent arguments out of the way. + +When the `print` expression is evaluated, the updates are applied as needed; the mapping is never applied to 1 and 2, and none of the loops in the list processing functions execute their body on the list object containing 6. \ No newline at end of file diff --git a/notes/papers/report/parts/future_work.md b/notes/papers/report/parts/future_work.md index 2668f41..54dc7a3 100644 --- a/notes/papers/report/parts/future_work.md +++ b/notes/papers/report/parts/future_work.md @@ -55,9 +55,19 @@ Being a pure language, Orchid carries the potential to serialize functions and s The flexible macro system enables library developers to invent their own syntax for essentially anything. I considered defining macros for html, music scores / midi data, marble and flow diagrams. -### DMA/MMIO +### Unsafe -TODO +These functions may be exposed by a direct Orchid interpreter but they would probably not be included in the library exposed by an embedder. + +#### system calls + +While individual system APIs can be exposed to the program using dedicated Rust bindings, this takes time and limits the power of the language. The general solution to this in high level languages is to expose the `system()` function which enables high level code to interact with _some kind of shell_, the shell of the operating system. What shell this exactly is and what tools are available through it is up to the user to discover. + +#### DMA/MMIO + +As a high level language, Orchid doesn't inherently have direct memory access, in part because it's not generally required. Regardless, a way of writing to and reading from exact memory addresses may be useful in the development of libraries that interface with hardware such as a rapsberry pi's GPIO pins. + +On general this is probably better accomplished using Rust functions that interface with Orchid, but this will eventually inevitably lead to several functions that do nothing but read a number from an address or write a number to an address, except the addresses are wrapped in various tagged structs. This repetition could be nipped in the bud by simply exposing a function for mmio and allowing the Orchid side to define the wrappers. ## Type system @@ -67,5 +77,5 @@ Originally, Orchid was meant to have a type system that used Orchid itself to bu ### Alternatives -During initial testing of the working version, I found that the most common kind of programming error in lambda calculus appears to be arity mismatch or syntax errors that result in arity mismatch. Without any kind of type checking this is especially difficult to debug as every function looks the same. This can be addressed with a much simpler type system similar to System-F. Any such type checker would have to be constructed so as to only verify user-provided information regarding the arity of functions without attempting to find the arity of every expression, since System-F is strongly normalising and Orchid like any general purpose language supports potentially infinite loops. +During initial testing of the working version, I found that the most common kind of programming error in lambda calculus appears to be arity mismatch or syntax error that results in arity mismatch. Without any kind of type checking this is especially difficult to debug as every function looks the same. This can be addressed with a much simpler type system similar to System-F. Any such type checker would have to be constructed so as to only verify user-provided information regarding the arity of functions without attempting to find the arity of every expression, since System-F is strongly normalising and Orchid like any general purpose language supports potentially infinite loops. diff --git a/notes/papers/report/parts/haskell.md b/notes/papers/report/parts/haskell.md index 7a89de7..992bdfa 100644 --- a/notes/papers/report/parts/haskell.md +++ b/notes/papers/report/parts/haskell.md @@ -16,4 +16,4 @@ My plan for Orchid was to use Orchid itself as a type system as well; rather tha [tc2]: https://blog.rust-lang.org/2022/10/28/gats-stabilization.html [tc3]: https://wiki.haskell.org/Type_SK -A description of the planned type system is available in [[type_system/01-main.md|Appendix T]] \ No newline at end of file +A description of the planned type system is available in [[type_system/+index|Appendix T]] \ No newline at end of file diff --git a/notes/papers/report/parts/macros.md b/notes/papers/report/parts/macros.md index ff285cd..4508d0b 100644 --- a/notes/papers/report/parts/macros.md +++ b/notes/papers/report/parts/macros.md @@ -10,45 +10,49 @@ The range of valid priorities is divided up into bands, much like radio bands. I The bands are each an even 32 orders of magnitude, with space in between for future expansion -| | | | | -| :----------: | :-----: | :---------: | :----------: | -| 0-31 | 32-63 | 64-95 | 96-127 | -| | x | | | -| 128-159 | 160-191 | 192-223 | 224-255 | -| operators | | | x | -| 256-287 | 288-319 | 320-351 | 352-383 | -| | | expressions | | -| 384-415 | 416-447 | 448-479 | 480-511 | -| | x | | | -| 512-543 | 544-575 | 576-607 | 608-639 | -| bindings | | | x | -| 640-671 | 672-703 | 704-735 | 736-767 | -| | | aliases | | -| 768-799 | 800-831 | 832-863 | 864-895 | -| | x | | | -| 896-927 | 928-959 | 960-991 | 992- | -| integrations | | | transitional | +| | | | | +| :-----------: | :------: | :---------: | :----------: | +| 0-31 | 32-63 | 64-95 | 96-127 | +| optimizations | x | | | +| 128-159 | 160-191 | 192-223 | 224-255 | +| operators | | | x | +| 256-287 | 288-319 | 320-351 | 352-383 | +| | | expressions | | +| 384-415 | 416-447 | 448-479 | 480-511 | +| | x | | | +| 512-543 | 544-575 | 576-607 | 608-639 | +| bindings | | | x | +| 640-671 | 672-703 | 704-735 | 736-767 | +| | | x | | +| 768-799 | 800-831 | 832-863 | 864-895 | +| | aliases* | | | +| 896-927 | 928-959 | 960-991 | 992- | +| integrations | | | transitional | ### Transitional states -Transitional states produced and consumed by the same macro program occupy the range above 0x1p991. Nothing in this range should be written by the user or triggered by an interaction of distinct macro programs, the purpose of this high range is to prevent devices such as carriages from interacting. Any transformation sequence in this range can assume that the tree is inert other than its own operation. +Transitional states produced and consumed by the same macro program occupy the range above 0x1p991. Nothing in this range should be written by the user or triggered by an interaction of distinct macro programs, the purpose of this high range is to prevent devices such as carriages from interacting. Any transformation sequence in this range can assume that the tree is inert other than its own operation. ### Integrations -Integrations expect an inert syntax tree but at least one hidden token does not belong to the macro program that resolves the rule, so it's additionally important that all macro programs be in a documented state at the time of resolution. +Integrations expect an inert syntax tree but at least one token in the pattern is external to the macro program that resolves the rule, so it's critical that all macro programs be in a documented state at the time of resolution. ### Aliases -Fragments of code extracted for readability. +Fragments of code extracted for readability are all at exactly 0x1p800. These may be written by programmers who are not comfortable with macros or metaprogramming. They must have unique single token patterns. Because their priority is higher than any entry point, they can safely contain parts of other macro invocations. They have a single priority number because they can't conceivably require internal ordering adjustments and their usage is meant to be be as straightforward as possible. ### Binding builders -Syntax elements that manipulate bindings should be executed earlier. Do blocks and match statements are good examples of this category. Anything with a lower priority trigger can assume that all names are correctly bound. +Syntax elements that manipulate bindings should be executed earlier. `do` blocks and (future) `match` statements are good examples of this category. Anything with a lower priority trigger can assume that all names are correctly bound. ### Expressions -Things that essentially work like function calls just with added structure, such as if/then/else +Things that essentially work like function calls just with added structure, such as `if`/`then`/`else` or `loop`. These are usually just more intuitive custom forms that are otherwise identical to a macro ### Operators -Binary and unary operators that process the chunks of text on either side \ No newline at end of file +Binary and unary operators that process the chunks of text on either side. Within the band, these macros are prioritized in inverse precedence order and apply to the entire range of clauses before and after themselves, to ensure that function calls have the highest perceived priority. + +### Optimizations + +Macros that operate on a fully resolved lambda code and look for known patterns that can be simplified. I did not manage to create a working example of this but for instance repeated string concatenation is a good example. \ No newline at end of file diff --git a/notes/papers/report/parts/pipeline.md b/notes/papers/report/parts/pipeline.md index cd277db..c80ab47 100644 --- a/notes/papers/report/parts/pipeline.md +++ b/notes/papers/report/parts/pipeline.md @@ -1,13 +1,6 @@ # The pipeline -The conversion of Orchid files into a collection of macro rules is a relatively complicated process. First, the source files are loaded and an initial parsing pass is executed. Because the set of supported operators influences the correct lexing of expressions, the output of this pass can't directly be used. The parts of each module that are known to be valid are - -- the imports, because they don't use expressions at all -- the visibility and pattern of macro rule definitions, because it is required to separate distinct operators with spaces -- the visibility and name of constant definitions -- the name of submodules and these same elements in their bodies - -This preparsed data is then used to locate all files in the solution, and to collect all operators visible to a certain file for a final parsing pass. It is necessary to refer to imported modules for a complete list of operators because glob imports don't offer any information about the set of names but still import all operators for the purpose of lexing. +The conversion of Orchid files into a collection of macro rules is a relatively complicated process that took several attempts to get right. ## Push vs pull logistics @@ -15,4 +8,60 @@ The initial POC implementation of Orchid used pull logistics aka lazy evaluation Additionally, in a lot of cases lazy evaluation is undesirable. Most programmers other than the developers of Python would like to receive syntax errors in dead functions because statically identifiable errors are usually either typos that are trivial to fix or born out of a misconception on the programmer's part which is worth addressing in case it produces silent errors elsewhere. But errors are produced when the calculation of a value fails, so to produce errors all values about all functions msut be calculated. -To address these issues, the second iteration only uses pull logistics for the preparsing and file collection phase, and the only errors guaranteed to be produced by this stage are imports from missing files and syntax errors regarding the structure of the S-expressions. \ No newline at end of file +To address these issues, the second iteration only uses pull logistics for the preparsing and file collection phase, and the only errors guaranteed to be produced by this stage are imports from missing files and syntax errors regarding the structure of the S-expressions. + +## Stages + +As of writing, the pipeline consists of three main stages; source loading, tree-building and name resolution. These break down into multiple substages. + +All stages support various ways to introduce blind spots and precomputed values into their processing. This is used to load the standard library, prelude, and possibly externally defined intermediate stages of injected code. + +### Source loading + +This stage encapsulates pull logistics. It collects all source files that should be included in the compilation in a hashmap keyed by their project-relative path. All subsequent operations are executed on every element of this map unconditionally. + +The files and directory listings are obtained from an injected function for flexibility. File collection is motivated by a set of target paths, and injected paths can be ignored with a callback. + +Parsing itself is outsourced to a Chumsky parser defined separately. This parser expects a list of operators for tokenization, but such a list is not available without knowledge of other files because glob imports don't provide information about the operators they define so much of the parsed data is invalid. What is known to be valid are + +- the types of all lines +- line types `import` and `export` +- the pattern of `rule` lines +- the name of `constant` and `namespace` lines +- valid parts of the `exported` variant of lines +- valid parts of the body of `namespace` lines + +This information is compiled into a very barebones module representation and returned alongside the loaded source code. + +### Tree building + +This stage aims to collect all modules in a single tree. To achieve this, it re-parses each file with the set of operators collected from the datastructure built during preparsing. The glob imports in the resulting FileEntry lists are eliminated, and the names in the bodies of expressions and macro rules are prefixed with the module path in preparation for macro execution. + +Operator collection can be advised about the exports of injected modules using a callback, and a prelude in the form of a list of line objects - in the shape emitted by the parser - can be injected before the contents of every module to define universally accessible names. Since these lines are processed for every file, it's generally best to just insert a single glob import from a module that defines everything. The interpreter inserts `import prelude::*`. + +### Import resolution + +This stage aims to produce a tree ready for consumption by a macro executor or any other subsystem. It replaces every name originating from imported namespaces in every module with the original name. + +Injection is supported with a function which takes a path and, if it's valid in the injected tree, returns its original value even if that's the path itself. This is used both to skip resolving names in the injected modules - which are expected to have already been processed using this step - and of course to find the origin of imports from the injected tree. + +## Layered parsing + +The most important export of the pipeline is the `parse_layer` function, which acts as a façade over the complex system described above. The environment in which user code runs is bootstrapped using repeated invocations of this function. It has the following options + +1. targets that motivate file loading + + In the case of intermediate layers this can be a list of all included module names. The targets are only required to be valid, global import paths without a globstar. + +2. a function that performs file and directory reads. + + This is normally set to a lambda that relays requests to `pipeline::file_loader`, but it can be replaced with another function if source code is to be loaded from an emulated file system, such as an in-memory tree or an online package repository. + +3. the previous layer as an environment +4. a prelude to every file + + The interpreter sets this to `import prelude::*`. If the embedder defines its own prelude it's a good idea to append it. + +### The first layer + +The other important exports of the pipeline are `ConstTree` and `from_const_tree`. These are used to define a base layer that exposes extern functions. `ConstTree` implements `Add` so distinct libraries of extern functions can be intuitively combined. \ No newline at end of file diff --git a/notes/papers/report/parts/spec/01-main.md b/notes/papers/report/parts/spec/+index.md similarity index 100% rename from notes/papers/report/parts/spec/01-main.md rename to notes/papers/report/parts/spec/+index.md diff --git a/notes/papers/report/parts/spec/02-parsing.md b/notes/papers/report/parts/spec/02-parsing.md index 1d956cc..fd33092 100644 --- a/notes/papers/report/parts/spec/02-parsing.md +++ b/notes/papers/report/parts/spec/02-parsing.md @@ -2,6 +2,10 @@ Orchid expressions are similar in nature to lambda calculus or haskell, except whitespace is mostly irrelevant. +## Comments + +Orchid borrows Lua's comment syntax. Line comments start with `--` and end at a line break. Block comments start with `--[` and end with `]--`. + ## Names `name` and `ns_name` tokens appear all over the place in this spec. They represent operators, function names, arguments, modules. A `name` is diff --git a/notes/papers/report/parts/substack.md b/notes/papers/report/parts/substack.md index 3f8052b..0641a0c 100644 --- a/notes/papers/report/parts/substack.md +++ b/notes/papers/report/parts/substack.md @@ -2,6 +2,4 @@ The vast majority of algorithms involved in this project are multiple recursive in nature. Very often information on higher levels would influence the entire subtree. A good example is the resolution of name bindings. The size of the call stack is associated with the growth of the set of names, only the top needs to be mutated, but all names seen in enclosing scopes need to be accessible. The datastructure we need is essentially a linked list on the stack. -This is a very common and not particularly interesting datastructure; much like quicksort, every C project of considerable size that uses recursion includes some definition of it. However, I still think it deserves some attention, precisely because it's so common. For example, my implementation also defines an iterator, and a reasonably efficient implementation of the outstandingly common operation of collecting the stack into a Vec that starts at the bottom. - -Another extension to this concept I wrote to help with the type system is a stackbound map. This is not a very good implementation and it definitely needs more work to be worth publishing, but it stands to show that the concept of a substack is versatile and powerful. \ No newline at end of file +This is a very common and not particularly interesting datastructure; much like quicksort, every C project of considerable size that uses recursion includes some definition of it. However, I still think it deserves some attention, precisely because it's so common. For example, my implementation also defines an iterator, and a reasonably efficient, safe implementation of the outstandingly common operation of collecting the stack into a Vec that starts at the bottom. \ No newline at end of file diff --git a/notes/papers/report/parts/type_system/01-main.md b/notes/papers/report/parts/type_system/+index.md similarity index 100% rename from notes/papers/report/parts/type_system/01-main.md rename to notes/papers/report/parts/type_system/+index.md diff --git a/src/external/bool/equals.rs b/src/external/bool/equals.rs index de74f72..f0281e1 100644 --- a/src/external/bool/equals.rs +++ b/src/external/bool/equals.rs @@ -34,12 +34,14 @@ externfn_impl!(Equals1, |this: &Self, x: ExprInst| { #[derive(Debug, Clone)] pub struct Equals0 { a: Literal, x: ExprInst } atomic_redirect!(Equals0, x); -atomic_impl!(Equals0, |Self{ a, x }: &Self| { +atomic_impl!(Equals0, |Self{ a, x }: &Self, _| { let eqls = with_lit(x, |l| Ok(match (a, l) { (Literal::Char(c1), Literal::Char(c2)) => c1 == c2, (Literal::Num(n1), Literal::Num(n2)) => n1 == n2, (Literal::Str(s1), Literal::Str(s2)) => s1 == s2, (Literal::Uint(i1), Literal::Uint(i2)) => i1 == i2, + (Literal::Num(n1), Literal::Uint(u1)) => *n1 == (*u1 as f64), + (Literal::Uint(u1), Literal::Num(n1)) => *n1 == (*u1 as f64), (_, _) => AssertionError::fail(x.clone(), "the expected type")?, }))?; Ok(Boolean::from(eqls).to_atom_cls()) diff --git a/src/external/bool/ifthenelse.rs b/src/external/bool/ifthenelse.rs index 44aeeb6..db920a0 100644 --- a/src/external/bool/ifthenelse.rs +++ b/src/external/bool/ifthenelse.rs @@ -22,7 +22,7 @@ externfn_impl!(IfThenElse1, |_: &Self, x: ExprInst| Ok(IfThenElse0{x})); #[derive(Debug, Clone)] pub struct IfThenElse0{ x: ExprInst } atomic_redirect!(IfThenElse0, x); -atomic_impl!(IfThenElse0, |this: &Self| { +atomic_impl!(IfThenElse0, |this: &Self, _| { let Boolean(b) = this.x.clone().try_into() .map_err(|_| AssertionError::ext(this.x.clone(), "a boolean"))?; Ok(if b { Clause::Lambda { diff --git a/src/external/bool/mod.rs b/src/external/bool/mod.rs index 6d61294..31aad3d 100644 --- a/src/external/bool/mod.rs +++ b/src/external/bool/mod.rs @@ -9,6 +9,8 @@ use crate::{pipeline::ConstTree, interner::Interner}; pub fn bool(i: &Interner) -> ConstTree { ConstTree::tree([ (i.i("ifthenelse"), ConstTree::xfn(ifthenelse::IfThenElse1)), - (i.i("equals"), ConstTree::xfn(equals::Equals2)) + (i.i("equals"), ConstTree::xfn(equals::Equals2)), + (i.i("true"), ConstTree::atom(Boolean(true))), + (i.i("false"), ConstTree::atom(Boolean(false))) ]) } \ No newline at end of file diff --git a/src/external/conv/parse_float.rs b/src/external/conv/parse_float.rs index 6f1252b..639f2a9 100644 --- a/src/external/conv/parse_float.rs +++ b/src/external/conv/parse_float.rs @@ -24,7 +24,7 @@ externfn_impl!(ParseFloat1, |_: &Self, x: ExprInst| Ok(ParseFloat0{x})); #[derive(Debug, Clone)] pub struct ParseFloat0{ x: ExprInst } atomic_redirect!(ParseFloat0, x); -atomic_impl!(ParseFloat0, |Self{ x }: &Self| { +atomic_impl!(ParseFloat0, |Self{ x }: &Self, _| { let number = with_lit(x, |l| Ok(match l { Literal::Str(s) => { let parser = float_parser(); diff --git a/src/external/conv/parse_uint.rs b/src/external/conv/parse_uint.rs index 130b719..2e848c3 100644 --- a/src/external/conv/parse_uint.rs +++ b/src/external/conv/parse_uint.rs @@ -23,7 +23,7 @@ externfn_impl!(ParseUint1, |_: &Self, x: ExprInst| Ok(ParseUint0{x})); #[derive(Debug, Clone)] pub struct ParseUint0{ x: ExprInst } atomic_redirect!(ParseUint0, x); -atomic_impl!(ParseUint0, |Self{ x }: &Self| { +atomic_impl!(ParseUint0, |Self{ x }: &Self, _| { let uint = with_lit(x, |l| Ok(match l { Literal::Str(s) => { let parser = int_parser(); diff --git a/src/external/conv/to_string.rs b/src/external/conv/to_string.rs index cd5b56f..731129e 100644 --- a/src/external/conv/to_string.rs +++ b/src/external/conv/to_string.rs @@ -20,7 +20,7 @@ externfn_impl!(ToString1, |_: &Self, x: ExprInst| Ok(ToString0{x})); #[derive(Debug, Clone)] pub struct ToString0{ x: ExprInst } atomic_redirect!(ToString0, x); -atomic_impl!(ToString0, |Self{ x }: &Self| { +atomic_impl!(ToString0, |Self{ x }: &Self, _| { let string = with_lit(x, |l| Ok(match l { Literal::Char(c) => c.to_string(), Literal::Uint(i) => i.to_string(), diff --git a/src/external/cpsio/debug.rs b/src/external/cpsio/debug.rs new file mode 100644 index 0000000..cba1bff --- /dev/null +++ b/src/external/cpsio/debug.rs @@ -0,0 +1,33 @@ +use std::fmt::Debug; + +use crate::foreign::{Atomic, AtomicReturn}; +use crate::interner::InternedDisplay; +use crate::interpreter::Context; +use crate::{externfn_impl, atomic_defaults}; +use crate::representations::interpreted::ExprInst; + +/// Debug function +/// +/// Next state: [Debug0] + +#[derive(Clone)] +pub struct Debug2; +externfn_impl!(Debug2, |_: &Self, x: ExprInst| Ok(Debug1{x})); + +/// Partially applied Print function +/// +/// Prev state: [Debug1] + +#[derive(Debug, Clone)] +pub struct Debug1{ x: ExprInst } +impl Atomic for Debug1 { + atomic_defaults!(); + fn run(&self, ctx: Context) -> crate::foreign::AtomicResult { + println!("{}", self.x.bundle(&ctx.interner)); + Ok(AtomicReturn{ + clause: self.x.expr().clause.clone(), + gas: ctx.gas.map(|g| g - 1), + inert: false + }) + } +} \ No newline at end of file diff --git a/src/external/cpsio/mod.rs b/src/external/cpsio/mod.rs index adc4b25..511417c 100644 --- a/src/external/cpsio/mod.rs +++ b/src/external/cpsio/mod.rs @@ -2,10 +2,14 @@ use crate::{interner::Interner, pipeline::ConstTree}; mod print; mod readline; +mod debug; +mod panic; pub fn cpsio(i: &Interner) -> ConstTree { ConstTree::tree([ (i.i("print"), ConstTree::xfn(print::Print2)), - (i.i("readline"), ConstTree::xfn(readline::Readln2)) + (i.i("readline"), ConstTree::xfn(readline::Readln2)), + (i.i("debug"), ConstTree::xfn(debug::Debug2)), + (i.i("panic"), ConstTree::xfn(panic::Panic1)) ]) } \ No newline at end of file diff --git a/src/external/cpsio/panic.rs b/src/external/cpsio/panic.rs new file mode 100644 index 0000000..b845f8a --- /dev/null +++ b/src/external/cpsio/panic.rs @@ -0,0 +1,29 @@ +use std::fmt::Display; + +use crate::{atomic_impl, atomic_redirect, externfn_impl}; +use crate::external::litconv::with_str; +use crate::representations::interpreted::ExprInst; +use crate::foreign::ExternError; + +#[derive(Clone)] +pub struct Panic1; +externfn_impl!(Panic1, |_: &Self, x: ExprInst| Ok(Panic0{ x })); + +#[derive(Debug, Clone)] +pub struct Panic0{ x: ExprInst } +atomic_redirect!(Panic0, x); +atomic_impl!(Panic0, |Self{ x }: &Self, _| { + with_str(x, |s| { + Err(OrchidPanic(s.clone()).into_extern()) + }) +}); + +pub struct OrchidPanic(String); + +impl Display for OrchidPanic { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Orchid code panicked: {}", self.0) + } +} + +impl ExternError for OrchidPanic {} \ No newline at end of file diff --git a/src/external/cpsio/print.rs b/src/external/cpsio/print.rs index 79557a5..61ebbeb 100644 --- a/src/external/cpsio/print.rs +++ b/src/external/cpsio/print.rs @@ -1,4 +1,5 @@ use std::fmt::Debug; +use std::io::{self, Write}; use std::rc::Rc; use crate::external::litconv::with_str; @@ -21,9 +22,10 @@ externfn_impl!(Print2, |_: &Self, x: ExprInst| Ok(Print1{x})); #[derive(Debug, Clone)] pub struct Print1{ x: ExprInst } atomic_redirect!(Print1, x); -atomic_impl!(Print1, |Self{ x }: &Self| { +atomic_impl!(Print1, |Self{ x }: &Self, _| { with_str(x, |s| { print!("{}", s); + io::stdout().flush().unwrap(); Ok(Clause::Lambda { args: Some(PathSet{ steps: Rc::new(vec![]), next: None }), body: Clause::LambdaArg.wrap() diff --git a/src/external/cpsio/readline.rs b/src/external/cpsio/readline.rs index c8e8672..d852480 100644 --- a/src/external/cpsio/readline.rs +++ b/src/external/cpsio/readline.rs @@ -21,7 +21,7 @@ externfn_impl!(Readln2, |_: &Self, x: ExprInst| Ok(Readln1{x})); #[derive(Debug, Clone)] pub struct Readln1{ x: ExprInst } atomic_redirect!(Readln1, x); -atomic_impl!(Readln1, |Self{ x }: &Self| { +atomic_impl!(Readln1, |Self{ x }: &Self, _| { let mut buf = String::new(); stdin().read_line(&mut buf) .map_err(|e| RuntimeError::ext(e.to_string(), "reading from stdin"))?; diff --git a/src/external/num/numeric.rs b/src/external/num/numeric.rs index 7d001ca..b520a1b 100644 --- a/src/external/num/numeric.rs +++ b/src/external/num/numeric.rs @@ -48,7 +48,7 @@ impl Sub for Numeric { fn sub(self, rhs: Self) -> Self::Output { match (self, rhs) { - (Numeric::Uint(a), Numeric::Uint(b)) if b < a => Numeric::Uint(a - b), + (Numeric::Uint(a), Numeric::Uint(b)) if b <= a => Numeric::Uint(a - b), (Numeric::Uint(a), Numeric::Uint(b)) => Numeric::num(a as f64 - b as f64), (Numeric::Num(a), Numeric::Num(b)) => Numeric::num(a - b), diff --git a/src/external/num/operators/add.rs b/src/external/num/operators/add.rs index d1b3b02..c430ede 100644 --- a/src/external/num/operators/add.rs +++ b/src/external/num/operators/add.rs @@ -9,7 +9,6 @@ use crate::representations::interpreted::ExprInst; /// Add function /// /// Next state: [Add1] - #[derive(Clone)] pub struct Add2; externfn_impl!(Add2, |_: &Self, x: ExprInst| Ok(Add1{x})); @@ -17,7 +16,6 @@ externfn_impl!(Add2, |_: &Self, x: ExprInst| Ok(Add1{x})); /// Partially applied Add function /// /// Prev state: [Add2]; Next state: [Add0] - #[derive(Debug, Clone)] pub struct Add1{ x: ExprInst } atomic_redirect!(Add1, x); @@ -30,11 +28,12 @@ externfn_impl!(Add1, |this: &Self, x: ExprInst| { /// Fully applied Add function. /// /// Prev state: [Add1] - #[derive(Debug, Clone)] pub struct Add0 { a: Numeric, x: ExprInst } atomic_redirect!(Add0, x); -atomic_impl!(Add0, |Self{ a, x }: &Self| { +atomic_impl!(Add0, |Self{ a, x }: &Self, _| { let b: Numeric = x.clone().try_into()?; Ok((*a + b).into()) }); + + diff --git a/src/external/num/operators/divide.rs b/src/external/num/operators/divide.rs index 5b9c310..259293f 100644 --- a/src/external/num/operators/divide.rs +++ b/src/external/num/operators/divide.rs @@ -34,7 +34,7 @@ externfn_impl!(Divide1, |this: &Self, x: ExprInst| { #[derive(Debug, Clone)] pub struct Divide0 { a: Numeric, x: ExprInst } atomic_redirect!(Divide0, x); -atomic_impl!(Divide0, |Self{ a, x }: &Self| { +atomic_impl!(Divide0, |Self{ a, x }: &Self, _| { let b: Numeric = x.clone().try_into()?; Ok((*a / b).into()) }); \ No newline at end of file diff --git a/src/external/num/operators/multiply.rs b/src/external/num/operators/multiply.rs index df990f3..85a75b3 100644 --- a/src/external/num/operators/multiply.rs +++ b/src/external/num/operators/multiply.rs @@ -34,7 +34,7 @@ externfn_impl!(Multiply1, |this: &Self, x: ExprInst| { #[derive(Debug, Clone)] pub struct Multiply0 { a: Numeric, x: ExprInst } atomic_redirect!(Multiply0, x); -atomic_impl!(Multiply0, |Self{ a, x }: &Self| { +atomic_impl!(Multiply0, |Self{ a, x }: &Self, _| { let b: Numeric = x.clone().try_into()?; Ok((*a * b).into()) }); \ No newline at end of file diff --git a/src/external/num/operators/remainder.rs b/src/external/num/operators/remainder.rs index 037eb5a..cf3621d 100644 --- a/src/external/num/operators/remainder.rs +++ b/src/external/num/operators/remainder.rs @@ -34,7 +34,7 @@ externfn_impl!(Remainder1, |this: &Self, x: ExprInst| { #[derive(Debug, Clone)] pub struct Remainder0 { a: Numeric, x: ExprInst } atomic_redirect!(Remainder0, x); -atomic_impl!(Remainder0, |Self{ a, x }: &Self| { +atomic_impl!(Remainder0, |Self{ a, x }: &Self, _| { let b: Numeric = x.clone().try_into()?; Ok((*a % b).into()) }); \ No newline at end of file diff --git a/src/external/num/operators/subtract.rs b/src/external/num/operators/subtract.rs index f3aab97..9aef1dd 100644 --- a/src/external/num/operators/subtract.rs +++ b/src/external/num/operators/subtract.rs @@ -34,7 +34,7 @@ externfn_impl!(Subtract1, |this: &Self, x: ExprInst| { #[derive(Debug, Clone)] pub struct Subtract0 { a: Numeric, x: ExprInst } atomic_redirect!(Subtract0, x); -atomic_impl!(Subtract0, |Self{ a, x }: &Self| { +atomic_impl!(Subtract0, |Self{ a, x }: &Self, _| { let b: Numeric = x.clone().try_into()?; Ok((*a - b).into()) }); \ No newline at end of file diff --git a/src/external/str/char_at.rs b/src/external/str/char_at.rs index e64edd4..950392b 100644 --- a/src/external/str/char_at.rs +++ b/src/external/str/char_at.rs @@ -33,7 +33,7 @@ externfn_impl!(CharAt1, |this: &Self, x: ExprInst| { #[derive(Debug, Clone)] pub struct CharAt0 { s: String, x: ExprInst } atomic_redirect!(CharAt0, x); -atomic_impl!(CharAt0, |Self{ s, x }: &Self| { +atomic_impl!(CharAt0, |Self{ s, x }: &Self, _| { with_uint(x, |i| if let Some(c) = s.chars().nth(i as usize) { Ok(Clause::P(Primitive::Literal(Literal::Char(c)))) } else { diff --git a/src/external/str/concatenate.rs b/src/external/str/concatenate.rs index 0df75e8..09bad5a 100644 --- a/src/external/str/concatenate.rs +++ b/src/external/str/concatenate.rs @@ -32,7 +32,7 @@ externfn_impl!(Concatenate1, |this: &Self, c: ExprInst| { #[derive(Debug, Clone)] pub struct Concatenate0 { a: String, c: ExprInst } atomic_redirect!(Concatenate0, c); -atomic_impl!(Concatenate0, |Self{ a, c }: &Self| { +atomic_impl!(Concatenate0, |Self{ a, c }: &Self, _| { with_str(c, |b| Ok(Clause::P(Primitive::Literal( Literal::Str(a.to_owned() + b) )))) diff --git a/src/foreign_macros/atomic_impl.rs b/src/foreign_macros/atomic_impl.rs index 9669dcd..3d0e266 100644 --- a/src/foreign_macros/atomic_impl.rs +++ b/src/foreign_macros/atomic_impl.rs @@ -39,7 +39,7 @@ use std::fmt::Debug; #[macro_export] macro_rules! atomic_impl { ($typ:ident) => { - atomic_impl!{$typ, |this: &Self| { + atomic_impl!{$typ, |this: &Self, _: $crate::interpreter::Context| { use $crate::foreign::ExternFn; Ok(this.clone().to_xfn_cls()) }} @@ -64,7 +64,7 @@ macro_rules! atomic_impl { >::from((self, state)); // branch off or wrap up let clause = if inert { - match ($next_phase)(&next_self) { + match ($next_phase)(&next_self, ctx) { Ok(r) => r, Err(e) => return Err( $crate::interpreter::RuntimeError::Extern(e) diff --git a/src/interpreter/apply.rs b/src/interpreter/apply.rs index f1d43a9..bc6998b 100644 --- a/src/interpreter/apply.rs +++ b/src/interpreter/apply.rs @@ -82,7 +82,10 @@ pub fn apply( (new_xpr.clause.clone(), (ctx.gas.map(|x| x - 1), false)) } else {(body.expr().clause.clone(), (ctx.gas, false))}), Clause::Constant(name) => { - let symval = ctx.symbols.get(name).expect("missing symbol for function").clone(); + let symval = if let Some(sym) = ctx.symbols.get(name) {sym.clone()} + else { panic!("missing symbol for function {}", + ctx.interner.extern_vec(*name).join("::") + )}; Ok((Clause::Apply { f: symval, x, }, (ctx.gas, false))) } Clause::P(Primitive::Atom(atom)) => { // take a step in expanding atom diff --git a/src/interpreter/context.rs b/src/interpreter/context.rs index 55fe66f..d8dc414 100644 --- a/src/interpreter/context.rs +++ b/src/interpreter/context.rs @@ -1,11 +1,12 @@ use hashbrown::HashMap; use crate::representations::interpreted::ExprInst; -use crate::interner::Token; +use crate::interner::{Token, Interner}; #[derive(Clone)] pub struct Context<'a> { pub symbols: &'a HashMap>>, ExprInst>, + pub interner: &'a Interner, pub gas: Option, } diff --git a/src/parse/sourcefile.rs b/src/parse/sourcefile.rs index 010998a..e2357cd 100644 --- a/src/parse/sourcefile.rs +++ b/src/parse/sourcefile.rs @@ -66,8 +66,8 @@ fn namespace_parser<'a>( .ignore_then(filter_map_lex(enum_filter!(Lexeme::Name))) .then( any().repeated().delimited_by( - Lexeme::LP('{').parser(), - Lexeme::RP('{').parser() + Lexeme::LP('(').parser(), + Lexeme::RP('(').parser() ).try_map(move |body, _| { split_lines(&body) .map(|l| line.parse(l)) @@ -120,6 +120,7 @@ pub fn line_parser<'a>(ctx: impl Context + 'a) pub fn split_lines(data: &[Entry]) -> impl Iterator { let mut source = data.iter().enumerate(); let mut last_slice = 0; + let mut finished = false; iter::from_fn(move || { let mut paren_count = 0; while let Some((i, Entry{ lexeme, .. })) = source.next() { @@ -134,6 +135,11 @@ pub fn split_lines(data: &[Entry]) -> impl Iterator { _ => (), } } + // Include last line even without trailing newline + if !finished { + finished = true; + return Some(&data[last_slice..]) + } None }).filter(|s| s.len() > 0) } diff --git a/src/pipeline/import_resolution/apply_aliases.rs b/src/pipeline/import_resolution/apply_aliases.rs index b9df142..fc38c92 100644 --- a/src/pipeline/import_resolution/apply_aliases.rs +++ b/src/pipeline/import_resolution/apply_aliases.rs @@ -6,6 +6,20 @@ use crate::{utils::Substack, interner::{Token, Interner}, pipeline::{ProjectModu use super::{alias_map::AliasMap, decls::InjectedAsFn}; +fn resolve( + token: Token>>, + alias_map: &AliasMap, + i: &Interner, +) -> Option>> { + if let Some(alias) = alias_map.resolve(token) { + Some(i.r(alias).clone()) + } else if let Some((foot, body)) = i.r(token).split_last() { + let mut new_beginning = resolve(i.i(body), alias_map, i)?; + new_beginning.push(*foot); + Some(new_beginning) + } else {None} +} + fn process_expr( expr: &Expr, alias_map: &AliasMap, @@ -14,9 +28,15 @@ fn process_expr( ) -> Expr { expr.map_names(&|n| { injected_as(&i.r(n)[..]).or_else(|| { - alias_map.resolve(n).map(|n| { - injected_as(&i.r(n)[..]).unwrap_or(n) - }) + let next_v = resolve(n, alias_map, i)?; + // println!("Resolved alias {} to {}", + // i.extern_vec(n).join("::"), + // i.extern_all(&next_v).join("::") + // ); + Some( + injected_as(&next_v) + .unwrap_or_else(|| i.i(&next_v)) + ) }) }).unwrap_or_else(|| expr.clone()) } diff --git a/src/pipeline/import_resolution/resolve_imports.rs b/src/pipeline/import_resolution/resolve_imports.rs index bf50ac9..b8daf40 100644 --- a/src/pipeline/import_resolution/resolve_imports.rs +++ b/src/pipeline/import_resolution/resolve_imports.rs @@ -1,5 +1,7 @@ use std::rc::Rc; +use itertools::Itertools; + use crate::interner::Interner; use crate::pipeline::error::ProjectError; use crate::pipeline::project_tree::ProjectTree; @@ -23,6 +25,14 @@ pub fn resolve_imports( &project, &mut map, i, injected_as )?; + println!("Aliases: {{{:?}}}", + map.targets.iter() + .map(|(kt, vt)| format!("{} => {}", + i.extern_vec(*kt).join("::"), + i.extern_vec(*vt).join("::") + )) + .join(", ") + ); let new_mod = apply_aliases(project.0.as_ref(), &map, i, injected_as); Ok(ProjectTree(Rc::new(new_mod))) } \ No newline at end of file diff --git a/src/pipeline/parse_layer.rs b/src/pipeline/parse_layer.rs index 53a89bc..d50d431 100644 --- a/src/pipeline/parse_layer.rs +++ b/src/pipeline/parse_layer.rs @@ -39,7 +39,7 @@ pub fn parse_layer<'a>( )) }; let source = source_loader::load_source( - targets, i, loader, &|path| injected_as(path).is_some() + targets, prelude, i, loader, &|path| injected_as(path).is_some() )?; let tree = project_tree::build_tree(source, i, prelude, &injected_names)?; let sum = ProjectTree(Rc::new( diff --git a/src/pipeline/project_tree/build_tree.rs b/src/pipeline/project_tree/build_tree.rs index 6e18a9a..34d97fd 100644 --- a/src/pipeline/project_tree/build_tree.rs +++ b/src/pipeline/project_tree/build_tree.rs @@ -1,6 +1,7 @@ use std::rc::Rc; use hashbrown::HashMap; +use itertools::Itertools; use crate::pipeline::error::ProjectError; use crate::interner::{Token, Interner}; @@ -138,6 +139,11 @@ fn source_to_module( _ => None, }) .collect::>(); + // println!( + // "Constructing file-module {} with members ({})", + // i.extern_all(&path_v[..]).join("::"), + // exports.keys().map(|t| i.r(*t)).join(", ") + // ); Rc::new(Module { imports, items, @@ -174,10 +180,15 @@ fn files_to_module( (namespace, ModEntry{ exported: true, member }) }) .collect::>(); - let exports = items.keys() + let exports: HashMap<_, _> = items.keys() .copied() .map(|name| (name, i.i(&pushed(&path_v, name)))) .collect(); + // println!( + // "Constructing module {} with items ({})", + // i.extern_all(&path_v[..]).join("::"), + // exports.keys().map(|t| i.r(*t)).join(", ") + // ); Rc::new(Module{ items, imports: vec![], diff --git a/src/pipeline/project_tree/collect_ops/exported_ops.rs b/src/pipeline/project_tree/collect_ops/exported_ops.rs index f2ffb3a..0d408b4 100644 --- a/src/pipeline/project_tree/collect_ops/exported_ops.rs +++ b/src/pipeline/project_tree/collect_ops/exported_ops.rs @@ -1,6 +1,8 @@ +use std::println; use std::rc::Rc; use hashbrown::HashSet; +use itertools::Itertools; use crate::representations::tree::WalkErrorKind; use crate::pipeline::source_loader::LoadedSourceTable; @@ -30,7 +32,13 @@ pub fn collect_exported_ops( i: &Interner, injected: &impl InjectedOperatorsFn ) -> OpsResult { - if let Some(i) = injected(path) {return Ok(i)} + if let Some(ops) = injected(path) { + if path == i.i(&[i.i("prelude")][..]) { + println!("%%% Prelude exported ops %%%"); + println!("{}", ops.iter().map(|t| i.r(*t)).join(", ")); + } + return Ok(ops) + } let is_file = |n: &[Token]| loaded.contains_key(&i.i(n)); let path_s = &i.r(path)[..]; let name_split = split_name(path_s, &is_file); @@ -59,11 +67,15 @@ pub fn collect_exported_ops( .collect() }.rc(), })?; - Ok(Rc::new(module.items.iter() + let out: HashSet<_> = module.items.iter() .filter(|(_, v)| v.exported) .map(|(k, _)| *k) - .collect() - )) + .collect(); + if path == i.i(&[i.i("prelude")][..]) { + println!("%%% Prelude exported ops %%%"); + println!("{}", out.iter().map(|t| i.r(*t)).join(", ")); + } + Ok(Rc::new(out)) } pub fn mk_cache<'a>( @@ -71,5 +83,7 @@ pub fn mk_cache<'a>( i: &'a Interner, injected: &'a impl InjectedOperatorsFn, ) -> ExportedOpsCache<'a> { - Cache::new(|path, _this| collect_exported_ops(path, loaded, i, injected)) + Cache::new(|path, _this| { + collect_exported_ops(path, loaded, i, injected) + }) } \ No newline at end of file diff --git a/src/pipeline/project_tree/collect_ops/ops_for.rs b/src/pipeline/project_tree/collect_ops/ops_for.rs index 3af906c..2285781 100644 --- a/src/pipeline/project_tree/collect_ops/ops_for.rs +++ b/src/pipeline/project_tree/collect_ops/ops_for.rs @@ -1,6 +1,7 @@ use std::rc::Rc; use hashbrown::HashSet; +use itertools::Itertools; use crate::parse::is_op; use crate::pipeline::error::ProjectError; @@ -34,9 +35,11 @@ pub fn collect_ops_for( ) -> OpsResult { let tree = &loaded[&i.i(file)].preparsed.0; let mut ret = HashSet::new(); + println!("collecting ops for {}", i.extern_all(file).join("::")); tree_all_ops(tree.as_ref(), &mut ret); tree.visit_all_imports(&mut |modpath, module, import| { if let Some(n) = import.name { ret.insert(n); } else { + println!("\tglob import from {}", i.extern_vec(import.path).join("::")); let path = import_abs_path( &file, modpath, module, &i.r(import.path)[..], i ).expect("This error should have been caught during loading"); @@ -45,5 +48,9 @@ pub fn collect_ops_for( Ok::<_, Rc>(()) })?; ret.drain_filter(|t| !is_op(i.r(*t))); + if file == &[i.i("map")][..] { + println!(" %%% ops in map %%% "); + println!("{}", ret.iter().map(|t| i.r(*t)).join(", ")) + } Ok(Rc::new(ret)) } \ No newline at end of file diff --git a/src/pipeline/project_tree/const_tree.rs b/src/pipeline/project_tree/const_tree.rs index 63d9b6e..25fff07 100644 --- a/src/pipeline/project_tree/const_tree.rs +++ b/src/pipeline/project_tree/const_tree.rs @@ -5,7 +5,7 @@ use hashbrown::HashMap; use crate::representations::tree::{ModEntry, ModMember, Module}; use crate::representations::Primitive; use crate::representations::location::Location; -use crate::foreign::ExternFn; +use crate::foreign::{ExternFn, Atomic, Atom}; use crate::interner::{Token, Interner}; use crate::ast::{Expr, Clause}; use crate::utils::{Substack, pushed}; @@ -17,12 +17,18 @@ pub enum ConstTree { Tree(HashMap, ConstTree>) } impl ConstTree { - pub fn xfn(xfn: impl ExternFn + 'static) -> Self { + pub fn primitive(primitive: Primitive) -> Self { Self::Const(Expr{ location: Location::Unknown, - value: Clause::P(Primitive::ExternFn(Box::new(xfn))) + value: Clause::P(primitive) }) } + pub fn xfn(xfn: impl ExternFn + 'static) -> Self { + Self::primitive(Primitive::ExternFn(Box::new(xfn))) + } + pub fn atom(atom: impl Atomic + 'static) -> Self { + Self::primitive(Primitive::Atom(Atom(Box::new(atom)))) + } pub fn tree( arr: impl IntoIterator, Self)> ) -> Self { diff --git a/src/pipeline/source_loader/load_source.rs b/src/pipeline/source_loader/load_source.rs index 88957db..1b10976 100644 --- a/src/pipeline/source_loader/load_source.rs +++ b/src/pipeline/source_loader/load_source.rs @@ -7,6 +7,7 @@ use crate::pipeline::split_name::split_name; use crate::interner::{Token, Interner}; use crate::pipeline::file_loader::{Loaded, load_text, IOResult}; +use crate::representations::sourcefile::FileEntry; use super::loaded_source::{LoadedSourceTable, LoadedSource}; use super::preparse::preparse; @@ -15,6 +16,7 @@ use super::preparse::preparse; fn load_abs_path_rec( abs_path: Token>>, table: &mut LoadedSourceTable, + prelude: &[FileEntry], i: &Interner, get_source: &impl Fn(Token>>) -> IOResult, is_injected: &impl Fn(&[Token]) -> bool @@ -39,7 +41,7 @@ fn load_abs_path_rec( .chain(iter::once(i.i(item))) .collect::>(); load_abs_path_rec( - i.i(&abs_subpath), table, i, get_source, is_injected + i.i(&abs_subpath), table, prelude, i, get_source, is_injected )? } return Ok(()); @@ -48,7 +50,7 @@ fn load_abs_path_rec( let text = load_text(i.i(filename), &get_source, i)?; let preparsed = preparse( filename.iter().map(|t| i.r(*t)).cloned().collect(), - text.as_str(), i + text.as_str(), prelude, i )?; table.insert(abs_path, LoadedSource{ text, preparsed: preparsed.clone() }); // recurse on all imported modules @@ -58,7 +60,9 @@ fn load_abs_path_rec( module, &import.nonglob_path(i), i )?; // recurse on imported module - load_abs_path_rec(i.i(&abs_pathv), table, i, get_source, is_injected) + load_abs_path_rec( + i.i(&abs_pathv), table, prelude, i, get_source, is_injected + ) }) } @@ -66,6 +70,7 @@ fn load_abs_path_rec( /// imports that aren't injected. pub fn load_source( targets: &[Token>>], + prelude: &[FileEntry], i: &Interner, get_source: &impl Fn(Token>>) -> IOResult, is_injected: &impl Fn(&[Token]) -> bool, @@ -75,6 +80,7 @@ pub fn load_source( load_abs_path_rec( *target, &mut table, + prelude, i, get_source, is_injected )? } diff --git a/src/pipeline/source_loader/preparse.rs b/src/pipeline/source_loader/preparse.rs index 22e40d8..904b66e 100644 --- a/src/pipeline/source_loader/preparse.rs +++ b/src/pipeline/source_loader/preparse.rs @@ -38,23 +38,27 @@ fn add_export( } /// Convert source lines into a module -fn to_module(src: &[FileEntry], i: &Interner) -> Rc> -{ - let imports = imports(src.iter()).cloned().collect::>(); - let mut items = src.iter().filter_map(|ent| match ent { +fn to_module( + src: &[FileEntry], + prelude: &[FileEntry], + i: &Interner +) -> Rc> { + let all_src = || src.iter().chain(prelude.iter()); + let imports = imports(all_src()).cloned().collect::>(); + let mut items = all_src().filter_map(|ent| match ent { FileEntry::Internal(Member::Namespace(name, data)) => { - let member = ModMember::Sub(to_module(data, i)); + let member = ModMember::Sub(to_module(data, prelude, i)); let entry = ModEntry{ exported: false, member }; Some((*name, entry)) } FileEntry::Exported(Member::Namespace(name, data)) => { - let member = ModMember::Sub(to_module(data, i)); + let member = ModMember::Sub(to_module(data, prelude, i)); let entry = ModEntry{ exported: true, member }; Some((*name, entry)) } _ => None }).collect::>(); - for file_entry in src { match file_entry { + for file_entry in all_src() { match file_entry { FileEntry::Comment(_) | FileEntry::Import(_) | FileEntry::Internal(Member::Namespace(..)) | FileEntry::Exported(Member::Namespace(..)) => (), @@ -83,8 +87,12 @@ fn to_module(src: &[FileEntry], i: &Interner) -> Rc> /// Preparse the module. At this stage, only the imports and /// names defined by the module can be parsed -pub fn preparse(file: Vec, source: &str, i: &Interner) --> Result> { +pub fn preparse( + file: Vec, + source: &str, + prelude: &[FileEntry], + i: &Interner, +) -> Result> { // Parse with no operators let ctx = ParsingContext::<&str>::new(&[], i, Rc::new(file.clone())); let entries = parse::parse(source, ctx) @@ -98,5 +106,5 @@ pub fn preparse(file: Vec, source: &str, i: &Interner) namespace: ns.into_iter().map(|t| i.r(t)).cloned().collect(), file: Rc::new(file.clone()) }.rc())?; - Ok(Preparsed(to_module(&normalized, i))) + Ok(Preparsed(to_module(&normalized, prelude, i))) } \ No newline at end of file diff --git a/src/rule/matcher_second/any_match.rs b/src/rule/matcher_second/any_match.rs index 904b96b..b1b4dba 100644 --- a/src/rule/matcher_second/any_match.rs +++ b/src/rule/matcher_second/any_match.rs @@ -8,6 +8,7 @@ pub fn any_match<'a>(matcher: &AnyMatcher, seq: &'a [Expr]) match matcher { AnyMatcher::Scalar(scalv) => scalv_match(scalv, seq), AnyMatcher::Vec{ left, mid, right } => { + if seq.len() < left.len() + right.len() {return None}; let left_split = left.len(); let right_split = seq.len() - right.len(); let mut state = scalv_match(left, &seq[..left_split])?; diff --git a/src/run_dir.rs b/src/run_dir.rs index 1f290ae..1ac48b3 100644 --- a/src/run_dir.rs +++ b/src/run_dir.rs @@ -28,24 +28,26 @@ export ...$a / ...$b:1 =1000=> (divide (...$a) (...$b)) export ...$a == ...$b =1002=> (equals (...$a) (...$b)) export ...$a ++ ...$b =1003=> (concatenate (...$a) (...$b)) -export do { ...$statement ; ...$rest:1 } =10_001=> ( +export do { ...$statement ; ...$rest:1 } =0x2p543=> ( statement (...$statement) do { ...$rest } ) -export do { ...$return } =10_000=> (...$return) +export do { ...$return } =0x1p543=> (...$return) -export statement (let $name = ...$value) ...$next =10_000=> ( +export statement (let $name = ...$value) ...$next =0x1p1000=> ( (\$name. ...$next) (...$value) ) -export statement (cps $name = ...$operation) ...$next =10_001=> ( +export statement (cps $name = ...$operation) ...$next =0x2p1000=> ( (...$operation) \$name. ...$next ) -export statement (cps ...$operation) ...$next =10_000=> ( +export statement (cps ...$operation) ...$next =0x1p1000=> ( (...$operation) (...$next) ) -export if ...$cond then ...$true else ...$false:1 =5_000=> ( +export if ...$cond then ...$true else ...$false:1 =0x1p320=> ( ifthenelse (...$cond) (...$true) (...$false) ) + +export ::(,) "#; fn prelude_path(i: &Interner) -> Token>> @@ -100,7 +102,7 @@ pub fn run_dir(dir: &Path) { rule.bundle(&i) ) }); - println!("Repo dump: {}", repo.bundle(&i)); + // println!("Repo dump: {}", repo.bundle(&i)); let mut exec_table = HashMap::new(); for (name, source) in consts.iter() { // let nval = entrypoint(&i); let name = &nval; let source = &consts[name]; @@ -125,6 +127,7 @@ pub fn run_dir(dir: &Path) { println!("macro execution complete"); let ctx = interpreter::Context { symbols: &exec_table, + interner: &i, gas: None }; let entrypoint = exec_table.get(&entrypoint(&i))