forked from Orchid/orchid
September-october commit
- manual parser - stl refinements - all language constructs are now Send
This commit is contained in:
173
Cargo.lock
generated
173
Cargo.lock
generated
@@ -2,17 +2,6 @@
|
|||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
version = 3
|
version = 3
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ahash"
|
|
||||||
version = "0.7.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
|
|
||||||
dependencies = [
|
|
||||||
"getrandom",
|
|
||||||
"once_cell",
|
|
||||||
"version_check",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ahash"
|
name = "ahash"
|
||||||
version = "0.8.3"
|
version = "0.8.3"
|
||||||
@@ -137,16 +126,6 @@ version = "1.0.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "chumsky"
|
|
||||||
version = "0.9.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "23170228b96236b5a7299057ac284a321457700bc8c41a4476052f0f4ba5349d"
|
|
||||||
dependencies = [
|
|
||||||
"hashbrown 0.12.3",
|
|
||||||
"stacker",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "4.3.4"
|
version = "4.3.4"
|
||||||
@@ -204,12 +183,6 @@ dependencies = [
|
|||||||
"crossbeam-utils",
|
"crossbeam-utils",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "convert_case"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cpufeatures"
|
name = "cpufeatures"
|
||||||
version = "0.2.7"
|
version = "0.2.7"
|
||||||
@@ -238,19 +211,6 @@ dependencies = [
|
|||||||
"typenum",
|
"typenum",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "derive_more"
|
|
||||||
version = "0.99.17"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
|
|
||||||
dependencies = [
|
|
||||||
"convert_case",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"rustc_version",
|
|
||||||
"syn 1.0.109",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "digest"
|
name = "digest"
|
||||||
version = "0.10.7"
|
version = "0.10.7"
|
||||||
@@ -261,16 +221,6 @@ dependencies = [
|
|||||||
"crypto-common",
|
"crypto-common",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "duplicate"
|
|
||||||
version = "1.0.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "de78e66ac9061e030587b2a2e75cc88f22304913c907b11307bca737141230cb"
|
|
||||||
dependencies = [
|
|
||||||
"heck",
|
|
||||||
"proc-macro-error",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dyn-clone"
|
name = "dyn-clone"
|
||||||
version = "1.0.11"
|
version = "1.0.11"
|
||||||
@@ -320,17 +270,6 @@ dependencies = [
|
|||||||
"version_check",
|
"version_check",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "getrandom"
|
|
||||||
version = "0.2.8"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
"libc",
|
|
||||||
"wasi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "globset"
|
name = "globset"
|
||||||
version = "0.4.10"
|
version = "0.4.10"
|
||||||
@@ -344,22 +283,13 @@ dependencies = [
|
|||||||
"regex",
|
"regex",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "hashbrown"
|
|
||||||
version = "0.12.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
|
|
||||||
dependencies = [
|
|
||||||
"ahash 0.7.6",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hashbrown"
|
name = "hashbrown"
|
||||||
version = "0.14.0"
|
version = "0.14.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
|
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ahash 0.8.3",
|
"ahash",
|
||||||
"allocator-api2",
|
"allocator-api2",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -451,36 +381,32 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
version = "1.17.1"
|
version = "1.18.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
|
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "orchidlang"
|
name = "orchidlang"
|
||||||
version = "0.2.2"
|
version = "0.2.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chumsky",
|
|
||||||
"clap",
|
"clap",
|
||||||
"derive_more",
|
|
||||||
"duplicate",
|
|
||||||
"dyn-clone",
|
"dyn-clone",
|
||||||
"hashbrown 0.14.0",
|
"hashbrown",
|
||||||
"itertools",
|
"itertools",
|
||||||
"ordered-float",
|
"ordered-float",
|
||||||
"paste",
|
"paste",
|
||||||
"polling",
|
"polling",
|
||||||
"rust-embed",
|
"rust-embed",
|
||||||
"take_mut",
|
"take_mut",
|
||||||
"thiserror",
|
|
||||||
"trait-set",
|
"trait-set",
|
||||||
"unicode-segmentation",
|
"unicode-segmentation",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ordered-float"
|
name = "ordered-float"
|
||||||
version = "3.7.0"
|
version = "4.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2fc2dbde8f8a79f2102cc474ceb0ad68e3b80b85289ea62389b60e66777e4213"
|
checksum = "e3a540f3e3b3d7929c884e46d093d344e4e5bdeed54d08bf007df50c93cc85d5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"num-traits",
|
"num-traits",
|
||||||
]
|
]
|
||||||
@@ -511,30 +437,6 @@ dependencies = [
|
|||||||
"windows-sys",
|
"windows-sys",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro-error"
|
|
||||||
version = "1.0.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro-error-attr",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn 1.0.109",
|
|
||||||
"version_check",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro-error-attr"
|
|
||||||
version = "1.0.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"version_check",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.56"
|
version = "1.0.56"
|
||||||
@@ -544,15 +446,6 @@ dependencies = [
|
|||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "psm"
|
|
||||||
version = "0.1.21"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874"
|
|
||||||
dependencies = [
|
|
||||||
"cc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.26"
|
version = "1.0.26"
|
||||||
@@ -614,15 +507,6 @@ dependencies = [
|
|||||||
"walkdir",
|
"walkdir",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rustc_version"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
|
|
||||||
dependencies = [
|
|
||||||
"semver",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustix"
|
name = "rustix"
|
||||||
version = "0.37.19"
|
version = "0.37.19"
|
||||||
@@ -659,12 +543,6 @@ dependencies = [
|
|||||||
"winapi-util",
|
"winapi-util",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "semver"
|
|
||||||
version = "1.0.18"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.160"
|
version = "1.0.160"
|
||||||
@@ -682,19 +560,6 @@ dependencies = [
|
|||||||
"digest",
|
"digest",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "stacker"
|
|
||||||
version = "0.1.15"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c886bd4480155fd3ef527d45e9ac8dd7118a898a46530b7b94c3e21866259fce"
|
|
||||||
dependencies = [
|
|
||||||
"cc",
|
|
||||||
"cfg-if",
|
|
||||||
"libc",
|
|
||||||
"psm",
|
|
||||||
"winapi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strsim"
|
name = "strsim"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
@@ -729,26 +594,6 @@ version = "0.2.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60"
|
checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "thiserror"
|
|
||||||
version = "1.0.40"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
|
|
||||||
dependencies = [
|
|
||||||
"thiserror-impl",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "thiserror-impl"
|
|
||||||
version = "1.0.40"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn 2.0.13",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing"
|
name = "tracing"
|
||||||
version = "0.1.37"
|
version = "0.1.37"
|
||||||
@@ -817,12 +662,6 @@ dependencies = [
|
|||||||
"winapi-util",
|
"winapi-util",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasi"
|
|
||||||
version = "0.11.0+wasi-snapshot-preview1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "winapi"
|
name = "winapi"
|
||||||
version = "0.3.9"
|
version = "0.3.9"
|
||||||
|
|||||||
@@ -22,18 +22,14 @@ doc = false
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
thiserror = "1.0"
|
|
||||||
chumsky = "0.9"
|
|
||||||
hashbrown = "0.14"
|
hashbrown = "0.14"
|
||||||
ordered-float = "3.7"
|
ordered-float = "4.1"
|
||||||
itertools = "0.11"
|
itertools = "0.11"
|
||||||
dyn-clone = "1.0"
|
dyn-clone = "1.0"
|
||||||
clap = { version = "4.3", features = ["derive"] }
|
clap = { version = "4.3", features = ["derive"] }
|
||||||
trait-set = "0.3"
|
trait-set = "0.3"
|
||||||
paste = "1.0"
|
paste = "1.0"
|
||||||
rust-embed = { version = "8.0", features = ["include-exclude"] }
|
rust-embed = { version = "8.0", features = ["include-exclude"] }
|
||||||
duplicate = "1.0.0"
|
|
||||||
take_mut = "0.2.2"
|
take_mut = "0.2.2"
|
||||||
unicode-segmentation = "1.10.1"
|
unicode-segmentation = "1.10.1"
|
||||||
polling = "3.0.0"
|
polling = "3.0.0"
|
||||||
derive_more = "0.99.17"
|
|
||||||
|
|||||||
@@ -7,9 +7,7 @@ An experimental lazy, pure functional programming language designed to be embedd
|
|||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
TODO
|
The standalone interpreter can be built as the binary target from this package. The language tutorial and standard library documentation is at [www.lbfalvy.com/orchid-reference](https://www.lbfalvy.com/orchid-reference/). Embedder guide and Rust API documentation are coming soon.
|
||||||
|
|
||||||
I need to write a few articles explaining individual fragments of the language, and accurately document everything. Writing tutorials at this stage is not really worth it.
|
|
||||||
|
|
||||||
## Design
|
## Design
|
||||||
|
|
||||||
@@ -35,4 +33,4 @@ Orchids and mangrove trees form complex ecosystems; The flowers persuade the tre
|
|||||||
|
|
||||||
## Contribution
|
## Contribution
|
||||||
|
|
||||||
All contributions are welcome. For the time being, use the issue tracker to discuss ideas.
|
All contributions are welcome. For the time being, use the issue tracker to discuss ideas.
|
||||||
|
|||||||
11
ROADMAP.md
11
ROADMAP.md
@@ -2,12 +2,6 @@ This document is a wishlist, its items aren't ordered in any way other than inli
|
|||||||
|
|
||||||
# Language
|
# Language
|
||||||
|
|
||||||
## Operator declarations
|
|
||||||
A dedicated (exportable) line type for declaring operators. Still just names, only you can write them next to other things without whitespace
|
|
||||||
|
|
||||||
- ops may not contain c-ident-safe characters
|
|
||||||
- clusters of operator characters are broken up with a greedy algorithm
|
|
||||||
|
|
||||||
## Typeclasses
|
## Typeclasses
|
||||||
Elixir-style protocols probably, only with n-ary dispatch which I saw in SICP-js
|
Elixir-style protocols probably, only with n-ary dispatch which I saw in SICP-js
|
||||||
|
|
||||||
@@ -37,11 +31,6 @@ Error tokens with rules to lift them out. Kinda depends on preservation of locat
|
|||||||
## Async
|
## Async
|
||||||
Join allows to run code when a tuple of pending events all resolve on the event poller
|
Join allows to run code when a tuple of pending events all resolve on the event poller
|
||||||
|
|
||||||
## New: FS
|
|
||||||
Exposes tree operations to Orchid
|
|
||||||
Uses existing IO to open and read files
|
|
||||||
Uses the event bus to read directories in batches without blocking other Orchid code
|
|
||||||
|
|
||||||
## New: Network
|
## New: Network
|
||||||
Event-driven I/O with single-fire events and resubscription to relay backpressure to the OS. Initially TCP
|
Event-driven I/O with single-fire events and resubscription to relay backpressure to the OS. Initially TCP
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import std::(proc::*, to_float, to_string, panic, str::char_at)
|
import std::(to_float, to_string, panic, string::char_at)
|
||||||
|
|
||||||
export const main := do{
|
export const main := do{
|
||||||
cps print "left operand: ";
|
cps print "left operand: ";
|
||||||
@@ -6,7 +6,6 @@ export const main := do{
|
|||||||
let a = to_float data;
|
let a = to_float data;
|
||||||
cps print "operator: ";
|
cps print "operator: ";
|
||||||
cps op = readln;
|
cps op = readln;
|
||||||
let op = char_at op 0;
|
|
||||||
cps println ("you selected \"" ++ op ++ "\"");
|
cps println ("you selected \"" ++ op ++ "\"");
|
||||||
cps print "right operand: ";
|
cps print "right operand: ";
|
||||||
cps data = readln;
|
cps data = readln;
|
||||||
|
|||||||
@@ -1,18 +1,17 @@
|
|||||||
import system::(io, directfs, async)
|
import system::(io, directfs, async)
|
||||||
import std::proc::*
|
|
||||||
import std::(to_string, to_uint, inspect)
|
import std::(to_string, to_uint, inspect)
|
||||||
|
|
||||||
const folder_view := \path.\next. do{
|
const folder_view := \path. \next. do{
|
||||||
cps println $ "Contents of " ++ directfs::os_print path;
|
cps println $ "Contents of " ++ directfs::os_print path;
|
||||||
cps entries = async::block_on $ directfs::read_dir path;
|
cps entries = async::block_on $ directfs::read_dir path;
|
||||||
cps list::enumerate entries
|
cps list::enumerate entries
|
||||||
|> list::map (pass \id. pass \name.\is_dir.
|
|> list::map (pass \id. pass \name. \is_dir.
|
||||||
println $ to_string id ++ ": " ++ directfs::os_print name ++ if is_dir then "/" else ""
|
println $ to_string id ++ ": " ++ directfs::os_print name ++ if is_dir then "/" else ""
|
||||||
)
|
)
|
||||||
|> list::chain;
|
|> list::chain;
|
||||||
cps print "select an entry, or .. to move up: ";
|
cps print "select an entry, or .. to move up: ";
|
||||||
cps choice = readln;
|
cps choice = readln;
|
||||||
if (choice == "..\n") then do {
|
if (choice == "..") then do {
|
||||||
let parent_path = directfs::pop_path path
|
let parent_path = directfs::pop_path path
|
||||||
|> option::unwrap
|
|> option::unwrap
|
||||||
|> tuple::pick 0 2;
|
|> tuple::pick 0 2;
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
|
import std::exit_status
|
||||||
|
|
||||||
const main := (
|
const main := (
|
||||||
println "Hello, world!"
|
println "Hello, world!"
|
||||||
"success"
|
exit_status::success
|
||||||
)
|
)
|
||||||
-- main := "Hello, World!\n"
|
-- main := "Hello, World!\n"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import std::(proc::*, to_string)
|
import std::to_string
|
||||||
|
|
||||||
export const main := do{
|
export const main := do{
|
||||||
let foo = list::new[1, 2, 3, 4, 5, 6];
|
let foo = list::new[1, 2, 3, 4, 5, 6];
|
||||||
@@ -6,7 +6,7 @@ export const main := do{
|
|||||||
let sum = bar
|
let sum = bar
|
||||||
|> list::skip 2
|
|> list::skip 2
|
||||||
|> list::take 3
|
|> list::take 3
|
||||||
|> list::reduce (\a.\b. a + b)
|
|> list::reduce (\a. \b. a + b)
|
||||||
|> option::unwrap;
|
|> option::unwrap;
|
||||||
cps println $ to_string sum;
|
cps println $ to_string sum;
|
||||||
0
|
0
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import std::(proc::*, to_string)
|
import std::to_string
|
||||||
|
|
||||||
export const main := do{
|
export const main := do{
|
||||||
let foo = map::new[
|
let foo = map::new[
|
||||||
|
|||||||
@@ -3,13 +3,13 @@ mod cli;
|
|||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::BufReader;
|
use std::io::BufReader;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::process;
|
use std::process::ExitCode;
|
||||||
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use orchidlang::facade::{Environment, PreMacro};
|
use orchidlang::facade::{Environment, PreMacro};
|
||||||
use orchidlang::systems::asynch::AsynchSystem;
|
use orchidlang::systems::asynch::AsynchSystem;
|
||||||
use orchidlang::systems::stl::StlConfig;
|
use orchidlang::systems::stl::{ExitStatus, StlConfig};
|
||||||
use orchidlang::systems::{directfs, io, scheduler};
|
use orchidlang::systems::{directfs, io, scheduler};
|
||||||
use orchidlang::{ast, interpreted, interpreter, Interner, Sym, VName};
|
use orchidlang::{ast, interpreted, interpreter, Interner, Sym, VName};
|
||||||
|
|
||||||
@@ -80,7 +80,7 @@ fn print_for_debug(e: &ast::Expr<Sym>) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A little utility to step through the resolution of a macro set
|
/// A little utility to step through the resolution of a macro set
|
||||||
pub fn macro_debug(premacro: PreMacro, sym: Sym) {
|
pub fn macro_debug(premacro: PreMacro, sym: Sym) -> ExitCode {
|
||||||
let (mut code, location) = (premacro.consts.get(&sym))
|
let (mut code, location) = (premacro.consts.get(&sym))
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
panic!(
|
panic!(
|
||||||
@@ -111,7 +111,7 @@ pub fn macro_debug(premacro: PreMacro, sym: Sym) {
|
|||||||
},
|
},
|
||||||
"p" | "print" => print_for_debug(&code),
|
"p" | "print" => print_for_debug(&code),
|
||||||
"d" | "dump" => print!("Rules: {}", premacro.repo),
|
"d" | "dump" => print!("Rules: {}", premacro.repo),
|
||||||
"q" | "quit" => return,
|
"q" | "quit" => return ExitCode::SUCCESS,
|
||||||
"h" | "help" => print!(
|
"h" | "help" => print!(
|
||||||
"Available commands:
|
"Available commands:
|
||||||
\t<blank>, n, next\t\ttake a step
|
\t<blank>, n, next\t\ttake a step
|
||||||
@@ -128,7 +128,7 @@ pub fn macro_debug(premacro: PreMacro, sym: Sym) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn main() {
|
pub fn main() -> ExitCode {
|
||||||
let args = Args::parse();
|
let args = Args::parse();
|
||||||
args.chk_proj().unwrap_or_else(|e| panic!("{e}"));
|
args.chk_proj().unwrap_or_else(|e| panic!("{e}"));
|
||||||
let dir = PathBuf::try_from(args.dir).unwrap();
|
let dir = PathBuf::try_from(args.dir).unwrap();
|
||||||
@@ -150,7 +150,7 @@ pub fn main() {
|
|||||||
let premacro = env.load_dir(&dir, &main).unwrap();
|
let premacro = env.load_dir(&dir, &main).unwrap();
|
||||||
if args.dump_repo {
|
if args.dump_repo {
|
||||||
println!("Parsed rules: {}", premacro.repo);
|
println!("Parsed rules: {}", premacro.repo);
|
||||||
return;
|
return ExitCode::SUCCESS;
|
||||||
}
|
}
|
||||||
if !args.macro_debug.is_empty() {
|
if !args.macro_debug.is_empty() {
|
||||||
let sym = i.i(&to_vname(&args.macro_debug, &i));
|
let sym = i.i(&to_vname(&args.macro_debug, &i));
|
||||||
@@ -160,15 +160,15 @@ pub fn main() {
|
|||||||
proc.validate_refs().unwrap();
|
proc.validate_refs().unwrap();
|
||||||
let main = interpreted::Clause::Constant(i.i(&main)).wrap();
|
let main = interpreted::Clause::Constant(i.i(&main)).wrap();
|
||||||
let ret = proc.run(main, None).unwrap();
|
let ret = proc.run(main, None).unwrap();
|
||||||
let interpreter::Return { gas, state, inert } = ret;
|
let interpreter::Return { state, inert, .. } = ret;
|
||||||
drop(proc);
|
drop(proc);
|
||||||
if inert {
|
assert!(inert, "Gas is not used, only inert data should be yielded");
|
||||||
println!("Settled at {}", state.expr().clause);
|
match state.clone().downcast::<ExitStatus>() {
|
||||||
if let Some(g) = gas {
|
Ok(ExitStatus::Success) => ExitCode::SUCCESS,
|
||||||
println!("Remaining gas: {g}")
|
Ok(ExitStatus::Failure) => ExitCode::FAILURE,
|
||||||
}
|
Err(_) => {
|
||||||
} else if gas == Some(0) {
|
println!("{}", state.expr().clause);
|
||||||
eprintln!("Ran out of gas!");
|
ExitCode::SUCCESS
|
||||||
process::exit(-1);
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,12 +19,12 @@ impl AssertionError {
|
|||||||
location: Location,
|
location: Location,
|
||||||
message: &'static str,
|
message: &'static str,
|
||||||
) -> Result<T, Rc<dyn ExternError>> {
|
) -> Result<T, Rc<dyn ExternError>> {
|
||||||
return Err(Self::ext(location, message));
|
Err(Self::ext(location, message))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Construct and upcast to [ExternError]
|
/// Construct and upcast to [ExternError]
|
||||||
pub fn ext(location: Location, message: &'static str) -> Rc<dyn ExternError> {
|
pub fn ext(location: Location, message: &'static str) -> Rc<dyn ExternError> {
|
||||||
return Self { location, message }.into_extern();
|
Self { location, message }.into_extern()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
@@ -10,9 +10,9 @@ use crate::VName;
|
|||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||||
pub struct ImportAll {
|
pub struct ImportAll {
|
||||||
/// The file containing the offending import
|
/// The file containing the offending import
|
||||||
pub offender_file: Rc<VName>,
|
pub offender_file: Arc<VName>,
|
||||||
/// The module containing the offending import
|
/// The module containing the offending import
|
||||||
pub offender_mod: Rc<VName>,
|
pub offender_mod: Arc<VName>,
|
||||||
}
|
}
|
||||||
impl ProjectError for ImportAll {
|
impl ProjectError for ImportAll {
|
||||||
fn description(&self) -> &str { "a top-level glob import was used" }
|
fn description(&self) -> &str { "a top-level glob import was used" }
|
||||||
|
|||||||
@@ -8,6 +8,8 @@ mod project_error;
|
|||||||
mod too_many_supers;
|
mod too_many_supers;
|
||||||
mod unexpected_directory;
|
mod unexpected_directory;
|
||||||
mod visibility_mismatch;
|
mod visibility_mismatch;
|
||||||
|
mod assertion_error;
|
||||||
|
mod runtime_error;
|
||||||
|
|
||||||
pub use conflicting_roles::ConflictingRoles;
|
pub use conflicting_roles::ConflictingRoles;
|
||||||
pub use import_all::ImportAll;
|
pub use import_all::ImportAll;
|
||||||
@@ -18,3 +20,5 @@ pub use project_error::{ErrorPosition, ProjectError, ProjectResult};
|
|||||||
pub use too_many_supers::TooManySupers;
|
pub use too_many_supers::TooManySupers;
|
||||||
pub use unexpected_directory::UnexpectedDirectory;
|
pub use unexpected_directory::UnexpectedDirectory;
|
||||||
pub use visibility_mismatch::VisibilityMismatch;
|
pub use visibility_mismatch::VisibilityMismatch;
|
||||||
|
pub use assertion_error::AssertionError;
|
||||||
|
pub use runtime_error::RuntimeError;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::{ErrorPosition, ProjectError};
|
use super::{ErrorPosition, ProjectError};
|
||||||
use crate::representations::location::Location;
|
use crate::representations::location::Location;
|
||||||
@@ -25,14 +25,14 @@ impl ProjectError for NotExported {
|
|||||||
Box::new(
|
Box::new(
|
||||||
[
|
[
|
||||||
ErrorPosition {
|
ErrorPosition {
|
||||||
location: Location::File(Rc::new(self.file.clone())),
|
location: Location::File(Arc::new(self.file.clone())),
|
||||||
message: Some(format!(
|
message: Some(format!(
|
||||||
"{} isn't exported",
|
"{} isn't exported",
|
||||||
Interner::extern_all(&self.subpath).join("::")
|
Interner::extern_all(&self.subpath).join("::")
|
||||||
)),
|
)),
|
||||||
},
|
},
|
||||||
ErrorPosition {
|
ErrorPosition {
|
||||||
location: Location::File(Rc::new(self.referrer_file.clone())),
|
location: Location::File(Arc::new(self.referrer_file.clone())),
|
||||||
message: Some(format!(
|
message: Some(format!(
|
||||||
"{} cannot see this symbol",
|
"{} cannot see this symbol",
|
||||||
Interner::extern_all(&self.referrer_subpath).join("::")
|
Interner::extern_all(&self.referrer_subpath).join("::")
|
||||||
|
|||||||
@@ -1,71 +0,0 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use super::ProjectError;
|
|
||||||
use crate::representations::project::ProjectModule;
|
|
||||||
#[allow(unused)] // For doc
|
|
||||||
use crate::tree::Module;
|
|
||||||
use crate::tree::WalkError;
|
|
||||||
use crate::{Interner, Location, NameLike, Tok, VName};
|
|
||||||
|
|
||||||
/// Error produced when an import refers to a nonexistent module
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
|
||||||
pub struct NotFound {
|
|
||||||
/// The module that imported the invalid path
|
|
||||||
pub source: Option<VName>,
|
|
||||||
/// The file not containing the expected path
|
|
||||||
pub file: VName,
|
|
||||||
/// The invalid import path
|
|
||||||
pub subpath: VName,
|
|
||||||
}
|
|
||||||
impl NotFound {
|
|
||||||
/// Produce this error from the parameters of [Module]`::walk_ref` and a
|
|
||||||
/// [WalkError]
|
|
||||||
///
|
|
||||||
/// # Panics
|
|
||||||
///
|
|
||||||
/// - if `path` is shorter than the `pos` of the error
|
|
||||||
/// - if a walk up to but not including `pos` fails
|
|
||||||
///
|
|
||||||
/// Basically, if `e` was not produced by the `walk*` methods called on
|
|
||||||
/// `path`.
|
|
||||||
#[must_use]
|
|
||||||
pub fn from_walk_error(
|
|
||||||
source: &[Tok<String>],
|
|
||||||
prefix: &[Tok<String>],
|
|
||||||
path: &[Tok<String>],
|
|
||||||
orig: &ProjectModule<impl NameLike>,
|
|
||||||
e: WalkError,
|
|
||||||
) -> Self {
|
|
||||||
let last_mod =
|
|
||||||
orig.walk_ref(&path[..e.pos], false).expect("error occured on next step");
|
|
||||||
let mut whole_path = prefix.iter().chain(path.iter()).cloned();
|
|
||||||
if let Some(file) = &last_mod.extra.file {
|
|
||||||
Self {
|
|
||||||
source: Some(source.to_vec()),
|
|
||||||
file: whole_path.by_ref().take(file.len()).collect(),
|
|
||||||
subpath: whole_path.collect(),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Self {
|
|
||||||
source: Some(source.to_vec()),
|
|
||||||
file: whole_path.collect(),
|
|
||||||
subpath: Vec::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl ProjectError for NotFound {
|
|
||||||
fn description(&self) -> &str {
|
|
||||||
"an import refers to a nonexistent module"
|
|
||||||
}
|
|
||||||
fn message(&self) -> String {
|
|
||||||
format!(
|
|
||||||
"module {} in {} was not found",
|
|
||||||
Interner::extern_all(&self.subpath).join("::"),
|
|
||||||
Interner::extern_all(&self.file).join("/"),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
fn one_position(&self) -> crate::Location {
|
|
||||||
Location::File(Rc::new(Interner::extern_all(&self.file)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -17,12 +17,12 @@ impl RuntimeError {
|
|||||||
message: String,
|
message: String,
|
||||||
operation: &'static str,
|
operation: &'static str,
|
||||||
) -> Result<T, Rc<dyn ExternError>> {
|
) -> Result<T, Rc<dyn ExternError>> {
|
||||||
return Err(Self { message, operation }.into_extern());
|
Err(Self { message, operation }.into_extern())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Construct and upcast to [ExternError]
|
/// Construct and upcast to [ExternError]
|
||||||
pub fn ext(message: String, operation: &'static str) -> Rc<dyn ExternError> {
|
pub fn ext(message: String, operation: &'static str) -> Rc<dyn ExternError> {
|
||||||
return Self { message, operation }.into_extern();
|
Self { message, operation }.into_extern()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::ProjectError;
|
use super::ProjectError;
|
||||||
use crate::{Interner, Location, VName};
|
use crate::{Interner, Location, VName};
|
||||||
@@ -16,7 +16,7 @@ impl ProjectError for UnexpectedDirectory {
|
|||||||
to a directory"
|
to a directory"
|
||||||
}
|
}
|
||||||
fn one_position(&self) -> crate::Location {
|
fn one_position(&self) -> crate::Location {
|
||||||
Location::File(Rc::new(self.path.clone()))
|
Location::File(Arc::new(self.path.clone()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::project_error::ProjectError;
|
use super::project_error::ProjectError;
|
||||||
use crate::representations::location::Location;
|
use crate::representations::location::Location;
|
||||||
@@ -23,6 +23,6 @@ impl ProjectError for VisibilityMismatch {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
fn one_position(&self) -> Location {
|
fn one_position(&self) -> Location {
|
||||||
Location::File(Rc::new(self.file.clone()))
|
Location::File(Arc::new(self.file.clone()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -42,6 +42,8 @@ impl<'a> Environment<'a> {
|
|||||||
let system_tree = from_const_tree(sys.constants.clone(), &sys.vname(i));
|
let system_tree = from_const_tree(sys.constants.clone(), &sys.vname(i));
|
||||||
tree = ProjectTree(never::unwrap_always(tree.0.overlay(system_tree.0)));
|
tree = ProjectTree(never::unwrap_always(tree.0.overlay(system_tree.0)));
|
||||||
}
|
}
|
||||||
|
let mut lexer_plugins = vec![];
|
||||||
|
let mut line_parsers = vec![];
|
||||||
let mut prelude = vec![];
|
let mut prelude = vec![];
|
||||||
for sys in systems.iter() {
|
for sys in systems.iter() {
|
||||||
if !sys.code.is_empty() {
|
if !sys.code.is_empty() {
|
||||||
@@ -50,9 +52,13 @@ impl<'a> Environment<'a> {
|
|||||||
&|k| sys.load_file(k),
|
&|k| sys.load_file(k),
|
||||||
&tree,
|
&tree,
|
||||||
&prelude,
|
&prelude,
|
||||||
|
&lexer_plugins,
|
||||||
|
&line_parsers,
|
||||||
i,
|
i,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
lexer_plugins.extend(sys.lexer_plugin.as_deref().iter());
|
||||||
|
line_parsers.extend(sys.line_parser.as_deref().iter());
|
||||||
prelude.extend_from_slice(&sys.prelude);
|
prelude.extend_from_slice(&sys.prelude);
|
||||||
}
|
}
|
||||||
Ok(CompiledEnv { prelude, tree, systems })
|
Ok(CompiledEnv { prelude, tree, systems })
|
||||||
@@ -67,11 +73,19 @@ impl<'a> Environment<'a> {
|
|||||||
let i = self.i;
|
let i = self.i;
|
||||||
let CompiledEnv { prelude, systems, tree } = self.compile()?;
|
let CompiledEnv { prelude, systems, tree } = self.compile()?;
|
||||||
let file_cache = file_loader::mk_dir_cache(dir.to_path_buf());
|
let file_cache = file_loader::mk_dir_cache(dir.to_path_buf());
|
||||||
|
let lexer_plugins = (systems.iter())
|
||||||
|
.filter_map(|s| s.lexer_plugin.as_deref())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
let line_parsers = (systems.iter())
|
||||||
|
.filter_map(|s| s.line_parser.as_deref())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
let vname_tree = parse_layer(
|
let vname_tree = parse_layer(
|
||||||
iter::once(target),
|
iter::once(target),
|
||||||
&|path| file_cache.find(path),
|
&|path| file_cache.find(path),
|
||||||
&tree,
|
&tree,
|
||||||
&prelude,
|
&prelude,
|
||||||
|
&lexer_plugins,
|
||||||
|
&line_parsers,
|
||||||
i,
|
i,
|
||||||
)?;
|
)?;
|
||||||
let tree = vname_to_sym_tree(vname_tree, i);
|
let tree = vname_to_sym_tree(vname_tree, i);
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
use std::iter;
|
use std::iter;
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
|
|
||||||
@@ -50,7 +50,7 @@ impl<'a> PreMacro<'a> {
|
|||||||
.unwrap_or_else(|_| panic!("path sourced from symbol names"));
|
.unwrap_or_else(|_| panic!("path sourced from symbol names"));
|
||||||
(origin.extra.file.as_ref()).cloned()
|
(origin.extra.file.as_ref()).cloned()
|
||||||
})
|
})
|
||||||
.map(|p| Location::File(Rc::new(p)))
|
.map(|p| Location::File(Arc::new(p)))
|
||||||
.unwrap_or(Location::Unknown);
|
.unwrap_or(Location::Unknown);
|
||||||
(name, (expr, location))
|
(name, (expr, location))
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ use hashbrown::HashMap;
|
|||||||
|
|
||||||
use crate::error::{ErrorPosition, ProjectError};
|
use crate::error::{ErrorPosition, ProjectError};
|
||||||
use crate::interpreter::HandlerTable;
|
use crate::interpreter::HandlerTable;
|
||||||
|
use crate::parse::{LexerPlugin, LineParser};
|
||||||
use crate::pipeline::file_loader::{IOResult, Loaded};
|
use crate::pipeline::file_loader::{IOResult, Loaded};
|
||||||
use crate::sourcefile::FileEntry;
|
use crate::sourcefile::FileEntry;
|
||||||
use crate::utils::boxed_iter::box_empty;
|
use crate::utils::boxed_iter::box_empty;
|
||||||
@@ -23,6 +24,13 @@ pub struct System<'a> {
|
|||||||
pub prelude: Vec<FileEntry>,
|
pub prelude: Vec<FileEntry>,
|
||||||
/// Handlers for actions defined in this system
|
/// Handlers for actions defined in this system
|
||||||
pub handlers: HandlerTable<'a>,
|
pub handlers: HandlerTable<'a>,
|
||||||
|
/// Custom lexer for the source code representation atomic data.
|
||||||
|
/// These take priority over builtin lexers so the syntax they
|
||||||
|
/// match should be unambiguous
|
||||||
|
pub lexer_plugin: Option<Box<dyn LexerPlugin>>,
|
||||||
|
/// Parser that processes custom line types into their representation in the
|
||||||
|
/// module tree
|
||||||
|
pub line_parser: Option<Box<dyn LineParser>>,
|
||||||
}
|
}
|
||||||
impl<'a> System<'a> {
|
impl<'a> System<'a> {
|
||||||
/// Intern the name of the system so that it can be used as an Orchid
|
/// Intern the name of the system so that it can be used as an Orchid
|
||||||
|
|||||||
@@ -1,13 +1,16 @@
|
|||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
use dyn_clone::DynClone;
|
use dyn_clone::DynClone;
|
||||||
|
|
||||||
use crate::interpreted::ExprInst;
|
use super::ExternError;
|
||||||
|
use crate::ddispatch::request;
|
||||||
|
use crate::error::AssertionError;
|
||||||
|
use crate::interpreted::{ExprInst, TryFromExprInst};
|
||||||
use crate::interpreter::{Context, RuntimeError};
|
use crate::interpreter::{Context, RuntimeError};
|
||||||
use crate::representations::interpreted::Clause;
|
use crate::representations::interpreted::Clause;
|
||||||
use crate::utils::ddispatch::Responder;
|
use crate::utils::ddispatch::Responder;
|
||||||
use crate::Primitive;
|
|
||||||
|
|
||||||
/// Information returned by [Atomic::run]. This mirrors
|
/// Information returned by [Atomic::run]. This mirrors
|
||||||
/// [crate::interpreter::Return] but with a clause instead of an Expr.
|
/// [crate::interpreter::Return] but with a clause instead of an Expr.
|
||||||
@@ -24,8 +27,18 @@ pub struct AtomicReturn {
|
|||||||
/// Returned by [Atomic::run]
|
/// Returned by [Atomic::run]
|
||||||
pub type AtomicResult = Result<AtomicReturn, RuntimeError>;
|
pub type AtomicResult = Result<AtomicReturn, RuntimeError>;
|
||||||
|
|
||||||
|
/// Trait for things that are _definitely_ equal.
|
||||||
|
pub trait StrictEq {
|
||||||
|
/// must return true if the objects were produced via the exact same sequence
|
||||||
|
/// of transformations, including any relevant context data. Must return false
|
||||||
|
/// if the objects are of different type, or if their type is [PartialEq]
|
||||||
|
/// and [PartialEq::eq] returns false.
|
||||||
|
fn strict_eq(&self, other: &dyn Any) -> bool;
|
||||||
|
}
|
||||||
|
|
||||||
/// Functionality the interpreter needs to handle a value
|
/// Functionality the interpreter needs to handle a value
|
||||||
pub trait Atomic: Any + Debug + DynClone + Responder
|
pub trait Atomic:
|
||||||
|
Any + Debug + DynClone + StrictEq + Responder + Send
|
||||||
where
|
where
|
||||||
Self: 'static,
|
Self: 'static,
|
||||||
{
|
{
|
||||||
@@ -54,7 +67,7 @@ where
|
|||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
Clause::P(Primitive::Atom(Atom(Box::new(self))))
|
Clause::Atom(Atom(Box::new(self)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Wrap the atom in a new expression instance to be placed in a tree
|
/// Wrap the atom in a new expression instance to be placed in a tree
|
||||||
@@ -73,7 +86,7 @@ where
|
|||||||
/// inert at which point the [Atom] will validate and process the argument,
|
/// inert at which point the [Atom] will validate and process the argument,
|
||||||
/// returning a different [Atom] intended for processing by external code, a new
|
/// returning a different [Atom] intended for processing by external code, a new
|
||||||
/// [ExternFn] to capture an additional argument, or an Orchid expression
|
/// [ExternFn] to capture an additional argument, or an Orchid expression
|
||||||
/// to pass control back to the interpreter.btop
|
/// to pass control back to the interpreter.
|
||||||
pub struct Atom(pub Box<dyn Atomic>);
|
pub struct Atom(pub Box<dyn Atomic>);
|
||||||
impl Atom {
|
impl Atom {
|
||||||
/// Wrap an [Atomic] in a type-erased box
|
/// Wrap an [Atomic] in a type-erased box
|
||||||
@@ -84,23 +97,26 @@ impl Atom {
|
|||||||
/// Get the contained data
|
/// Get the contained data
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn data(&self) -> &dyn Atomic { self.0.as_ref() as &dyn Atomic }
|
pub fn data(&self) -> &dyn Atomic { self.0.as_ref() as &dyn Atomic }
|
||||||
/// Attempt to downcast contained data to a specific type
|
/// Test the type of the contained data without downcasting
|
||||||
pub fn try_cast<T: Atomic>(self) -> Result<T, Self> {
|
#[must_use]
|
||||||
|
pub fn is<T: Atomic>(&self) -> bool { self.data().as_any_ref().is::<T>() }
|
||||||
|
/// Downcast contained data, panic if it isn't the specified type
|
||||||
|
#[must_use]
|
||||||
|
pub fn downcast<T: Atomic>(self) -> T {
|
||||||
|
*self.0.as_any().downcast().expect("Type mismatch on Atom::cast")
|
||||||
|
}
|
||||||
|
/// Normalize the contained data
|
||||||
|
pub fn run(self, ctx: Context) -> AtomicResult { self.0.run(ctx) }
|
||||||
|
/// Request a delegate from the encapsulated data
|
||||||
|
pub fn request<T: 'static>(&self) -> Option<T> { request(self.0.as_ref()) }
|
||||||
|
/// Downcast the atom to a concrete atomic type, or return the original atom
|
||||||
|
/// if it is not the specified type
|
||||||
|
pub fn try_downcast<T: Atomic>(self) -> Result<T, Self> {
|
||||||
match self.0.as_any_ref().is::<T>() {
|
match self.0.as_any_ref().is::<T>() {
|
||||||
true => Ok(*self.0.as_any().downcast().expect("checked just above")),
|
true => Ok(*self.0.as_any().downcast().expect("checked just above")),
|
||||||
false => Err(self),
|
false => Err(self),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/// Test the type of the contained data without downcasting
|
|
||||||
#[must_use]
|
|
||||||
pub fn is<T: 'static>(&self) -> bool { self.data().as_any_ref().is::<T>() }
|
|
||||||
/// Downcast contained data, panic if it isn't the specified type
|
|
||||||
#[must_use]
|
|
||||||
pub fn cast<T: 'static>(self) -> T {
|
|
||||||
*self.0.as_any().downcast().expect("Type mismatch on Atom::cast")
|
|
||||||
}
|
|
||||||
/// Normalize the contained data
|
|
||||||
pub fn run(self, ctx: Context) -> AtomicResult { self.0.run(ctx) }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for Atom {
|
impl Clone for Atom {
|
||||||
@@ -109,6 +125,16 @@ impl Clone for Atom {
|
|||||||
|
|
||||||
impl Debug for Atom {
|
impl Debug for Atom {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "##ATOM[{:?}]##", self.data())
|
write!(f, "{:?}", self.data())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFromExprInst for Atom {
|
||||||
|
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
||||||
|
let loc = exi.location();
|
||||||
|
match exi.expr_val().clause {
|
||||||
|
Clause::Atom(a) => Ok(a),
|
||||||
|
_ => AssertionError::fail(loc, "atom"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,12 +7,12 @@ use trait_set::trait_set;
|
|||||||
use super::{Atomic, ExternFn, InertAtomic, XfnResult};
|
use super::{Atomic, ExternFn, InertAtomic, XfnResult};
|
||||||
use crate::interpreted::{Clause, ExprInst};
|
use crate::interpreted::{Clause, ExprInst};
|
||||||
use crate::interpreter::{Context, HandlerRes};
|
use crate::interpreter::{Context, HandlerRes};
|
||||||
use crate::utils::pure_push::pushed_ref;
|
use crate::utils::pure_seq::pushed_ref;
|
||||||
use crate::ConstTree;
|
use crate::ConstTree;
|
||||||
|
|
||||||
trait_set! {
|
trait_set! {
|
||||||
/// A "well behaved" type that can be used as payload in a CPS box
|
/// A "well behaved" type that can be used as payload in a CPS box
|
||||||
pub trait CPSPayload = Clone + Debug + 'static;
|
pub trait CPSPayload = Clone + Debug + Send + 'static;
|
||||||
/// A function to handle a CPS box with a specific payload
|
/// A function to handle a CPS box with a specific payload
|
||||||
pub trait CPSHandler<T: CPSPayload> = FnMut(&T, &ExprInst) -> HandlerRes;
|
pub trait CPSHandler<T: CPSPayload> = FnMut(&T, &ExprInst) -> HandlerRes;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,13 +3,12 @@ use std::fmt::{Debug, Display};
|
|||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use dyn_clone::DynClone;
|
use dyn_clone::{DynClone, clone_box};
|
||||||
|
|
||||||
use super::XfnResult;
|
use super::XfnResult;
|
||||||
use crate::interpreted::ExprInst;
|
use crate::interpreted::ExprInst;
|
||||||
use crate::interpreter::Context;
|
use crate::interpreter::Context;
|
||||||
use crate::representations::interpreted::Clause;
|
use crate::representations::interpreted::Clause;
|
||||||
use crate::Primitive;
|
|
||||||
|
|
||||||
/// Errors produced by external code
|
/// Errors produced by external code
|
||||||
pub trait ExternError: Display {
|
pub trait ExternError: Display {
|
||||||
@@ -34,7 +33,7 @@ impl Error for dyn ExternError {}
|
|||||||
/// Represents an externally defined function from the perspective of
|
/// Represents an externally defined function from the perspective of
|
||||||
/// the executor. Since Orchid lacks basic numerical operations,
|
/// the executor. Since Orchid lacks basic numerical operations,
|
||||||
/// these are also external functions.
|
/// these are also external functions.
|
||||||
pub trait ExternFn: DynClone {
|
pub trait ExternFn: DynClone + Send {
|
||||||
/// Display name of the function
|
/// Display name of the function
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn name(&self) -> &str;
|
fn name(&self) -> &str;
|
||||||
@@ -50,7 +49,7 @@ pub trait ExternFn: DynClone {
|
|||||||
where
|
where
|
||||||
Self: Sized + 'static,
|
Self: Sized + 'static,
|
||||||
{
|
{
|
||||||
Clause::P(Primitive::ExternFn(Box::new(self)))
|
Clause::ExternFn(ExFn(Box::new(self)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -68,3 +67,21 @@ impl Debug for dyn ExternFn {
|
|||||||
write!(f, "##EXTERN[{}]##", self.name())
|
write!(f, "##EXTERN[{}]##", self.name())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Represents a black box function that can be applied to a [Clause] to produce
|
||||||
|
/// a new [Clause], typically an [Atom] representing external work, a new [ExFn]
|
||||||
|
/// to take additional arguments, or an Orchid tree to return control to the
|
||||||
|
/// interpreter
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ExFn(pub Box<dyn ExternFn + 'static>);
|
||||||
|
impl ExFn {
|
||||||
|
/// Combine the function with an argument to produce a new clause
|
||||||
|
pub fn apply(self, arg: ExprInst, ctx: Context) -> XfnResult<Clause> {
|
||||||
|
self.0.apply(arg, ctx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Clone for ExFn {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
Self(clone_box(self.0.as_ref()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ use std::fmt::Debug;
|
|||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use super::atom::StrictEq;
|
||||||
use super::{
|
use super::{
|
||||||
Atomic, AtomicResult, AtomicReturn, ExternError, ExternFn, XfnResult,
|
Atomic, AtomicResult, AtomicReturn, ExternError, ExternFn, XfnResult,
|
||||||
};
|
};
|
||||||
@@ -9,7 +10,7 @@ use crate::ddispatch::Responder;
|
|||||||
use crate::interpreted::{Clause, ExprInst, TryFromExprInst};
|
use crate::interpreted::{Clause, ExprInst, TryFromExprInst};
|
||||||
use crate::interpreter::{run, Context, Return};
|
use crate::interpreter::{run, Context, Return};
|
||||||
use crate::systems::codegen::{opt, res};
|
use crate::systems::codegen::{opt, res};
|
||||||
use crate::{Literal, OrcString};
|
use crate::OrcString;
|
||||||
|
|
||||||
/// A trait for things that are infallibly convertible to [Clause]. These types
|
/// A trait for things that are infallibly convertible to [Clause]. These types
|
||||||
/// can be returned by callbacks passed to the [super::xfn_1ary] family of
|
/// can be returned by callbacks passed to the [super::xfn_1ary] family of
|
||||||
@@ -17,6 +18,8 @@ use crate::{Literal, OrcString};
|
|||||||
pub trait ToClause: Clone {
|
pub trait ToClause: Clone {
|
||||||
/// Convert the type to a [Clause].
|
/// Convert the type to a [Clause].
|
||||||
fn to_clause(self) -> Clause;
|
fn to_clause(self) -> Clause;
|
||||||
|
/// Convert to an expression instance via [ToClause].
|
||||||
|
fn to_exi(self) -> ExprInst { self.to_clause().wrap() }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Atomic + Clone> ToClause for T {
|
impl<T: Atomic + Clone> ToClause for T {
|
||||||
@@ -28,14 +31,8 @@ impl ToClause for Clause {
|
|||||||
impl ToClause for ExprInst {
|
impl ToClause for ExprInst {
|
||||||
fn to_clause(self) -> Clause { self.expr_val().clause }
|
fn to_clause(self) -> Clause { self.expr_val().clause }
|
||||||
}
|
}
|
||||||
impl ToClause for Literal {
|
|
||||||
fn to_clause(self) -> Clause { self.into() }
|
|
||||||
}
|
|
||||||
impl ToClause for u64 {
|
|
||||||
fn to_clause(self) -> Clause { Literal::Uint(self).into() }
|
|
||||||
}
|
|
||||||
impl ToClause for String {
|
impl ToClause for String {
|
||||||
fn to_clause(self) -> Clause { OrcString::from(self).cls() }
|
fn to_clause(self) -> Clause { OrcString::from(self).atom_cls() }
|
||||||
}
|
}
|
||||||
impl<T: ToClause> ToClause for Option<T> {
|
impl<T: ToClause> ToClause for Option<T> {
|
||||||
fn to_clause(self) -> Clause { opt(self.map(|t| t.to_clause().wrap())) }
|
fn to_clause(self) -> Clause { opt(self.map(|t| t.to_clause().wrap())) }
|
||||||
@@ -59,31 +56,28 @@ pub struct Param<T, U, F> {
|
|||||||
_t: PhantomData<T>,
|
_t: PhantomData<T>,
|
||||||
_u: PhantomData<U>,
|
_u: PhantomData<U>,
|
||||||
}
|
}
|
||||||
|
unsafe impl<T, U, F: Send> Send for Param<T, U, F> {}
|
||||||
impl<T, U, F> Param<T, U, F> {
|
impl<T, U, F> Param<T, U, F> {
|
||||||
/// Wrap a new function in a parametric struct
|
/// Wrap a new function in a parametric struct
|
||||||
pub fn new(f: F) -> Self
|
pub fn new(f: F) -> Self
|
||||||
where
|
where
|
||||||
F: FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
F: FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
||||||
{
|
{
|
||||||
Self { data: f, _t: PhantomData::default(), _u: PhantomData::default() }
|
Self { data: f, _t: PhantomData, _u: PhantomData }
|
||||||
}
|
}
|
||||||
/// Take out the function
|
/// Take out the function
|
||||||
pub fn get(self) -> F { self.data }
|
pub fn get(self) -> F { self.data }
|
||||||
}
|
}
|
||||||
impl<T, U, F: Clone> Clone for Param<T, U, F> {
|
impl<T, U, F: Clone> Clone for Param<T, U, F> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self {
|
Self { data: self.data.clone(), _t: PhantomData, _u: PhantomData }
|
||||||
data: self.data.clone(),
|
|
||||||
_t: PhantomData::default(),
|
|
||||||
_u: PhantomData::default(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<
|
impl<
|
||||||
T: 'static + TryFromExprInst,
|
T: 'static + TryFromExprInst,
|
||||||
U: 'static + ToClause,
|
U: 'static + ToClause,
|
||||||
F: 'static + Clone + FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
F: 'static + Clone + Send + FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
||||||
> ToClause for Param<T, U, F>
|
> ToClause for Param<T, U, F>
|
||||||
{
|
{
|
||||||
fn to_clause(self) -> Clause { self.xfn_cls() }
|
fn to_clause(self) -> Clause { self.xfn_cls() }
|
||||||
@@ -93,6 +87,11 @@ struct FnMiddleStage<T, U, F> {
|
|||||||
argument: ExprInst,
|
argument: ExprInst,
|
||||||
f: Param<T, U, F>,
|
f: Param<T, U, F>,
|
||||||
}
|
}
|
||||||
|
impl<T, U, F> StrictEq for FnMiddleStage<T, U, F> {
|
||||||
|
fn strict_eq(&self, _other: &dyn std::any::Any) -> bool {
|
||||||
|
unimplemented!("This should never be able to appear in a pattern")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T, U, F: Clone> Clone for FnMiddleStage<T, U, F> {
|
impl<T, U, F: Clone> Clone for FnMiddleStage<T, U, F> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
@@ -110,7 +109,7 @@ impl<T, U, F> Responder for FnMiddleStage<T, U, F> {}
|
|||||||
impl<
|
impl<
|
||||||
T: 'static + TryFromExprInst,
|
T: 'static + TryFromExprInst,
|
||||||
U: 'static + ToClause,
|
U: 'static + ToClause,
|
||||||
F: 'static + Clone + FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
F: 'static + Clone + FnOnce(T) -> Result<U, Rc<dyn ExternError>> + Send,
|
||||||
> Atomic for FnMiddleStage<T, U, F>
|
> Atomic for FnMiddleStage<T, U, F>
|
||||||
{
|
{
|
||||||
fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
|
fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
|
||||||
@@ -128,7 +127,7 @@ impl<
|
|||||||
impl<
|
impl<
|
||||||
T: 'static + TryFromExprInst,
|
T: 'static + TryFromExprInst,
|
||||||
U: 'static + ToClause,
|
U: 'static + ToClause,
|
||||||
F: 'static + Clone + FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
F: 'static + Clone + Send + FnOnce(T) -> Result<U, Rc<dyn ExternError>>,
|
||||||
> ExternFn for Param<T, U, F>
|
> ExternFn for Param<T, U, F>
|
||||||
{
|
{
|
||||||
fn name(&self) -> &str { "anonymous Rust function" }
|
fn name(&self) -> &str { "anonymous Rust function" }
|
||||||
@@ -155,16 +154,16 @@ pub mod constructors {
|
|||||||
" Orchid function. See also Constraints summarized:\n\n"
|
" Orchid function. See also Constraints summarized:\n\n"
|
||||||
"- the callback must live as long as `'static`\n"
|
"- the callback must live as long as `'static`\n"
|
||||||
"- All arguments must implement [TryFromExprInst]\n"
|
"- All arguments must implement [TryFromExprInst]\n"
|
||||||
"- all but the last argument must implement [Clone]\n"
|
"- all but the last argument must implement [Clone] and [Send]\n"
|
||||||
"- the return type must implement [ToClause].\n\n"
|
"- the return type must implement [ToClause].\n\n"
|
||||||
]
|
]
|
||||||
#[doc = "Other arities: " $( "[xfn_" $alt "ary], " )+ ]
|
#[doc = "Other arities: " $( "[xfn_" $alt "ary], " )+ ]
|
||||||
pub fn [< xfn_ $number ary >] <
|
pub fn [< xfn_ $number ary >] <
|
||||||
$( $t : TryFromExprInst + Clone + 'static, )*
|
$( $t : TryFromExprInst + Clone + Send + 'static, )*
|
||||||
TLast: TryFromExprInst + 'static,
|
TLast: TryFromExprInst + 'static,
|
||||||
TReturn: ToClause + 'static,
|
TReturn: ToClause + Send + 'static,
|
||||||
TFunction: FnOnce( $( $t , )* TLast )
|
TFunction: FnOnce( $( $t , )* TLast )
|
||||||
-> Result<TReturn, Rc<dyn ExternError>> + Clone + 'static
|
-> Result<TReturn, Rc<dyn ExternError>> + Clone + Send + 'static
|
||||||
>(function: TFunction) -> impl ExternFn {
|
>(function: TFunction) -> impl ExternFn {
|
||||||
xfn_variant!(@BODY_LOOP function
|
xfn_variant!(@BODY_LOOP function
|
||||||
( $( ( $t [< $t:lower >] ) )* )
|
( $( ( $t [< $t:lower >] ) )* )
|
||||||
|
|||||||
@@ -2,22 +2,25 @@ use std::any::Any;
|
|||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use ordered_float::NotNan;
|
||||||
|
|
||||||
|
use super::atom::StrictEq;
|
||||||
use super::{AtomicResult, AtomicReturn, ExternError};
|
use super::{AtomicResult, AtomicReturn, ExternError};
|
||||||
|
use crate::error::AssertionError;
|
||||||
#[allow(unused)] // for doc
|
#[allow(unused)] // for doc
|
||||||
// use crate::define_fn;
|
// use crate::define_fn;
|
||||||
use crate::foreign::Atomic;
|
use crate::foreign::Atomic;
|
||||||
use crate::interpreted::{Clause, Expr, ExprInst, TryFromExprInst};
|
use crate::interpreted::{Clause, Expr, ExprInst, TryFromExprInst};
|
||||||
use crate::interpreter::Context;
|
use crate::interpreter::Context;
|
||||||
use crate::systems::AssertionError;
|
use crate::systems::stl::Numeric;
|
||||||
use crate::utils::ddispatch::{Request, Responder};
|
use crate::utils::ddispatch::{Request, Responder};
|
||||||
use crate::Primitive;
|
|
||||||
|
|
||||||
/// A proxy trait that implements [Atomic] for blobs of data in Rust code that
|
/// A proxy trait that implements [Atomic] for blobs of data in Rust code that
|
||||||
/// cannot be processed and always report inert. Since these are expected to be
|
/// cannot be processed and always report inert. Since these are expected to be
|
||||||
/// parameters of functions defined with [define_fn] it also automatically
|
/// parameters of functions defined with [define_fn] it also automatically
|
||||||
/// implements [TryFromExprInst] so that a conversion doesn't have to be
|
/// implements [TryFromExprInst] so that a conversion doesn't have to be
|
||||||
/// provided in argument lists.
|
/// provided in argument lists.
|
||||||
pub trait InertAtomic: Debug + Clone + 'static {
|
pub trait InertAtomic: Debug + Clone + Send + 'static {
|
||||||
/// Typename to be shown in the error when a conversion from [ExprInst] fails
|
/// Typename to be shown in the error when a conversion from [ExprInst] fails
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn type_str() -> &'static str;
|
fn type_str() -> &'static str;
|
||||||
@@ -25,9 +28,29 @@ pub trait InertAtomic: Debug + Clone + 'static {
|
|||||||
/// you need it, but behaves exactly as the default implementation.
|
/// you need it, but behaves exactly as the default implementation.
|
||||||
#[allow(unused_mut, unused_variables)] // definition should show likely usage
|
#[allow(unused_mut, unused_variables)] // definition should show likely usage
|
||||||
fn respond(&self, mut request: Request) {}
|
fn respond(&self, mut request: Request) {}
|
||||||
|
/// Equality comparison used by the pattern matcher. Since the pattern matcher
|
||||||
|
/// only works with parsed code, you only need to implement this if your type
|
||||||
|
/// is directly parseable.
|
||||||
|
///
|
||||||
|
/// If your type implements [PartialEq], this can simply be implemented as
|
||||||
|
/// ```ignore
|
||||||
|
/// fn strict_eq(&self, other: &Self) -> bool { self == other }
|
||||||
|
/// ```
|
||||||
|
fn strict_eq(&self, _: &Self) -> bool { false }
|
||||||
|
}
|
||||||
|
impl<T: InertAtomic> StrictEq for T {
|
||||||
|
fn strict_eq(&self, other: &dyn Any) -> bool {
|
||||||
|
other.downcast_ref().map_or(false, |other| self.strict_eq(other))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
impl<T: InertAtomic> Responder for T {
|
impl<T: InertAtomic> Responder for T {
|
||||||
fn respond(&self, request: Request) { self.respond(request) }
|
fn respond(&self, mut request: Request) {
|
||||||
|
if request.can_serve::<T>() {
|
||||||
|
request.serve(self.clone())
|
||||||
|
} else {
|
||||||
|
self.respond(request)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
impl<T: InertAtomic> Atomic for T {
|
impl<T: InertAtomic> Atomic for T {
|
||||||
fn as_any(self: Box<Self>) -> Box<dyn Any> { self }
|
fn as_any(self: Box<Self>) -> Box<dyn Any> { self }
|
||||||
@@ -42,7 +65,7 @@ impl<T: InertAtomic> TryFromExprInst for T {
|
|||||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
||||||
let Expr { clause, location } = exi.expr_val();
|
let Expr { clause, location } = exi.expr_val();
|
||||||
match clause {
|
match clause {
|
||||||
Clause::P(Primitive::Atom(a)) => match a.0.as_any().downcast() {
|
Clause::Atom(a) => match a.0.as_any().downcast() {
|
||||||
Ok(t) => Ok(*t),
|
Ok(t) => Ok(*t),
|
||||||
Err(_) => AssertionError::fail(location, Self::type_str()),
|
Err(_) => AssertionError::fail(location, Self::type_str()),
|
||||||
},
|
},
|
||||||
@@ -50,3 +73,24 @@ impl<T: InertAtomic> TryFromExprInst for T {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl InertAtomic for bool {
|
||||||
|
fn type_str() -> &'static str { "bool" }
|
||||||
|
fn strict_eq(&self, other: &Self) -> bool { self == other }
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InertAtomic for usize {
|
||||||
|
fn type_str() -> &'static str { "usize" }
|
||||||
|
fn strict_eq(&self, other: &Self) -> bool { self == other }
|
||||||
|
fn respond(&self, mut request: Request) {
|
||||||
|
request.serve(Numeric::Uint(*self))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InertAtomic for NotNan<f64> {
|
||||||
|
fn type_str() -> &'static str { "NotNan<f64>" }
|
||||||
|
fn strict_eq(&self, other: &Self) -> bool { self == other }
|
||||||
|
fn respond(&self, mut request: Request) {
|
||||||
|
request.serve(Numeric::Float(*self))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ mod inert;
|
|||||||
|
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
pub use atom::{Atom, Atomic, AtomicResult, AtomicReturn};
|
pub use atom::{Atom, Atomic, AtomicResult, AtomicReturn, StrictEq};
|
||||||
pub use extern_fn::{ExternError, ExternFn};
|
pub use extern_fn::{ExternError, ExternFn, ExFn};
|
||||||
pub use fn_bridge::constructors::{
|
pub use fn_bridge::constructors::{
|
||||||
xfn_1ary, xfn_2ary, xfn_3ary, xfn_4ary, xfn_5ary, xfn_6ary, xfn_7ary,
|
xfn_1ary, xfn_2ary, xfn_3ary, xfn_4ary, xfn_5ary, xfn_6ary, xfn_7ary,
|
||||||
xfn_8ary, xfn_9ary,
|
xfn_8ary, xfn_9ary,
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
use std::borrow::Borrow;
|
use std::borrow::Borrow;
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::hash::{BuildHasher, Hash};
|
use std::hash::{BuildHasher, Hash};
|
||||||
use std::rc::Rc;
|
use std::sync::{RwLock, Arc};
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
|
|
||||||
@@ -11,32 +10,32 @@ use super::token::Tok;
|
|||||||
/// Lasso but much simpler, in part because not much can be known about the
|
/// Lasso but much simpler, in part because not much can be known about the
|
||||||
/// type.
|
/// type.
|
||||||
pub struct TypedInterner<T: 'static + Eq + Hash + Clone> {
|
pub struct TypedInterner<T: 'static + Eq + Hash + Clone> {
|
||||||
tokens: RefCell<HashMap<Rc<T>, Tok<T>>>,
|
tokens: RwLock<HashMap<Arc<T>, Tok<T>>>,
|
||||||
}
|
}
|
||||||
impl<T: Eq + Hash + Clone> TypedInterner<T> {
|
impl<T: Eq + Hash + Clone> TypedInterner<T> {
|
||||||
/// Create a fresh interner instance
|
/// Create a fresh interner instance
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn new() -> Rc<Self> {
|
pub fn new() -> Arc<Self> {
|
||||||
Rc::new(Self { tokens: RefCell::new(HashMap::new()) })
|
Arc::new(Self { tokens: RwLock::new(HashMap::new()) })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Intern an object, returning a token
|
/// Intern an object, returning a token
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn i<Q: ?Sized + Eq + Hash + ToOwned<Owned = T>>(
|
pub fn i<Q: ?Sized + Eq + Hash + ToOwned<Owned = T>>(
|
||||||
self: &Rc<Self>,
|
self: &Arc<Self>,
|
||||||
q: &Q,
|
q: &Q,
|
||||||
) -> Tok<T>
|
) -> Tok<T>
|
||||||
where
|
where
|
||||||
T: Borrow<Q>,
|
T: Borrow<Q>,
|
||||||
{
|
{
|
||||||
let mut tokens = self.tokens.borrow_mut();
|
let mut tokens = self.tokens.write().unwrap();
|
||||||
let hash = compute_hash(tokens.hasher(), q);
|
let hash = compute_hash(tokens.hasher(), q);
|
||||||
let raw_entry = tokens
|
let raw_entry = tokens
|
||||||
.raw_entry_mut()
|
.raw_entry_mut()
|
||||||
.from_hash(hash, |k| <T as Borrow<Q>>::borrow(k) == q);
|
.from_hash(hash, |k| <T as Borrow<Q>>::borrow(k) == q);
|
||||||
let kv = raw_entry.or_insert_with(|| {
|
let kv = raw_entry.or_insert_with(|| {
|
||||||
let keyrc = Rc::new(q.to_owned());
|
let keyrc = Arc::new(q.to_owned());
|
||||||
let token = Tok::<T>::new(keyrc.clone(), Rc::downgrade(self));
|
let token = Tok::<T>::new(keyrc.clone(), Arc::downgrade(self));
|
||||||
(keyrc, token)
|
(keyrc, token)
|
||||||
});
|
});
|
||||||
kv.1.clone()
|
kv.1.clone()
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ use std::any::{Any, TypeId};
|
|||||||
use std::borrow::Borrow;
|
use std::borrow::Borrow;
|
||||||
use std::cell::{RefCell, RefMut};
|
use std::cell::{RefCell, RefMut};
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
|
|
||||||
@@ -13,7 +13,7 @@ use super::token::Tok;
|
|||||||
/// that implements [ToOwned]. Objects of the same type are stored together in a
|
/// that implements [ToOwned]. Objects of the same type are stored together in a
|
||||||
/// [TypedInterner].
|
/// [TypedInterner].
|
||||||
pub struct Interner {
|
pub struct Interner {
|
||||||
interners: RefCell<HashMap<TypeId, Rc<dyn Any>>>,
|
interners: RefCell<HashMap<TypeId, Arc<dyn Any + Send + Sync>>>,
|
||||||
}
|
}
|
||||||
impl Interner {
|
impl Interner {
|
||||||
/// Create a new interner
|
/// Create a new interner
|
||||||
@@ -24,7 +24,7 @@ impl Interner {
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn i<Q: ?Sized + Eq + Hash + ToOwned>(&self, q: &Q) -> Tok<Q::Owned>
|
pub fn i<Q: ?Sized + Eq + Hash + ToOwned>(&self, q: &Q) -> Tok<Q::Owned>
|
||||||
where
|
where
|
||||||
Q::Owned: 'static + Eq + Hash + Clone + Borrow<Q>,
|
Q::Owned: 'static + Eq + Hash + Clone + Borrow<Q> + Send + Sync,
|
||||||
{
|
{
|
||||||
let mut interners = self.interners.borrow_mut();
|
let mut interners = self.interners.borrow_mut();
|
||||||
let interner = get_interner(&mut interners);
|
let interner = get_interner(&mut interners);
|
||||||
@@ -44,9 +44,9 @@ impl Default for Interner {
|
|||||||
|
|
||||||
/// Get or create an interner for a given type.
|
/// Get or create an interner for a given type.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn get_interner<T: 'static + Eq + Hash + Clone>(
|
fn get_interner<T: 'static + Eq + Hash + Clone + Send + Sync>(
|
||||||
interners: &mut RefMut<HashMap<TypeId, Rc<dyn Any>>>,
|
interners: &mut RefMut<HashMap<TypeId, Arc<dyn Any + Send + Sync>>>,
|
||||||
) -> Rc<TypedInterner<T>> {
|
) -> Arc<TypedInterner<T>> {
|
||||||
let boxed = interners
|
let boxed = interners
|
||||||
.raw_entry_mut()
|
.raw_entry_mut()
|
||||||
.from_key(&TypeId::of::<T>())
|
.from_key(&TypeId::of::<T>())
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ use std::fmt::{Debug, Display};
|
|||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::rc::{Rc, Weak};
|
use std::sync::{Arc, Weak};
|
||||||
|
|
||||||
use super::TypedInterner;
|
use super::TypedInterner;
|
||||||
|
|
||||||
@@ -13,13 +13,13 @@ use super::TypedInterner;
|
|||||||
/// currently not enforced.
|
/// currently not enforced.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Tok<T: Eq + Hash + Clone + 'static> {
|
pub struct Tok<T: Eq + Hash + Clone + 'static> {
|
||||||
data: Rc<T>,
|
data: Arc<T>,
|
||||||
interner: Weak<TypedInterner<T>>,
|
interner: Weak<TypedInterner<T>>,
|
||||||
}
|
}
|
||||||
impl<T: Eq + Hash + Clone + 'static> Tok<T> {
|
impl<T: Eq + Hash + Clone + 'static> Tok<T> {
|
||||||
/// Create a new token. Used exclusively by the interner
|
/// Create a new token. Used exclusively by the interner
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub(crate) fn new(data: Rc<T>, interner: Weak<TypedInterner<T>>) -> Self {
|
pub(crate) fn new(data: Arc<T>, interner: Weak<TypedInterner<T>>) -> Self {
|
||||||
Self { data, interner }
|
Self { data, interner }
|
||||||
}
|
}
|
||||||
/// Take the ID number out of a token
|
/// Take the ID number out of a token
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ use super::error::RuntimeError;
|
|||||||
use super::Return;
|
use super::Return;
|
||||||
use crate::foreign::AtomicReturn;
|
use crate::foreign::AtomicReturn;
|
||||||
use crate::representations::interpreted::{Clause, ExprInst};
|
use crate::representations::interpreted::{Clause, ExprInst};
|
||||||
use crate::representations::{PathSet, Primitive};
|
use crate::representations::PathSet;
|
||||||
use crate::utils::never::{unwrap_always, Always};
|
use crate::utils::never::{unwrap_always, Always};
|
||||||
use crate::utils::Side;
|
use crate::utils::Side;
|
||||||
|
|
||||||
@@ -77,9 +77,8 @@ pub fn apply(
|
|||||||
) -> Result<Return, RuntimeError> {
|
) -> Result<Return, RuntimeError> {
|
||||||
let (state, (gas, inert)) = f.try_update(|clause, loc| match clause {
|
let (state, (gas, inert)) = f.try_update(|clause, loc| match clause {
|
||||||
// apply an ExternFn or an internal function
|
// apply an ExternFn or an internal function
|
||||||
Clause::P(Primitive::ExternFn(f)) => {
|
Clause::ExternFn(f) => {
|
||||||
let clause =
|
let clause = f.apply(x, ctx.clone()).map_err(RuntimeError::Extern)?;
|
||||||
f.apply(x, ctx.clone()).map_err(|e| RuntimeError::Extern(e))?;
|
|
||||||
Ok((clause, (ctx.gas.map(|g| g - 1), false)))
|
Ok((clause, (ctx.gas.map(|g| g - 1), false)))
|
||||||
},
|
},
|
||||||
Clause::Lambda { args, body } => Ok(if let Some(args) = args {
|
Clause::Lambda { args, body } => Ok(if let Some(args) = args {
|
||||||
@@ -97,7 +96,7 @@ pub fn apply(
|
|||||||
} else {
|
} else {
|
||||||
Err(RuntimeError::MissingSymbol(name.clone(), loc))
|
Err(RuntimeError::MissingSymbol(name.clone(), loc))
|
||||||
},
|
},
|
||||||
Clause::P(Primitive::Atom(atom)) => {
|
Clause::Atom(atom) => {
|
||||||
// take a step in expanding atom
|
// take a step in expanding atom
|
||||||
let AtomicReturn { clause, gas, inert } = atom.run(ctx.clone())?;
|
let AtomicReturn { clause, gas, inert } = atom.run(ctx.clone())?;
|
||||||
Ok((Clause::Apply { f: clause.wrap(), x }, (gas, inert)))
|
Ok((Clause::Apply { f: clause.wrap(), x }, (gas, inert)))
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ use super::{run, Context, Return, RuntimeError};
|
|||||||
use crate::foreign::{Atom, Atomic, ExternError};
|
use crate::foreign::{Atom, Atomic, ExternError};
|
||||||
use crate::interpreted::{Clause, Expr, ExprInst};
|
use crate::interpreted::{Clause, Expr, ExprInst};
|
||||||
use crate::utils::take_with_output;
|
use crate::utils::take_with_output;
|
||||||
use crate::Primitive;
|
|
||||||
|
|
||||||
trait_set! {
|
trait_set! {
|
||||||
trait Handler = FnMut(Box<dyn Any>) -> HandlerRes;
|
trait Handler = FnMut(Box<dyn Any>) -> HandlerRes;
|
||||||
@@ -71,9 +70,9 @@ pub fn run_handler(
|
|||||||
loop {
|
loop {
|
||||||
let mut ret = run(expr, ctx.clone())?;
|
let mut ret = run(expr, ctx.clone())?;
|
||||||
let quit = take_with_output(&mut ret.state, |exi| match exi.expr_val() {
|
let quit = take_with_output(&mut ret.state, |exi| match exi.expr_val() {
|
||||||
Expr { clause: Clause::P(Primitive::Atom(a)), .. } => {
|
Expr { clause: Clause::Atom(a), .. } => {
|
||||||
match handlers.dispatch(a.0) {
|
match handlers.dispatch(a.0) {
|
||||||
Err(b) => (Clause::P(Primitive::Atom(Atom(b))).wrap(), Ok(true)),
|
Err(b) => (Clause::Atom(Atom(b)).wrap(), Ok(true)),
|
||||||
Ok(e) => match e {
|
Ok(e) => match e {
|
||||||
Ok(expr) => (expr, Ok(false)),
|
Ok(expr) => (expr, Ok(false)),
|
||||||
Err(e) => (Clause::Bottom.wrap(), Err(e)),
|
Err(e) => (Clause::Bottom.wrap(), Err(e)),
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ use super::context::{Context, Return};
|
|||||||
use super::error::RuntimeError;
|
use super::error::RuntimeError;
|
||||||
use crate::foreign::AtomicReturn;
|
use crate::foreign::AtomicReturn;
|
||||||
use crate::representations::interpreted::{Clause, ExprInst};
|
use crate::representations::interpreted::{Clause, ExprInst};
|
||||||
use crate::representations::Primitive;
|
|
||||||
|
|
||||||
/// Normalize an expression using beta reduction with memoization
|
/// Normalize an expression using beta reduction with memoization
|
||||||
pub fn run(expr: ExprInst, mut ctx: Context) -> Result<Return, RuntimeError> {
|
pub fn run(expr: ExprInst, mut ctx: Context) -> Result<Return, RuntimeError> {
|
||||||
@@ -19,7 +18,7 @@ pub fn run(expr: ExprInst, mut ctx: Context) -> Result<Return, RuntimeError> {
|
|||||||
ctx.gas = res.gas;
|
ctx.gas = res.gas;
|
||||||
cls = res.state.expr().clause.clone();
|
cls = res.state.expr().clause.clone();
|
||||||
},
|
},
|
||||||
Clause::P(Primitive::Atom(data)) => {
|
Clause::Atom(data) => {
|
||||||
let AtomicReturn { clause, gas, inert } = data.run(ctx.clone())?;
|
let AtomicReturn { clause, gas, inert } = data.run(ctx.clone())?;
|
||||||
if inert {
|
if inert {
|
||||||
return Ok((clause, (gas, true)));
|
return Ok((clause, (gas, true)));
|
||||||
|
|||||||
@@ -29,8 +29,8 @@ pub use representations::project::{
|
|||||||
collect_consts, collect_rules, vname_to_sym_tree, ProjectTree,
|
collect_consts, collect_rules, vname_to_sym_tree, ProjectTree,
|
||||||
};
|
};
|
||||||
pub use representations::{
|
pub use representations::{
|
||||||
ast, from_const_tree, interpreted, sourcefile, tree, ConstTree, Literal,
|
ast, from_const_tree, interpreted, sourcefile, tree, ConstTree, Location,
|
||||||
Location, NameLike, OrcString, PathSet, Primitive, Sym, VName,
|
NameLike, OrcString, PathSet, Sym, VName,
|
||||||
};
|
};
|
||||||
pub use utils::substack::Substack;
|
pub use utils::substack::Substack;
|
||||||
pub use utils::{ddispatch, take_with_output, thread_pool, IdMap, Side};
|
pub use utils::{ddispatch, take_with_output, thread_pool, IdMap, Side};
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
pub use chumsky::prelude::*;
|
|
||||||
pub use chumsky::{self, Parser};
|
|
||||||
|
|
||||||
use super::decls::SimpleParser;
|
|
||||||
|
|
||||||
/// Parses Lua-style comments
|
|
||||||
#[must_use]
|
|
||||||
pub fn comment_parser() -> impl SimpleParser<char, String> {
|
|
||||||
choice((
|
|
||||||
just("--[").ignore_then(take_until(just("]--").ignored())),
|
|
||||||
just("--").ignore_then(take_until(just("\n").rewind().ignored().or(end()))),
|
|
||||||
))
|
|
||||||
.map(|(vc, ())| vc)
|
|
||||||
.collect()
|
|
||||||
.labelled("comment")
|
|
||||||
}
|
|
||||||
@@ -1,19 +1,62 @@
|
|||||||
use std::rc::Rc;
|
use std::ops::Range;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use super::stream::Stream;
|
||||||
|
use crate::error::ProjectResult;
|
||||||
|
use crate::foreign::Atom;
|
||||||
use crate::interner::Interner;
|
use crate::interner::Interner;
|
||||||
use crate::{Tok, VName};
|
use crate::sourcefile::FileEntryKind;
|
||||||
|
use crate::{Location, VName};
|
||||||
|
|
||||||
/// Trait enclosing all context features
|
/// Trait enclosing all context features
|
||||||
///
|
///
|
||||||
/// Hiding type parameters in associated types allows for simpler
|
/// Hiding type parameters in associated types allows for simpler
|
||||||
/// parser definitions
|
/// parser definitions
|
||||||
pub trait Context: Clone {
|
pub trait Context {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn ops(&self) -> &[Tok<String>];
|
fn file(&self) -> Arc<VName>;
|
||||||
#[must_use]
|
|
||||||
fn file(&self) -> Rc<VName>;
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn interner(&self) -> &Interner;
|
fn interner(&self) -> &Interner;
|
||||||
|
#[must_use]
|
||||||
|
fn source(&self) -> Arc<String>;
|
||||||
|
fn lexers(&self) -> &[&dyn LexerPlugin];
|
||||||
|
fn line_parsers(&self) -> &[&dyn LineParser];
|
||||||
|
#[must_use]
|
||||||
|
fn pos(&self, tail: &str) -> usize { self.source().len() - tail.len() }
|
||||||
|
#[must_use]
|
||||||
|
fn location(&self, len: usize, tail: &str) -> Location {
|
||||||
|
match self.pos(tail).checked_sub(len) {
|
||||||
|
Some(start) => self.range_loc(start..self.pos(tail)),
|
||||||
|
None => {
|
||||||
|
let tl = tail.len();
|
||||||
|
panic!("len={len} greater than tail.len()={tl}; tail={tail:?}")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[must_use]
|
||||||
|
fn range_loc(&self, range: Range<usize>) -> Location {
|
||||||
|
Location::Range { file: self.file(), range, source: self.source() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type LexerPluginOut<'a> = Option<ProjectResult<(Atom, &'a str)>>;
|
||||||
|
pub type LineParserOut = Option<ProjectResult<Vec<FileEntryKind>>>;
|
||||||
|
pub trait LexerPlugin:
|
||||||
|
for<'a> Fn(&'a str, &dyn Context) -> LexerPluginOut<'a> + Sync + Send
|
||||||
|
{
|
||||||
|
}
|
||||||
|
impl<F> LexerPlugin for F where
|
||||||
|
F: for<'a> Fn(&'a str, &dyn Context) -> LexerPluginOut<'a> + Sync + Send
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait LineParser:
|
||||||
|
Fn(Stream<'_>, &dyn Context) -> LineParserOut + Sync + Send
|
||||||
|
{
|
||||||
|
}
|
||||||
|
impl<F> LineParser for F where
|
||||||
|
F: Fn(Stream<'_>, &dyn Context) -> LineParserOut + Sync + Send
|
||||||
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Struct implementing context
|
/// Struct implementing context
|
||||||
@@ -21,29 +64,55 @@ pub trait Context: Clone {
|
|||||||
/// Hiding type parameters in associated types allows for simpler
|
/// Hiding type parameters in associated types allows for simpler
|
||||||
/// parser definitions
|
/// parser definitions
|
||||||
pub struct ParsingContext<'a> {
|
pub struct ParsingContext<'a> {
|
||||||
pub ops: &'a [Tok<String>],
|
|
||||||
pub interner: &'a Interner,
|
pub interner: &'a Interner,
|
||||||
pub file: Rc<VName>,
|
pub file: Arc<VName>,
|
||||||
|
pub source: Arc<String>,
|
||||||
|
pub lexers: &'a [&'a dyn LexerPlugin],
|
||||||
|
pub line_parsers: &'a [&'a dyn LineParser],
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ParsingContext<'a> {
|
impl<'a> ParsingContext<'a> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
ops: &'a [Tok<String>],
|
|
||||||
interner: &'a Interner,
|
interner: &'a Interner,
|
||||||
file: Rc<VName>,
|
file: Arc<VName>,
|
||||||
|
source: Arc<String>,
|
||||||
|
lexers: &'a [&'a dyn LexerPlugin],
|
||||||
|
line_parsers: &'a [&'a dyn LineParser],
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self { ops, interner, file }
|
Self { interner, file, source, lexers, line_parsers }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Clone for ParsingContext<'a> {
|
impl<'a> Clone for ParsingContext<'a> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self { ops: self.ops, interner: self.interner, file: self.file.clone() }
|
Self {
|
||||||
|
interner: self.interner,
|
||||||
|
file: self.file.clone(),
|
||||||
|
source: self.source.clone(),
|
||||||
|
lexers: self.lexers,
|
||||||
|
line_parsers: self.line_parsers,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Context for ParsingContext<'_> {
|
impl Context for ParsingContext<'_> {
|
||||||
fn interner(&self) -> &Interner { self.interner }
|
fn interner(&self) -> &Interner { self.interner }
|
||||||
fn file(&self) -> Rc<VName> { self.file.clone() }
|
fn file(&self) -> Arc<VName> { self.file.clone() }
|
||||||
fn ops(&self) -> &[Tok<String>] { self.ops }
|
fn source(&self) -> Arc<String> { self.source.clone() }
|
||||||
|
fn lexers(&self) -> &[&dyn LexerPlugin] { self.lexers }
|
||||||
|
fn line_parsers(&self) -> &[&dyn LineParser] { self.line_parsers }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct MockContext<'a>(pub &'a Interner);
|
||||||
|
impl<'a> Context for MockContext<'a> {
|
||||||
|
// these are doing something
|
||||||
|
fn interner(&self) -> &Interner { self.0 }
|
||||||
|
fn pos(&self, tail: &str) -> usize { usize::MAX / 2 - tail.len() }
|
||||||
|
// these are expendable
|
||||||
|
fn file(&self) -> Arc<VName> { Arc::new(Vec::new()) }
|
||||||
|
fn lexers(&self) -> &[&dyn LexerPlugin] { &[] }
|
||||||
|
fn line_parsers(&self) -> &[&dyn LineParser] { &[] }
|
||||||
|
fn location(&self, _: usize, _: &str) -> Location { Location::Unknown }
|
||||||
|
fn range_loc(&self, _: Range<usize>) -> Location { Location::Unknown }
|
||||||
|
fn source(&self) -> Arc<String> { Arc::new(String::new()) }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,13 +0,0 @@
|
|||||||
use std::hash::Hash;
|
|
||||||
|
|
||||||
use chumsky::prelude::Simple;
|
|
||||||
use chumsky::{BoxedParser, Parser};
|
|
||||||
use trait_set::trait_set;
|
|
||||||
|
|
||||||
trait_set! {
|
|
||||||
/// Wrapper around [Parser] with [Simple] error to avoid repeating the input
|
|
||||||
pub trait SimpleParser<I: Eq + Hash + Clone, O> =
|
|
||||||
Parser<I, O, Error = Simple<I>>;
|
|
||||||
}
|
|
||||||
/// Boxed version of [SimpleParser]
|
|
||||||
pub type BoxedSimpleParser<'a, I, O> = BoxedParser<'a, I, O, Simple<I>>;
|
|
||||||
@@ -1,12 +1,10 @@
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use chumsky::prelude::Simple;
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use super::{Entry, Lexeme};
|
use super::{Entry, Lexeme};
|
||||||
use crate::error::{ErrorPosition, ProjectError};
|
use crate::error::ProjectError;
|
||||||
use crate::utils::BoxedIter;
|
use crate::{Location, Tok};
|
||||||
use crate::{Location, Tok, VName};
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct LineNeedsPrefix {
|
pub struct LineNeedsPrefix {
|
||||||
@@ -14,10 +12,10 @@ pub struct LineNeedsPrefix {
|
|||||||
}
|
}
|
||||||
impl ProjectError for LineNeedsPrefix {
|
impl ProjectError for LineNeedsPrefix {
|
||||||
fn description(&self) -> &str { "This linetype requires a prefix" }
|
fn description(&self) -> &str { "This linetype requires a prefix" }
|
||||||
|
fn one_position(&self) -> Location { self.entry.location() }
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
format!("{} cannot appear at the beginning of a line", self.entry)
|
format!("{} cannot appear at the beginning of a line", self.entry)
|
||||||
}
|
}
|
||||||
fn one_position(&self) -> Location { self.entry.location() }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@@ -27,14 +25,12 @@ pub struct UnexpectedEOL {
|
|||||||
}
|
}
|
||||||
impl ProjectError for UnexpectedEOL {
|
impl ProjectError for UnexpectedEOL {
|
||||||
fn description(&self) -> &str { "The line ended abruptly" }
|
fn description(&self) -> &str { "The line ended abruptly" }
|
||||||
|
fn one_position(&self) -> Location { self.entry.location() }
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
"The line ends unexpectedly here. In Orchid, all line breaks outside \
|
"The line ends unexpectedly here. In Orchid, all line breaks outside \
|
||||||
parentheses start a new declaration"
|
parentheses start a new declaration"
|
||||||
.to_string()
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn one_position(&self) -> Location { self.entry.location() }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ExpectedEOL {
|
pub struct ExpectedEOL {
|
||||||
@@ -58,10 +54,8 @@ impl ExpectedName {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl ProjectError for ExpectedName {
|
impl ProjectError for ExpectedName {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str { "A name was expected" }
|
||||||
"A name was expected here, but something else was found"
|
fn one_position(&self) -> Location { self.entry.location() }
|
||||||
}
|
|
||||||
|
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
if self.entry.is_keyword() {
|
if self.entry.is_keyword() {
|
||||||
format!(
|
format!(
|
||||||
@@ -72,8 +66,6 @@ impl ProjectError for ExpectedName {
|
|||||||
format!("Expected a name, found {}", self.entry)
|
format!("Expected a name, found {}", self.entry)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn one_position(&self) -> Location { self.entry.location() }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive()]
|
#[derive()]
|
||||||
@@ -84,18 +76,15 @@ pub struct Expected {
|
|||||||
}
|
}
|
||||||
impl Expected {
|
impl Expected {
|
||||||
pub fn expect(l: Lexeme, e: &Entry) -> Result<(), Rc<dyn ProjectError>> {
|
pub fn expect(l: Lexeme, e: &Entry) -> Result<(), Rc<dyn ProjectError>> {
|
||||||
if e.lexeme != l {
|
if e.lexeme.strict_eq(&l) {
|
||||||
return Err(
|
return Ok(());
|
||||||
Self { expected: vec![l], or_name: false, found: e.clone() }.rc(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Err(Self { expected: vec![l], or_name: false, found: e.clone() }.rc())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl ProjectError for Expected {
|
impl ProjectError for Expected {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str { "A concrete token was expected" }
|
||||||
"A concrete token was expected but something else was found"
|
fn one_position(&self) -> Location { self.found.location() }
|
||||||
}
|
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
let list = match &self.expected[..] {
|
let list = match &self.expected[..] {
|
||||||
&[] => return "Unsatisfiable expectation".to_string(),
|
&[] => return "Unsatisfiable expectation".to_string(),
|
||||||
@@ -108,21 +97,15 @@ impl ProjectError for Expected {
|
|||||||
let or_name = if self.or_name { " or a name" } else { "" };
|
let or_name = if self.or_name { " or a name" } else { "" };
|
||||||
format!("Expected {list}{or_name} but found {}", self.found)
|
format!("Expected {list}{or_name} but found {}", self.found)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn one_position(&self) -> Location { self.found.location() }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ReservedToken {
|
pub struct ReservedToken {
|
||||||
pub entry: Entry,
|
pub entry: Entry,
|
||||||
}
|
}
|
||||||
impl ProjectError for ReservedToken {
|
impl ProjectError for ReservedToken {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str { "Syntax reserved for future use" }
|
||||||
"A token reserved for future use was found in the code"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn message(&self) -> String { format!("{} is a reserved token", self.entry) }
|
|
||||||
|
|
||||||
fn one_position(&self) -> Location { self.entry.location() }
|
fn one_position(&self) -> Location { self.entry.location() }
|
||||||
|
fn message(&self) -> String { format!("{} is a reserved token", self.entry) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct BadTokenInRegion {
|
pub struct BadTokenInRegion {
|
||||||
@@ -130,15 +113,11 @@ pub struct BadTokenInRegion {
|
|||||||
pub region: &'static str,
|
pub region: &'static str,
|
||||||
}
|
}
|
||||||
impl ProjectError for BadTokenInRegion {
|
impl ProjectError for BadTokenInRegion {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str { "An unexpected token was found" }
|
||||||
"A token was found in a region where it should not appear"
|
fn one_position(&self) -> Location { self.entry.location() }
|
||||||
}
|
|
||||||
|
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
format!("{} cannot appear in {}", self.entry, self.region)
|
format!("{} cannot appear in {}", self.entry, self.region)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn one_position(&self) -> Location { self.entry.location() }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct NotFound {
|
pub struct NotFound {
|
||||||
@@ -146,70 +125,90 @@ pub struct NotFound {
|
|||||||
pub location: Location,
|
pub location: Location,
|
||||||
}
|
}
|
||||||
impl ProjectError for NotFound {
|
impl ProjectError for NotFound {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str { "A specific lexeme was expected" }
|
||||||
"A specific lexeme was expected but not found in the given range"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn message(&self) -> String { format!("{} was expected", self.expected) }
|
|
||||||
|
|
||||||
fn one_position(&self) -> Location { self.location.clone() }
|
fn one_position(&self) -> Location { self.location.clone() }
|
||||||
|
fn message(&self) -> String { format!("{} was expected", self.expected) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LeadingNS {
|
pub struct LeadingNS(pub Location);
|
||||||
pub location: Location,
|
|
||||||
}
|
|
||||||
impl ProjectError for LeadingNS {
|
impl ProjectError for LeadingNS {
|
||||||
fn description(&self) -> &str { ":: can only follow a name token" }
|
fn description(&self) -> &str { ":: can only follow a name token" }
|
||||||
fn one_position(&self) -> Location { self.location.clone() }
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct MisalignedParen {
|
pub struct MisalignedParen(pub Entry);
|
||||||
pub entry: Entry,
|
|
||||||
}
|
|
||||||
impl ProjectError for MisalignedParen {
|
impl ProjectError for MisalignedParen {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str { "(), [] and {} must always pair up" }
|
||||||
"Parentheses (), [] and {} must always pair up"
|
fn one_position(&self) -> Location { self.0.location() }
|
||||||
}
|
fn message(&self) -> String { format!("This {} has no pair", self.0) }
|
||||||
fn message(&self) -> String { format!("This {} has no pair", self.entry) }
|
|
||||||
fn one_position(&self) -> Location { self.entry.location() }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct NamespacedExport {
|
pub struct NamespacedExport(pub Location);
|
||||||
pub location: Location,
|
|
||||||
}
|
|
||||||
impl ProjectError for NamespacedExport {
|
impl ProjectError for NamespacedExport {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str { "Only local names may be exported" }
|
||||||
"Exports can only refer to unnamespaced names in the local namespace"
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
|
||||||
fn one_position(&self) -> Location { self.location.clone() }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct GlobExport {
|
pub struct GlobExport(pub Location);
|
||||||
pub location: Location,
|
|
||||||
}
|
|
||||||
impl ProjectError for GlobExport {
|
impl ProjectError for GlobExport {
|
||||||
fn description(&self) -> &str {
|
fn description(&self) -> &str { "Globstars are not allowed in exports" }
|
||||||
"Exports can only refer to concrete names, globstars are not allowed"
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
|
||||||
fn one_position(&self) -> Location { self.location.clone() }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LexError {
|
pub struct NoStringEnd(pub Location);
|
||||||
pub errors: Vec<Simple<char>>,
|
impl ProjectError for NoStringEnd {
|
||||||
pub source: Rc<String>,
|
fn description(&self) -> &str { "A string literal was not closed with `\"`" }
|
||||||
pub file: VName,
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
impl ProjectError for LexError {
|
|
||||||
fn description(&self) -> &str { "An error occured during tokenization" }
|
pub struct NoCommentEnd(pub Location);
|
||||||
fn positions(&self) -> BoxedIter<ErrorPosition> {
|
impl ProjectError for NoCommentEnd {
|
||||||
let file = self.file.clone();
|
fn description(&self) -> &str { "a comment was not closed with `]--`" }
|
||||||
Box::new(self.errors.iter().map(move |s| ErrorPosition {
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
location: Location::Range {
|
}
|
||||||
file: Rc::new(file.clone()),
|
|
||||||
range: s.span(),
|
pub struct FloatPlacehPrio(pub Location);
|
||||||
source: self.source.clone(),
|
impl ProjectError for FloatPlacehPrio {
|
||||||
},
|
fn description(&self) -> &str {
|
||||||
message: Some(format!("{}", s)),
|
"a placeholder priority has a decimal point or a negative exponent"
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct NaNLiteral(pub Location);
|
||||||
|
impl ProjectError for NaNLiteral {
|
||||||
|
fn description(&self) -> &str { "float literal decoded to NaN" }
|
||||||
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct LiteralOverflow(pub Location);
|
||||||
|
impl ProjectError for LiteralOverflow {
|
||||||
|
fn description(&self) -> &str { "number literal described number greater than usize::MAX" }
|
||||||
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ExpectedDigit(pub Location);
|
||||||
|
impl ProjectError for ExpectedDigit {
|
||||||
|
fn description(&self) -> &str { "expected a digit" }
|
||||||
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct NotHex(pub Location);
|
||||||
|
impl ProjectError for NotHex {
|
||||||
|
fn description(&self) -> &str { "Expected a hex digit" }
|
||||||
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct BadCodePoint(pub Location);
|
||||||
|
impl ProjectError for BadCodePoint {
|
||||||
|
fn description(&self) -> &str {
|
||||||
|
"\\uXXXX escape sequence does not describe valid code point"
|
||||||
|
}
|
||||||
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct BadEscapeSequence(pub Location);
|
||||||
|
impl ProjectError for BadEscapeSequence {
|
||||||
|
fn description(&self) -> &str { "Unrecognized escape sequence" }
|
||||||
|
fn one_position(&self) -> Location { self.0.clone() }
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,31 +1,17 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use chumsky::Parser;
|
|
||||||
|
|
||||||
use super::context::Context;
|
use super::context::Context;
|
||||||
use super::errors::LexError;
|
use super::lexer::lex;
|
||||||
use super::lexer;
|
|
||||||
use super::sourcefile::parse_module_body;
|
use super::sourcefile::parse_module_body;
|
||||||
use super::stream::Stream;
|
use super::stream::Stream;
|
||||||
use crate::error::{ParseErrorWithTokens, ProjectError, ProjectResult};
|
use crate::error::{ParseErrorWithTokens, ProjectError, ProjectResult};
|
||||||
use crate::representations::sourcefile::FileEntry;
|
use crate::representations::sourcefile::FileEntry;
|
||||||
|
|
||||||
pub fn parse2(data: &str, ctx: impl Context) -> ProjectResult<Vec<FileEntry>> {
|
pub fn parse2(ctx: impl Context) -> ProjectResult<Vec<FileEntry>> {
|
||||||
let source = Rc::new(data.to_string());
|
let tokens = lex(vec![], ctx.source().as_str(), &ctx).expect("debug");
|
||||||
let lexie = lexer(ctx.clone(), source.clone());
|
|
||||||
let tokens = (lexie.parse(data)).map_err(|errors| {
|
|
||||||
LexError {
|
|
||||||
errors,
|
|
||||||
file: ctx.file().as_ref().clone(),
|
|
||||||
source: source.clone(),
|
|
||||||
}
|
|
||||||
.rc()
|
|
||||||
})?;
|
|
||||||
if tokens.is_empty() {
|
if tokens.is_empty() {
|
||||||
Ok(Vec::new())
|
Ok(Vec::new())
|
||||||
} else {
|
} else {
|
||||||
parse_module_body(Stream::from_slice(&tokens), ctx).map_err(|error| {
|
parse_module_body(Stream::from_slice(&tokens), &ctx).map_err(|error| {
|
||||||
ParseErrorWithTokens { error, full_source: data.to_string(), tokens }.rc()
|
ParseErrorWithTokens { error, full_source: ctx.source().to_string(), tokens }.rc()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,24 +1,26 @@
|
|||||||
use std::fmt::{self, Display};
|
use std::fmt::Display;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use chumsky::prelude::*;
|
|
||||||
use chumsky::text::keyword;
|
|
||||||
use chumsky::Parser;
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use ordered_float::NotNan;
|
use ordered_float::NotNan;
|
||||||
|
|
||||||
|
use super::LexerPlugin;
|
||||||
use super::context::Context;
|
use super::context::Context;
|
||||||
use super::decls::SimpleParser;
|
use super::errors::{FloatPlacehPrio, NoCommentEnd};
|
||||||
use super::number::print_nat16;
|
use super::numeric::{parse_num, print_nat16, numstart};
|
||||||
use super::{comment, name, number, placeholder, string};
|
|
||||||
use crate::ast::{PHClass, Placeholder};
|
use crate::ast::{PHClass, Placeholder};
|
||||||
|
use crate::error::{ProjectResult, ProjectError};
|
||||||
|
use crate::foreign::Atom;
|
||||||
use crate::interner::Tok;
|
use crate::interner::Tok;
|
||||||
use crate::parse::operators::operators_parser;
|
use crate::parse::numeric::{numchar, lex_numeric};
|
||||||
use crate::representations::Literal;
|
use crate::parse::string::lex_string;
|
||||||
use crate::{Interner, Location, VName};
|
use crate::systems::stl::Numeric;
|
||||||
|
use crate::utils::pure_seq::next;
|
||||||
|
use crate::utils::unwrap_or;
|
||||||
|
use crate::{Location, VName};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Entry {
|
pub struct Entry {
|
||||||
pub lexeme: Lexeme,
|
pub lexeme: Lexeme,
|
||||||
pub location: Location,
|
pub location: Location,
|
||||||
@@ -32,14 +34,15 @@ impl Entry {
|
|||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn is_keyword(&self) -> bool {
|
pub fn is_keyword(&self) -> bool {
|
||||||
matches!(
|
false
|
||||||
self.lexeme,
|
// matches!(
|
||||||
Lexeme::Const
|
// self.lexeme,
|
||||||
| Lexeme::Export
|
// Lexeme::Const
|
||||||
| Lexeme::Import
|
// | Lexeme::Export
|
||||||
| Lexeme::Macro
|
// | Lexeme::Import
|
||||||
| Lexeme::Module
|
// | Lexeme::Macro
|
||||||
)
|
// | Lexeme::Module
|
||||||
|
// )
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
@@ -51,9 +54,13 @@ impl Entry {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn file(&self) -> Rc<VName> {
|
pub fn file(&self) -> Arc<VName> {
|
||||||
self.location.file().expect("An Entry can only have a range location")
|
self.location.file().expect("An Entry can only have a range location")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn new(location: Location, lexeme: Lexeme) -> Self {
|
||||||
|
Self { lexeme, location }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for Entry {
|
impl Display for Entry {
|
||||||
@@ -66,9 +73,9 @@ impl AsRef<Location> for Entry {
|
|||||||
fn as_ref(&self) -> &Location { &self.location }
|
fn as_ref(&self) -> &Location { &self.location }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum Lexeme {
|
pub enum Lexeme {
|
||||||
Literal(Literal),
|
Atom(Atom),
|
||||||
Name(Tok<String>),
|
Name(Tok<String>),
|
||||||
Arrow(NotNan<f64>),
|
Arrow(NotNan<f64>),
|
||||||
/// Walrus operator (formerly shorthand macro)
|
/// Walrus operator (formerly shorthand macro)
|
||||||
@@ -86,20 +93,19 @@ pub enum Lexeme {
|
|||||||
At,
|
At,
|
||||||
// Dot,
|
// Dot,
|
||||||
Type, // type operator
|
Type, // type operator
|
||||||
Comment(Rc<String>),
|
Comment(Arc<String>),
|
||||||
Export,
|
// Export,
|
||||||
Import,
|
// Import,
|
||||||
Module,
|
// Module,
|
||||||
Macro,
|
// Macro,
|
||||||
Const,
|
// Const,
|
||||||
Operators(Rc<VName>),
|
|
||||||
Placeh(Placeholder),
|
Placeh(Placeholder),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for Lexeme {
|
impl Display for Lexeme {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Literal(l) => write!(f, "{:?}", l),
|
Self::Atom(a) => write!(f, "{a:?}"),
|
||||||
Self::Name(token) => write!(f, "{}", **token),
|
Self::Name(token) => write!(f, "{}", **token),
|
||||||
Self::Walrus => write!(f, ":="),
|
Self::Walrus => write!(f, ":="),
|
||||||
Self::Arrow(prio) => write!(f, "={}=>", print_nat16(*prio)),
|
Self::Arrow(prio) => write!(f, "={}=>", print_nat16(*prio)),
|
||||||
@@ -116,14 +122,11 @@ impl Display for Lexeme {
|
|||||||
Self::At => write!(f, "@"),
|
Self::At => write!(f, "@"),
|
||||||
Self::Type => write!(f, ":"),
|
Self::Type => write!(f, ":"),
|
||||||
Self::Comment(text) => write!(f, "--[{}]--", text),
|
Self::Comment(text) => write!(f, "--[{}]--", text),
|
||||||
Self::Export => write!(f, "export"),
|
// Self::Export => write!(f, "export"),
|
||||||
Self::Import => write!(f, "import"),
|
// Self::Import => write!(f, "import"),
|
||||||
Self::Module => write!(f, "module"),
|
// Self::Module => write!(f, "module"),
|
||||||
Self::Const => write!(f, "const"),
|
// Self::Const => write!(f, "const"),
|
||||||
Self::Macro => write!(f, "macro"),
|
// Self::Macro => write!(f, "macro"),
|
||||||
Self::Operators(ops) => {
|
|
||||||
write!(f, "operators[{}]", Interner::extern_all(ops).join(" "))
|
|
||||||
},
|
|
||||||
Self::Placeh(Placeholder { name, class }) => match *class {
|
Self::Placeh(Placeholder { name, class }) => match *class {
|
||||||
PHClass::Scalar => write!(f, "${}", **name),
|
PHClass::Scalar => write!(f, "${}", **name),
|
||||||
PHClass::Vec { nonzero, prio } => {
|
PHClass::Vec { nonzero, prio } => {
|
||||||
@@ -147,97 +150,192 @@ impl Lexeme {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
pub fn strict_eq(&self, other: &Self) -> bool {
|
||||||
pub fn parser<E: chumsky::Error<Entry>>(
|
match (self, other) {
|
||||||
self,
|
(Self::Arrow(f1), Self::Arrow(f2)) => f1 == f2,
|
||||||
) -> impl Parser<Entry, Entry, Error = E> + Clone {
|
(Self::At, Self::At) | (Self::BR, Self::BR) => true,
|
||||||
filter(move |ent: &Entry| ent.lexeme == self)
|
(Self::BS, Self::BS) /*| (Self::Const, Self::Const)*/ => true,
|
||||||
|
// (Self::Export, Self::Export) | (Self::Import, Self::Import) => true,
|
||||||
|
// (Self::Macro, Self::Macro) | (Self::Module, Self::Module) => true,
|
||||||
|
(Self::NS, Self::NS) | (Self::Type, Self::Type) => true,
|
||||||
|
(Self::Walrus, Self::Walrus) => true,
|
||||||
|
(Self::Atom(a1), Self::Atom(a2)) => a1.0.strict_eq(&a2.0),
|
||||||
|
(Self::Comment(c1), Self::Comment(c2)) => c1 == c2,
|
||||||
|
(Self::LP(p1), Self::LP(p2)) | (Self::RP(p1), Self::RP(p2)) => p1 == p2,
|
||||||
|
(Self::Name(n1), Self::Name(n2)) => n1 == n2,
|
||||||
|
(Self::Placeh(ph1), Self::Placeh(ph2)) => ph1 == ph2,
|
||||||
|
(_, _) => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[allow(unused)]
|
||||||
pub struct LexedText(pub Vec<Entry>);
|
pub fn format(lexed: &[Entry]) -> String { lexed.iter().join(" ") }
|
||||||
|
|
||||||
impl Display for LexedText {
|
pub fn namechar(c: char) -> bool { c.is_alphanumeric() | (c == '_') }
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
pub fn namestart(c: char) -> bool { c.is_alphabetic() | (c == '_') }
|
||||||
write!(f, "{}", self.0.iter().join(" "))
|
pub fn opchar(c: char) -> bool {
|
||||||
|
!namestart(c) && !numstart(c) && !c.is_whitespace() && !"()[]{},".contains(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn split_filter(
|
||||||
|
s: &str,
|
||||||
|
mut pred: impl FnMut(char) -> bool,
|
||||||
|
) -> (&str, &str) {
|
||||||
|
s.find(|c| !pred(c)).map_or((s, ""), |i| s.split_at(i))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lit_table() -> impl IntoIterator<Item = (&'static str, Lexeme)> {
|
||||||
|
[
|
||||||
|
("\\", Lexeme::BS),
|
||||||
|
("@", Lexeme::At),
|
||||||
|
("(", Lexeme::LP('(')),
|
||||||
|
("[", Lexeme::LP('[')),
|
||||||
|
("{", Lexeme::LP('{')),
|
||||||
|
(")", Lexeme::RP('(')),
|
||||||
|
("]", Lexeme::RP('[')),
|
||||||
|
("}", Lexeme::RP('{')),
|
||||||
|
("\n", Lexeme::BR),
|
||||||
|
(":=", Lexeme::Walrus),
|
||||||
|
("::", Lexeme::NS),
|
||||||
|
(":", Lexeme::Type),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
static BUILTIN_ATOMS: &[&dyn LexerPlugin] = &[&lex_string, &lex_numeric];
|
||||||
|
|
||||||
|
pub fn lex(
|
||||||
|
mut tokens: Vec<Entry>,
|
||||||
|
mut data: &str,
|
||||||
|
ctx: &impl Context,
|
||||||
|
) -> ProjectResult<Vec<Entry>> {
|
||||||
|
let mut prev_len = data.len() + 1;
|
||||||
|
'tail:loop {
|
||||||
|
if prev_len == data.len() {
|
||||||
|
panic!("got stuck at {data:?}, parsed {:?}", tokens.last().unwrap());
|
||||||
|
}
|
||||||
|
prev_len = data.len();
|
||||||
|
data = data.trim_start_matches(|c: char| c.is_whitespace() && c != '\n');
|
||||||
|
let (head, _) = match next(data.chars()) {
|
||||||
|
Some((h, t)) => (h, t.as_str()),
|
||||||
|
None => return Ok(tokens),
|
||||||
|
};
|
||||||
|
for lexer in ctx.lexers().iter().chain(BUILTIN_ATOMS.iter()) {
|
||||||
|
if let Some(res) = lexer(data, ctx) {
|
||||||
|
let (atom, tail) = res?;
|
||||||
|
if tail.len() == data.len() {
|
||||||
|
panic!("lexer plugin consumed 0 characters")
|
||||||
|
}
|
||||||
|
let loc = ctx.location(data.len() - tail.len(), tail);
|
||||||
|
tokens.push(Entry::new(loc, Lexeme::Atom(atom)));
|
||||||
|
data = tail;
|
||||||
|
continue 'tail;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (prefix, lexeme) in lit_table() {
|
||||||
|
if let Some(tail) = data.strip_prefix(prefix) {
|
||||||
|
tokens.push(Entry::new(ctx.location(prefix.len(), tail), lexeme.clone()));
|
||||||
|
data = tail;
|
||||||
|
continue 'tail;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(tail) = data.strip_prefix(',') {
|
||||||
|
let lexeme = Lexeme::Name(ctx.interner().i(","));
|
||||||
|
tokens.push(Entry::new(ctx.location(1, tail), lexeme));
|
||||||
|
data = tail;
|
||||||
|
continue 'tail;
|
||||||
|
}
|
||||||
|
if let Some(tail) = data.strip_prefix("--[") {
|
||||||
|
let (note, tail) = (tail.split_once("]--"))
|
||||||
|
.ok_or_else(|| NoCommentEnd(ctx.location(tail.len(), "")).rc())?;
|
||||||
|
let lexeme = Lexeme::Comment(Arc::new(note.to_string()));
|
||||||
|
let location = ctx.location(note.len() + 3, tail);
|
||||||
|
tokens.push(Entry::new(location, lexeme));
|
||||||
|
data = tail;
|
||||||
|
continue 'tail;
|
||||||
|
}
|
||||||
|
if let Some(tail) = data.strip_prefix("--") {
|
||||||
|
let (note, tail) = split_filter(tail, |c| c != '\n');
|
||||||
|
let lexeme = Lexeme::Comment(Arc::new(note.to_string()));
|
||||||
|
let location = ctx.location(note.len(), tail);
|
||||||
|
tokens.push(Entry::new(location, lexeme));
|
||||||
|
data = tail;
|
||||||
|
continue 'tail;
|
||||||
|
}
|
||||||
|
if let Some(tail) = data.strip_prefix('=') {
|
||||||
|
if tail.chars().next().map_or(false, numstart) {
|
||||||
|
let (num, post_num) = split_filter(tail, numchar);
|
||||||
|
if let Some(tail) = post_num.strip_prefix("=>") {
|
||||||
|
let lexeme = Lexeme::Arrow(parse_num(num).map_err(|e| e.into_proj(num.len(), post_num, ctx))?.as_float());
|
||||||
|
let location = ctx.location(num.len() + 3, tail);
|
||||||
|
tokens.push(Entry::new(location, lexeme));
|
||||||
|
data = tail;
|
||||||
|
continue 'tail;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// todo: parse placeholders, don't forget vectorials!
|
||||||
|
if let Some(tail) = data.strip_prefix('$') {
|
||||||
|
let (name, tail) = split_filter(tail, namechar);
|
||||||
|
if !name.is_empty() {
|
||||||
|
let name = ctx.interner().i(name);
|
||||||
|
let location = ctx.location(name.len() + 1, tail);
|
||||||
|
let lexeme = Lexeme::Placeh(Placeholder { name, class: PHClass::Scalar });
|
||||||
|
tokens.push(Entry::new(location, lexeme));
|
||||||
|
data = tail;
|
||||||
|
continue 'tail;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(vec) = data.strip_prefix("..") {
|
||||||
|
let (nonzero, tail) =
|
||||||
|
vec.strip_prefix('.').map_or((false, vec), |t| (true, t));
|
||||||
|
if let Some(tail) = tail.strip_prefix('$') {
|
||||||
|
let (name, tail) = split_filter(tail, namechar);
|
||||||
|
if !name.is_empty() {
|
||||||
|
let (prio, priolen, tail) = tail
|
||||||
|
.strip_prefix(':')
|
||||||
|
.map(|tail| split_filter(tail, numchar))
|
||||||
|
.filter(|(num, _)| !num.is_empty())
|
||||||
|
.map(|(num_str, tail)| {
|
||||||
|
parse_num(num_str)
|
||||||
|
.map_err(|e| e.into_proj(num_str.len(), tail, ctx))
|
||||||
|
.and_then(|num| {
|
||||||
|
Ok(unwrap_or!(num => Numeric::Uint; {
|
||||||
|
return Err(FloatPlacehPrio(ctx.location(num_str.len(), tail)).rc())
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
.map(|p| (p, num_str.len() + 1, tail))
|
||||||
|
})
|
||||||
|
.unwrap_or(Ok((0, 0, tail)))?;
|
||||||
|
let byte_len = if nonzero { 4 } else { 3 } + priolen + name.len();
|
||||||
|
let name = ctx.interner().i(name);
|
||||||
|
let class = PHClass::Vec { nonzero, prio };
|
||||||
|
let lexeme = Lexeme::Placeh(Placeholder { name, class });
|
||||||
|
tokens.push(Entry::new(ctx.location(byte_len, tail), lexeme));
|
||||||
|
data = tail;
|
||||||
|
continue 'tail;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if namestart(head) {
|
||||||
|
let (name, tail) = split_filter(data, namechar);
|
||||||
|
if !name.is_empty() {
|
||||||
|
let lexeme = Lexeme::Name(ctx.interner().i(name));
|
||||||
|
tokens.push(Entry::new(ctx.location(name.len(), tail), lexeme));
|
||||||
|
data = tail;
|
||||||
|
continue 'tail;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if opchar(head) {
|
||||||
|
let (name, tail) = split_filter(data, opchar);
|
||||||
|
if !name.is_empty() {
|
||||||
|
let lexeme = Lexeme::Name(ctx.interner().i(name));
|
||||||
|
tokens.push(Entry::new(ctx.location(name.len(), tail), lexeme));
|
||||||
|
data = tail;
|
||||||
|
continue 'tail;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
unreachable!(r#"opchar is pretty much defined as "not namechar" "#)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn paren_parser(lp: char, rp: char) -> impl SimpleParser<char, Lexeme> {
|
|
||||||
just(lp).to(Lexeme::LP(lp)).or(just(rp).to(Lexeme::RP(lp)))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn literal_parser<'a>(
|
|
||||||
ctx: impl Context + 'a,
|
|
||||||
) -> impl SimpleParser<char, Literal> + 'a {
|
|
||||||
choice((
|
|
||||||
// all ints are valid floats so it takes precedence
|
|
||||||
number::int_parser().map(Literal::Uint),
|
|
||||||
number::float_parser().map(Literal::Num),
|
|
||||||
string::str_parser()
|
|
||||||
.map(move |s| Literal::Str(ctx.interner().i(&s).into())),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub static BASE_OPS: &[&str] = &[",", ".", "..", "...", "*"];
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn lexer<'a>(
|
|
||||||
ctx: impl Context + 'a,
|
|
||||||
source: Rc<String>,
|
|
||||||
) -> impl SimpleParser<char, Vec<Entry>> + 'a {
|
|
||||||
let all_ops = ctx
|
|
||||||
.ops()
|
|
||||||
.iter()
|
|
||||||
.map(|op| op.as_ref())
|
|
||||||
.chain(BASE_OPS.iter().cloned())
|
|
||||||
.map(str::to_string)
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
choice((
|
|
||||||
keyword("export").to(Lexeme::Export),
|
|
||||||
keyword("module").to(Lexeme::Module),
|
|
||||||
keyword("import").to(Lexeme::Import),
|
|
||||||
keyword("macro").to(Lexeme::Macro),
|
|
||||||
keyword("const").to(Lexeme::Const),
|
|
||||||
operators_parser({
|
|
||||||
let ctx = ctx.clone();
|
|
||||||
move |s| ctx.interner().i(&s)
|
|
||||||
})
|
|
||||||
.map(|v| Lexeme::Operators(Rc::new(v))),
|
|
||||||
paren_parser('(', ')'),
|
|
||||||
paren_parser('[', ']'),
|
|
||||||
paren_parser('{', '}'),
|
|
||||||
just(":=").to(Lexeme::Walrus),
|
|
||||||
just("=")
|
|
||||||
.ignore_then(number::float_parser())
|
|
||||||
.then_ignore(just("=>"))
|
|
||||||
.map(Lexeme::rule),
|
|
||||||
comment::comment_parser().map(|s| Lexeme::Comment(Rc::new(s))),
|
|
||||||
placeholder::placeholder_parser(ctx.clone()).map(Lexeme::Placeh),
|
|
||||||
just("::").to(Lexeme::NS),
|
|
||||||
just('\\').to(Lexeme::BS),
|
|
||||||
just('@').to(Lexeme::At),
|
|
||||||
just(':').to(Lexeme::Type),
|
|
||||||
just('\n').to(Lexeme::BR),
|
|
||||||
// just('.').to(Lexeme::Dot),
|
|
||||||
literal_parser(ctx.clone()).map(Lexeme::Literal),
|
|
||||||
name::name_parser(&all_ops).map({
|
|
||||||
let ctx = ctx.clone();
|
|
||||||
move |n| Lexeme::Name(ctx.interner().i(&n))
|
|
||||||
}),
|
|
||||||
))
|
|
||||||
.map_with_span(move |lexeme, range| Entry {
|
|
||||||
lexeme,
|
|
||||||
location: Location::Range {
|
|
||||||
range,
|
|
||||||
file: ctx.file(),
|
|
||||||
source: source.clone(),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.padded_by(one_of(" \t").repeated())
|
|
||||||
.repeated()
|
|
||||||
.then_ignore(end())
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,19 +1,18 @@
|
|||||||
mod comment;
|
|
||||||
mod context;
|
mod context;
|
||||||
mod decls;
|
|
||||||
mod errors;
|
mod errors;
|
||||||
mod facade;
|
mod facade;
|
||||||
mod lexer;
|
mod lexer;
|
||||||
mod multiname;
|
mod multiname;
|
||||||
mod name;
|
mod numeric;
|
||||||
mod number;
|
|
||||||
mod operators;
|
|
||||||
mod placeholder;
|
|
||||||
mod sourcefile;
|
mod sourcefile;
|
||||||
mod stream;
|
mod stream;
|
||||||
mod string;
|
mod string;
|
||||||
|
|
||||||
pub use context::ParsingContext;
|
pub use context::{ParsingContext, Context, LexerPlugin, LineParser};
|
||||||
pub use facade::parse2;
|
pub use facade::parse2;
|
||||||
pub use lexer::{lexer, Entry, Lexeme};
|
pub use lexer::{namechar, namestart, opchar, split_filter, Entry, Lexeme};
|
||||||
pub use number::{float_parser, int_parser, print_nat16};
|
pub use numeric::{
|
||||||
|
lex_numeric, numchar, numstart, parse_num, print_nat16, NumError,
|
||||||
|
NumErrorKind,
|
||||||
|
};
|
||||||
|
pub use string::{lex_string, parse_string, StringError, StringErrorKind};
|
||||||
|
|||||||
@@ -41,12 +41,12 @@ impl Subresult {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_multiname_branch(
|
fn parse_multiname_branch<'a>(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'a>,
|
||||||
ctx: impl Context,
|
ctx: &impl Context,
|
||||||
) -> ProjectResult<(BoxedIter<Subresult>, Stream<'_>)> {
|
) -> ProjectResult<(BoxedIter<'a, Subresult>, Stream<'a>)> {
|
||||||
let comma = ctx.interner().i(",");
|
let comma = ctx.interner().i(",");
|
||||||
let (subnames, cursor) = parse_multiname_rec(cursor, ctx.clone())?;
|
let (subnames, cursor) = parse_multiname_rec(cursor, ctx)?;
|
||||||
let (delim, cursor) = cursor.trim().pop()?;
|
let (delim, cursor) = cursor.trim().pop()?;
|
||||||
match &delim.lexeme {
|
match &delim.lexeme {
|
||||||
Lexeme::Name(n) if n == &comma => {
|
Lexeme::Name(n) if n == &comma => {
|
||||||
@@ -65,10 +65,10 @@ fn parse_multiname_branch(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_multiname_rec(
|
fn parse_multiname_rec<'a>(
|
||||||
curosr: Stream<'_>,
|
curosr: Stream<'a>,
|
||||||
ctx: impl Context,
|
ctx: &impl Context,
|
||||||
) -> ProjectResult<(BoxedIter<Subresult>, Stream<'_>)> {
|
) -> ProjectResult<(BoxedIter<'a, Subresult>, Stream<'a>)> {
|
||||||
let star = ctx.interner().i("*");
|
let star = ctx.interner().i("*");
|
||||||
let comma = ctx.interner().i(",");
|
let comma = ctx.interner().i(",");
|
||||||
let (head, mut cursor) = curosr.trim().pop()?;
|
let (head, mut cursor) = curosr.trim().pop()?;
|
||||||
@@ -103,7 +103,7 @@ fn parse_multiname_rec(
|
|||||||
Ok((box_once(Subresult::new_glob(head.location())), cursor)),
|
Ok((box_once(Subresult::new_glob(head.location())), cursor)),
|
||||||
Lexeme::Name(n) if ![comma, star].contains(n) => {
|
Lexeme::Name(n) if ![comma, star].contains(n) => {
|
||||||
let cursor = cursor.trim();
|
let cursor = cursor.trim();
|
||||||
if cursor.get(0).ok().map(|e| &e.lexeme) == Some(&Lexeme::NS) {
|
if cursor.get(0).map_or(false, |e| e.lexeme.strict_eq(&Lexeme::NS)) {
|
||||||
let cursor = cursor.step()?;
|
let cursor = cursor.step()?;
|
||||||
let (out, cursor) = parse_multiname_rec(cursor, ctx)?;
|
let (out, cursor) = parse_multiname_rec(cursor, ctx)?;
|
||||||
let out = Box::new(out.map(|sr| sr.push_front(n.clone())));
|
let out = Box::new(out.map(|sr| sr.push_front(n.clone())));
|
||||||
@@ -123,10 +123,10 @@ fn parse_multiname_rec(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_multiname(
|
pub fn parse_multiname<'a>(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'a>,
|
||||||
ctx: impl Context,
|
ctx: &impl Context,
|
||||||
) -> ProjectResult<(Vec<Import>, Stream<'_>)> {
|
) -> ProjectResult<(Vec<Import>, Stream<'a>)> {
|
||||||
let (output, cont) = parse_multiname_rec(cursor, ctx)?;
|
let (output, cont) = parse_multiname_rec(cursor, ctx)?;
|
||||||
Ok((output.map(|sr| sr.finalize()).collect(), cont))
|
Ok((output.map(|sr| sr.finalize()).collect(), cont))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,71 +0,0 @@
|
|||||||
use chumsky::prelude::*;
|
|
||||||
use chumsky::{self, Parser};
|
|
||||||
|
|
||||||
use super::decls::{BoxedSimpleParser, SimpleParser};
|
|
||||||
|
|
||||||
/// Matches any one of the passed operators, preferring longer ones
|
|
||||||
fn op_parser<'a>(
|
|
||||||
ops: &[impl AsRef<str> + Clone],
|
|
||||||
) -> BoxedSimpleParser<'a, char, String> {
|
|
||||||
let mut sorted_ops: Vec<String> =
|
|
||||||
ops.iter().map(|t| t.as_ref().to_string()).collect();
|
|
||||||
sorted_ops.sort_by_key(|op| -(op.len() as i64));
|
|
||||||
sorted_ops
|
|
||||||
.into_iter()
|
|
||||||
.map(|op| just(op).boxed())
|
|
||||||
.reduce(|a, b| a.or(b).boxed())
|
|
||||||
.unwrap_or_else(|| {
|
|
||||||
empty().map(|()| panic!("Empty isn't meant to match")).boxed()
|
|
||||||
})
|
|
||||||
.labelled("operator")
|
|
||||||
.boxed()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Characters that cannot be parsed as part of an operator
|
|
||||||
///
|
|
||||||
/// The initial operator list overrides this.
|
|
||||||
pub static NOT_NAME_CHAR: &[char] = &[
|
|
||||||
':', // used for namespacing and type annotations
|
|
||||||
'\\', '@', // parametric expression starters
|
|
||||||
'"', // parsed as primitive and therefore would never match
|
|
||||||
'(', ')', '[', ']', '{', '}', // must be strictly balanced
|
|
||||||
'.', // Argument-body separator in parametrics
|
|
||||||
',', // Import separator
|
|
||||||
];
|
|
||||||
|
|
||||||
/// Matches anything that's allowed as an operator
|
|
||||||
///
|
|
||||||
/// FIXME: `@name` without a dot should be parsed correctly for overrides.
|
|
||||||
/// Could be an operator but then parametrics should take precedence,
|
|
||||||
/// which might break stuff. investigate.
|
|
||||||
///
|
|
||||||
/// TODO: `.` could possibly be parsed as an operator in some contexts.
|
|
||||||
/// This operator is very common in maths so it's worth a try.
|
|
||||||
/// Investigate.
|
|
||||||
#[must_use]
|
|
||||||
pub fn anyop_parser<'a>() -> impl SimpleParser<char, String> + 'a {
|
|
||||||
filter(move |c| {
|
|
||||||
!NOT_NAME_CHAR.contains(c)
|
|
||||||
&& !c.is_whitespace()
|
|
||||||
&& !c.is_alphanumeric()
|
|
||||||
&& c != &'_'
|
|
||||||
})
|
|
||||||
.repeated()
|
|
||||||
.at_least(1)
|
|
||||||
.collect()
|
|
||||||
.labelled("anyop")
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse an operator or name. Failing both, parse everything up to
|
|
||||||
/// the next whitespace or blacklisted character as a new operator.
|
|
||||||
#[must_use]
|
|
||||||
pub fn name_parser<'a>(
|
|
||||||
ops: &[impl AsRef<str> + Clone],
|
|
||||||
) -> impl SimpleParser<char, String> + 'a {
|
|
||||||
choice((
|
|
||||||
op_parser(ops), // First try to parse a known operator
|
|
||||||
text::ident().labelled("plain text"), // Failing that, parse plain text
|
|
||||||
anyop_parser(), // Finally parse everything until tne next forbidden char
|
|
||||||
))
|
|
||||||
.labelled("name")
|
|
||||||
}
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
use chumsky::prelude::*;
|
|
||||||
use chumsky::{self, Parser};
|
|
||||||
use ordered_float::NotNan;
|
|
||||||
|
|
||||||
use super::decls::SimpleParser;
|
|
||||||
|
|
||||||
fn assert_not_digit(base: u32, c: char) {
|
|
||||||
if base > (10 + (c as u32 - 'a' as u32)) {
|
|
||||||
panic!("The character '{}' is a digit in base ({})", c, base)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse an arbitrarily grouped sequence of digits starting with an underscore.
|
|
||||||
///
|
|
||||||
/// TODO: this should use separated_by and parse the leading group too
|
|
||||||
#[must_use]
|
|
||||||
fn separated_digits_parser(base: u32) -> impl SimpleParser<char, String> {
|
|
||||||
just('_')
|
|
||||||
.ignore_then(text::digits(base))
|
|
||||||
.repeated()
|
|
||||||
.map(|sv| sv.iter().flat_map(|s| s.chars()).collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// parse a grouped uint
|
|
||||||
///
|
|
||||||
/// Not to be confused with [int_parser] which does a lot more
|
|
||||||
#[must_use]
|
|
||||||
fn uint_parser(base: u32) -> impl SimpleParser<char, u64> {
|
|
||||||
text::int(base).then(separated_digits_parser(base)).map(
|
|
||||||
move |(s1, s2): (String, String)| {
|
|
||||||
u64::from_str_radix(&(s1 + &s2), base).unwrap()
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// parse exponent notation, or return 0 as the default exponent.
|
|
||||||
/// The exponent is always in decimal.
|
|
||||||
#[must_use]
|
|
||||||
fn pow_parser() -> impl SimpleParser<char, i32> {
|
|
||||||
choice((
|
|
||||||
just('p').ignore_then(text::int(10)).map(|s: String| s.parse().unwrap()),
|
|
||||||
just("p-")
|
|
||||||
.ignore_then(text::int(10))
|
|
||||||
.map(|s: String| -s.parse::<i32>().unwrap()),
|
|
||||||
))
|
|
||||||
.or_else(|_| Ok(0))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// returns a mapper that converts a mantissa and an exponent into an uint
|
|
||||||
///
|
|
||||||
/// TODO it panics if it finds a negative exponent
|
|
||||||
fn nat2u(base: u64) -> impl Fn((u64, i32)) -> u64 {
|
|
||||||
move |(val, exp)| {
|
|
||||||
if exp == 0 {
|
|
||||||
val
|
|
||||||
} else {
|
|
||||||
val * base.checked_pow(exp.try_into().unwrap()).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// returns a mapper that converts a mantissa and an exponent into a float
|
|
||||||
fn nat2f(base: u64) -> impl Fn((NotNan<f64>, i32)) -> NotNan<f64> {
|
|
||||||
move |(val, exp)| {
|
|
||||||
if exp == 0 {
|
|
||||||
val
|
|
||||||
} else {
|
|
||||||
val * (base as f64).powf(exp.try_into().unwrap())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// parse an uint from exponential notation (panics if 'p' is a digit in base)
|
|
||||||
#[must_use]
|
|
||||||
fn pow_uint_parser(base: u32) -> impl SimpleParser<char, u64> {
|
|
||||||
assert_not_digit(base, 'p');
|
|
||||||
uint_parser(base).then(pow_parser()).map(nat2u(base.into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// parse an uint from a base determined by its prefix or lack thereof
|
|
||||||
///
|
|
||||||
/// Not to be confused with [uint_parser] which is a component of it.
|
|
||||||
#[must_use]
|
|
||||||
pub fn int_parser() -> impl SimpleParser<char, u64> {
|
|
||||||
choice((
|
|
||||||
just("0b").ignore_then(pow_uint_parser(2)),
|
|
||||||
just("0x").ignore_then(pow_uint_parser(16)),
|
|
||||||
just('0').ignore_then(pow_uint_parser(8)),
|
|
||||||
pow_uint_parser(10), // Dec has no prefix
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// parse a float from dot notation
|
|
||||||
#[must_use]
|
|
||||||
fn dotted_parser(base: u32) -> impl SimpleParser<char, NotNan<f64>> {
|
|
||||||
uint_parser(base)
|
|
||||||
.then(
|
|
||||||
just('.')
|
|
||||||
.ignore_then(text::digits(base).then(separated_digits_parser(base)))
|
|
||||||
.map(move |(frac1, frac2)| {
|
|
||||||
let frac = frac1 + &frac2;
|
|
||||||
let frac_num = u64::from_str_radix(&frac, base).unwrap() as f64;
|
|
||||||
let dexp = base.pow(frac.len().try_into().unwrap());
|
|
||||||
frac_num / dexp as f64
|
|
||||||
})
|
|
||||||
.or_not()
|
|
||||||
.map(|o| o.unwrap_or_default()),
|
|
||||||
)
|
|
||||||
.try_map(|(wh, f), s| {
|
|
||||||
NotNan::new(wh as f64 + f)
|
|
||||||
.map_err(|_| Simple::custom(s, "Float literal evaluates to NaN"))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// parse a float from dotted and optionally also exponential notation
|
|
||||||
#[must_use]
|
|
||||||
fn pow_float_parser(base: u32) -> impl SimpleParser<char, NotNan<f64>> {
|
|
||||||
assert_not_digit(base, 'p');
|
|
||||||
dotted_parser(base).then(pow_parser()).map(nat2f(base.into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// parse a float with dotted and optionally exponential notation from a base
|
|
||||||
/// determined by its prefix
|
|
||||||
#[must_use]
|
|
||||||
pub fn float_parser() -> impl SimpleParser<char, NotNan<f64>> {
|
|
||||||
choice((
|
|
||||||
just("0b").ignore_then(pow_float_parser(2)),
|
|
||||||
just("0x").ignore_then(pow_float_parser(16)),
|
|
||||||
just('0').ignore_then(pow_float_parser(8)),
|
|
||||||
pow_float_parser(10),
|
|
||||||
))
|
|
||||||
.labelled("float")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn print_nat16(num: NotNan<f64>) -> String {
|
|
||||||
let exp = num.log(16.0).floor();
|
|
||||||
let man = num / 16_f64.powf(exp);
|
|
||||||
format!("{man}p{exp:.0}")
|
|
||||||
}
|
|
||||||
148
src/parse/numeric.rs
Normal file
148
src/parse/numeric.rs
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
use std::num::IntErrorKind;
|
||||||
|
use std::ops::Range;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use ordered_float::NotNan;
|
||||||
|
|
||||||
|
use super::context::Context;
|
||||||
|
use super::errors::NaNLiteral;
|
||||||
|
use super::lexer::split_filter;
|
||||||
|
use crate::error::{ProjectError, ProjectResult};
|
||||||
|
use crate::foreign::Atom;
|
||||||
|
use crate::systems::stl::Numeric;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub enum NumErrorKind {
|
||||||
|
NaN,
|
||||||
|
Overflow,
|
||||||
|
InvalidDigit,
|
||||||
|
}
|
||||||
|
impl NumErrorKind {
|
||||||
|
fn from_int(kind: &IntErrorKind) -> Self {
|
||||||
|
match kind {
|
||||||
|
IntErrorKind::InvalidDigit => Self::InvalidDigit,
|
||||||
|
IntErrorKind::NegOverflow | IntErrorKind::PosOverflow => Self::Overflow,
|
||||||
|
_ => panic!("Impossible error condition"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct NumError {
|
||||||
|
pub range: Range<usize>,
|
||||||
|
pub kind: NumErrorKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NumError {
|
||||||
|
pub fn into_proj(
|
||||||
|
self,
|
||||||
|
len: usize,
|
||||||
|
tail: &str,
|
||||||
|
ctx: &(impl Context + ?Sized),
|
||||||
|
) -> Rc<dyn ProjectError> {
|
||||||
|
let start = ctx.source().len() - tail.len() - len + self.range.start;
|
||||||
|
let location = ctx.range_loc(start..start + self.range.len());
|
||||||
|
match self.kind {
|
||||||
|
NumErrorKind::NaN => NaNLiteral(location).rc(),
|
||||||
|
_ => panic!(),
|
||||||
|
// NumErrorKind::Int(iek) => IntError(location, iek).rc(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_num(string: &str) -> Result<Numeric, NumError> {
|
||||||
|
let overflow_err =
|
||||||
|
NumError { range: 0..string.len(), kind: NumErrorKind::Overflow };
|
||||||
|
let (radix, noprefix, pos) =
|
||||||
|
(string.strip_prefix("0x").map(|s| (16u8, s, 2)))
|
||||||
|
.or_else(|| string.strip_prefix("0b").map(|s| (2u8, s, 2)))
|
||||||
|
.or_else(|| string.strip_prefix("0o").map(|s| (8u8, s, 2)))
|
||||||
|
.unwrap_or((10u8, string, 0));
|
||||||
|
// identity
|
||||||
|
let (base, exponent) = match noprefix.split_once('p') {
|
||||||
|
Some((b, e)) => {
|
||||||
|
let (s, d, len) = e.strip_prefix('-').map_or((1, e, 0), |ue| (-1, ue, 1));
|
||||||
|
(b, s * int_parse(d, radix, pos + b.len() + 1 + len)? as i32)
|
||||||
|
},
|
||||||
|
None => (noprefix, 0),
|
||||||
|
};
|
||||||
|
match base.split_once('.') {
|
||||||
|
None => {
|
||||||
|
let base_usize = int_parse(base, radix, pos)?;
|
||||||
|
if let Ok(pos_exp) = u32::try_from(exponent) {
|
||||||
|
if let Some(radical) = usize::from(radix).checked_pow(pos_exp) {
|
||||||
|
let number = base_usize.checked_mul(radical).ok_or(overflow_err)?;
|
||||||
|
return Ok(Numeric::Uint(number));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let f = (base_usize as f64) * (radix as f64).powi(exponent);
|
||||||
|
let err = NumError { range: 0..string.len(), kind: NumErrorKind::NaN };
|
||||||
|
Ok(Numeric::Float(NotNan::new(f).map_err(|_| err)?))
|
||||||
|
},
|
||||||
|
Some((whole, part)) => {
|
||||||
|
let whole_n = int_parse(whole, radix, pos)? as f64;
|
||||||
|
let part_n = int_parse(part, radix, pos + whole.len() + 1)? as f64;
|
||||||
|
let real_val = whole_n + (part_n / radix.pow(part.len() as u32) as f64);
|
||||||
|
let f = real_val * (radix as f64).powi(exponent);
|
||||||
|
Ok(Numeric::Float(NotNan::new(f).expect("None of the inputs are NaN")))
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn int_parse(s: &str, radix: u8, start: usize) -> Result<usize, NumError> {
|
||||||
|
let s = s.chars().filter(|c| *c != '_').collect::<String>();
|
||||||
|
let range = start..(start + s.len());
|
||||||
|
usize::from_str_radix(&s, radix as u32)
|
||||||
|
.map_err(|e| NumError { range, kind: NumErrorKind::from_int(e.kind()) })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn numchar(c: char) -> bool { c.is_alphanumeric() | "._-".contains(c) }
|
||||||
|
pub fn numstart(c: char) -> bool { c.is_ascii_digit() }
|
||||||
|
|
||||||
|
pub fn lex_numeric<'a>(
|
||||||
|
data: &'a str,
|
||||||
|
ctx: &dyn Context,
|
||||||
|
) -> Option<ProjectResult<(Atom, &'a str)>> {
|
||||||
|
data.chars().next().filter(|c| numstart(*c)).map(|_| {
|
||||||
|
let (num_str, tail) = split_filter(data, numchar);
|
||||||
|
match parse_num(num_str) {
|
||||||
|
Ok(Numeric::Float(f)) => Ok((Atom::new(f), tail)),
|
||||||
|
Ok(Numeric::Uint(i)) => Ok((Atom::new(i), tail)),
|
||||||
|
Err(e) => Err(e.into_proj(num_str.len(), tail, ctx)),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use crate::parse::numeric::parse_num;
|
||||||
|
use crate::systems::stl::Numeric;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn just_ints() {
|
||||||
|
let test = |s, n| assert_eq!(parse_num(s), Ok(Numeric::Uint(n)));
|
||||||
|
test("12345", 12345);
|
||||||
|
test("0xcafebabe", 0xcafebabe);
|
||||||
|
test("0o751", 0o751);
|
||||||
|
test("0b111000111", 0b111000111);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn decimals() {
|
||||||
|
let test = |s, n| assert_eq!(parse_num(s).map(|n| n.as_f64()), Ok(n));
|
||||||
|
test("3.1417", 3.1417);
|
||||||
|
test("3.1417", 3_f64 + 1417_f64 / 10000_f64);
|
||||||
|
test("0xf.cafe", 0xf as f64 + 0xcafe as f64 / 0x10000 as f64);
|
||||||
|
test("34p3", 34000f64);
|
||||||
|
test("0x2p3", (0x2 * 0x1000) as f64);
|
||||||
|
test("1.5p3", 1500f64);
|
||||||
|
test("0x2.5p3", (0x25 * 0x100) as f64);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn print_nat16(num: NotNan<f64>) -> String {
|
||||||
|
let exp = num.log(16.0).floor();
|
||||||
|
let man = num / 16_f64.powf(exp);
|
||||||
|
format!("{man}p{exp:.0}")
|
||||||
|
}
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
use chumsky::prelude::*;
|
|
||||||
|
|
||||||
use super::decls::SimpleParser;
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn operators_parser<T>(
|
|
||||||
f: impl Fn(String) -> T,
|
|
||||||
) -> impl SimpleParser<char, Vec<T>> {
|
|
||||||
filter(|c: &char| c != &']' && !c.is_whitespace())
|
|
||||||
.repeated()
|
|
||||||
.at_least(1)
|
|
||||||
.collect()
|
|
||||||
.map(f)
|
|
||||||
.separated_by(text::whitespace())
|
|
||||||
.allow_leading()
|
|
||||||
.allow_trailing()
|
|
||||||
.at_least(1)
|
|
||||||
.delimited_by(just("operators["), just(']'))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use chumsky::Parser;
|
|
||||||
|
|
||||||
use super::operators_parser;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn operators_scratchpad() {
|
|
||||||
let parsely = operators_parser(|s| s);
|
|
||||||
println!("{:?}", parsely.parse("operators[$ |> =>]"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
use chumsky::prelude::*;
|
|
||||||
use chumsky::Parser;
|
|
||||||
|
|
||||||
use super::context::Context;
|
|
||||||
use super::decls::SimpleParser;
|
|
||||||
use super::number::int_parser;
|
|
||||||
use crate::ast::{PHClass, Placeholder};
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn placeholder_parser(
|
|
||||||
ctx: impl Context,
|
|
||||||
) -> impl SimpleParser<char, Placeholder> {
|
|
||||||
choice((
|
|
||||||
just("...").to(Some(true)),
|
|
||||||
just("..").to(Some(false)),
|
|
||||||
empty().to(None),
|
|
||||||
))
|
|
||||||
.then(just("$").ignore_then(text::ident()))
|
|
||||||
.then(just(":").ignore_then(int_parser()).or_not())
|
|
||||||
.try_map(move |((vec_nonzero, name), vec_prio), span| {
|
|
||||||
let name = ctx.interner().i(&name);
|
|
||||||
if let Some(nonzero) = vec_nonzero {
|
|
||||||
let prio = vec_prio.unwrap_or_default();
|
|
||||||
Ok(Placeholder { name, class: PHClass::Vec { nonzero, prio } })
|
|
||||||
} else if vec_prio.is_some() {
|
|
||||||
Err(Simple::custom(span, "Scalar placeholders have no priority"))
|
|
||||||
} else {
|
|
||||||
Ok(Placeholder { name, class: PHClass::Scalar })
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -18,7 +18,6 @@ use crate::representations::location::Location;
|
|||||||
use crate::representations::sourcefile::{FileEntry, MemberKind, ModuleBlock};
|
use crate::representations::sourcefile::{FileEntry, MemberKind, ModuleBlock};
|
||||||
use crate::representations::VName;
|
use crate::representations::VName;
|
||||||
use crate::sourcefile::{FileEntryKind, Import, Member};
|
use crate::sourcefile::{FileEntryKind, Import, Member};
|
||||||
use crate::Primitive;
|
|
||||||
|
|
||||||
pub fn split_lines(module: Stream<'_>) -> impl Iterator<Item = Stream<'_>> {
|
pub fn split_lines(module: Stream<'_>) -> impl Iterator<Item = Stream<'_>> {
|
||||||
let mut source = module.data.iter().enumerate();
|
let mut source = module.data.iter().enumerate();
|
||||||
@@ -52,36 +51,44 @@ pub fn split_lines(module: Stream<'_>) -> impl Iterator<Item = Stream<'_>> {
|
|||||||
|
|
||||||
pub fn parse_module_body(
|
pub fn parse_module_body(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: impl Context,
|
ctx: &impl Context,
|
||||||
) -> ProjectResult<Vec<FileEntry>> {
|
) -> ProjectResult<Vec<FileEntry>> {
|
||||||
split_lines(cursor)
|
split_lines(cursor)
|
||||||
.map(Stream::trim)
|
.map(Stream::trim)
|
||||||
.filter(|l| !l.data.is_empty())
|
.filter(|l| !l.data.is_empty())
|
||||||
.map(|l| {
|
.map(|l| {
|
||||||
Ok(FileEntry {
|
parse_line(l, ctx).map(move |kinds| {
|
||||||
locations: vec![l.location()],
|
kinds
|
||||||
kind: parse_line(l, ctx.clone())?,
|
.into_iter()
|
||||||
|
.map(move |kind| FileEntry { locations: vec![l.location()], kind })
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
.flatten_ok()
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_line(
|
pub fn parse_line(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: impl Context,
|
ctx: &impl Context,
|
||||||
) -> ProjectResult<FileEntryKind> {
|
) -> ProjectResult<Vec<FileEntryKind>> {
|
||||||
match cursor.get(0)?.lexeme {
|
for line_parser in ctx.line_parsers() {
|
||||||
|
if let Some(result) = line_parser(cursor, ctx) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
match &cursor.get(0)?.lexeme {
|
||||||
Lexeme::BR | Lexeme::Comment(_) => parse_line(cursor.step()?, ctx),
|
Lexeme::BR | Lexeme::Comment(_) => parse_line(cursor.step()?, ctx),
|
||||||
Lexeme::Export => parse_export_line(cursor.step()?, ctx),
|
Lexeme::Name(n) if **n == "export" =>
|
||||||
Lexeme::Const | Lexeme::Macro | Lexeme::Module | Lexeme::Operators(_) =>
|
parse_export_line(cursor.step()?, ctx).map(|k| vec![k]),
|
||||||
Ok(FileEntryKind::Member(Member {
|
Lexeme::Name(n) if ["const", "macro", "module"].contains(&n.as_str()) =>
|
||||||
|
Ok(vec![FileEntryKind::Member(Member {
|
||||||
kind: parse_member(cursor, ctx)?,
|
kind: parse_member(cursor, ctx)?,
|
||||||
exported: false,
|
exported: false,
|
||||||
})),
|
})]),
|
||||||
Lexeme::Import => {
|
Lexeme::Name(n) if **n == "import" => {
|
||||||
let (imports, cont) = parse_multiname(cursor.step()?, ctx)?;
|
let (imports, cont) = parse_multiname(cursor.step()?, ctx)?;
|
||||||
cont.expect_empty()?;
|
cont.expect_empty()?;
|
||||||
Ok(FileEntryKind::Import(imports))
|
Ok(vec![FileEntryKind::Import(imports)])
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
let err = BadTokenInRegion {
|
let err = BadTokenInRegion {
|
||||||
@@ -95,23 +102,23 @@ pub fn parse_line(
|
|||||||
|
|
||||||
pub fn parse_export_line(
|
pub fn parse_export_line(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: impl Context,
|
ctx: &impl Context,
|
||||||
) -> ProjectResult<FileEntryKind> {
|
) -> ProjectResult<FileEntryKind> {
|
||||||
let cursor = cursor.trim();
|
let cursor = cursor.trim();
|
||||||
match cursor.get(0)?.lexeme {
|
match &cursor.get(0)?.lexeme {
|
||||||
Lexeme::NS => {
|
Lexeme::NS => {
|
||||||
let (names, cont) = parse_multiname(cursor.step()?, ctx)?;
|
let (names, cont) = parse_multiname(cursor.step()?, ctx)?;
|
||||||
cont.expect_empty()?;
|
cont.expect_empty()?;
|
||||||
let names = (names.into_iter())
|
let names = (names.into_iter())
|
||||||
.map(|Import { name, path, location }| match (name, &path[..]) {
|
.map(|Import { name, path, location }| match (name, &path[..]) {
|
||||||
(Some(n), []) => Ok((n, location)),
|
(Some(n), []) => Ok((n, location)),
|
||||||
(None, _) => Err(GlobExport { location }.rc()),
|
(None, _) => Err(GlobExport(location).rc()),
|
||||||
_ => Err(NamespacedExport { location }.rc()),
|
_ => Err(NamespacedExport(location).rc()),
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
Ok(FileEntryKind::Export(names))
|
Ok(FileEntryKind::Export(names))
|
||||||
},
|
},
|
||||||
Lexeme::Const | Lexeme::Macro | Lexeme::Module | Lexeme::Operators(_) =>
|
Lexeme::Name(n) if ["const", "macro", "module"].contains(&n.as_str()) =>
|
||||||
Ok(FileEntryKind::Member(Member {
|
Ok(FileEntryKind::Member(Member {
|
||||||
kind: parse_member(cursor, ctx)?,
|
kind: parse_member(cursor, ctx)?,
|
||||||
exported: true,
|
exported: true,
|
||||||
@@ -128,26 +135,22 @@ pub fn parse_export_line(
|
|||||||
|
|
||||||
fn parse_member(
|
fn parse_member(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: impl Context,
|
ctx: &impl Context,
|
||||||
) -> ProjectResult<MemberKind> {
|
) -> ProjectResult<MemberKind> {
|
||||||
let (typemark, cursor) = cursor.trim().pop()?;
|
let (typemark, cursor) = cursor.trim().pop()?;
|
||||||
match &typemark.lexeme {
|
match &typemark.lexeme {
|
||||||
Lexeme::Const => {
|
Lexeme::Name(n) if **n == "const" => {
|
||||||
let constant = parse_const(cursor, ctx)?;
|
let constant = parse_const(cursor, ctx)?;
|
||||||
Ok(MemberKind::Constant(constant))
|
Ok(MemberKind::Constant(constant))
|
||||||
},
|
},
|
||||||
Lexeme::Macro => {
|
Lexeme::Name(n) if **n == "macro" => {
|
||||||
let rule = parse_rule(cursor, ctx)?;
|
let rule = parse_rule(cursor, ctx)?;
|
||||||
Ok(MemberKind::Rule(rule))
|
Ok(MemberKind::Rule(rule))
|
||||||
},
|
},
|
||||||
Lexeme::Module => {
|
Lexeme::Name(n) if **n == "module" => {
|
||||||
let module = parse_module(cursor, ctx)?;
|
let module = parse_module(cursor, ctx)?;
|
||||||
Ok(MemberKind::Module(module))
|
Ok(MemberKind::Module(module))
|
||||||
},
|
},
|
||||||
Lexeme::Operators(ops) => {
|
|
||||||
cursor.trim().expect_empty()?;
|
|
||||||
Ok(MemberKind::Operators(ops[..].to_vec()))
|
|
||||||
},
|
|
||||||
_ => {
|
_ => {
|
||||||
let err =
|
let err =
|
||||||
BadTokenInRegion { entry: typemark.clone(), region: "member type" };
|
BadTokenInRegion { entry: typemark.clone(), region: "member type" };
|
||||||
@@ -158,20 +161,20 @@ fn parse_member(
|
|||||||
|
|
||||||
fn parse_rule(
|
fn parse_rule(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: impl Context,
|
ctx: &impl Context,
|
||||||
) -> ProjectResult<Rule<VName>> {
|
) -> ProjectResult<Rule<VName>> {
|
||||||
let (pattern, prio, template) = cursor.find_map("arrow", |a| match a {
|
let (pattern, prio, template) = cursor.find_map("arrow", |a| match a {
|
||||||
Lexeme::Arrow(p) => Some(*p),
|
Lexeme::Arrow(p) => Some(*p),
|
||||||
_ => None,
|
_ => None,
|
||||||
})?;
|
})?;
|
||||||
let (pattern, _) = parse_exprv(pattern, None, ctx.clone())?;
|
let (pattern, _) = parse_exprv(pattern, None, ctx)?;
|
||||||
let (template, _) = parse_exprv(template, None, ctx)?;
|
let (template, _) = parse_exprv(template, None, ctx)?;
|
||||||
Ok(Rule { pattern, prio, template })
|
Ok(Rule { pattern, prio, template })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_const(
|
fn parse_const(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: impl Context,
|
ctx: &impl Context,
|
||||||
) -> ProjectResult<Constant> {
|
) -> ProjectResult<Constant> {
|
||||||
let (name_ent, cursor) = cursor.trim().pop()?;
|
let (name_ent, cursor) = cursor.trim().pop()?;
|
||||||
let name = ExpectedName::expect(name_ent)?;
|
let name = ExpectedName::expect(name_ent)?;
|
||||||
@@ -183,7 +186,7 @@ fn parse_const(
|
|||||||
|
|
||||||
fn parse_module(
|
fn parse_module(
|
||||||
cursor: Stream<'_>,
|
cursor: Stream<'_>,
|
||||||
ctx: impl Context,
|
ctx: &impl Context,
|
||||||
) -> ProjectResult<ModuleBlock> {
|
) -> ProjectResult<ModuleBlock> {
|
||||||
let (name_ent, cursor) = cursor.trim().pop()?;
|
let (name_ent, cursor) = cursor.trim().pop()?;
|
||||||
let name = ExpectedName::expect(name_ent)?;
|
let name = ExpectedName::expect(name_ent)?;
|
||||||
@@ -195,11 +198,11 @@ fn parse_module(
|
|||||||
Ok(ModuleBlock { name, body })
|
Ok(ModuleBlock { name, body })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_exprv(
|
fn parse_exprv<'a>(
|
||||||
mut cursor: Stream<'_>,
|
mut cursor: Stream<'a>,
|
||||||
paren: Option<char>,
|
paren: Option<char>,
|
||||||
ctx: impl Context,
|
ctx: &impl Context,
|
||||||
) -> ProjectResult<(Vec<Expr<VName>>, Stream<'_>)> {
|
) -> ProjectResult<(Vec<Expr<VName>>, Stream<'a>)> {
|
||||||
let mut output = Vec::new();
|
let mut output = Vec::new();
|
||||||
cursor = cursor.trim();
|
cursor = cursor.trim();
|
||||||
while let Ok(current) = cursor.get(0) {
|
while let Ok(current) = cursor.get(0) {
|
||||||
@@ -207,11 +210,9 @@ fn parse_exprv(
|
|||||||
Lexeme::BR | Lexeme::Comment(_) => unreachable!("Fillers skipped"),
|
Lexeme::BR | Lexeme::Comment(_) => unreachable!("Fillers skipped"),
|
||||||
Lexeme::At | Lexeme::Type =>
|
Lexeme::At | Lexeme::Type =>
|
||||||
return Err(ReservedToken { entry: current.clone() }.rc()),
|
return Err(ReservedToken { entry: current.clone() }.rc()),
|
||||||
Lexeme::Literal(l) => {
|
Lexeme::Atom(a) => {
|
||||||
output.push(Expr {
|
let value = Clause::Atom(a.clone());
|
||||||
value: Clause::P(Primitive::Literal(l.clone())),
|
output.push(Expr { value, location: current.location() });
|
||||||
location: current.location(),
|
|
||||||
});
|
|
||||||
cursor = cursor.step()?;
|
cursor = cursor.step()?;
|
||||||
},
|
},
|
||||||
Lexeme::Placeh(ph) => {
|
Lexeme::Placeh(ph) => {
|
||||||
@@ -223,25 +224,23 @@ fn parse_exprv(
|
|||||||
},
|
},
|
||||||
Lexeme::Name(n) => {
|
Lexeme::Name(n) => {
|
||||||
let location = cursor.location();
|
let location = cursor.location();
|
||||||
let mut fullname = vec![n.clone()];
|
let mut fullname: VName = vec![n.clone()];
|
||||||
while cursor.get(1).ok().map(|e| &e.lexeme) == Some(&Lexeme::NS) {
|
while cursor.get(1).map_or(false, |e| e.lexeme.strict_eq(&Lexeme::NS)) {
|
||||||
fullname.push(ExpectedName::expect(cursor.get(2)?)?);
|
fullname.push(ExpectedName::expect(cursor.get(2)?)?);
|
||||||
cursor = cursor.step()?.step()?;
|
cursor = cursor.step()?.step()?;
|
||||||
}
|
}
|
||||||
output.push(Expr { value: Clause::Name(fullname), location });
|
output.push(Expr { value: Clause::Name(fullname), location });
|
||||||
cursor = cursor.step()?;
|
cursor = cursor.step()?;
|
||||||
},
|
},
|
||||||
Lexeme::NS =>
|
Lexeme::NS => return Err(LeadingNS(current.location()).rc()),
|
||||||
return Err(LeadingNS { location: current.location() }.rc()),
|
|
||||||
Lexeme::RP(c) =>
|
Lexeme::RP(c) =>
|
||||||
return if Some(*c) == paren {
|
return if Some(*c) == paren {
|
||||||
Ok((output, cursor.step()?))
|
Ok((output, cursor.step()?))
|
||||||
} else {
|
} else {
|
||||||
Err(MisalignedParen { entry: cursor.get(0)?.clone() }.rc())
|
Err(MisalignedParen(cursor.get(0)?.clone()).rc())
|
||||||
},
|
},
|
||||||
Lexeme::LP(c) => {
|
Lexeme::LP(c) => {
|
||||||
let (result, leftover) =
|
let (result, leftover) = parse_exprv(cursor.step()?, Some(*c), ctx)?;
|
||||||
parse_exprv(cursor.step()?, Some(*c), ctx.clone())?;
|
|
||||||
output.push(Expr {
|
output.push(Expr {
|
||||||
value: Clause::S(*c, Rc::new(result)),
|
value: Clause::S(*c, Rc::new(result)),
|
||||||
location: cursor.get(0)?.location().to(leftover.fallback.location()),
|
location: cursor.get(0)?.location().to(leftover.fallback.location()),
|
||||||
@@ -250,9 +249,9 @@ fn parse_exprv(
|
|||||||
},
|
},
|
||||||
Lexeme::BS => {
|
Lexeme::BS => {
|
||||||
let dot = ctx.interner().i(".");
|
let dot = ctx.interner().i(".");
|
||||||
let (arg, body) =
|
let (arg, body) = (cursor.step())?
|
||||||
cursor.step()?.find("A '.'", |l| l == &Lexeme::Name(dot.clone()))?;
|
.find("A '.'", |l| l.strict_eq(&Lexeme::Name(dot.clone())))?;
|
||||||
let (arg, _) = parse_exprv(arg, None, ctx.clone())?;
|
let (arg, _) = parse_exprv(arg, None, ctx)?;
|
||||||
let (body, leftover) = parse_exprv(body, paren, ctx)?;
|
let (body, leftover) = parse_exprv(body, paren, ctx)?;
|
||||||
output.push(Expr {
|
output.push(Expr {
|
||||||
location: cursor.location(),
|
location: cursor.location(),
|
||||||
@@ -278,7 +277,7 @@ fn vec_to_single(
|
|||||||
v: Vec<Expr<VName>>,
|
v: Vec<Expr<VName>>,
|
||||||
) -> ProjectResult<Expr<VName>> {
|
) -> ProjectResult<Expr<VName>> {
|
||||||
match v.len() {
|
match v.len() {
|
||||||
0 => return Err(UnexpectedEOL { entry: fallback.clone() }.rc()),
|
0 => Err(UnexpectedEOL { entry: fallback.clone() }.rc()),
|
||||||
1 => Ok(v.into_iter().exactly_one().unwrap()),
|
1 => Ok(v.into_iter().exactly_one().unwrap()),
|
||||||
_ => Ok(Expr {
|
_ => Ok(Expr {
|
||||||
location: expr_slice_location(&v),
|
location: expr_slice_location(&v),
|
||||||
|
|||||||
@@ -1,50 +1,117 @@
|
|||||||
use chumsky::prelude::*;
|
use itertools::Itertools;
|
||||||
use chumsky::{self, Parser};
|
|
||||||
|
|
||||||
use super::decls::SimpleParser;
|
use super::context::Context;
|
||||||
|
use super::errors::{BadCodePoint, BadEscapeSequence, NoStringEnd, NotHex};
|
||||||
|
use crate::error::{ProjectError, ProjectResult};
|
||||||
|
use crate::foreign::Atom;
|
||||||
|
use crate::OrcString;
|
||||||
|
|
||||||
/// Parses a text character that is not the specified delimiter
|
pub enum StringErrorKind {
|
||||||
#[must_use]
|
NotHex,
|
||||||
fn text_parser(delim: char) -> impl SimpleParser<char, char> {
|
BadCodePoint,
|
||||||
// Copied directly from Chumsky's JSON example.
|
BadEscSeq,
|
||||||
let escape = just('\\').ignore_then(
|
|
||||||
just('\\')
|
|
||||||
.or(just('/'))
|
|
||||||
.or(just('"'))
|
|
||||||
.or(just('b').to('\x08'))
|
|
||||||
.or(just('f').to('\x0C'))
|
|
||||||
.or(just('n').to('\n'))
|
|
||||||
.or(just('r').to('\r'))
|
|
||||||
.or(just('t').to('\t'))
|
|
||||||
.or(
|
|
||||||
just('u').ignore_then(
|
|
||||||
filter(|c: &char| c.is_ascii_hexdigit())
|
|
||||||
.repeated()
|
|
||||||
.exactly(4)
|
|
||||||
.collect::<String>()
|
|
||||||
.validate(|digits, span, emit| {
|
|
||||||
char::from_u32(u32::from_str_radix(&digits, 16).unwrap())
|
|
||||||
.unwrap_or_else(|| {
|
|
||||||
emit(Simple::custom(span, "invalid unicode character"));
|
|
||||||
'\u{FFFD}' // unicode replacement character
|
|
||||||
})
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
filter(move |&c| c != '\\' && c != delim).or(escape)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a string between double quotes
|
pub struct StringError {
|
||||||
#[must_use]
|
pos: usize,
|
||||||
pub fn str_parser() -> impl SimpleParser<char, String> {
|
kind: StringErrorKind,
|
||||||
just('"')
|
}
|
||||||
.ignore_then(
|
|
||||||
text_parser('"').map(Some)
|
pub fn parse_string(str: &str) -> Result<String, StringError> {
|
||||||
.or(just("\\\n").then(just(' ').or(just('\t')).repeated()).map(|_| None)) // Newlines preceded by backslashes are ignored along with all following indentation.
|
let mut target = String::new();
|
||||||
.repeated(),
|
let mut iter = str.char_indices();
|
||||||
)
|
while let Some((_, c)) = iter.next() {
|
||||||
.then_ignore(just('"'))
|
if c != '\\' {
|
||||||
.flatten()
|
target.push(c);
|
||||||
.collect()
|
continue;
|
||||||
|
}
|
||||||
|
let (mut pos, code) = iter.next().expect("lexer would have continued");
|
||||||
|
let next = match code {
|
||||||
|
c @ ('\\' | '/' | '"') => c,
|
||||||
|
'b' => '\x08',
|
||||||
|
'f' => '\x0f',
|
||||||
|
'n' => '\n',
|
||||||
|
'r' => '\r',
|
||||||
|
't' => '\t',
|
||||||
|
'\n' => 'skipws: loop {
|
||||||
|
match iter.next() {
|
||||||
|
None => return Ok(target),
|
||||||
|
Some((_, c)) =>
|
||||||
|
if !c.is_whitespace() {
|
||||||
|
break 'skipws c;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'u' => {
|
||||||
|
let acc = ((0..4).rev())
|
||||||
|
.map(|radical| {
|
||||||
|
let (j, c) = (iter.next())
|
||||||
|
.ok_or(StringError { pos, kind: StringErrorKind::NotHex })?;
|
||||||
|
pos = j;
|
||||||
|
let b =
|
||||||
|
u32::from_str_radix(&String::from(c), 16).map_err(|_| {
|
||||||
|
StringError { pos, kind: StringErrorKind::NotHex }
|
||||||
|
})?;
|
||||||
|
Ok(16u32.pow(radical) + b)
|
||||||
|
})
|
||||||
|
.fold_ok(0, u32::wrapping_add)?;
|
||||||
|
char::from_u32(acc)
|
||||||
|
.ok_or(StringError { pos, kind: StringErrorKind::BadCodePoint })?
|
||||||
|
},
|
||||||
|
_ => return Err(StringError { pos, kind: StringErrorKind::BadEscSeq }),
|
||||||
|
};
|
||||||
|
target.push(next);
|
||||||
|
}
|
||||||
|
Ok(target)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lex_string<'a>(
|
||||||
|
data: &'a str,
|
||||||
|
ctx: &dyn Context,
|
||||||
|
) -> Option<ProjectResult<(Atom, &'a str)>> {
|
||||||
|
data.strip_prefix('"').map(|data| {
|
||||||
|
let mut leftover = data;
|
||||||
|
return loop {
|
||||||
|
let (inside, outside) = (leftover.split_once('"'))
|
||||||
|
.ok_or_else(|| NoStringEnd(ctx.location(data.len(), "")).rc())?;
|
||||||
|
let backslashes = inside.chars().rev().take_while(|c| *c == '\\').count();
|
||||||
|
if backslashes % 2 == 0 {
|
||||||
|
// cut form tail to recoup what string_content doesn't have
|
||||||
|
let (string_data, tail) = data.split_at(data.len() - outside.len() - 1);
|
||||||
|
let tail = &tail[1..]; // push the tail past the end quote
|
||||||
|
let string = parse_string(string_data).map_err(|e| {
|
||||||
|
let start = ctx.pos(data) + e.pos;
|
||||||
|
let location = ctx.range_loc(start..start + 1);
|
||||||
|
match e.kind {
|
||||||
|
StringErrorKind::NotHex => NotHex(location).rc(),
|
||||||
|
StringErrorKind::BadCodePoint => BadCodePoint(location).rc(),
|
||||||
|
StringErrorKind::BadEscSeq => BadEscapeSequence(location).rc(),
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
let tok = ctx.interner().i(&string);
|
||||||
|
break Ok((Atom::new(OrcString::from(tok)), tail));
|
||||||
|
} else {
|
||||||
|
leftover = outside;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// TODO: rewrite the tree building pipeline step to load files
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::lex_string;
|
||||||
|
use crate::parse::context::MockContext;
|
||||||
|
use crate::{Interner, OrcString};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn plain_string() {
|
||||||
|
let source = r#""hello world!" - says the programmer"#;
|
||||||
|
let i = Interner::new();
|
||||||
|
let (data, tail) = lex_string(source, &MockContext(&i))
|
||||||
|
.expect("the snippet starts with a quote")
|
||||||
|
.expect("it contains a valid string");
|
||||||
|
assert_eq!(data.try_downcast::<OrcString>().unwrap().as_str(), "hello world!");
|
||||||
|
assert_eq!(tail, " - says the programmer");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ use crate::representations::project::{
|
|||||||
ItemKind, ProjectExt, ProjectItem, ProjectMod,
|
ItemKind, ProjectExt, ProjectItem, ProjectMod,
|
||||||
};
|
};
|
||||||
use crate::tree::{ModEntry, ModMember, Module};
|
use crate::tree::{ModEntry, ModMember, Module};
|
||||||
use crate::utils::pure_push::pushed;
|
use crate::utils::pure_seq::pushed;
|
||||||
use crate::{Interner, ProjectTree, Tok, VName};
|
use crate::{Interner, ProjectTree, Tok, VName};
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
@@ -74,7 +74,6 @@ fn resolve_aliases_rec(
|
|||||||
ModMember::Sub(module) =>
|
ModMember::Sub(module) =>
|
||||||
ModMember::Sub(resolve_aliases_rec(root, module, updated, false)),
|
ModMember::Sub(resolve_aliases_rec(root, module, updated, false)),
|
||||||
ModMember::Item(item) => ModMember::Item(ProjectItem {
|
ModMember::Item(item) => ModMember::Item(ProjectItem {
|
||||||
is_op: item.is_op,
|
|
||||||
kind: match &item.kind {
|
kind: match &item.kind {
|
||||||
ItemKind::Const(value) => ItemKind::Const(process_expr(value)),
|
ItemKind::Const(value) => ItemKind::Const(process_expr(value)),
|
||||||
other => other.clone(),
|
other => other.clone(),
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
//! Source loader callback definition and builtin implementations
|
//! Source loader callback definition and builtin implementations
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
use std::{fs, io};
|
use std::{fs, io};
|
||||||
|
|
||||||
use hashbrown::{HashMap, HashSet};
|
use hashbrown::{HashMap, HashSet};
|
||||||
@@ -25,7 +25,7 @@ impl ProjectError for FileLoadingError {
|
|||||||
"Neither a file nor a directory could be read from the requested path"
|
"Neither a file nor a directory could be read from the requested path"
|
||||||
}
|
}
|
||||||
fn one_position(&self) -> crate::Location {
|
fn one_position(&self) -> crate::Location {
|
||||||
Location::File(Rc::new(self.path.clone()))
|
Location::File(Arc::new(self.path.clone()))
|
||||||
}
|
}
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
format!("File: {}\nDirectory: {}", self.file, self.dir)
|
format!("File: {}\nDirectory: {}", self.file, self.dir)
|
||||||
@@ -37,10 +37,10 @@ impl ProjectError for FileLoadingError {
|
|||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum Loaded {
|
pub enum Loaded {
|
||||||
/// Conceptually equivalent to a sourcefile
|
/// Conceptually equivalent to a sourcefile
|
||||||
Code(Rc<String>),
|
Code(Arc<String>),
|
||||||
/// Conceptually equivalent to the list of *.orc files in a folder, without
|
/// Conceptually equivalent to the list of *.orc files in a folder, without
|
||||||
/// the extension
|
/// the extension
|
||||||
Collection(Rc<Vec<String>>),
|
Collection(Arc<Vec<String>>),
|
||||||
}
|
}
|
||||||
impl Loaded {
|
impl Loaded {
|
||||||
/// Is the loaded item source code (not a collection)?
|
/// Is the loaded item source code (not a collection)?
|
||||||
@@ -56,7 +56,7 @@ pub fn load_file(root: &Path, path: &[Tok<String>]) -> IOResult {
|
|||||||
let full_path = path.iter().fold(root.to_owned(), |p, t| p.join(t.as_str()));
|
let full_path = path.iter().fold(root.to_owned(), |p, t| p.join(t.as_str()));
|
||||||
let file_path = full_path.with_extension("orc");
|
let file_path = full_path.with_extension("orc");
|
||||||
let file_error = match fs::read_to_string(file_path) {
|
let file_error = match fs::read_to_string(file_path) {
|
||||||
Ok(string) => return Ok(Loaded::Code(Rc::new(string))),
|
Ok(string) => return Ok(Loaded::Code(Arc::new(string))),
|
||||||
Err(err) => err,
|
Err(err) => err,
|
||||||
};
|
};
|
||||||
let dir = match fs::read_dir(&full_path) {
|
let dir = match fs::read_dir(&full_path) {
|
||||||
@@ -83,7 +83,7 @@ pub fn load_file(root: &Path, path: &[Tok<String>]) -> IOResult {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
Ok(Loaded::Collection(Rc::new(names)))
|
Ok(Loaded::Collection(Arc::new(names)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generates a cached file loader for a directory
|
/// Generates a cached file loader for a directory
|
||||||
@@ -102,7 +102,7 @@ pub fn load_embed<T: 'static + RustEmbed>(path: &str, ext: &str) -> IOResult {
|
|||||||
if let Some(file) = T::get(&file_path) {
|
if let Some(file) = T::get(&file_path) {
|
||||||
let s =
|
let s =
|
||||||
String::from_utf8(file.data.to_vec()).expect("Embed must be valid UTF-8");
|
String::from_utf8(file.data.to_vec()).expect("Embed must be valid UTF-8");
|
||||||
Ok(Loaded::Code(Rc::new(s)))
|
Ok(Loaded::Code(Arc::new(s)))
|
||||||
} else {
|
} else {
|
||||||
let entries = T::iter()
|
let entries = T::iter()
|
||||||
.map(|c| c.to_string())
|
.map(|c| c.to_string())
|
||||||
@@ -121,7 +121,7 @@ pub fn load_embed<T: 'static + RustEmbed>(path: &str, ext: &str) -> IOResult {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect::<Vec<String>>();
|
.collect::<Vec<String>>();
|
||||||
Ok(Loaded::Collection(Rc::new(entries)))
|
Ok(Loaded::Collection(Arc::new(entries)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -165,9 +165,9 @@ pub fn embed_to_map<T: 'static + RustEmbed>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
(files.into_iter())
|
(files.into_iter())
|
||||||
.map(|(k, s)| (k, Loaded::Code(Rc::new(s))))
|
.map(|(k, s)| (k, Loaded::Code(Arc::new(s))))
|
||||||
.chain((dirs.into_iter()).map(|(k, entv)| {
|
.chain((dirs.into_iter()).map(|(k, entv)| {
|
||||||
(k, Loaded::Collection(Rc::new(entv.into_iter().collect())))
|
(k, Loaded::Collection(Arc::new(entv.into_iter().collect())))
|
||||||
}))
|
}))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,5 +5,6 @@ mod import_abs_path;
|
|||||||
mod parse_layer;
|
mod parse_layer;
|
||||||
mod project_tree;
|
mod project_tree;
|
||||||
mod source_loader;
|
mod source_loader;
|
||||||
|
// mod tree_loader;
|
||||||
|
|
||||||
pub use parse_layer::parse_layer;
|
pub use parse_layer::parse_layer;
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ use super::file_loader::IOResult;
|
|||||||
use super::{project_tree, source_loader};
|
use super::{project_tree, source_loader};
|
||||||
use crate::error::ProjectResult;
|
use crate::error::ProjectResult;
|
||||||
use crate::interner::{Interner, Tok};
|
use crate::interner::{Interner, Tok};
|
||||||
|
use crate::parse::{LexerPlugin, LineParser};
|
||||||
use crate::representations::sourcefile::FileEntry;
|
use crate::representations::sourcefile::FileEntry;
|
||||||
use crate::representations::VName;
|
use crate::representations::VName;
|
||||||
use crate::utils::never;
|
use crate::utils::never;
|
||||||
@@ -21,10 +22,14 @@ pub fn parse_layer<'a>(
|
|||||||
loader: &impl Fn(&[Tok<String>]) -> IOResult,
|
loader: &impl Fn(&[Tok<String>]) -> IOResult,
|
||||||
environment: &'a ProjectTree<VName>,
|
environment: &'a ProjectTree<VName>,
|
||||||
prelude: &[FileEntry],
|
prelude: &[FileEntry],
|
||||||
|
lexer_plugins: &[&dyn LexerPlugin],
|
||||||
|
line_parsers: &[&dyn LineParser],
|
||||||
i: &Interner,
|
i: &Interner,
|
||||||
) -> ProjectResult<ProjectTree<VName>> {
|
) -> ProjectResult<ProjectTree<VName>> {
|
||||||
|
let sl_ctx =
|
||||||
|
source_loader::Context { prelude, i, lexer_plugins, line_parsers };
|
||||||
let (preparsed, source) =
|
let (preparsed, source) =
|
||||||
source_loader::load_source(targets, prelude, i, loader, &|path| {
|
source_loader::load_source(targets, sl_ctx, loader, &|path| {
|
||||||
environment.0.walk_ref(&[], path, false).is_ok()
|
environment.0.walk_ref(&[], path, false).is_ok()
|
||||||
})?;
|
})?;
|
||||||
let tree =
|
let tree =
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ use crate::sourcefile::{
|
|||||||
};
|
};
|
||||||
use crate::tree::{ModEntry, ModMember, Module};
|
use crate::tree::{ModEntry, ModMember, Module};
|
||||||
use crate::utils::get_or::get_or_default;
|
use crate::utils::get_or::get_or_default;
|
||||||
use crate::utils::pure_push::pushed_ref;
|
use crate::utils::pure_seq::pushed_ref;
|
||||||
use crate::{Tok, VName};
|
use crate::{Tok, VName};
|
||||||
|
|
||||||
#[must_use = "A submodule may not be integrated into the tree"]
|
#[must_use = "A submodule may not be integrated into the tree"]
|
||||||
@@ -28,7 +28,7 @@ pub struct TreeReport {
|
|||||||
pub fn build_tree(
|
pub fn build_tree(
|
||||||
path: &VName,
|
path: &VName,
|
||||||
source: Vec<FileEntry>,
|
source: Vec<FileEntry>,
|
||||||
Module { entries, extra }: PreMod,
|
Module { entries, .. }: PreMod,
|
||||||
imports: ImpMod,
|
imports: ImpMod,
|
||||||
prelude: &[FileEntry],
|
prelude: &[FileEntry],
|
||||||
) -> ProjectResult<TreeReport> {
|
) -> ProjectResult<TreeReport> {
|
||||||
@@ -56,20 +56,11 @@ pub fn build_tree(
|
|||||||
MemberKind::Constant(Constant { name, value }) => {
|
MemberKind::Constant(Constant { name, value }) => {
|
||||||
consts.insert(name, value /* .prefix(path, &|_| false) */);
|
consts.insert(name, value /* .prefix(path, &|_| false) */);
|
||||||
},
|
},
|
||||||
MemberKind::Operators(_) => (),
|
|
||||||
MemberKind::Rule(rule) => rule_fragments.push(rule),
|
MemberKind::Rule(rule) => rule_fragments.push(rule),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mod_details = extra.details().expect("Directories handled elsewhere");
|
let rules = rule_fragments;
|
||||||
let rules = (mod_details.patterns.iter())
|
|
||||||
.zip(rule_fragments.into_iter())
|
|
||||||
.map(|(p, Rule { prio, template: t, .. })| {
|
|
||||||
// let p = p.iter().map(|e| e.prefix(path, &|_| false)).collect();
|
|
||||||
// let t = t.into_iter().map(|e| e.prefix(path, &|_| false)).collect();
|
|
||||||
Rule { pattern: p.clone(), prio, template: t }
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
let (pre_subs, pre_items) = (entries.into_iter())
|
let (pre_subs, pre_items) = (entries.into_iter())
|
||||||
.partition_map::<HashMap<_, _>, HashMap<_, _>, _, _, _>(
|
.partition_map::<HashMap<_, _>, HashMap<_, _>, _, _, _>(
|
||||||
|(k, ModEntry { exported, member })| match member {
|
|(k, ModEntry { exported, member })| match member {
|
||||||
@@ -98,7 +89,7 @@ pub fn build_tree(
|
|||||||
Ok((k, ModEntry { exported, member }))
|
Ok((k, ModEntry { exported, member }))
|
||||||
})
|
})
|
||||||
.chain((pre_items.into_iter()).map(
|
.chain((pre_items.into_iter()).map(
|
||||||
|(k, (exported, PreItem { has_value, is_op, location }))| {
|
|(k, (exported, PreItem { has_value, location }))| {
|
||||||
let item = match imports_from.get(&k) {
|
let item = match imports_from.get(&k) {
|
||||||
Some(_) if has_value => {
|
Some(_) if has_value => {
|
||||||
// Local value cannot be assigned to imported key
|
// Local value cannot be assigned to imported key
|
||||||
@@ -112,12 +103,10 @@ pub fn build_tree(
|
|||||||
},
|
},
|
||||||
None => {
|
None => {
|
||||||
let k = consts.remove(&k).map_or(ItemKind::None, ItemKind::Const);
|
let k = consts.remove(&k).map_or(ItemKind::None, ItemKind::Const);
|
||||||
ProjectItem { is_op, kind: k }
|
ProjectItem { kind: k }
|
||||||
},
|
|
||||||
Some(report) => ProjectItem {
|
|
||||||
is_op: is_op | report.is_op,
|
|
||||||
kind: ItemKind::Alias(report.source.clone()),
|
|
||||||
},
|
},
|
||||||
|
Some(report) =>
|
||||||
|
ProjectItem { kind: ItemKind::Alias(report.source.clone()) },
|
||||||
};
|
};
|
||||||
Ok((k, ModEntry { exported, member: ModMember::Item(item) }))
|
Ok((k, ModEntry { exported, member: ModMember::Item(item) }))
|
||||||
},
|
},
|
||||||
@@ -129,7 +118,6 @@ pub fn build_tree(
|
|||||||
exported: false,
|
exported: false,
|
||||||
member: ModMember::Item(ProjectItem {
|
member: ModMember::Item(ProjectItem {
|
||||||
kind: ItemKind::Alias(from.source.clone()),
|
kind: ItemKind::Alias(from.source.clone()),
|
||||||
is_op: from.is_op,
|
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ use crate::representations::project::ImpReport;
|
|||||||
use crate::sourcefile::{absolute_path, Import};
|
use crate::sourcefile::{absolute_path, Import};
|
||||||
use crate::tree::{ErrKind, ModEntry, ModMember, Module, WalkError};
|
use crate::tree::{ErrKind, ModEntry, ModMember, Module, WalkError};
|
||||||
use crate::utils::boxed_iter::{box_chain, box_once};
|
use crate::utils::boxed_iter::{box_chain, box_once};
|
||||||
use crate::utils::pure_push::pushed_ref;
|
use crate::utils::pure_seq::pushed_ref;
|
||||||
use crate::utils::{unwrap_or, BoxedIter};
|
use crate::utils::{unwrap_or, BoxedIter};
|
||||||
use crate::{Interner, ProjectTree, Tok, VName};
|
use crate::{Interner, ProjectTree, Tok, VName};
|
||||||
|
|
||||||
@@ -65,17 +65,13 @@ pub fn assert_visible_overlay<'a>(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn process_donor_module<'a, TItem: Clone>(
|
pub fn process_donor_module<TItem: Clone>(
|
||||||
module: &'a Module<TItem, impl Clone>,
|
module: &Module<TItem, impl Clone>,
|
||||||
abs_path: Rc<VName>,
|
abs_path: Rc<VName>,
|
||||||
is_op: impl Fn(&TItem) -> bool + 'a,
|
) -> impl Iterator<Item = (Tok<String>, VName)> + '_ {
|
||||||
) -> impl Iterator<Item = (Tok<String>, VName, bool)> + 'a {
|
(module.entries.iter())
|
||||||
(module.entries.iter()).filter(|(_, ent)| ent.exported).map(
|
.filter(|(_, ent)| ent.exported)
|
||||||
move |(n, ent)| {
|
.map(move |(n, _)| (n.clone(), pushed_ref(abs_path.as_ref(), n.clone())))
|
||||||
let is_op = ent.item().map_or(false, &is_op);
|
|
||||||
(n.clone(), pushed_ref(abs_path.as_ref(), n.clone()), is_op)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn import_tree(
|
pub fn import_tree(
|
||||||
@@ -99,14 +95,7 @@ pub fn import_tree(
|
|||||||
// println!("Old root: {:#?}", &prev_root.0);
|
// println!("Old root: {:#?}", &prev_root.0);
|
||||||
panic!("{}", e.at(location))
|
panic!("{}", e.at(location))
|
||||||
})?;
|
})?;
|
||||||
let is_op = (root.0.walk1_ref(&[], &abs_path, false))
|
box_once((name.clone(), abs_path))
|
||||||
.map(|(ent, _)| ent.item().map_or(false, |i| i.is_op))
|
|
||||||
.or_else(|e| if e.kind == ErrKind::Missing {
|
|
||||||
(prev_root.0.walk1_ref(&[], &abs_path, false))
|
|
||||||
.map(|(ent, _)| ent.item().map_or(false, |i| i.is_op))
|
|
||||||
} else {Err(e)})
|
|
||||||
.map_err(|e| e.at(location))?;
|
|
||||||
box_once((name.clone(), abs_path, is_op))
|
|
||||||
} else {
|
} else {
|
||||||
let rc_path = Rc::new(abs_path);
|
let rc_path = Rc::new(abs_path);
|
||||||
// wildcard imports are validated
|
// wildcard imports are validated
|
||||||
@@ -116,8 +105,7 @@ pub fn import_tree(
|
|||||||
let new_imports = match (root.0).walk_ref(&[], &rc_path, false) {
|
let new_imports = match (root.0).walk_ref(&[], &rc_path, false) {
|
||||||
Err(e) if e.kind == ErrKind::Missing => Err(e),
|
Err(e) if e.kind == ErrKind::Missing => Err(e),
|
||||||
Err(e) => return Err(e.at(location)),
|
Err(e) => return Err(e.at(location)),
|
||||||
Ok(module)
|
Ok(module) => Ok(process_donor_module(module, rc_path.clone()))
|
||||||
=> Ok(process_donor_module(module, rc_path.clone(), |i| i.is_op))
|
|
||||||
};
|
};
|
||||||
let old_m = match (prev_root.0).walk_ref(&[], &rc_path, false) {
|
let old_m = match (prev_root.0).walk_ref(&[], &rc_path, false) {
|
||||||
Err(e) if e.kind != ErrKind::Missing => return Err(e.at(location)),
|
Err(e) if e.kind != ErrKind::Missing => return Err(e.at(location)),
|
||||||
@@ -134,7 +122,7 @@ pub fn import_tree(
|
|||||||
},
|
},
|
||||||
Ok(old_m) => old_m,
|
Ok(old_m) => old_m,
|
||||||
};
|
};
|
||||||
let it1 = process_donor_module(old_m, rc_path.clone(), |i| i.is_op);
|
let it1 = process_donor_module(old_m, rc_path.clone());
|
||||||
match new_imports {
|
match new_imports {
|
||||||
Err(_) => Box::new(it1),
|
Err(_) => Box::new(it1),
|
||||||
Ok(it2) => box_chain!(it1, it2)
|
Ok(it2) => box_chain!(it1, it2)
|
||||||
@@ -144,10 +132,10 @@ pub fn import_tree(
|
|||||||
// leaf sets flattened to leaves
|
// leaf sets flattened to leaves
|
||||||
.flatten_ok()
|
.flatten_ok()
|
||||||
// translated to entries
|
// translated to entries
|
||||||
.map_ok(|(name, source, is_op)| {
|
.map_ok(|(name, source)| {
|
||||||
(name, ModEntry {
|
(name, ModEntry {
|
||||||
exported: false, // this is irrelevant but needed
|
exported: false, // this is irrelevant but needed
|
||||||
member: ModMember::Item(ImpReport { source, is_op }),
|
member: ModMember::Item(ImpReport { source }),
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.chain(
|
.chain(
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
@@ -7,15 +5,14 @@ use super::build_tree::{build_tree, TreeReport};
|
|||||||
use super::import_tree::{import_tree, ImpMod};
|
use super::import_tree::{import_tree, ImpMod};
|
||||||
use crate::error::ProjectResult;
|
use crate::error::ProjectResult;
|
||||||
use crate::pipeline::source_loader::{
|
use crate::pipeline::source_loader::{
|
||||||
LoadedSourceTable, PreExtra, PreItem, PreMod, Preparsed,
|
LoadedSourceTable, PreExtra, PreMod, Preparsed,
|
||||||
};
|
};
|
||||||
use crate::representations::project::{ImpReport, ProjectExt, ProjectMod};
|
use crate::representations::project::{ProjectExt, ProjectMod};
|
||||||
use crate::sourcefile::FileEntry;
|
use crate::sourcefile::FileEntry;
|
||||||
use crate::tree::{ModEntry, ModMember, Module};
|
use crate::tree::{ModEntry, ModMember, Module};
|
||||||
use crate::utils::never::{always, unwrap_always};
|
use crate::utils::pure_seq::pushed_ref;
|
||||||
use crate::utils::pure_push::pushed_ref;
|
|
||||||
use crate::utils::unwrap_or;
|
use crate::utils::unwrap_or;
|
||||||
use crate::{parse, Interner, ProjectTree, Tok, VName};
|
use crate::{Interner, ProjectTree, Tok, VName};
|
||||||
|
|
||||||
pub fn rebuild_file(
|
pub fn rebuild_file(
|
||||||
path: Vec<Tok<String>>,
|
path: Vec<Tok<String>>,
|
||||||
@@ -23,35 +20,12 @@ pub fn rebuild_file(
|
|||||||
imports: ImpMod,
|
imports: ImpMod,
|
||||||
source: &LoadedSourceTable,
|
source: &LoadedSourceTable,
|
||||||
prelude: &[FileEntry],
|
prelude: &[FileEntry],
|
||||||
i: &Interner,
|
|
||||||
) -> ProjectResult<ProjectMod<VName>> {
|
) -> ProjectResult<ProjectMod<VName>> {
|
||||||
let file = match &pre.extra {
|
let file = match &pre.extra {
|
||||||
PreExtra::Dir => panic!("Dir should not hand this node off"),
|
PreExtra::Dir => panic!("Dir should not hand this node off"),
|
||||||
PreExtra::Submod(_) => panic!("should not have received this"),
|
PreExtra::Submod(_) => panic!("should not have received this"),
|
||||||
PreExtra::File(f) => f,
|
PreExtra::File(f) => f,
|
||||||
};
|
};
|
||||||
let mut ops = Vec::new();
|
|
||||||
unwrap_always(imports.search_all((), &mut |_, module, ()| {
|
|
||||||
ops.extend(
|
|
||||||
(module.entries.iter())
|
|
||||||
.filter(|(_, ent)| {
|
|
||||||
matches!(ent.member, ModMember::Item(ImpReport { is_op: true, .. }))
|
|
||||||
})
|
|
||||||
.map(|(name, _)| name.clone()),
|
|
||||||
);
|
|
||||||
always(())
|
|
||||||
}));
|
|
||||||
unwrap_always(pre.search_all((), &mut |_, module, ()| {
|
|
||||||
ops.extend(
|
|
||||||
(module.entries.iter())
|
|
||||||
.filter(|(_, ent)| {
|
|
||||||
matches!(ent.member, ModMember::Item(PreItem { is_op: true, .. }))
|
|
||||||
})
|
|
||||||
.map(|(name, _)| name.clone()),
|
|
||||||
);
|
|
||||||
always(())
|
|
||||||
}));
|
|
||||||
let ctx = parse::ParsingContext::new(&ops, i, Rc::new(path.clone()));
|
|
||||||
let src = source.get(&file.name).unwrap_or_else(|| {
|
let src = source.get(&file.name).unwrap_or_else(|| {
|
||||||
panic!(
|
panic!(
|
||||||
"{} should have been preparsed already. Preparsed files are {}",
|
"{} should have been preparsed already. Preparsed files are {}",
|
||||||
@@ -62,13 +36,11 @@ pub fn rebuild_file(
|
|||||||
.join(", ")
|
.join(", ")
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
let entries = parse::parse2(&src.text, ctx)?;
|
let entries = src.entries.clone();
|
||||||
let TreeReport { entries: items, rules, imports_from } =
|
let TreeReport { entries, rules, imports_from } =
|
||||||
build_tree(&path, entries, pre, imports, prelude)?;
|
build_tree(&path, entries, pre, imports, prelude)?;
|
||||||
Ok(Module {
|
let file = Some(path.clone());
|
||||||
entries: items,
|
Ok(Module { entries, extra: ProjectExt { file, path, imports_from, rules } })
|
||||||
extra: ProjectExt { file: Some(path.clone()), path, imports_from, rules },
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rebuild_dir(
|
pub fn rebuild_dir(
|
||||||
@@ -77,15 +49,14 @@ pub fn rebuild_dir(
|
|||||||
mut imports: ImpMod,
|
mut imports: ImpMod,
|
||||||
source: &LoadedSourceTable,
|
source: &LoadedSourceTable,
|
||||||
prelude: &[FileEntry],
|
prelude: &[FileEntry],
|
||||||
i: &Interner,
|
|
||||||
) -> ProjectResult<ProjectMod<VName>> {
|
) -> ProjectResult<ProjectMod<VName>> {
|
||||||
match pre.extra {
|
match pre.extra {
|
||||||
PreExtra::Dir => (),
|
PreExtra::Dir => (),
|
||||||
PreExtra::File(_) =>
|
PreExtra::File(_) =>
|
||||||
return rebuild_file(path, pre, imports, source, prelude, i),
|
return rebuild_file(path, pre, imports, source, prelude),
|
||||||
PreExtra::Submod(_) => panic!("Dirs contain dirs and files"),
|
PreExtra::Submod(_) => panic!("Dirs contain dirs and files"),
|
||||||
}
|
}
|
||||||
let items = (pre.entries.into_iter())
|
let entries = (pre.entries.into_iter())
|
||||||
.map(|(name, entry)| {
|
.map(|(name, entry)| {
|
||||||
match imports.entries.remove(&name).map(|e| e.member) {
|
match imports.entries.remove(&name).map(|e| e.member) {
|
||||||
Some(ModMember::Sub(impmod)) => (name, entry, impmod),
|
Some(ModMember::Sub(impmod)) => (name, entry, impmod),
|
||||||
@@ -97,12 +68,8 @@ pub fn rebuild_dir(
|
|||||||
let pre = unwrap_or!(member => ModMember::Sub;
|
let pre = unwrap_or!(member => ModMember::Sub;
|
||||||
panic!("Dirs can only contain submodules")
|
panic!("Dirs can only contain submodules")
|
||||||
);
|
);
|
||||||
Ok((name, ModEntry {
|
let module = rebuild_dir(path, pre, impmod, source, prelude)?;
|
||||||
exported,
|
Ok((name, ModEntry { exported, member: ModMember::Sub(module) }))
|
||||||
member: ModMember::Sub(rebuild_dir(
|
|
||||||
path, pre, impmod, source, prelude, i,
|
|
||||||
)?),
|
|
||||||
}))
|
|
||||||
})
|
})
|
||||||
.collect::<Result<HashMap<_, _>, _>>()?;
|
.collect::<Result<HashMap<_, _>, _>>()?;
|
||||||
Ok(Module {
|
Ok(Module {
|
||||||
@@ -112,7 +79,7 @@ pub fn rebuild_dir(
|
|||||||
rules: Vec::new(),
|
rules: Vec::new(),
|
||||||
file: None,
|
file: None,
|
||||||
},
|
},
|
||||||
entries: items,
|
entries,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -126,6 +93,6 @@ pub fn rebuild_tree(
|
|||||||
) -> ProjectResult<ProjectTree<VName>> {
|
) -> ProjectResult<ProjectTree<VName>> {
|
||||||
let imports =
|
let imports =
|
||||||
import_tree(Vec::new(), &preparsed.0, &preparsed, prev_root, i)?;
|
import_tree(Vec::new(), &preparsed.0, &preparsed, prev_root, i)?;
|
||||||
rebuild_dir(Vec::new(), preparsed.0, imports, source, prelude, i)
|
rebuild_dir(Vec::new(), preparsed.0, imports, source, prelude)
|
||||||
.map(ProjectTree)
|
.map(ProjectTree)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
|
|
||||||
use super::loaded_source::{LoadedSource, LoadedSourceTable};
|
use super::loaded_source::{LoadedSource, LoadedSourceTable};
|
||||||
@@ -7,24 +9,32 @@ use crate::error::{
|
|||||||
NoTargets, ProjectError, ProjectResult, UnexpectedDirectory,
|
NoTargets, ProjectError, ProjectResult, UnexpectedDirectory,
|
||||||
};
|
};
|
||||||
use crate::interner::{Interner, Tok};
|
use crate::interner::{Interner, Tok};
|
||||||
|
use crate::parse::{self, LexerPlugin, LineParser, ParsingContext};
|
||||||
use crate::pipeline::file_loader::{IOResult, Loaded};
|
use crate::pipeline::file_loader::{IOResult, Loaded};
|
||||||
use crate::pipeline::import_abs_path::import_abs_path;
|
use crate::pipeline::import_abs_path::import_abs_path;
|
||||||
use crate::representations::sourcefile::FileEntry;
|
use crate::representations::sourcefile::FileEntry;
|
||||||
use crate::tree::Module;
|
use crate::tree::Module;
|
||||||
use crate::utils::pure_push::pushed_ref;
|
use crate::utils::pure_seq::pushed_ref;
|
||||||
use crate::utils::{split_max_prefix, unwrap_or};
|
use crate::utils::{split_max_prefix, unwrap_or};
|
||||||
use crate::Location;
|
use crate::Location;
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub struct Context<'a> {
|
||||||
|
pub prelude: &'a [FileEntry],
|
||||||
|
pub i: &'a Interner,
|
||||||
|
pub lexer_plugins: &'a [&'a dyn LexerPlugin],
|
||||||
|
pub line_parsers: &'a [&'a dyn LineParser],
|
||||||
|
}
|
||||||
|
|
||||||
/// Load the source at the given path or all within if it's a collection,
|
/// Load the source at the given path or all within if it's a collection,
|
||||||
/// and all sources imported from these.
|
/// and all sources imported from these.
|
||||||
fn load_abs_path_rec(
|
fn load_abs_path_rec(
|
||||||
abs_path: &[Tok<String>],
|
abs_path: &[Tok<String>],
|
||||||
mut all: Preparsed,
|
mut all: Preparsed,
|
||||||
source: &mut LoadedSourceTable,
|
source: &mut LoadedSourceTable,
|
||||||
prelude: &[FileEntry],
|
|
||||||
i: &Interner,
|
|
||||||
get_source: &impl Fn(&[Tok<String>]) -> IOResult,
|
get_source: &impl Fn(&[Tok<String>]) -> IOResult,
|
||||||
is_injected_module: &impl Fn(&[Tok<String>]) -> bool,
|
is_injected_module: &impl Fn(&[Tok<String>]) -> bool,
|
||||||
|
ctx @ Context { i, lexer_plugins, line_parsers, prelude }: Context,
|
||||||
) -> ProjectResult<Preparsed> {
|
) -> ProjectResult<Preparsed> {
|
||||||
// # Termination
|
// # Termination
|
||||||
//
|
//
|
||||||
@@ -47,8 +57,15 @@ fn load_abs_path_rec(
|
|||||||
let text = unwrap_or!(get_source(filename)? => Loaded::Code; {
|
let text = unwrap_or!(get_source(filename)? => Loaded::Code; {
|
||||||
return Err(UnexpectedDirectory { path: filename.to_vec() }.rc())
|
return Err(UnexpectedDirectory { path: filename.to_vec() }.rc())
|
||||||
});
|
});
|
||||||
source.insert(filename.to_vec(), LoadedSource { text: text.clone() });
|
let entries = parse::parse2(ParsingContext::new(
|
||||||
let preparsed = preparse(filename.to_vec(), text.as_str(), prelude, i)?;
|
i,
|
||||||
|
Arc::new(filename.to_vec()),
|
||||||
|
text,
|
||||||
|
lexer_plugins,
|
||||||
|
line_parsers,
|
||||||
|
))?;
|
||||||
|
let preparsed = preparse(filename.to_vec(), entries.clone(), prelude)?;
|
||||||
|
source.insert(filename.to_vec(), LoadedSource { entries });
|
||||||
// recurse on all imported modules
|
// recurse on all imported modules
|
||||||
// will be taken and returned by the closure. None iff an error is thrown
|
// will be taken and returned by the closure. None iff an error is thrown
|
||||||
all = preparsed.0.search_all(all, &mut |modpath,
|
all = preparsed.0.search_all(all, &mut |modpath,
|
||||||
@@ -73,10 +90,9 @@ fn load_abs_path_rec(
|
|||||||
&abs_pathv,
|
&abs_pathv,
|
||||||
all,
|
all,
|
||||||
source,
|
source,
|
||||||
prelude,
|
|
||||||
i,
|
|
||||||
get_source,
|
get_source,
|
||||||
is_injected_module,
|
is_injected_module,
|
||||||
|
ctx,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
Ok(all)
|
Ok(all)
|
||||||
@@ -105,10 +121,9 @@ fn load_abs_path_rec(
|
|||||||
&abs_subpath,
|
&abs_subpath,
|
||||||
all,
|
all,
|
||||||
source,
|
source,
|
||||||
prelude,
|
|
||||||
i,
|
|
||||||
get_source,
|
get_source,
|
||||||
is_injected_module,
|
is_injected_module,
|
||||||
|
ctx,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
Ok(all)
|
Ok(all)
|
||||||
@@ -123,8 +138,7 @@ fn load_abs_path_rec(
|
|||||||
/// injected data (the ProjectTree doesn't make a distinction between the two)
|
/// injected data (the ProjectTree doesn't make a distinction between the two)
|
||||||
pub fn load_source<'a>(
|
pub fn load_source<'a>(
|
||||||
targets: impl Iterator<Item = &'a [Tok<String>]>,
|
targets: impl Iterator<Item = &'a [Tok<String>]>,
|
||||||
prelude: &[FileEntry],
|
ctx: Context,
|
||||||
i: &Interner,
|
|
||||||
get_source: &impl Fn(&[Tok<String>]) -> IOResult,
|
get_source: &impl Fn(&[Tok<String>]) -> IOResult,
|
||||||
is_injected_module: &impl Fn(&[Tok<String>]) -> bool,
|
is_injected_module: &impl Fn(&[Tok<String>]) -> bool,
|
||||||
) -> ProjectResult<(Preparsed, LoadedSourceTable)> {
|
) -> ProjectResult<(Preparsed, LoadedSourceTable)> {
|
||||||
@@ -138,10 +152,9 @@ pub fn load_source<'a>(
|
|||||||
target,
|
target,
|
||||||
all,
|
all,
|
||||||
&mut table,
|
&mut table,
|
||||||
prelude,
|
|
||||||
i,
|
|
||||||
get_source,
|
get_source,
|
||||||
is_injected_module,
|
is_injected_module,
|
||||||
|
ctx,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
if any_target { Ok((all, table)) } else { Err(NoTargets.rc()) }
|
if any_target { Ok((all, table)) } else { Err(NoTargets.rc()) }
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use crate::representations::VName;
|
use crate::representations::VName;
|
||||||
|
use crate::sourcefile::FileEntry;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct LoadedSource {
|
pub struct LoadedSource {
|
||||||
pub text: Rc<String>,
|
pub entries: Vec<FileEntry>
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type LoadedSourceTable = HashMap<VName, LoadedSource>;
|
pub type LoadedSourceTable = HashMap<VName, LoadedSource>;
|
||||||
|
|||||||
@@ -20,6 +20,6 @@ mod loaded_source;
|
|||||||
mod preparse;
|
mod preparse;
|
||||||
mod types;
|
mod types;
|
||||||
|
|
||||||
pub use load_source::load_source;
|
pub use load_source::{load_source, Context};
|
||||||
pub use loaded_source::{LoadedSource, LoadedSourceTable};
|
pub use loaded_source::{LoadedSource, LoadedSourceTable};
|
||||||
pub use types::{PreExtra, PreFileExt, PreItem, PreMod, PreSubExt, Preparsed};
|
pub use types::{PreExtra, PreFileExt, PreItem, PreMod, PreSubExt, Preparsed};
|
||||||
|
|||||||
@@ -1,26 +1,21 @@
|
|||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use super::types::{PreFileExt, PreItem, PreSubExt};
|
use super::types::{PreFileExt, PreItem, PreSubExt};
|
||||||
use super::{PreExtra, Preparsed};
|
use super::{PreExtra, Preparsed};
|
||||||
use crate::ast::{Clause, Constant, Expr};
|
use crate::ast::{Clause, Constant};
|
||||||
use crate::error::{
|
use crate::error::{
|
||||||
ConflictingRoles, ProjectError, ProjectResult, VisibilityMismatch,
|
ConflictingRoles, ProjectError, ProjectResult, VisibilityMismatch,
|
||||||
};
|
};
|
||||||
use crate::interner::Interner;
|
|
||||||
use crate::parse::{self, ParsingContext};
|
|
||||||
use crate::representations::sourcefile::{FileEntry, MemberKind};
|
use crate::representations::sourcefile::{FileEntry, MemberKind};
|
||||||
use crate::representations::tree::{ModEntry, ModMember, Module};
|
use crate::representations::tree::{ModEntry, ModMember, Module};
|
||||||
use crate::sourcefile::{FileEntryKind, Import, Member, ModuleBlock};
|
use crate::sourcefile::{FileEntryKind, Import, Member, ModuleBlock};
|
||||||
use crate::utils::get_or::{get_or_default, get_or_make};
|
use crate::utils::get_or::{get_or_default, get_or_make};
|
||||||
use crate::utils::pure_push::pushed;
|
use crate::utils::pure_seq::pushed;
|
||||||
use crate::{Location, Tok, VName};
|
use crate::{Location, Tok, VName};
|
||||||
|
|
||||||
struct FileReport {
|
struct FileReport {
|
||||||
entries: HashMap<Tok<String>, ModEntry<PreItem, PreExtra>>,
|
entries: HashMap<Tok<String>, ModEntry<PreItem, PreExtra>>,
|
||||||
patterns: Vec<Vec<Expr<VName>>>,
|
|
||||||
imports: Vec<Import>,
|
imports: Vec<Import>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -32,12 +27,11 @@ fn to_module(
|
|||||||
prelude: &[FileEntry],
|
prelude: &[FileEntry],
|
||||||
) -> ProjectResult<FileReport> {
|
) -> ProjectResult<FileReport> {
|
||||||
let mut imports = Vec::new();
|
let mut imports = Vec::new();
|
||||||
let mut patterns = Vec::new();
|
|
||||||
let mut items = HashMap::<Tok<String>, (bool, PreItem)>::new();
|
let mut items = HashMap::<Tok<String>, (bool, PreItem)>::new();
|
||||||
let mut to_export = HashMap::<Tok<String>, Vec<Location>>::new();
|
let mut to_export = HashMap::<Tok<String>, Vec<Location>>::new();
|
||||||
let mut submods =
|
let mut submods =
|
||||||
HashMap::<Tok<String>, (bool, Vec<Location>, Vec<FileEntry>)>::new();
|
HashMap::<Tok<String>, (bool, Vec<Location>, Vec<FileEntry>)>::new();
|
||||||
let entries = prelude.iter().cloned().chain(src.into_iter());
|
let entries = prelude.iter().cloned().chain(src);
|
||||||
for FileEntry { kind, locations } in entries {
|
for FileEntry { kind, locations } in entries {
|
||||||
match kind {
|
match kind {
|
||||||
FileEntryKind::Import(imp) => imports.extend(imp.into_iter()),
|
FileEntryKind::Import(imp) => imports.extend(imp.into_iter()),
|
||||||
@@ -72,17 +66,7 @@ fn to_module(
|
|||||||
submods.insert(name.clone(), (exported, locations, body.clone()));
|
submods.insert(name.clone(), (exported, locations, body.clone()));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
MemberKind::Operators(ops) =>
|
MemberKind::Rule(r) =>
|
||||||
for op in ops {
|
|
||||||
let (prev_exported, it) = get_or_default(&mut items, &op);
|
|
||||||
if let Some(loc) = locations.get(0) {
|
|
||||||
it.location = it.location.clone().or(loc.clone())
|
|
||||||
}
|
|
||||||
*prev_exported |= exported;
|
|
||||||
it.is_op = true;
|
|
||||||
},
|
|
||||||
MemberKind::Rule(r) => {
|
|
||||||
patterns.push(r.pattern.clone());
|
|
||||||
if exported {
|
if exported {
|
||||||
for ex in r.pattern {
|
for ex in r.pattern {
|
||||||
ex.search_all(&mut |ex| {
|
ex.search_all(&mut |ex| {
|
||||||
@@ -95,8 +79,7 @@ fn to_module(
|
|||||||
None::<()>
|
None::<()>
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
},
|
|
||||||
},
|
},
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
@@ -111,11 +94,11 @@ fn to_module(
|
|||||||
.try_insert(subname.clone(), ModEntry {
|
.try_insert(subname.clone(), ModEntry {
|
||||||
member: ModMember::Sub({
|
member: ModMember::Sub({
|
||||||
name.push(subname);
|
name.push(subname);
|
||||||
let FileReport { imports, entries: items, patterns } =
|
let FileReport { imports, entries: items } =
|
||||||
to_module(file, name.clone(), body, prelude)?;
|
to_module(file, name.clone(), body, prelude)?;
|
||||||
Module {
|
Module {
|
||||||
entries: items,
|
entries: items,
|
||||||
extra: PreExtra::Submod(PreSubExt { imports, patterns }),
|
extra: PreExtra::Submod(PreSubExt { imports }),
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
exported,
|
exported,
|
||||||
@@ -125,7 +108,6 @@ fn to_module(
|
|||||||
for (item, locations) in to_export {
|
for (item, locations) in to_export {
|
||||||
get_or_make(&mut entries, &item, || ModEntry {
|
get_or_make(&mut entries, &item, || ModEntry {
|
||||||
member: ModMember::Item(PreItem {
|
member: ModMember::Item(PreItem {
|
||||||
is_op: false,
|
|
||||||
has_value: false,
|
has_value: false,
|
||||||
location: locations[0].clone(),
|
location: locations[0].clone(),
|
||||||
}),
|
}),
|
||||||
@@ -133,26 +115,22 @@ fn to_module(
|
|||||||
})
|
})
|
||||||
.exported = true
|
.exported = true
|
||||||
}
|
}
|
||||||
Ok(FileReport { entries, imports, patterns })
|
Ok(FileReport { entries, imports })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Preparse the module. At this stage, only the imports and
|
/// Preparse the module. At this stage, only the imports and
|
||||||
/// names defined by the module can be parsed
|
/// names defined by the module can be parsed
|
||||||
pub fn preparse(
|
pub fn preparse(
|
||||||
file: VName,
|
file: VName,
|
||||||
source: &str,
|
entries: Vec<FileEntry>,
|
||||||
prelude: &[FileEntry],
|
prelude: &[FileEntry],
|
||||||
i: &Interner,
|
|
||||||
) -> ProjectResult<Preparsed> {
|
) -> ProjectResult<Preparsed> {
|
||||||
// Parse with no operators
|
let FileReport { entries, imports } =
|
||||||
let ctx = ParsingContext::new(&[], i, Rc::new(file.clone()));
|
|
||||||
let entries = parse::parse2(source, ctx)?;
|
|
||||||
let FileReport { entries, imports, patterns } =
|
|
||||||
to_module(&file, file.clone(), entries, prelude)?;
|
to_module(&file, file.clone(), entries, prelude)?;
|
||||||
let mut module = Module {
|
let mut module = Module {
|
||||||
entries,
|
entries,
|
||||||
extra: PreExtra::File(PreFileExt {
|
extra: PreExtra::File(PreFileExt {
|
||||||
details: PreSubExt { patterns, imports },
|
details: PreSubExt { imports },
|
||||||
name: file.clone(),
|
name: file.clone(),
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::ops::Add;
|
use std::ops::Add;
|
||||||
|
|
||||||
use crate::ast::Expr;
|
|
||||||
use crate::error::ProjectResult;
|
use crate::error::ProjectResult;
|
||||||
use crate::sourcefile::Import;
|
use crate::sourcefile::Import;
|
||||||
use crate::tree::Module;
|
use crate::tree::Module;
|
||||||
@@ -9,34 +8,27 @@ use crate::{Interner, Location, VName};
|
|||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct PreItem {
|
pub struct PreItem {
|
||||||
pub is_op: bool,
|
|
||||||
pub has_value: bool,
|
pub has_value: bool,
|
||||||
pub location: Location,
|
pub location: Location,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for PreItem {
|
impl Display for PreItem {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
let Self { has_value, is_op, location } = self;
|
let Self { has_value, location } = self;
|
||||||
let description = match (is_op, has_value) {
|
let description = if *has_value { "value" } else { "keyword" };
|
||||||
(true, true) => "operator with value",
|
|
||||||
(true, false) => "operator",
|
|
||||||
(false, true) => "value",
|
|
||||||
(false, false) => "keyword",
|
|
||||||
};
|
|
||||||
write!(f, "{description} {location}")
|
write!(f, "{description} {location}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for PreItem {
|
impl Default for PreItem {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
PreItem { is_op: false, has_value: false, location: Location::Unknown }
|
PreItem { has_value: false, location: Location::Unknown }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct PreSubExt {
|
pub struct PreSubExt {
|
||||||
pub imports: Vec<Import>,
|
pub imports: Vec<Import>,
|
||||||
pub patterns: Vec<Vec<Expr<VName>>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
|||||||
@@ -15,13 +15,13 @@ use ordered_float::NotNan;
|
|||||||
use super::interpreted;
|
use super::interpreted;
|
||||||
use super::location::Location;
|
use super::location::Location;
|
||||||
use super::namelike::{NameLike, VName};
|
use super::namelike::{NameLike, VName};
|
||||||
use super::primitive::Primitive;
|
use crate::foreign::{Atom, ExFn};
|
||||||
use crate::interner::Tok;
|
use crate::interner::Tok;
|
||||||
use crate::parse::print_nat16;
|
use crate::parse::print_nat16;
|
||||||
use crate::utils::rc_tools::map_rc;
|
use crate::utils::rc_tools::map_rc;
|
||||||
|
|
||||||
/// A [Clause] with associated metadata
|
/// A [Clause] with associated metadata
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Expr<N: NameLike> {
|
pub struct Expr<N: NameLike> {
|
||||||
/// The actual value
|
/// The actual value
|
||||||
pub value: Clause<N>,
|
pub value: Clause<N>,
|
||||||
@@ -100,7 +100,7 @@ pub enum PHClass {
|
|||||||
/// If true, must match at least one clause
|
/// If true, must match at least one clause
|
||||||
nonzero: bool,
|
nonzero: bool,
|
||||||
/// Greediness in the allocation of tokens
|
/// Greediness in the allocation of tokens
|
||||||
prio: u64,
|
prio: usize,
|
||||||
},
|
},
|
||||||
/// Matches exactly one token, lambda or parenthesized group
|
/// Matches exactly one token, lambda or parenthesized group
|
||||||
Scalar,
|
Scalar,
|
||||||
@@ -129,10 +129,12 @@ impl Display for Placeholder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// An S-expression as read from a source file
|
/// An S-expression as read from a source file
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum Clause<N: NameLike> {
|
pub enum Clause<N: NameLike> {
|
||||||
/// A primitive
|
/// An opaque function, eg. an effectful function employing CPS
|
||||||
P(Primitive),
|
ExternFn(ExFn),
|
||||||
|
/// An opaque non-callable value, eg. a file handle
|
||||||
|
Atom(Atom),
|
||||||
/// A c-style name or an operator, eg. `+`, `i`, `foo::bar`
|
/// A c-style name or an operator, eg. `+`, `i`, `foo::bar`
|
||||||
Name(N),
|
Name(N),
|
||||||
/// A parenthesized expression
|
/// A parenthesized expression
|
||||||
@@ -214,7 +216,7 @@ impl<N: NameLike> Clause<N> {
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn map_names(&self, pred: &impl Fn(&N) -> Option<N>) -> Option<Self> {
|
pub fn map_names(&self, pred: &impl Fn(&N) -> Option<N>) -> Option<Self> {
|
||||||
match self {
|
match self {
|
||||||
Clause::P(_) | Clause::Placeh(_) => None,
|
Clause::Atom(_) | Clause::ExternFn(_) | Clause::Placeh(_) => None,
|
||||||
Clause::Name(name) => pred(name).map(Clause::Name),
|
Clause::Name(name) => pred(name).map(Clause::Name),
|
||||||
Clause::S(c, body) => {
|
Clause::S(c, body) => {
|
||||||
let mut any_some = false;
|
let mut any_some = false;
|
||||||
@@ -262,7 +264,8 @@ impl<N: NameLike> Clause<N> {
|
|||||||
match self {
|
match self {
|
||||||
Self::Name(n) => Clause::Name(pred(n)),
|
Self::Name(n) => Clause::Name(pred(n)),
|
||||||
Self::Placeh(p) => Clause::Placeh(p),
|
Self::Placeh(p) => Clause::Placeh(p),
|
||||||
Self::P(p) => Clause::P(p),
|
Self::Atom(a) => Clause::Atom(a),
|
||||||
|
Self::ExternFn(f) => Clause::ExternFn(f),
|
||||||
Self::Lambda(n, b) => Clause::Lambda(
|
Self::Lambda(n, b) => Clause::Lambda(
|
||||||
map_rc(n, |n| n.into_iter().map(|e| e.transform_names(pred)).collect()),
|
map_rc(n, |n| n.into_iter().map(|e| e.transform_names(pred)).collect()),
|
||||||
map_rc(b, |b| b.into_iter().map(|e| e.transform_names(pred)).collect()),
|
map_rc(b, |b| b.into_iter().map(|e| e.transform_names(pred)).collect()),
|
||||||
@@ -282,7 +285,8 @@ impl<N: NameLike> Clause<N> {
|
|||||||
match self {
|
match self {
|
||||||
Clause::Lambda(arg, body) =>
|
Clause::Lambda(arg, body) =>
|
||||||
arg.iter().chain(body.iter()).find_map(|expr| expr.search_all(f)),
|
arg.iter().chain(body.iter()).find_map(|expr| expr.search_all(f)),
|
||||||
Clause::Name(_) | Clause::P(_) | Clause::Placeh(_) => None,
|
Clause::Name(_) | Clause::Atom(_) => None,
|
||||||
|
Clause::ExternFn(_) | Clause::Placeh(_) => None,
|
||||||
Clause::S(_, body) => body.iter().find_map(|expr| expr.search_all(f)),
|
Clause::S(_, body) => body.iter().find_map(|expr| expr.search_all(f)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -295,7 +299,8 @@ impl<N: NameLike> Clause<N> {
|
|||||||
match self {
|
match self {
|
||||||
Clause::Lambda(arg, body) =>
|
Clause::Lambda(arg, body) =>
|
||||||
search_all_slcs(arg, f).or_else(|| search_all_slcs(body, f)),
|
search_all_slcs(arg, f).or_else(|| search_all_slcs(body, f)),
|
||||||
Clause::Name(_) | Clause::P(_) | Clause::Placeh(_) => None,
|
Clause::Name(_) | Clause::Atom(_) => None,
|
||||||
|
Clause::ExternFn(_) | Clause::Placeh(_) => None,
|
||||||
Clause::S(_, body) => search_all_slcs(body, f),
|
Clause::S(_, body) => search_all_slcs(body, f),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -325,7 +330,8 @@ impl Clause<VName> {
|
|||||||
impl<N: NameLike> Display for Clause<N> {
|
impl<N: NameLike> Display for Clause<N> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::P(p) => write!(f, "{:?}", p),
|
Self::ExternFn(fun) => write!(f, "{fun:?}"),
|
||||||
|
Self::Atom(a) => write!(f, "{a:?}"),
|
||||||
Self::Name(name) => write!(f, "{}", name.to_strv().join("::")),
|
Self::Name(name) => write!(f, "{}", name.to_strv().join("::")),
|
||||||
Self::S(del, items) => {
|
Self::S(del, items) => {
|
||||||
let body = items.iter().join(" ");
|
let body = items.iter().join(" ");
|
||||||
@@ -348,7 +354,7 @@ impl<N: NameLike> Display for Clause<N> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A substitution rule as read from the source
|
/// A substitution rule as read from the source
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Rule<N: NameLike> {
|
pub struct Rule<N: NameLike> {
|
||||||
/// Tree fragment in the source code that activates this rule
|
/// Tree fragment in the source code that activates this rule
|
||||||
pub pattern: Vec<Expr<N>>,
|
pub pattern: Vec<Expr<N>>,
|
||||||
@@ -407,7 +413,7 @@ impl<N: NameLike> Display for Rule<N> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A named constant
|
/// A named constant
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Constant {
|
pub struct Constant {
|
||||||
/// Used to reference the constant
|
/// Used to reference the constant
|
||||||
pub name: Tok<String>,
|
pub name: Tok<String>,
|
||||||
|
|||||||
@@ -107,7 +107,8 @@ fn expr_rec<'a>(
|
|||||||
Ok(postmacro::Expr { value: expr.value, location: location.clone() })
|
Ok(postmacro::Expr { value: expr.value, location: location.clone() })
|
||||||
} else {
|
} else {
|
||||||
let value = match value {
|
let value = match value {
|
||||||
ast::Clause::P(p) => postmacro::Clause::P(p.clone()),
|
ast::Clause::Atom(a) => postmacro::Clause::Atom(a.clone()),
|
||||||
|
ast::Clause::ExternFn(fun) => postmacro::Clause::ExternFn(fun.clone()),
|
||||||
ast::Clause::Lambda(arg, b) => {
|
ast::Clause::Lambda(arg, b) => {
|
||||||
let name = match &arg[..] {
|
let name = match &arg[..] {
|
||||||
[ast::Expr { value: ast::Clause::Name(name), .. }] => name,
|
[ast::Expr { value: ast::Clause::Name(name), .. }] => name,
|
||||||
|
|||||||
@@ -4,12 +4,12 @@ use hashbrown::HashMap;
|
|||||||
|
|
||||||
use super::project::{ItemKind, ProjectItem};
|
use super::project::{ItemKind, ProjectItem};
|
||||||
use crate::ast::{Clause, Expr};
|
use crate::ast::{Clause, Expr};
|
||||||
use crate::foreign::{Atom, Atomic, ExternFn};
|
use crate::foreign::{Atom, Atomic, ExFn, ExternFn};
|
||||||
use crate::interner::Tok;
|
use crate::interner::Tok;
|
||||||
use crate::representations::location::Location;
|
use crate::representations::location::Location;
|
||||||
use crate::representations::project::{ProjectExt, ProjectMod, ProjectTree};
|
use crate::representations::project::{ProjectExt, ProjectMod, ProjectTree};
|
||||||
use crate::representations::tree::{ModEntry, ModMember, Module};
|
use crate::representations::tree::{ModEntry, ModMember, Module};
|
||||||
use crate::representations::{Primitive, VName};
|
use crate::representations::VName;
|
||||||
use crate::utils::substack::Substack;
|
use crate::utils::substack::Substack;
|
||||||
|
|
||||||
/// A lightweight module tree that can be built declaratively by hand to
|
/// A lightweight module tree that can be built declaratively by hand to
|
||||||
@@ -25,21 +25,18 @@ pub enum ConstTree {
|
|||||||
impl ConstTree {
|
impl ConstTree {
|
||||||
/// Describe a [Primitive]
|
/// Describe a [Primitive]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn primitive(primitive: Primitive) -> Self {
|
pub fn clause(value: Clause<VName>) -> Self {
|
||||||
Self::Const(Expr {
|
Self::Const(Expr { location: Location::Unknown, value })
|
||||||
location: Location::Unknown,
|
|
||||||
value: Clause::P(primitive),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
/// Describe an [ExternFn]
|
/// Describe an [ExternFn]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn xfn(xfn: impl ExternFn + 'static) -> Self {
|
pub fn xfn(xfn: impl ExternFn + 'static) -> Self {
|
||||||
Self::primitive(Primitive::ExternFn(Box::new(xfn)))
|
Self::clause(Clause::ExternFn(ExFn(Box::new(xfn))))
|
||||||
}
|
}
|
||||||
/// Describe an [Atomic]
|
/// Describe an [Atomic]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn atom(atom: impl Atomic + 'static) -> Self {
|
pub fn atom(atom: impl Atomic + 'static) -> Self {
|
||||||
Self::primitive(Primitive::Atom(Atom(Box::new(atom))))
|
Self::clause(Clause::Atom(Atom(Box::new(atom))))
|
||||||
}
|
}
|
||||||
/// Describe a module
|
/// Describe a module
|
||||||
#[must_use]
|
#[must_use]
|
||||||
@@ -85,7 +82,7 @@ impl Add for ConstTree {
|
|||||||
product.insert(key, i1);
|
product.insert(key, i1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
product.extend(t2.into_iter());
|
product.extend(t2);
|
||||||
Self::Tree(product)
|
Self::Tree(product)
|
||||||
} else {
|
} else {
|
||||||
panic!("cannot combine tree and value fields")
|
panic!("cannot combine tree and value fields")
|
||||||
@@ -104,10 +101,8 @@ fn from_const_tree_rec(
|
|||||||
items.insert(name.clone(), ModEntry {
|
items.insert(name.clone(), ModEntry {
|
||||||
exported: true,
|
exported: true,
|
||||||
member: match item {
|
member: match item {
|
||||||
ConstTree::Const(c) => ModMember::Item(ProjectItem {
|
ConstTree::Const(c) =>
|
||||||
kind: ItemKind::Const(c),
|
ModMember::Item(ProjectItem { kind: ItemKind::Const(c) }),
|
||||||
is_op: false,
|
|
||||||
}),
|
|
||||||
ConstTree::Tree(t) =>
|
ConstTree::Tree(t) =>
|
||||||
ModMember::Sub(from_const_tree_rec(path.push(name), t, file)),
|
ModMember::Sub(from_const_tree_rec(path.push(name), t, file)),
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -2,20 +2,18 @@
|
|||||||
//!
|
//!
|
||||||
//! This code may be generated to minimize the number of states external
|
//! This code may be generated to minimize the number of states external
|
||||||
//! functions have to define
|
//! functions have to define
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::fmt::{Debug, Display};
|
use std::fmt::{Debug, Display};
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::sync::{Arc, TryLockError, Mutex};
|
||||||
|
|
||||||
#[allow(unused)] // for doc
|
#[allow(unused)] // for doc
|
||||||
use super::ast;
|
use super::ast;
|
||||||
use super::location::Location;
|
use super::location::Location;
|
||||||
use super::path_set::PathSet;
|
use super::path_set::PathSet;
|
||||||
use super::primitive::Primitive;
|
|
||||||
use super::Literal;
|
|
||||||
#[allow(unused)] // for doc
|
#[allow(unused)] // for doc
|
||||||
use crate::foreign::Atomic;
|
use crate::foreign::Atomic;
|
||||||
use crate::foreign::ExternError;
|
use crate::foreign::{Atom, ExFn, ExternError};
|
||||||
use crate::utils::ddispatch::request;
|
use crate::utils::ddispatch::request;
|
||||||
use crate::utils::take_with_output;
|
use crate::utils::take_with_output;
|
||||||
use crate::Sym;
|
use crate::Sym;
|
||||||
@@ -40,10 +38,11 @@ impl Debug for Expr {
|
|||||||
|
|
||||||
impl Display for Expr {
|
impl Display for Expr {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match &self.location {
|
write!(f, "{}", self.clause)
|
||||||
Location::Unknown => write!(f, "{}", self.clause),
|
// match &self.location {
|
||||||
loc => write!(f, "{}:({})", loc, self.clause),
|
// Location::Unknown => write!(f, "{}", self.clause),
|
||||||
}
|
// loc => write!(f, "{}:({})", loc, self.clause),
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -64,20 +63,20 @@ impl TryFromExprInst for ExprInst {
|
|||||||
/// A wrapper around expressions to handle their multiple occurences in
|
/// A wrapper around expressions to handle their multiple occurences in
|
||||||
/// the tree together
|
/// the tree together
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct ExprInst(pub Rc<RefCell<Expr>>);
|
pub struct ExprInst(pub Arc<Mutex<Expr>>);
|
||||||
impl ExprInst {
|
impl ExprInst {
|
||||||
/// Wrap an [Expr] in a shared container so that normalizatoin steps are
|
/// Wrap an [Expr] in a shared container so that normalizatoin steps are
|
||||||
/// applied to all references
|
/// applied to all references
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn new(expr: Expr) -> Self { Self(Rc::new(RefCell::new(expr))) }
|
pub fn new(expr: Expr) -> Self { Self(Arc::new(Mutex::new(expr))) }
|
||||||
|
|
||||||
/// Take the [Expr] out of this container if it's the last reference to it, or
|
/// Take the [Expr] out of this container if it's the last reference to it, or
|
||||||
/// clone it out.
|
/// clone it out.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn expr_val(self) -> Expr {
|
pub fn expr_val(self) -> Expr {
|
||||||
Rc::try_unwrap(self.0)
|
Arc::try_unwrap(self.0)
|
||||||
.map(|c| c.into_inner())
|
.map(|c| c.into_inner().unwrap())
|
||||||
.unwrap_or_else(|rc| rc.as_ref().borrow().deref().clone())
|
.unwrap_or_else(|arc| arc.lock().unwrap().clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read-only access to the shared expression instance
|
/// Read-only access to the shared expression instance
|
||||||
@@ -87,7 +86,7 @@ impl ExprInst {
|
|||||||
/// if the expression is already borrowed in read-write mode
|
/// if the expression is already borrowed in read-write mode
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn expr(&self) -> impl Deref<Target = Expr> + '_ {
|
pub fn expr(&self) -> impl Deref<Target = Expr> + '_ {
|
||||||
self.0.as_ref().borrow()
|
self.0.lock().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read-Write access to the shared expression instance
|
/// Read-Write access to the shared expression instance
|
||||||
@@ -97,7 +96,7 @@ impl ExprInst {
|
|||||||
/// if the expression is already borrowed
|
/// if the expression is already borrowed
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn expr_mut(&self) -> impl DerefMut<Target = Expr> + '_ {
|
pub fn expr_mut(&self) -> impl DerefMut<Target = Expr> + '_ {
|
||||||
self.0.as_ref().borrow_mut()
|
self.0.lock().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Call a normalization function on the expression. The expr is
|
/// Call a normalization function on the expression. The expr is
|
||||||
@@ -137,26 +136,6 @@ impl ExprInst {
|
|||||||
predicate(&self.expr().clause)
|
predicate(&self.expr().clause)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Call the predicate on the value inside this expression if it is a
|
|
||||||
/// primitive
|
|
||||||
pub fn get_literal(self) -> Result<(Literal, Location), Self> {
|
|
||||||
Rc::try_unwrap(self.0).map_or_else(
|
|
||||||
|rc| {
|
|
||||||
if let Expr { clause: Clause::P(Primitive::Literal(li)), location } =
|
|
||||||
rc.as_ref().borrow().deref()
|
|
||||||
{
|
|
||||||
return Ok((li.clone(), location.clone()));
|
|
||||||
}
|
|
||||||
Err(Self(rc))
|
|
||||||
},
|
|
||||||
|cell| match cell.into_inner() {
|
|
||||||
Expr { clause: Clause::P(Primitive::Literal(li)), location } =>
|
|
||||||
Ok((li, location)),
|
|
||||||
expr => Err(Self::new(expr)),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Visit all expressions in the tree. The search can be exited early by
|
/// Visit all expressions in the tree. The search can be exited early by
|
||||||
/// returning [Some]
|
/// returning [Some]
|
||||||
///
|
///
|
||||||
@@ -174,7 +153,8 @@ impl ExprInst {
|
|||||||
Clause::Lambda { body, .. } => body.search_all(predicate),
|
Clause::Lambda { body, .. } => body.search_all(predicate),
|
||||||
Clause::Constant(_)
|
Clause::Constant(_)
|
||||||
| Clause::LambdaArg
|
| Clause::LambdaArg
|
||||||
| Clause::P(_)
|
| Clause::Atom(_)
|
||||||
|
| Clause::ExternFn(_)
|
||||||
| Clause::Bottom => None,
|
| Clause::Bottom => None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -195,7 +175,7 @@ impl ExprInst {
|
|||||||
#[must_use = "your request might not have succeeded"]
|
#[must_use = "your request might not have succeeded"]
|
||||||
pub fn request<T: 'static>(&self) -> Option<T> {
|
pub fn request<T: 'static>(&self) -> Option<T> {
|
||||||
match &self.expr().clause {
|
match &self.expr().clause {
|
||||||
Clause::P(Primitive::Atom(a)) => request(&*a.0),
|
Clause::Atom(a) => request(&*a.0),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -203,18 +183,20 @@ impl ExprInst {
|
|||||||
|
|
||||||
impl Debug for ExprInst {
|
impl Debug for ExprInst {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self.0.try_borrow() {
|
match self.0.try_lock() {
|
||||||
Ok(expr) => write!(f, "{expr:?}"),
|
Ok(expr) => write!(f, "{expr:?}"),
|
||||||
Err(_) => write!(f, "<borrowed>"),
|
Err(TryLockError::Poisoned(_)) => write!(f, "<poisoned>"),
|
||||||
|
Err(TryLockError::WouldBlock) => write!(f, "<locked>"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for ExprInst {
|
impl Display for ExprInst {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self.0.try_borrow() {
|
match self.0.try_lock() {
|
||||||
Ok(expr) => write!(f, "{expr}"),
|
Ok(expr) => write!(f, "{expr}"),
|
||||||
Err(_) => write!(f, "<borrowed>"),
|
Err(TryLockError::Poisoned(_)) => write!(f, "<poisoned>"),
|
||||||
|
Err(TryLockError::WouldBlock) => write!(f, "<locked>"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -224,8 +206,10 @@ impl Display for ExprInst {
|
|||||||
pub enum Clause {
|
pub enum Clause {
|
||||||
/// An expression that causes an error
|
/// An expression that causes an error
|
||||||
Bottom,
|
Bottom,
|
||||||
/// An unintrospectable unit
|
/// An opaque function, eg. an effectful function employing CPS
|
||||||
P(Primitive),
|
ExternFn(ExFn),
|
||||||
|
/// An opaque non-callable value, eg. a file handle
|
||||||
|
Atom(Atom),
|
||||||
/// A function application
|
/// A function application
|
||||||
Apply {
|
Apply {
|
||||||
/// Function to be applied
|
/// Function to be applied
|
||||||
@@ -252,7 +236,7 @@ impl Clause {
|
|||||||
/// copied or moved clauses as it does not have debug information and
|
/// copied or moved clauses as it does not have debug information and
|
||||||
/// does not share a normalization cache list with them.
|
/// does not share a normalization cache list with them.
|
||||||
pub fn wrap(self) -> ExprInst {
|
pub fn wrap(self) -> ExprInst {
|
||||||
ExprInst(Rc::new(RefCell::new(Expr {
|
ExprInst(Arc::new(Mutex::new(Expr {
|
||||||
location: Location::Unknown,
|
location: Location::Unknown,
|
||||||
clause: self,
|
clause: self,
|
||||||
})))
|
})))
|
||||||
@@ -284,7 +268,8 @@ impl Clause {
|
|||||||
impl Display for Clause {
|
impl Display for Clause {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Clause::P(p) => write!(f, "{p:?}"),
|
Clause::ExternFn(fun) => write!(f, "{fun:?}"),
|
||||||
|
Clause::Atom(a) => write!(f, "{a:?}"),
|
||||||
Clause::Bottom => write!(f, "bottom"),
|
Clause::Bottom => write!(f, "bottom"),
|
||||||
Clause::LambdaArg => write!(f, "arg"),
|
Clause::LambdaArg => write!(f, "arg"),
|
||||||
Clause::Apply { f: fun, x } => write!(f, "({fun} {x})"),
|
Clause::Apply { f: fun, x } => write!(f, "({fun} {x})"),
|
||||||
@@ -296,11 +281,3 @@ impl Display for Clause {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Into<Literal>> From<T> for Clause {
|
|
||||||
fn from(value: T) -> Self { Self::P(Primitive::Literal(value.into())) }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Into<Clause>> From<T> for ExprInst {
|
|
||||||
fn from(value: T) -> Self { value.into().wrap() }
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,37 +0,0 @@
|
|||||||
use std::fmt::Debug;
|
|
||||||
|
|
||||||
use ordered_float::NotNan;
|
|
||||||
|
|
||||||
use super::OrcString;
|
|
||||||
|
|
||||||
/// Exact values read from the AST which have a shared meaning recognized by all
|
|
||||||
/// external functions
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
|
||||||
pub enum Literal {
|
|
||||||
/// Any floating point number except `NaN`
|
|
||||||
Num(NotNan<f64>),
|
|
||||||
/// An unsigned integer; a size, index or pointer
|
|
||||||
Uint(u64),
|
|
||||||
/// A utf-8 character sequence
|
|
||||||
Str(OrcString),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Debug for Literal {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
Self::Num(arg0) => write!(f, "{:?}", arg0),
|
|
||||||
Self::Uint(arg0) => write!(f, "{:?}", arg0),
|
|
||||||
Self::Str(arg0) => write!(f, "{:?}", arg0),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<NotNan<f64>> for Literal {
|
|
||||||
fn from(value: NotNan<f64>) -> Self { Self::Num(value) }
|
|
||||||
}
|
|
||||||
impl From<u64> for Literal {
|
|
||||||
fn from(value: u64) -> Self { Self::Uint(value) }
|
|
||||||
}
|
|
||||||
impl From<String> for Literal {
|
|
||||||
fn from(value: String) -> Self { Self::Str(value.into()) }
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
use std::fmt::{Debug, Display};
|
use std::fmt::{Debug, Display};
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
@@ -13,15 +13,15 @@ pub enum Location {
|
|||||||
/// Location information lost or code generated on the fly
|
/// Location information lost or code generated on the fly
|
||||||
Unknown,
|
Unknown,
|
||||||
/// Only the file is known
|
/// Only the file is known
|
||||||
File(Rc<VName>),
|
File(Arc<VName>),
|
||||||
/// Character slice of the code
|
/// Character slice of the code
|
||||||
Range {
|
Range {
|
||||||
/// Argument to the file loading callback that produced this code
|
/// Argument to the file loading callback that produced this code
|
||||||
file: Rc<VName>,
|
file: Arc<VName>,
|
||||||
/// Index of the unicode code points associated with the code
|
/// Index of the unicode code points associated with the code
|
||||||
range: Range<usize>,
|
range: Range<usize>,
|
||||||
/// The full source code as received by the parser
|
/// The full source code as received by the parser
|
||||||
source: Rc<String>,
|
source: Arc<String>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -38,7 +38,7 @@ impl Location {
|
|||||||
|
|
||||||
/// File, if known
|
/// File, if known
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn file(&self) -> Option<Rc<VName>> {
|
pub fn file(&self) -> Option<Arc<VName>> {
|
||||||
if let Self::File(file) | Self::Range { file, .. } = self {
|
if let Self::File(file) | Self::Range { file, .. } = self {
|
||||||
Some(file.clone())
|
Some(file.clone())
|
||||||
} else {
|
} else {
|
||||||
@@ -48,7 +48,7 @@ impl Location {
|
|||||||
|
|
||||||
/// Associated source code, if known
|
/// Associated source code, if known
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn source(&self) -> Option<Rc<String>> {
|
pub fn source(&self) -> Option<Arc<String>> {
|
||||||
if let Self::Range { source, .. } = self {
|
if let Self::Range { source, .. } = self {
|
||||||
Some(source.clone())
|
Some(source.clone())
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -3,22 +3,18 @@ pub mod ast_to_interpreted;
|
|||||||
pub mod ast_to_postmacro;
|
pub mod ast_to_postmacro;
|
||||||
mod const_tree;
|
mod const_tree;
|
||||||
pub mod interpreted;
|
pub mod interpreted;
|
||||||
pub mod literal;
|
|
||||||
pub mod location;
|
pub mod location;
|
||||||
mod namelike;
|
mod namelike;
|
||||||
pub mod path_set;
|
pub mod path_set;
|
||||||
pub mod postmacro;
|
pub mod postmacro;
|
||||||
pub mod postmacro_to_interpreted;
|
pub mod postmacro_to_interpreted;
|
||||||
pub mod primitive;
|
|
||||||
pub mod project;
|
pub mod project;
|
||||||
pub mod sourcefile;
|
pub mod sourcefile;
|
||||||
mod string;
|
mod string;
|
||||||
pub mod tree;
|
pub mod tree;
|
||||||
|
|
||||||
pub use const_tree::{from_const_tree, ConstTree};
|
pub use const_tree::{from_const_tree, ConstTree};
|
||||||
pub use literal::Literal;
|
|
||||||
pub use location::Location;
|
pub use location::Location;
|
||||||
pub use namelike::{NameLike, Sym, VName};
|
pub use namelike::{NameLike, Sym, VName};
|
||||||
pub use path_set::PathSet;
|
pub use path_set::PathSet;
|
||||||
pub use primitive::Primitive;
|
|
||||||
pub use string::OrcString;
|
pub use string::OrcString;
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::ops::Add;
|
use std::ops::Add;
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::utils::rc_tools::rc_to_owned;
|
use crate::utils::rc_tools::arc_to_owned;
|
||||||
use crate::utils::Side;
|
use crate::utils::Side;
|
||||||
|
|
||||||
/// A branching path selecting some placeholders (but at least one) in a Lambda
|
/// A branching path selecting some placeholders (but at least one) in a Lambda
|
||||||
@@ -10,9 +10,9 @@ use crate::utils::Side;
|
|||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct PathSet {
|
pub struct PathSet {
|
||||||
/// The definite steps
|
/// The definite steps
|
||||||
pub steps: Rc<Vec<Side>>,
|
pub steps: Arc<Vec<Side>>,
|
||||||
/// if Some, it splits. If None, it ends.
|
/// if Some, it splits. If None, it ends.
|
||||||
pub next: Option<(Rc<PathSet>, Rc<PathSet>)>,
|
pub next: Option<(Arc<PathSet>, Arc<PathSet>)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PathSet {
|
impl PathSet {
|
||||||
@@ -22,20 +22,20 @@ impl PathSet {
|
|||||||
left: Self,
|
left: Self,
|
||||||
right: Self,
|
right: Self,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let steps = Rc::new(steps.into_iter().collect());
|
let steps = Arc::new(steps.into_iter().collect());
|
||||||
Self { steps, next: Some((Rc::new(left), Rc::new(right))) }
|
Self { steps, next: Some((Arc::new(left), Arc::new(right))) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a path set for one target
|
/// Create a path set for one target
|
||||||
pub fn end(steps: impl IntoIterator<Item = Side>) -> Self {
|
pub fn end(steps: impl IntoIterator<Item = Side>) -> Self {
|
||||||
Self { steps: Rc::new(steps.into_iter().collect()), next: None }
|
Self { steps: Arc::new(steps.into_iter().collect()), next: None }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a path set points to a slot that is a direct
|
/// Create a path set points to a slot that is a direct
|
||||||
/// child of the given lambda with no applications. In essence, this means
|
/// child of the given lambda with no applications. In essence, this means
|
||||||
/// that this argument will be picked as the value of the expression after an
|
/// that this argument will be picked as the value of the expression after an
|
||||||
/// arbitrary amount of subsequent discarded parameters.
|
/// arbitrary amount of subsequent discarded parameters.
|
||||||
pub fn pick() -> Self { Self { steps: Rc::new(vec![]), next: None } }
|
pub fn pick() -> Self { Self { steps: Arc::new(vec![]), next: None } }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for PathSet {
|
impl Debug for PathSet {
|
||||||
@@ -57,7 +57,10 @@ impl Debug for PathSet {
|
|||||||
impl Add for PathSet {
|
impl Add for PathSet {
|
||||||
type Output = Self;
|
type Output = Self;
|
||||||
fn add(self, rhs: Self) -> Self::Output {
|
fn add(self, rhs: Self) -> Self::Output {
|
||||||
Self { steps: Rc::new(vec![]), next: Some((Rc::new(self), Rc::new(rhs))) }
|
Self {
|
||||||
|
steps: Arc::new(vec![]),
|
||||||
|
next: Some((Arc::new(self), Arc::new(rhs))),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -65,9 +68,9 @@ impl Add<Side> for PathSet {
|
|||||||
type Output = Self;
|
type Output = Self;
|
||||||
fn add(self, rhs: Side) -> Self::Output {
|
fn add(self, rhs: Side) -> Self::Output {
|
||||||
let PathSet { steps, next } = self;
|
let PathSet { steps, next } = self;
|
||||||
let mut new_steps = rc_to_owned(steps);
|
let mut new_steps = arc_to_owned(steps);
|
||||||
new_steps.insert(0, rhs);
|
new_steps.insert(0, rhs);
|
||||||
Self { steps: Rc::new(new_steps), next }
|
Self { steps: Arc::new(new_steps), next }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -78,9 +81,9 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_combine() -> Result<(), &'static str> {
|
fn test_combine() -> Result<(), &'static str> {
|
||||||
let ps1 =
|
let ps1 =
|
||||||
PathSet { next: None, steps: Rc::new(vec![Side::Left, Side::Left]) };
|
PathSet { next: None, steps: Arc::new(vec![Side::Left, Side::Left]) };
|
||||||
let ps2 =
|
let ps2 =
|
||||||
PathSet { next: None, steps: Rc::new(vec![Side::Left, Side::Right]) };
|
PathSet { next: None, steps: Arc::new(vec![Side::Left, Side::Right]) };
|
||||||
let sum = ps1.clone() + ps2.clone();
|
let sum = ps1.clone() + ps2.clone();
|
||||||
assert_eq!(sum.steps.as_ref(), &[]);
|
assert_eq!(sum.steps.as_ref(), &[]);
|
||||||
let nexts = sum.next.ok_or("nexts not set")?;
|
let nexts = sum.next.ok_or("nexts not set")?;
|
||||||
@@ -92,16 +95,16 @@ mod tests {
|
|||||||
fn extend_scaffold() -> PathSet {
|
fn extend_scaffold() -> PathSet {
|
||||||
PathSet {
|
PathSet {
|
||||||
next: Some((
|
next: Some((
|
||||||
Rc::new(PathSet {
|
Arc::new(PathSet {
|
||||||
next: None,
|
next: None,
|
||||||
steps: Rc::new(vec![Side::Left, Side::Left]),
|
steps: Arc::new(vec![Side::Left, Side::Left]),
|
||||||
}),
|
}),
|
||||||
Rc::new(PathSet {
|
Arc::new(PathSet {
|
||||||
next: None,
|
next: None,
|
||||||
steps: Rc::new(vec![Side::Left, Side::Right]),
|
steps: Arc::new(vec![Side::Left, Side::Right]),
|
||||||
}),
|
}),
|
||||||
)),
|
)),
|
||||||
steps: Rc::new(vec![Side::Left, Side::Right, Side::Left]),
|
steps: Arc::new(vec![Side::Left, Side::Right, Side::Left]),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ use std::fmt::{Debug, Write};
|
|||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use super::location::Location;
|
use super::location::Location;
|
||||||
use super::primitive::Primitive;
|
use crate::foreign::{ExFn, Atom};
|
||||||
use crate::utils::string_from_charset;
|
use crate::utils::string_from_charset;
|
||||||
use crate::Sym;
|
use crate::Sym;
|
||||||
|
|
||||||
@@ -42,7 +42,10 @@ pub enum Clause {
|
|||||||
Lambda(Rc<Expr>),
|
Lambda(Rc<Expr>),
|
||||||
Constant(Sym),
|
Constant(Sym),
|
||||||
LambdaArg(usize),
|
LambdaArg(usize),
|
||||||
P(Primitive),
|
/// An opaque function, eg. an effectful function employing CPS
|
||||||
|
ExternFn(ExFn),
|
||||||
|
/// An opaque non-callable value, eg. a file handle
|
||||||
|
Atom(Atom),
|
||||||
}
|
}
|
||||||
|
|
||||||
const ARGNAME_CHARSET: &str = "abcdefghijklmnopqrstuvwxyz";
|
const ARGNAME_CHARSET: &str = "abcdefghijklmnopqrstuvwxyz";
|
||||||
@@ -75,7 +78,8 @@ impl Clause {
|
|||||||
Wrap(wl, wr): Wrap,
|
Wrap(wl, wr): Wrap,
|
||||||
) -> std::fmt::Result {
|
) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::P(p) => write!(f, "{p:?}"),
|
Self::Atom(a) => write!(f, "{a:?}"),
|
||||||
|
Self::ExternFn(fun) => write!(f, "{fun:?}"),
|
||||||
Self::Lambda(body) => parametric_fmt(f, depth, "\\", body, wr),
|
Self::Lambda(body) => parametric_fmt(f, depth, "\\", body, wr),
|
||||||
Self::LambdaArg(skip) => {
|
Self::LambdaArg(skip) => {
|
||||||
let lambda_depth = (depth - skip - 1).try_into().unwrap();
|
let lambda_depth = (depth - skip - 1).try_into().unwrap();
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
use std::cell::RefCell;
|
use std::sync::{Arc, Mutex};
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use super::path_set::PathSet;
|
use super::path_set::PathSet;
|
||||||
use super::{interpreted, postmacro};
|
use super::{interpreted, postmacro};
|
||||||
@@ -18,12 +17,12 @@ fn collect_paths_cls_rec(
|
|||||||
depth: usize,
|
depth: usize,
|
||||||
) -> Option<PathSet> {
|
) -> Option<PathSet> {
|
||||||
match cls {
|
match cls {
|
||||||
postmacro::Clause::P(_) | postmacro::Clause::Constant(_) => None,
|
postmacro::Clause::Atom(_) | postmacro::Clause::ExternFn(_) => None,
|
||||||
|
postmacro::Clause::Constant(_) => None,
|
||||||
postmacro::Clause::LambdaArg(h) =>
|
postmacro::Clause::LambdaArg(h) =>
|
||||||
if *h != depth {
|
match *h != depth {
|
||||||
None
|
true => None,
|
||||||
} else {
|
false => Some(PathSet::pick())
|
||||||
Some(PathSet { next: None, steps: Rc::new(vec![]) })
|
|
||||||
},
|
},
|
||||||
postmacro::Clause::Lambda(b) => collect_paths_expr_rec(b, depth + 1),
|
postmacro::Clause::Lambda(b) => collect_paths_expr_rec(b, depth + 1),
|
||||||
postmacro::Clause::Apply(f, x) => {
|
postmacro::Clause::Apply(f, x) => {
|
||||||
@@ -43,7 +42,8 @@ pub fn clause(cls: &postmacro::Clause) -> interpreted::Clause {
|
|||||||
match cls {
|
match cls {
|
||||||
postmacro::Clause::Constant(name) =>
|
postmacro::Clause::Constant(name) =>
|
||||||
interpreted::Clause::Constant(name.clone()),
|
interpreted::Clause::Constant(name.clone()),
|
||||||
postmacro::Clause::P(p) => interpreted::Clause::P(p.clone()),
|
postmacro::Clause::Atom(a) => interpreted::Clause::Atom(a.clone()),
|
||||||
|
postmacro::Clause::ExternFn(fun) => interpreted::Clause::ExternFn(fun.clone()),
|
||||||
postmacro::Clause::Apply(f, x) =>
|
postmacro::Clause::Apply(f, x) =>
|
||||||
interpreted::Clause::Apply { f: expr(f.as_ref()), x: expr(x.as_ref()) },
|
interpreted::Clause::Apply { f: expr(f.as_ref()), x: expr(x.as_ref()) },
|
||||||
postmacro::Clause::Lambda(body) => interpreted::Clause::Lambda {
|
postmacro::Clause::Lambda(body) => interpreted::Clause::Lambda {
|
||||||
@@ -55,7 +55,7 @@ pub fn clause(cls: &postmacro::Clause) -> interpreted::Clause {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn expr(expr: &postmacro::Expr) -> interpreted::ExprInst {
|
pub fn expr(expr: &postmacro::Expr) -> interpreted::ExprInst {
|
||||||
interpreted::ExprInst(Rc::new(RefCell::new(interpreted::Expr {
|
interpreted::ExprInst(Arc::new(Mutex::new(interpreted::Expr {
|
||||||
location: expr.location.clone(),
|
location: expr.location.clone(),
|
||||||
clause: clause(&expr.value),
|
clause: clause(&expr.value),
|
||||||
})))
|
})))
|
||||||
|
|||||||
@@ -1,45 +0,0 @@
|
|||||||
use std::fmt::Debug;
|
|
||||||
|
|
||||||
use super::Literal;
|
|
||||||
use crate::foreign::{Atom, ExternFn};
|
|
||||||
|
|
||||||
/// A value the interpreter can't inspect
|
|
||||||
pub enum Primitive {
|
|
||||||
/// A literal value, eg. `1`, `"hello"`
|
|
||||||
Literal(Literal),
|
|
||||||
/// An opaque function, eg. an effectful function employing CPS
|
|
||||||
ExternFn(Box<dyn ExternFn>),
|
|
||||||
/// An opaque non-callable value, eg. a file handle
|
|
||||||
Atom(Atom),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for Primitive {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
if let (Self::Literal(l1), Self::Literal(l2)) = (self, other) {
|
|
||||||
l1 == l2
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Clone for Primitive {
|
|
||||||
fn clone(&self) -> Self {
|
|
||||||
match self {
|
|
||||||
Primitive::Literal(l) => Primitive::Literal(l.clone()),
|
|
||||||
Primitive::Atom(a) => Primitive::Atom(a.clone()),
|
|
||||||
Primitive::ExternFn(ef) =>
|
|
||||||
Primitive::ExternFn(dyn_clone::clone_box(ef.as_ref())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Debug for Primitive {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
Self::Atom(a) => write!(f, "{a:?}"),
|
|
||||||
Self::ExternFn(ef) => write!(f, "{ef:?}"),
|
|
||||||
Self::Literal(l) => write!(f, "{l:?}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -31,26 +31,15 @@ impl<N: NameLike> Default for ItemKind<N> {
|
|||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Clone, Default)]
|
||||||
pub struct ProjectItem<N: NameLike> {
|
pub struct ProjectItem<N: NameLike> {
|
||||||
pub kind: ItemKind<N>,
|
pub kind: ItemKind<N>,
|
||||||
pub is_op: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<N: NameLike> Display for ProjectItem<N> {
|
impl<N: NameLike> Display for ProjectItem<N> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
ItemKind::None => match self.is_op {
|
ItemKind::None => write!(f, "keyword"),
|
||||||
true => write!(f, "operator"),
|
ItemKind::Const(c) => write!(f, "constant {c}"),
|
||||||
false => write!(f, "keyword"),
|
|
||||||
},
|
|
||||||
ItemKind::Const(c) => match self.is_op {
|
|
||||||
true => write!(f, "operator with value {c}"),
|
|
||||||
false => write!(f, "constant {c}"),
|
|
||||||
},
|
|
||||||
ItemKind::Alias(alias) => {
|
ItemKind::Alias(alias) => {
|
||||||
let origin = Interner::extern_all(alias).join("::");
|
write!(f, "alias to {}", Interner::extern_all(alias).join("::"))
|
||||||
match self.is_op {
|
|
||||||
true => write!(f, "operator alias to {origin}"),
|
|
||||||
false => write!(f, "alias to {origin}"),
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -61,9 +50,6 @@ impl<N: NameLike> Display for ProjectItem<N> {
|
|||||||
pub struct ImpReport<N: NameLike> {
|
pub struct ImpReport<N: NameLike> {
|
||||||
/// Absolute path of the module the symbol is imported from
|
/// Absolute path of the module the symbol is imported from
|
||||||
pub source: N,
|
pub source: N,
|
||||||
/// Whether this symbol should be treated as an operator for the purpose of
|
|
||||||
/// parsing
|
|
||||||
pub is_op: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Additional data about a loaded module beyond the list of constants and
|
/// Additional data about a loaded module beyond the list of constants and
|
||||||
@@ -94,8 +80,8 @@ impl<N: NameLike> Add for ProjectExt<N> {
|
|||||||
Interner::extern_all(&self.path).join("::")
|
Interner::extern_all(&self.path).join("::")
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
self.imports_from.extend(imports_from.into_iter());
|
self.imports_from.extend(imports_from);
|
||||||
self.rules.extend(rules.into_iter());
|
self.rules.extend(rules);
|
||||||
if file.is_some() {
|
if file.is_some() {
|
||||||
self.file = file
|
self.file = file
|
||||||
}
|
}
|
||||||
@@ -190,7 +176,6 @@ fn vname_to_sym_tree_rec(
|
|||||||
ModMember::Sub(module) =>
|
ModMember::Sub(module) =>
|
||||||
ModMember::Sub(vname_to_sym_tree_rec(module, i)),
|
ModMember::Sub(vname_to_sym_tree_rec(module, i)),
|
||||||
ModMember::Item(ex) => ModMember::Item(ProjectItem {
|
ModMember::Item(ex) => ModMember::Item(ProjectItem {
|
||||||
is_op: ex.is_op,
|
|
||||||
kind: match ex.kind {
|
kind: match ex.kind {
|
||||||
ItemKind::None => ItemKind::None,
|
ItemKind::None => ItemKind::None,
|
||||||
ItemKind::Alias(n) => ItemKind::Alias(n),
|
ItemKind::Alias(n) => ItemKind::Alias(n),
|
||||||
@@ -204,7 +189,7 @@ fn vname_to_sym_tree_rec(
|
|||||||
extra: ProjectExt {
|
extra: ProjectExt {
|
||||||
path: tree.extra.path,
|
path: tree.extra.path,
|
||||||
imports_from: (tree.extra.imports_from.into_iter())
|
imports_from: (tree.extra.imports_from.into_iter())
|
||||||
.map(|(k, v)| (k, ImpReport { is_op: v.is_op, source: i.i(&v.source) }))
|
.map(|(k, v)| (k, ImpReport { source: i.i(&v.source) }))
|
||||||
.collect(),
|
.collect(),
|
||||||
rules: (tree.extra.rules.into_iter())
|
rules: (tree.extra.rules.into_iter())
|
||||||
.map(|Rule { pattern, prio, template }| Rule {
|
.map(|Rule { pattern, prio, template }| Rule {
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ use super::namelike::VName;
|
|||||||
use crate::ast::{Constant, Rule};
|
use crate::ast::{Constant, Rule};
|
||||||
use crate::error::{ProjectError, ProjectResult, TooManySupers};
|
use crate::error::{ProjectError, ProjectResult, TooManySupers};
|
||||||
use crate::interner::{Interner, Tok};
|
use crate::interner::{Interner, Tok};
|
||||||
use crate::utils::pure_push::pushed;
|
use crate::utils::pure_seq::pushed;
|
||||||
use crate::utils::{unwrap_or, BoxedIter};
|
use crate::utils::{unwrap_or, BoxedIter};
|
||||||
use crate::Location;
|
use crate::Location;
|
||||||
|
|
||||||
@@ -78,16 +78,11 @@ pub enum MemberKind {
|
|||||||
Constant(Constant),
|
Constant(Constant),
|
||||||
/// A prefixed set of other entries
|
/// A prefixed set of other entries
|
||||||
Module(ModuleBlock),
|
Module(ModuleBlock),
|
||||||
/// Operator declarations
|
|
||||||
Operators(Vec<Tok<String>>),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for MemberKind {
|
impl Display for MemberKind {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Operators(opv) => {
|
|
||||||
write!(f, "operators[{}]", opv.iter().map(|t| &**t).join(" "))
|
|
||||||
},
|
|
||||||
Self::Constant(c) => c.fmt(f),
|
Self::Constant(c) => c.fmt(f),
|
||||||
Self::Module(m) => m.fmt(f),
|
Self::Module(m) => m.fmt(f),
|
||||||
Self::Rule(r) => r.fmt(f),
|
Self::Rule(r) => r.fmt(f),
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::rc::Rc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::interpreted::{Clause, ExprInst};
|
use crate::foreign::InertAtomic;
|
||||||
use crate::{Literal, Primitive, Tok};
|
use crate::{Interner, Tok};
|
||||||
|
|
||||||
/// An Orchid string which may or may not be interned
|
/// An Orchid string which may or may not be interned
|
||||||
#[derive(Clone, Eq)]
|
#[derive(Clone, Eq)]
|
||||||
@@ -12,7 +12,7 @@ pub enum OrcString {
|
|||||||
/// An interned string. Equality-conpared by reference.
|
/// An interned string. Equality-conpared by reference.
|
||||||
Interned(Tok<String>),
|
Interned(Tok<String>),
|
||||||
/// An uninterned bare string. Equality-compared by character
|
/// An uninterned bare string. Equality-compared by character
|
||||||
Runtime(Rc<String>),
|
Runtime(Arc<String>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for OrcString {
|
impl Debug for OrcString {
|
||||||
@@ -25,23 +25,21 @@ impl Debug for OrcString {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl OrcString {
|
impl OrcString {
|
||||||
|
/// Intern the contained string
|
||||||
|
pub fn intern(&mut self, i: &Interner) {
|
||||||
|
if let Self::Runtime(t) = self {
|
||||||
|
*self = Self::Interned(i.i(t.as_str()))
|
||||||
|
}
|
||||||
|
}
|
||||||
/// Clone out the plain Rust [String]
|
/// Clone out the plain Rust [String]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn get_string(self) -> String {
|
pub fn get_string(self) -> String {
|
||||||
match self {
|
match self {
|
||||||
Self::Interned(s) => s.as_str().to_owned(),
|
Self::Interned(s) => s.as_str().to_owned(),
|
||||||
Self::Runtime(rc) =>
|
Self::Runtime(rc) =>
|
||||||
Rc::try_unwrap(rc).unwrap_or_else(|rc| (*rc).clone()),
|
Arc::try_unwrap(rc).unwrap_or_else(|rc| (*rc).clone()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Wrap in a [Clause] for returning from extern functions
|
|
||||||
pub fn cls(self) -> Clause {
|
|
||||||
Clause::P(Primitive::Literal(Literal::Str(self)))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Wrap in an [ExprInst] for embedding in runtime-generated code
|
|
||||||
pub fn exi(self) -> ExprInst { self.cls().wrap() }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Deref for OrcString {
|
impl Deref for OrcString {
|
||||||
@@ -62,7 +60,7 @@ impl Hash for OrcString {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<String> for OrcString {
|
impl From<String> for OrcString {
|
||||||
fn from(value: String) -> Self { Self::Runtime(Rc::new(value)) }
|
fn from(value: String) -> Self { Self::Runtime(Arc::new(value)) }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Tok<String>> for OrcString {
|
impl From<Tok<String>> for OrcString {
|
||||||
@@ -77,3 +75,8 @@ impl PartialEq for OrcString {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl InertAtomic for OrcString {
|
||||||
|
fn type_str() -> &'static str { "OrcString" }
|
||||||
|
fn strict_eq(&self, other: &Self) -> bool { self == other }
|
||||||
|
}
|
||||||
|
|||||||
@@ -174,7 +174,7 @@ impl<TItem: Clone, TExt: Clone> Module<TItem, TExt> {
|
|||||||
(_, right) => new_items.insert(key, right),
|
(_, right) => new_items.insert(key, right),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
new_items.extend(self.entries.into_iter());
|
new_items.extend(self.entries);
|
||||||
Ok(Module { entries: new_items, extra: (self.extra + extra)? })
|
Ok(Module { entries: new_items, extra: (self.extra + extra)? })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ use crate::rule::vec_attrs::vec_attrs;
|
|||||||
use crate::utils::Side;
|
use crate::utils::Side;
|
||||||
|
|
||||||
pub type MaxVecSplit<'a> =
|
pub type MaxVecSplit<'a> =
|
||||||
(&'a [RuleExpr], (Tok<String>, u64, bool), &'a [RuleExpr]);
|
(&'a [RuleExpr], (Tok<String>, usize, bool), &'a [RuleExpr]);
|
||||||
|
|
||||||
/// Derive the details of the central vectorial and the two sides from a
|
/// Derive the details of the central vectorial and the two sides from a
|
||||||
/// slice of Expr's
|
/// slice of Expr's
|
||||||
@@ -107,7 +107,8 @@ fn mk_vec(pattern: &[RuleExpr]) -> VecMatcher {
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
fn mk_scalar(pattern: &RuleExpr) -> ScalMatcher {
|
fn mk_scalar(pattern: &RuleExpr) -> ScalMatcher {
|
||||||
match &pattern.value {
|
match &pattern.value {
|
||||||
Clause::P(p) => ScalMatcher::P(p.clone()),
|
Clause::Atom(a) => ScalMatcher::Atom(a.clone()),
|
||||||
|
Clause::ExternFn(_) => panic!("Cannot match on ExternFn"),
|
||||||
Clause::Name(n) => ScalMatcher::Name(n.clone()),
|
Clause::Name(n) => ScalMatcher::Name(n.clone()),
|
||||||
Clause::Placeh(Placeholder { name, class }) => {
|
Clause::Placeh(Placeholder { name, class }) => {
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
|
|||||||
@@ -10,7 +10,8 @@ pub fn scal_match<'a>(
|
|||||||
expr: &'a RuleExpr,
|
expr: &'a RuleExpr,
|
||||||
) -> Option<State<'a>> {
|
) -> Option<State<'a>> {
|
||||||
match (matcher, &expr.value) {
|
match (matcher, &expr.value) {
|
||||||
(ScalMatcher::P(p1), Clause::P(p2)) if p1 == p2 => Some(State::new()),
|
(ScalMatcher::Atom(a1), Clause::Atom(a2)) if a1.0.strict_eq(&a2.0) =>
|
||||||
|
Some(State::new()),
|
||||||
(ScalMatcher::Name(n1), Clause::Name(n2)) if n1 == n2 => Some(State::new()),
|
(ScalMatcher::Name(n1), Clause::Name(n2)) if n1 == n2 => Some(State::new()),
|
||||||
(ScalMatcher::Placeh(key), _) =>
|
(ScalMatcher::Placeh(key), _) =>
|
||||||
Some(State::from([(key.clone(), StateEntry::Scalar(expr))])),
|
Some(State::from([(key.clone(), StateEntry::Scalar(expr))])),
|
||||||
|
|||||||
@@ -5,15 +5,15 @@ use itertools::Itertools;
|
|||||||
|
|
||||||
use super::any_match::any_match;
|
use super::any_match::any_match;
|
||||||
use super::build::mk_any;
|
use super::build::mk_any;
|
||||||
|
use crate::foreign::Atom;
|
||||||
use crate::interner::Tok;
|
use crate::interner::Tok;
|
||||||
use crate::representations::Primitive;
|
|
||||||
use crate::rule::matcher::{Matcher, RuleExpr};
|
use crate::rule::matcher::{Matcher, RuleExpr};
|
||||||
use crate::rule::state::State;
|
use crate::rule::state::State;
|
||||||
use crate::utils::Side;
|
use crate::utils::Side;
|
||||||
use crate::{Sym, VName};
|
use crate::{Sym, VName};
|
||||||
|
|
||||||
pub enum ScalMatcher {
|
pub enum ScalMatcher {
|
||||||
P(Primitive),
|
Atom(Atom),
|
||||||
Name(Sym),
|
Name(Sym),
|
||||||
S(char, Box<AnyMatcher>),
|
S(char, Box<AnyMatcher>),
|
||||||
Lambda(Box<AnyMatcher>, Box<AnyMatcher>),
|
Lambda(Box<AnyMatcher>, Box<AnyMatcher>),
|
||||||
@@ -68,7 +68,7 @@ impl Matcher for AnyMatcher {
|
|||||||
impl Display for ScalMatcher {
|
impl Display for ScalMatcher {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::P(p) => write!(f, "{:?}", p),
|
Self::Atom(a) => write!(f, "{a:?}"),
|
||||||
Self::Placeh(n) => write!(f, "${n}"),
|
Self::Placeh(n) => write!(f, "${n}"),
|
||||||
Self::Name(n) => write!(f, "{}", n.extern_vec().join("::")),
|
Self::Name(n) => write!(f, "{}", n.extern_vec().join("::")),
|
||||||
Self::S(c, body) => {
|
Self::S(c, body) => {
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ pub fn vec_match<'a>(
|
|||||||
// Valid combinations of locations for the separators
|
// Valid combinations of locations for the separators
|
||||||
let mut pos_pairs = lposv
|
let mut pos_pairs = lposv
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.cartesian_product(rposv.into_iter())
|
.cartesian_product(rposv)
|
||||||
.filter(|((lpos, _), (rpos, _))| lpos + left_sep.len() <= *rpos)
|
.filter(|((lpos, _), (rpos, _))| lpos + left_sep.len() <= *rpos)
|
||||||
.map(|((lpos, mut lstate), (rpos, rstate))| {
|
.map(|((lpos, mut lstate), (rpos, rstate))| {
|
||||||
lstate.extend(rstate);
|
lstate.extend(rstate);
|
||||||
|
|||||||
@@ -30,11 +30,11 @@ fn pad(mut rule: Rule<Sym>, i: &Interner) -> Rule<Sym> {
|
|||||||
let prefix_v = if prefix_explicit { empty } else { prefix };
|
let prefix_v = if prefix_explicit { empty } else { prefix };
|
||||||
let suffix_v = if suffix_explicit { empty } else { suffix };
|
let suffix_v = if suffix_explicit { empty } else { suffix };
|
||||||
rule.pattern = (prefix_v.iter().cloned())
|
rule.pattern = (prefix_v.iter().cloned())
|
||||||
.chain(rule.pattern.into_iter())
|
.chain(rule.pattern)
|
||||||
.chain(suffix_v.iter().cloned())
|
.chain(suffix_v.iter().cloned())
|
||||||
.collect();
|
.collect();
|
||||||
rule.template = (prefix_v.iter().cloned())
|
rule.template = (prefix_v.iter().cloned())
|
||||||
.chain(rule.template.into_iter())
|
.chain(rule.template)
|
||||||
.chain(suffix_v.iter().cloned())
|
.chain(suffix_v.iter().cloned())
|
||||||
.collect();
|
.collect();
|
||||||
rule
|
rule
|
||||||
@@ -60,7 +60,8 @@ fn check_rec_expr(
|
|||||||
in_template: bool,
|
in_template: bool,
|
||||||
) -> Result<(), RuleError> {
|
) -> Result<(), RuleError> {
|
||||||
match &expr.value {
|
match &expr.value {
|
||||||
Clause::Name(_) | Clause::P(_) => Ok(()),
|
Clause::Name(_) | Clause::Atom(_) => Ok(()),
|
||||||
|
Clause::ExternFn(_) => Err(RuleError::ExternFn),
|
||||||
Clause::Placeh(Placeholder { name, class }) => {
|
Clause::Placeh(Placeholder { name, class }) => {
|
||||||
let typ = (*class).into();
|
let typ = (*class).into();
|
||||||
// in a template, the type must be known and identical
|
// in a template, the type must be known and identical
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ use hashbrown::HashSet;
|
|||||||
|
|
||||||
use crate::ast::{self, search_all_slcs, PHClass, Placeholder, Rule};
|
use crate::ast::{self, search_all_slcs, PHClass, Placeholder, Rule};
|
||||||
use crate::error::{ErrorPosition, ProjectError};
|
use crate::error::{ErrorPosition, ProjectError};
|
||||||
|
#[allow(unused)] // for doc
|
||||||
|
use crate::foreign::ExternFn;
|
||||||
use crate::interner::Tok;
|
use crate::interner::Tok;
|
||||||
use crate::utils::BoxedIter;
|
use crate::utils::BoxedIter;
|
||||||
use crate::{Location, Sym};
|
use crate::{Location, Sym};
|
||||||
@@ -20,16 +22,20 @@ pub enum RuleError {
|
|||||||
Multiple(Tok<String>),
|
Multiple(Tok<String>),
|
||||||
/// Two vectorial placeholders are next to each other
|
/// Two vectorial placeholders are next to each other
|
||||||
VecNeighbors(Tok<String>, Tok<String>),
|
VecNeighbors(Tok<String>, Tok<String>),
|
||||||
|
/// Found an [ExternFn] in the pattern. This is a really unlikely mistake
|
||||||
|
/// caused only by rogue systems.
|
||||||
|
ExternFn,
|
||||||
}
|
}
|
||||||
impl RuleError {
|
impl RuleError {
|
||||||
/// Convert into a unified error trait object shared by all Orchid errors
|
/// Convert into a unified error trait object shared by all Orchid errors
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn to_project_error(self, rule: &Rule<Sym>) -> Rc<dyn ProjectError> {
|
pub fn to_project_error(self, rule: &Rule<Sym>) -> Rc<dyn ProjectError> {
|
||||||
match self {
|
match self {
|
||||||
RuleError::Missing(name) => Missing::new(rule, name).rc(),
|
Self::Missing(name) => Missing::new(rule, name).rc(),
|
||||||
RuleError::Multiple(name) => Multiple::new(rule, name).rc(),
|
Self::Multiple(name) => Multiple::new(rule, name).rc(),
|
||||||
RuleError::ArityMismatch(name) => ArityMismatch::new(rule, name).rc(),
|
Self::ArityMismatch(name) => ArityMismatch::new(rule, name).rc(),
|
||||||
RuleError::VecNeighbors(n1, n2) => VecNeighbors::new(rule, n1, n2).rc(),
|
Self::VecNeighbors(n1, n2) => VecNeighbors::new(rule, n1, n2).rc(),
|
||||||
|
Self::ExternFn => ExternFnInPattern(rule.clone()).rc(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -37,6 +43,7 @@ impl RuleError {
|
|||||||
impl Display for RuleError {
|
impl Display for RuleError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
|
Self::ExternFn => write!(f, "Found an ExternFn in the pattern"),
|
||||||
Self::Missing(key) => write!(f, "Key {key} not in match pattern"),
|
Self::Missing(key) => write!(f, "Key {key} not in match pattern"),
|
||||||
Self::ArityMismatch(key) => {
|
Self::ArityMismatch(key) => {
|
||||||
write!(f, "Key {key} used inconsistently with and without ellipsis")
|
write!(f, "Key {key} used inconsistently with and without ellipsis")
|
||||||
@@ -44,10 +51,8 @@ impl Display for RuleError {
|
|||||||
Self::Multiple(key) => {
|
Self::Multiple(key) => {
|
||||||
write!(f, "Key {key} appears multiple times in match pattern")
|
write!(f, "Key {key} appears multiple times in match pattern")
|
||||||
},
|
},
|
||||||
Self::VecNeighbors(left, right) => write!(
|
Self::VecNeighbors(left, right) =>
|
||||||
f,
|
write!(f, "vectorials {left} and {right} are next to each other"),
|
||||||
"Keys {left} and {right} are two vectorials right next to each other"
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -232,3 +237,15 @@ impl ProjectError for VecNeighbors {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Not referencing by location because it's most likely unknown
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ExternFnInPattern(ast::Rule<Sym>);
|
||||||
|
impl ProjectError for ExternFnInPattern {
|
||||||
|
fn description(&self) -> &str {
|
||||||
|
"Found an ExternFn in a pattern. Unlikely error caused by a system"
|
||||||
|
}
|
||||||
|
fn message(&self) -> String {
|
||||||
|
format!("Found ExternFn in pattern {}", self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ use crate::ast::{Clause, Expr, PHClass, Placeholder};
|
|||||||
use crate::interner::Tok;
|
use crate::interner::Tok;
|
||||||
use crate::utils::unwrap_or;
|
use crate::utils::unwrap_or;
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
#[derive(Clone, Copy, Debug)]
|
||||||
pub enum StateEntry<'a> {
|
pub enum StateEntry<'a> {
|
||||||
Vec(&'a [RuleExpr]),
|
Vec(&'a [RuleExpr]),
|
||||||
Scalar(&'a RuleExpr),
|
Scalar(&'a RuleExpr),
|
||||||
@@ -27,7 +27,8 @@ pub fn apply_exprv(template: &[RuleExpr], state: &State) -> Vec<RuleExpr> {
|
|||||||
pub fn apply_expr(template: &RuleExpr, state: &State) -> Vec<RuleExpr> {
|
pub fn apply_expr(template: &RuleExpr, state: &State) -> Vec<RuleExpr> {
|
||||||
let Expr { location, value } = template;
|
let Expr { location, value } = template;
|
||||||
match value {
|
match value {
|
||||||
Clause::P(_) | Clause::Name(_) => vec![template.clone()],
|
Clause::Atom(_) | Clause::Name(_) | Clause::ExternFn(_) =>
|
||||||
|
vec![template.clone()],
|
||||||
Clause::S(c, body) => vec![Expr {
|
Clause::S(c, body) => vec![Expr {
|
||||||
location: location.clone(),
|
location: location.clone(),
|
||||||
value: Clause::S(*c, Rc::new(apply_exprv(body.as_slice(), state))),
|
value: Clause::S(*c, Rc::new(apply_exprv(body.as_slice(), state))),
|
||||||
|
|||||||
@@ -35,7 +35,8 @@ pub fn clause<F: FnMut(Rc<Vec<RuleExpr>>) -> Option<Rc<Vec<RuleExpr>>>>(
|
|||||||
pred: &mut F,
|
pred: &mut F,
|
||||||
) -> Option<Clause<Sym>> {
|
) -> Option<Clause<Sym>> {
|
||||||
match c {
|
match c {
|
||||||
Clause::P(_) | Clause::Placeh { .. } | Clause::Name { .. } => None,
|
Clause::Atom(_) | Clause::Placeh { .. } | Clause::Name { .. } => None,
|
||||||
|
Clause::ExternFn(_) => None,
|
||||||
Clause::Lambda(arg, body) =>
|
Clause::Lambda(arg, body) =>
|
||||||
if let Some(arg) = exprv(arg.clone(), pred) {
|
if let Some(arg) = exprv(arg.clone(), pred) {
|
||||||
Some(Clause::Lambda(arg, body.clone()))
|
Some(Clause::Lambda(arg, body.clone()))
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ use crate::interner::Tok;
|
|||||||
/// Returns the name, priority and nonzero of the expression if it is
|
/// Returns the name, priority and nonzero of the expression if it is
|
||||||
/// a vectorial placeholder
|
/// a vectorial placeholder
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn vec_attrs(expr: &RuleExpr) -> Option<(Tok<String>, u64, bool)> {
|
pub fn vec_attrs(expr: &RuleExpr) -> Option<(Tok<String>, usize, bool)> {
|
||||||
match expr.value.clone() {
|
match expr.value.clone() {
|
||||||
Clause::Placeh(Placeholder {
|
Clause::Placeh(Placeholder {
|
||||||
class: PHClass::Vec { prio, nonzero },
|
class: PHClass::Vec { prio, nonzero },
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import std::panic
|
import std::panic
|
||||||
|
|
||||||
export const block_on := \action.\cont. (
|
export const block_on := \action. \cont. (
|
||||||
action cont
|
action cont
|
||||||
(\e.panic "unwrapped asynch call")
|
(\e.panic "unwrapped asynch call")
|
||||||
\c.yield
|
\c.yield
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ use std::collections::VecDeque;
|
|||||||
use std::fmt::{Debug, Display};
|
use std::fmt::{Debug, Display};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::sync::mpsc::Sender;
|
use std::sync::mpsc::Sender;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
@@ -17,23 +18,30 @@ use crate::interpreted::{Clause, ExprInst};
|
|||||||
use crate::interpreter::HandlerTable;
|
use crate::interpreter::HandlerTable;
|
||||||
use crate::pipeline::file_loader::embed_to_map;
|
use crate::pipeline::file_loader::embed_to_map;
|
||||||
use crate::systems::codegen::call;
|
use crate::systems::codegen::call;
|
||||||
use crate::systems::stl::Boolean;
|
|
||||||
use crate::utils::poller::{PollEvent, Poller};
|
use crate::utils::poller::{PollEvent, Poller};
|
||||||
use crate::utils::unwrap_or;
|
use crate::utils::unwrap_or;
|
||||||
use crate::{ConstTree, Interner};
|
use crate::{ConstTree, Interner};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
struct Timer {
|
struct Timer {
|
||||||
recurring: Boolean,
|
recurring: bool,
|
||||||
delay: NotNan<f64>,
|
delay: NotNan<f64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_timer(recurring: Boolean, delay: NotNan<f64>) -> XfnResult<Clause> {
|
pub fn set_timer(recurring: bool, delay: NotNan<f64>) -> XfnResult<Clause> {
|
||||||
Ok(init_cps(2, Timer { recurring, delay }))
|
Ok(init_cps(2, Timer { recurring, delay }))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct CancelTimer(Rc<dyn Fn()>);
|
struct CancelTimer(Arc<Mutex<dyn Fn() + Send>>);
|
||||||
|
impl CancelTimer {
|
||||||
|
pub fn new(f: impl Fn() + Send + 'static) -> Self {
|
||||||
|
Self(Arc::new(Mutex::new(f)))
|
||||||
|
}
|
||||||
|
pub fn cancel(&self) {
|
||||||
|
self.0.lock().unwrap()()
|
||||||
|
}
|
||||||
|
}
|
||||||
impl Debug for CancelTimer {
|
impl Debug for CancelTimer {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "opaque cancel operation")
|
write!(f, "opaque cancel operation")
|
||||||
@@ -134,17 +142,16 @@ impl<'a> IntoSystem<'a> for AsynchSystem<'a> {
|
|||||||
let mut polly = polly.borrow_mut();
|
let mut polly = polly.borrow_mut();
|
||||||
let (timeout, action, cont) = t.unpack2();
|
let (timeout, action, cont) = t.unpack2();
|
||||||
let duration = Duration::from_secs_f64(*timeout.delay);
|
let duration = Duration::from_secs_f64(*timeout.delay);
|
||||||
let cancel_timer = if timeout.recurring.0 {
|
let cancel_timer = match timeout.recurring {
|
||||||
CancelTimer(Rc::new(polly.set_interval(duration, action)))
|
true => CancelTimer::new(polly.set_interval(duration, action)),
|
||||||
} else {
|
false => CancelTimer::new(polly.set_timeout(duration, action)),
|
||||||
CancelTimer(Rc::new(polly.set_timeout(duration, action)))
|
|
||||||
};
|
};
|
||||||
Ok(call(cont, [init_cps(1, cancel_timer).wrap()]).wrap())
|
Ok(call(cont, [init_cps(1, cancel_timer).wrap()]).wrap())
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
handler_table.register(move |t: Box<CPSBox<CancelTimer>>| {
|
handler_table.register(move |t: Box<CPSBox<CancelTimer>>| {
|
||||||
let (command, cont) = t.unpack1();
|
let (command, cont) = t.unpack1();
|
||||||
command.0.as_ref()();
|
command.cancel();
|
||||||
Ok(cont)
|
Ok(cont)
|
||||||
});
|
});
|
||||||
handler_table.register({
|
handler_table.register({
|
||||||
@@ -165,7 +172,7 @@ impl<'a> IntoSystem<'a> for AsynchSystem<'a> {
|
|||||||
PollEvent::Event(ev) => {
|
PollEvent::Event(ev) => {
|
||||||
let handler = (handlers.get_mut(&ev.as_ref().type_id()))
|
let handler = (handlers.get_mut(&ev.as_ref().type_id()))
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
panic!("Unhandled messgae type: {:?}", ev.type_id())
|
panic!("Unhandled messgae type: {:?}", (*ev).type_id())
|
||||||
});
|
});
|
||||||
let events = handler(ev);
|
let events = handler(ev);
|
||||||
// we got new microtasks
|
// we got new microtasks
|
||||||
@@ -181,6 +188,8 @@ impl<'a> IntoSystem<'a> for AsynchSystem<'a> {
|
|||||||
});
|
});
|
||||||
System {
|
System {
|
||||||
name: vec!["system".to_string(), "asynch".to_string()],
|
name: vec!["system".to_string(), "asynch".to_string()],
|
||||||
|
lexer_plugin: None,
|
||||||
|
line_parser: None,
|
||||||
constants: ConstTree::namespace(
|
constants: ConstTree::namespace(
|
||||||
[i.i("system"), i.i("async")],
|
[i.i("system"), i.i("async")],
|
||||||
ConstTree::tree([
|
ConstTree::tree([
|
||||||
|
|||||||
@@ -1,67 +0,0 @@
|
|||||||
//! Utility functions that operate on literals. Because of the parallel locked
|
|
||||||
//! nature of [ExprInst], returning a reference to [Literal] is not possible.
|
|
||||||
use std::rc::Rc;
|
|
||||||
|
|
||||||
use ordered_float::NotNan;
|
|
||||||
|
|
||||||
use super::assertion_error::AssertionError;
|
|
||||||
use crate::foreign::{Atom, ExternError};
|
|
||||||
use crate::interpreted::{Clause, Expr, TryFromExprInst};
|
|
||||||
use crate::representations::interpreted::ExprInst;
|
|
||||||
use crate::representations::{Literal, OrcString};
|
|
||||||
use crate::{Location, Primitive};
|
|
||||||
|
|
||||||
/// [ExprInst::get_literal] except the error is mapped to an [ExternError]
|
|
||||||
pub fn get_literal(
|
|
||||||
exi: ExprInst,
|
|
||||||
) -> Result<(Literal, Location), Rc<dyn ExternError>> {
|
|
||||||
(exi.get_literal()).map_err(|exi| {
|
|
||||||
eprintln!("failed to get literal from {:?}", exi.expr().clause);
|
|
||||||
AssertionError::ext(exi.location(), "literal")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// ######## Automatically ########
|
|
||||||
|
|
||||||
impl TryFromExprInst for Literal {
|
|
||||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
|
||||||
get_literal(exi).map(|(l, _)| l)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFromExprInst for OrcString {
|
|
||||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
|
||||||
match get_literal(exi)? {
|
|
||||||
(Literal::Str(s), _) => Ok(s),
|
|
||||||
(_, location) => AssertionError::fail(location, "string"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFromExprInst for u64 {
|
|
||||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
|
||||||
match get_literal(exi)? {
|
|
||||||
(Literal::Uint(u), _) => Ok(u),
|
|
||||||
(_, location) => AssertionError::fail(location, "uint"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFromExprInst for NotNan<f64> {
|
|
||||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
|
||||||
match get_literal(exi)? {
|
|
||||||
(Literal::Num(n), _) => Ok(n),
|
|
||||||
(_, location) => AssertionError::fail(location, "float"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFromExprInst for Atom {
|
|
||||||
fn from_exi(exi: ExprInst) -> Result<Self, Rc<dyn ExternError>> {
|
|
||||||
let Expr { clause, location } = exi.expr_val();
|
|
||||||
match clause {
|
|
||||||
Clause::P(Primitive::Atom(a)) => Ok(a),
|
|
||||||
_ => AssertionError::fail(location, "atom"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -39,11 +39,11 @@ pub fn tuple(data: impl IntoIterator<Item = ExprInst>) -> Clause {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use crate::systems::codegen::tuple;
|
use crate::{systems::codegen::tuple, foreign::Atomic};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tuple_printer() {
|
fn tuple_printer() {
|
||||||
println!("Binary tuple: {}", tuple([0.into(), 1.into()]))
|
println!("Binary tuple: {}", tuple([0usize.atom_exi(), 1usize.atom_exi()]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -8,24 +8,27 @@ use itertools::Itertools;
|
|||||||
|
|
||||||
use super::osstring::os_string_lib;
|
use super::osstring::os_string_lib;
|
||||||
use crate::ddispatch::Responder;
|
use crate::ddispatch::Responder;
|
||||||
|
use crate::error::RuntimeError;
|
||||||
use crate::facade::{IntoSystem, System};
|
use crate::facade::{IntoSystem, System};
|
||||||
use crate::foreign::cps_box::{init_cps, CPSBox};
|
use crate::foreign::cps_box::{init_cps, CPSBox};
|
||||||
use crate::foreign::{
|
use crate::foreign::{
|
||||||
xfn_1ary, xfn_2ary, Atomic, AtomicReturn, InertAtomic, XfnResult,
|
xfn_1ary, xfn_2ary, Atomic, AtomicReturn, InertAtomic, StrictEq, XfnResult,
|
||||||
};
|
};
|
||||||
use crate::interpreted::{Clause, ExprInst};
|
use crate::interpreted::{Clause, ExprInst};
|
||||||
use crate::interpreter::HandlerTable;
|
use crate::interpreter::HandlerTable;
|
||||||
use crate::systems::codegen::{call, list, opt, tuple};
|
use crate::systems::codegen::{call, list, opt, tuple};
|
||||||
use crate::systems::io::{wrap_io_error, Source};
|
use crate::systems::io::{wrap_io_error, Source};
|
||||||
use crate::systems::scheduler::{SeqScheduler, SharedHandle};
|
use crate::systems::scheduler::{SeqScheduler, SharedHandle};
|
||||||
use crate::systems::stl::Boolean;
|
|
||||||
use crate::systems::RuntimeError;
|
|
||||||
use crate::utils::unwrap_or;
|
use crate::utils::unwrap_or;
|
||||||
use crate::ConstTree;
|
use crate::ConstTree;
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct CurrentDir;
|
pub struct CurrentDir;
|
||||||
impl Responder for CurrentDir {}
|
impl Responder for CurrentDir {}
|
||||||
|
impl StrictEq for CurrentDir {
|
||||||
|
// never appears in macros
|
||||||
|
fn strict_eq(&self, _: &dyn std::any::Any) -> bool { false }
|
||||||
|
}
|
||||||
impl Atomic for CurrentDir {
|
impl Atomic for CurrentDir {
|
||||||
fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
|
fn as_any(self: Box<Self>) -> Box<dyn std::any::Any> { self }
|
||||||
fn as_any_ref(&self) -> &dyn std::any::Any { self }
|
fn as_any_ref(&self) -> &dyn std::any::Any { self }
|
||||||
@@ -95,7 +98,7 @@ fn read_dir(sched: &SeqScheduler, cmd: CPSBox<ReadDirCmd>) -> ExprInst {
|
|||||||
Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()],
|
Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()],
|
||||||
Ok(os_namev) => {
|
Ok(os_namev) => {
|
||||||
let converted = (os_namev.into_iter())
|
let converted = (os_namev.into_iter())
|
||||||
.map(|(n, d)| Ok(tuple([n.atom_exi(), Boolean(d).atom_exi()]).wrap()))
|
.map(|(n, d)| Ok(tuple([n.atom_exi(), d.atom_exi()]).wrap()))
|
||||||
.collect::<Result<Vec<_>, Clause>>();
|
.collect::<Result<Vec<_>, Clause>>();
|
||||||
match converted {
|
match converted {
|
||||||
Err(e) => vec![call(fail, [e.wrap()]).wrap()],
|
Err(e) => vec![call(fail, [e.wrap()]).wrap()],
|
||||||
@@ -115,7 +118,7 @@ pub fn write_file(sched: &SeqScheduler, cmd: CPSBox<WriteFile>) -> ExprInst {
|
|||||||
|file, _| match file {
|
|file, _| match file {
|
||||||
Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()],
|
Err(e) => vec![call(fail, [wrap_io_error(e)]).wrap()],
|
||||||
Ok(f) => {
|
Ok(f) => {
|
||||||
let handle = SharedHandle::wrap(Box::new(f) as Box<dyn Write>);
|
let handle = SharedHandle::wrap(Box::new(f) as Box<dyn Write + Send>);
|
||||||
vec![call(succ, [handle.atom_exi()]).wrap()]
|
vec![call(succ, [handle.atom_exi()]).wrap()]
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -180,6 +183,8 @@ impl IntoSystem<'static> for DirectFS {
|
|||||||
name: ["system", "directfs"].into_iter().map_into().collect(),
|
name: ["system", "directfs"].into_iter().map_into().collect(),
|
||||||
code: HashMap::new(),
|
code: HashMap::new(),
|
||||||
prelude: Vec::new(),
|
prelude: Vec::new(),
|
||||||
|
lexer_plugin: None,
|
||||||
|
line_parser: None,
|
||||||
constants: ConstTree::namespace(
|
constants: ConstTree::namespace(
|
||||||
[i.i("system"), i.i("directfs")],
|
[i.i("system"), i.i("directfs")],
|
||||||
ConstTree::tree([
|
ConstTree::tree([
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
use super::flow::IOCmdHandlePack;
|
use super::flow::IOCmdHandlePack;
|
||||||
use super::instances::{BRead, ReadCmd, SRead, Sink, Source, WriteCmd};
|
use super::instances::{BRead, ReadCmd, SRead, Sink, Source, WriteCmd};
|
||||||
|
use crate::error::RuntimeError;
|
||||||
use crate::foreign::cps_box::init_cps;
|
use crate::foreign::cps_box::init_cps;
|
||||||
use crate::foreign::{xfn_1ary, xfn_2ary, Atom, Atomic, XfnResult};
|
use crate::foreign::{xfn_1ary, xfn_2ary, Atomic, XfnResult, Atom};
|
||||||
use crate::interpreted::Clause;
|
use crate::interpreted::Clause;
|
||||||
use crate::representations::OrcString;
|
use crate::representations::OrcString;
|
||||||
use crate::systems::scheduler::SharedHandle;
|
use crate::systems::scheduler::SharedHandle;
|
||||||
use crate::systems::stl::Binary;
|
use crate::systems::stl::Binary;
|
||||||
use crate::systems::RuntimeError;
|
use crate::{ConstTree, Interner, ast};
|
||||||
use crate::{ast, ConstTree, Interner, Primitive};
|
|
||||||
|
|
||||||
type WriteHandle = SharedHandle<Sink>;
|
type WriteHandle = SharedHandle<Sink>;
|
||||||
type ReadHandle = SharedHandle<Source>;
|
type ReadHandle = SharedHandle<Source>;
|
||||||
@@ -21,11 +21,11 @@ pub fn read_line(handle: ReadHandle) -> XfnResult<Clause> {
|
|||||||
pub fn read_bin(handle: ReadHandle) -> XfnResult<Clause> {
|
pub fn read_bin(handle: ReadHandle) -> XfnResult<Clause> {
|
||||||
Ok(init_cps(3, IOCmdHandlePack { handle, cmd: ReadCmd::RBytes(BRead::All) }))
|
Ok(init_cps(3, IOCmdHandlePack { handle, cmd: ReadCmd::RBytes(BRead::All) }))
|
||||||
}
|
}
|
||||||
pub fn read_bytes(handle: ReadHandle, n: u64) -> XfnResult<Clause> {
|
pub fn read_bytes(handle: ReadHandle, n: usize) -> XfnResult<Clause> {
|
||||||
let cmd = ReadCmd::RBytes(BRead::N(n.try_into().unwrap()));
|
let cmd = ReadCmd::RBytes(BRead::N(n));
|
||||||
Ok(init_cps(3, IOCmdHandlePack { cmd, handle }))
|
Ok(init_cps(3, IOCmdHandlePack { cmd, handle }))
|
||||||
}
|
}
|
||||||
pub fn read_until(handle: ReadHandle, pattern: u64) -> XfnResult<Clause> {
|
pub fn read_until(handle: ReadHandle, pattern: usize) -> XfnResult<Clause> {
|
||||||
let delim = pattern.try_into().map_err(|_| {
|
let delim = pattern.try_into().map_err(|_| {
|
||||||
let msg = "greater than 255".to_string();
|
let msg = "greater than 255".to_string();
|
||||||
RuntimeError::ext(msg, "converting number to byte")
|
RuntimeError::ext(msg, "converting number to byte")
|
||||||
@@ -63,8 +63,7 @@ pub fn io_bindings<'a>(
|
|||||||
std_streams
|
std_streams
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(n, at)| {
|
.map(|(n, at)| {
|
||||||
let expr = ast::Clause::P(Primitive::Atom(Atom(at))).into_expr();
|
(i.i(n), ConstTree::clause(ast::Clause::Atom(Atom(at))))
|
||||||
(i.i(n), ConstTree::Const(expr))
|
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ use crate::interpreted::ExprInst;
|
|||||||
use crate::systems::codegen::call;
|
use crate::systems::codegen::call;
|
||||||
use crate::systems::scheduler::{Canceller, SharedHandle};
|
use crate::systems::scheduler::{Canceller, SharedHandle};
|
||||||
use crate::systems::stl::Binary;
|
use crate::systems::stl::Binary;
|
||||||
use crate::Literal;
|
use crate::OrcString;
|
||||||
|
|
||||||
/// Any type that we can read controlled amounts of data from
|
/// Any type that we can read controlled amounts of data from
|
||||||
pub type Source = BufReader<Box<dyn Read + Send>>;
|
pub type Source = BufReader<Box<dyn Read + Send>>;
|
||||||
@@ -63,10 +63,14 @@ impl IOCmd for ReadCmd {
|
|||||||
Self::RStr(sread) => {
|
Self::RStr(sread) => {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
let sresult = match &sread {
|
let sresult = match &sread {
|
||||||
SRead::All => stream.read_to_string(&mut buf),
|
SRead::All => stream.read_to_string(&mut buf).map(|_| ()),
|
||||||
SRead::Line => stream.read_line(&mut buf),
|
SRead::Line => stream.read_line(&mut buf).map(|_| {
|
||||||
|
if buf.ends_with('\n') {
|
||||||
|
buf.pop();
|
||||||
|
}
|
||||||
|
}),
|
||||||
};
|
};
|
||||||
ReadResult::RStr(sread, sresult.map(|_| buf))
|
ReadResult::RStr(sread, sresult.map(|()| buf))
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -88,14 +92,14 @@ impl ReadResult {
|
|||||||
vec![call(succ, [arg]).wrap()]
|
vec![call(succ, [arg]).wrap()]
|
||||||
},
|
},
|
||||||
ReadResult::RStr(_, Ok(text)) => {
|
ReadResult::RStr(_, Ok(text)) => {
|
||||||
vec![call(succ, [Literal::Str(text.into()).into()]).wrap()]
|
vec![call(succ, [OrcString::from(text).atom_exi()]).wrap()]
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Function to convert [io::Error] to Orchid data
|
/// Function to convert [io::Error] to Orchid data
|
||||||
pub fn wrap_io_error(_e: io::Error) -> ExprInst { Literal::Uint(0u64).into() }
|
pub fn wrap_io_error(_e: io::Error) -> ExprInst { 0usize.atom_exi() }
|
||||||
|
|
||||||
/// Writing command (string or binary)
|
/// Writing command (string or binary)
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import std::panic
|
|||||||
import system::io
|
import system::io
|
||||||
import system::async::yield
|
import system::async::yield
|
||||||
|
|
||||||
export const print := \text.\ok. (
|
export const print := \text. \ok. (
|
||||||
io::write_str io::stdout text
|
io::write_str io::stdout text
|
||||||
(io::flush io::stdout
|
(io::flush io::stdout
|
||||||
ok
|
ok
|
||||||
@@ -13,7 +13,7 @@ export const print := \text.\ok. (
|
|||||||
\_. yield
|
\_. yield
|
||||||
)
|
)
|
||||||
|
|
||||||
export const println := \line.\ok. (
|
export const println := \line. \ok. (
|
||||||
print (line ++ "\n") ok
|
print (line ++ "\n") ok
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -113,6 +113,8 @@ impl<'a, ST: IntoIterator<Item = (&'a str, Stream)>> IntoSystem<'static>
|
|||||||
name: None,
|
name: None,
|
||||||
}]),
|
}]),
|
||||||
}],
|
}],
|
||||||
|
lexer_plugin: None,
|
||||||
|
line_parser: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,13 +1,7 @@
|
|||||||
//! Constants exposed to usercode by the interpreter
|
//! Constants exposed to usercode by the interpreter
|
||||||
mod assertion_error;
|
|
||||||
pub mod asynch;
|
pub mod asynch;
|
||||||
pub mod cast_exprinst;
|
|
||||||
pub mod codegen;
|
pub mod codegen;
|
||||||
pub mod directfs;
|
pub mod directfs;
|
||||||
pub mod io;
|
pub mod io;
|
||||||
mod runtime_error;
|
|
||||||
pub mod scheduler;
|
pub mod scheduler;
|
||||||
pub mod stl;
|
pub mod stl;
|
||||||
|
|
||||||
pub use assertion_error::AssertionError;
|
|
||||||
pub use runtime_error::RuntimeError;
|
|
||||||
|
|||||||
@@ -7,14 +7,17 @@ use crate::interpreted::ExprInst;
|
|||||||
pub type SyncResult<T> = (T, Box<dyn Any + Send>);
|
pub type SyncResult<T> = (T, Box<dyn Any + Send>);
|
||||||
pub type SyncOperation<T> =
|
pub type SyncOperation<T> =
|
||||||
Box<dyn FnOnce(T, Canceller) -> SyncResult<T> + Send>;
|
Box<dyn FnOnce(T, Canceller) -> SyncResult<T> + Send>;
|
||||||
pub type SyncOpResultHandler<T> =
|
pub type SyncOpResultHandler<T> = Box<
|
||||||
Box<dyn FnOnce(T, Box<dyn Any + Send>, Canceller) -> (T, Vec<ExprInst>)>;
|
dyn FnOnce(T, Box<dyn Any + Send>, Canceller) -> (T, Vec<ExprInst>)
|
||||||
|
|
||||||
|
+ Send,
|
||||||
|
>;
|
||||||
|
|
||||||
struct SyncQueueItem<T> {
|
struct SyncQueueItem<T> {
|
||||||
cancelled: Canceller,
|
cancelled: Canceller,
|
||||||
operation: SyncOperation<T>,
|
operation: SyncOperation<T>,
|
||||||
handler: SyncOpResultHandler<T>,
|
handler: SyncOpResultHandler<T>,
|
||||||
early_cancel: Box<dyn FnOnce(T) -> (T, Vec<ExprInst>)>,
|
early_cancel: Box<dyn FnOnce(T) -> (T, Vec<ExprInst>) + Send>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum NextItemReportKind<T> {
|
pub enum NextItemReportKind<T> {
|
||||||
@@ -36,11 +39,14 @@ pub struct NextItemReport<T> {
|
|||||||
pub struct BusyState<T> {
|
pub struct BusyState<T> {
|
||||||
handler: SyncOpResultHandler<T>,
|
handler: SyncOpResultHandler<T>,
|
||||||
queue: VecDeque<SyncQueueItem<T>>,
|
queue: VecDeque<SyncQueueItem<T>>,
|
||||||
seal: Option<Box<dyn FnOnce(T) -> Vec<ExprInst>>>,
|
seal: Option<Box<dyn FnOnce(T) -> Vec<ExprInst> + Send>>,
|
||||||
}
|
}
|
||||||
impl<T> BusyState<T> {
|
impl<T> BusyState<T> {
|
||||||
pub fn new<U: 'static + Send>(
|
pub fn new<U: 'static + Send>(
|
||||||
handler: impl FnOnce(T, U, Canceller) -> (T, Vec<ExprInst>) + 'static,
|
handler: impl FnOnce(T, U, Canceller) -> (T, Vec<ExprInst>)
|
||||||
|
|
||||||
|
+ Send
|
||||||
|
+ 'static,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
BusyState {
|
BusyState {
|
||||||
handler: Box::new(|t, payload, cancel| {
|
handler: Box::new(|t, payload, cancel| {
|
||||||
@@ -59,8 +65,8 @@ impl<T> BusyState<T> {
|
|||||||
pub fn enqueue<U: 'static + Send>(
|
pub fn enqueue<U: 'static + Send>(
|
||||||
&mut self,
|
&mut self,
|
||||||
operation: impl FnOnce(T, Canceller) -> (T, U) + Send + 'static,
|
operation: impl FnOnce(T, Canceller) -> (T, U) + Send + 'static,
|
||||||
handler: impl FnOnce(T, U, Canceller) -> (T, Vec<ExprInst>) + 'static,
|
handler: impl FnOnce(T, U, Canceller) -> (T, Vec<ExprInst>) + Send + 'static,
|
||||||
early_cancel: impl FnOnce(T) -> (T, Vec<ExprInst>) + 'static,
|
early_cancel: impl FnOnce(T) -> (T, Vec<ExprInst>) + Send + 'static,
|
||||||
) -> Option<Canceller> {
|
) -> Option<Canceller> {
|
||||||
if self.seal.is_some() {
|
if self.seal.is_some() {
|
||||||
return None;
|
return None;
|
||||||
@@ -81,7 +87,7 @@ impl<T> BusyState<T> {
|
|||||||
Some(cancelled)
|
Some(cancelled)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn seal(&mut self, recipient: impl FnOnce(T) -> Vec<ExprInst> + 'static) {
|
pub fn seal(&mut self, recipient: impl FnOnce(T) -> Vec<ExprInst> + Send + 'static) {
|
||||||
assert!(self.seal.is_none(), "Already sealed");
|
assert!(self.seal.is_none(), "Already sealed");
|
||||||
self.seal = Some(Box::new(recipient))
|
self.seal = Some(Box::new(recipient))
|
||||||
}
|
}
|
||||||
@@ -101,7 +107,7 @@ impl<T> BusyState<T> {
|
|||||||
if candidate.cancelled.is_cancelled() {
|
if candidate.cancelled.is_cancelled() {
|
||||||
let ret = (candidate.early_cancel)(instance);
|
let ret = (candidate.early_cancel)(instance);
|
||||||
instance = ret.0;
|
instance = ret.0;
|
||||||
events.extend(ret.1.into_iter());
|
events.extend(ret.1);
|
||||||
} else {
|
} else {
|
||||||
break candidate;
|
break candidate;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ use std::any::{type_name, Any};
|
|||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
|
||||||
use hashbrown::HashMap;
|
use hashbrown::HashMap;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
@@ -9,14 +10,13 @@ use trait_set::trait_set;
|
|||||||
|
|
||||||
use super::busy::{BusyState, NextItemReportKind};
|
use super::busy::{BusyState, NextItemReportKind};
|
||||||
use super::Canceller;
|
use super::Canceller;
|
||||||
|
use crate::error::AssertionError;
|
||||||
use crate::facade::{IntoSystem, System};
|
use crate::facade::{IntoSystem, System};
|
||||||
use crate::foreign::cps_box::{init_cps, CPSBox};
|
use crate::foreign::cps_box::{init_cps, CPSBox};
|
||||||
use crate::foreign::{xfn_1ary, InertAtomic, XfnResult};
|
use crate::foreign::{xfn_1ary, InertAtomic, XfnResult};
|
||||||
use crate::interpreted::{Clause, ExprInst};
|
use crate::interpreted::{Clause, ExprInst};
|
||||||
use crate::interpreter::HandlerTable;
|
use crate::interpreter::HandlerTable;
|
||||||
use crate::systems::asynch::{AsynchSystem, MessagePort};
|
use crate::systems::asynch::{AsynchSystem, MessagePort};
|
||||||
use crate::systems::stl::Boolean;
|
|
||||||
use crate::systems::AssertionError;
|
|
||||||
use crate::utils::ddispatch::Request;
|
use crate::utils::ddispatch::Request;
|
||||||
use crate::utils::thread_pool::ThreadPool;
|
use crate::utils::thread_pool::ThreadPool;
|
||||||
use crate::utils::{take_with_output, unwrap_or, IdMap};
|
use crate::utils::{take_with_output, unwrap_or, IdMap};
|
||||||
@@ -47,17 +47,17 @@ pub enum SharedState {
|
|||||||
|
|
||||||
/// A shared handle for a resource of type `T` that can be used with a
|
/// A shared handle for a resource of type `T` that can be used with a
|
||||||
/// [SeqScheduler] to execute mutating operations one by one in worker threads.
|
/// [SeqScheduler] to execute mutating operations one by one in worker threads.
|
||||||
pub struct SharedHandle<T>(Rc<RefCell<SharedResource<T>>>);
|
pub struct SharedHandle<T>(Arc<Mutex<SharedResource<T>>>);
|
||||||
|
|
||||||
impl<T> SharedHandle<T> {
|
impl<T> SharedHandle<T> {
|
||||||
/// Wrap a value to be accessible to a [SeqScheduler].
|
/// Wrap a value to be accessible to a [SeqScheduler].
|
||||||
pub fn wrap(t: T) -> Self {
|
pub fn wrap(t: T) -> Self {
|
||||||
Self(Rc::new(RefCell::new(SharedResource::Free(t))))
|
Self(Arc::new(Mutex::new(SharedResource::Free(t))))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check the state of the handle
|
/// Check the state of the handle
|
||||||
pub fn state(&self) -> SharedState {
|
pub fn state(&self) -> SharedState {
|
||||||
match &*self.0.as_ref().borrow() {
|
match &*self.0.lock().unwrap() {
|
||||||
SharedResource::Busy(b) if b.is_sealed() => SharedState::Sealed,
|
SharedResource::Busy(b) if b.is_sealed() => SharedState::Sealed,
|
||||||
SharedResource::Busy(_) => SharedState::Busy,
|
SharedResource::Busy(_) => SharedState::Busy,
|
||||||
SharedResource::Free(_) => SharedState::Free,
|
SharedResource::Free(_) => SharedState::Free,
|
||||||
@@ -70,7 +70,7 @@ impl<T> SharedHandle<T> {
|
|||||||
/// sense as eg. an optimization. You can return the value after processing
|
/// sense as eg. an optimization. You can return the value after processing
|
||||||
/// via [SyncHandle::untake].
|
/// via [SyncHandle::untake].
|
||||||
pub fn take(&self) -> Option<T> {
|
pub fn take(&self) -> Option<T> {
|
||||||
take_with_output(&mut *self.0.as_ref().borrow_mut(), |state| match state {
|
take_with_output(&mut *self.0.lock().unwrap(), |state| match state {
|
||||||
SharedResource::Free(t) => (SharedResource::Taken, Some(t)),
|
SharedResource::Free(t) => (SharedResource::Taken, Some(t)),
|
||||||
_ => (state, None),
|
_ => (state, None),
|
||||||
})
|
})
|
||||||
@@ -80,10 +80,13 @@ impl<T> SharedHandle<T> {
|
|||||||
/// is to return values synchronously after they have been removed with
|
/// is to return values synchronously after they have been removed with
|
||||||
/// [SyncHandle::untake].
|
/// [SyncHandle::untake].
|
||||||
pub fn untake(&self, value: T) -> Result<(), T> {
|
pub fn untake(&self, value: T) -> Result<(), T> {
|
||||||
take_with_output(&mut *self.0.as_ref().borrow_mut(), |state| match state {
|
take_with_output(
|
||||||
SharedResource::Taken => (SharedResource::Free(value), Ok(())),
|
&mut *self.0.lock().unwrap(),
|
||||||
_ => (state, Err(value)),
|
|state| match state {
|
||||||
})
|
SharedResource::Taken => (SharedResource::Free(value), Ok(())),
|
||||||
|
_ => (state, Err(value)),
|
||||||
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<T> Clone for SharedHandle<T> {
|
impl<T> Clone for SharedHandle<T> {
|
||||||
@@ -97,12 +100,12 @@ impl<T> Debug for SharedHandle<T> {
|
|||||||
.finish()
|
.finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<T: 'static> InertAtomic for SharedHandle<T> {
|
impl<T: Send + 'static> InertAtomic for SharedHandle<T> {
|
||||||
fn type_str() -> &'static str { "a SharedHandle" }
|
fn type_str() -> &'static str { "a SharedHandle" }
|
||||||
fn respond(&self, mut request: Request) {
|
fn respond(&self, mut request: Request) {
|
||||||
request.serve_with(|| {
|
request.serve_with(|| {
|
||||||
let this = self.clone();
|
let this = self.clone();
|
||||||
TakeCmd(Rc::new(move |sch| {
|
TakeCmd(Arc::new(move |sch| {
|
||||||
let _ = sch.seal(this.clone(), |_| Vec::new());
|
let _ = sch.seal(this.clone(), |_| Vec::new());
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
@@ -110,7 +113,7 @@ impl<T: 'static> InertAtomic for SharedHandle<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct TakeCmd(pub Rc<dyn Fn(SeqScheduler)>);
|
pub struct TakeCmd(pub Arc<dyn Fn(SeqScheduler) + Send + Sync>);
|
||||||
impl Debug for TakeCmd {
|
impl Debug for TakeCmd {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "A command to drop a shared resource")
|
write!(f, "A command to drop a shared resource")
|
||||||
@@ -134,8 +137,8 @@ pub fn take_and_drop(x: ExprInst) -> XfnResult<Clause> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_taken_error(x: ExprInst) -> XfnResult<Boolean> {
|
pub fn is_taken_error(x: ExprInst) -> XfnResult<bool> {
|
||||||
Ok(Boolean(x.downcast::<SealedOrTaken>().is_ok()))
|
Ok(x.downcast::<SealedOrTaken>().is_ok())
|
||||||
}
|
}
|
||||||
|
|
||||||
trait_set! {
|
trait_set! {
|
||||||
@@ -195,11 +198,11 @@ impl SeqScheduler {
|
|||||||
pub fn schedule<T: Send + 'static, U: Send + 'static>(
|
pub fn schedule<T: Send + 'static, U: Send + 'static>(
|
||||||
&self,
|
&self,
|
||||||
handle: SharedHandle<T>,
|
handle: SharedHandle<T>,
|
||||||
operation: impl FnOnce(T, Canceller) -> (T, U) + Send + 'static,
|
operation: impl FnOnce(T, Canceller) -> (T, U) + Sync + Send + 'static,
|
||||||
handler: impl FnOnce(T, U, Canceller) -> (T, Vec<ExprInst>) + 'static,
|
handler: impl FnOnce(T, U, Canceller) -> (T, Vec<ExprInst>) + Sync + Send + 'static,
|
||||||
early_cancel: impl FnOnce(T) -> (T, Vec<ExprInst>) + 'static,
|
early_cancel: impl FnOnce(T) -> (T, Vec<ExprInst>) + Sync + Send + 'static,
|
||||||
) -> Result<Canceller, SealedOrTaken> {
|
) -> Result<Canceller, SealedOrTaken> {
|
||||||
take_with_output(&mut *handle.0.as_ref().borrow_mut(), {
|
take_with_output(&mut *handle.0.lock().unwrap(), {
|
||||||
let handle = handle.clone();
|
let handle = handle.clone();
|
||||||
|state| {
|
|state| {
|
||||||
match state {
|
match state {
|
||||||
@@ -246,10 +249,10 @@ impl SeqScheduler {
|
|||||||
pub fn seal<T>(
|
pub fn seal<T>(
|
||||||
&self,
|
&self,
|
||||||
handle: SharedHandle<T>,
|
handle: SharedHandle<T>,
|
||||||
seal: impl FnOnce(T) -> Vec<ExprInst> + 'static,
|
seal: impl FnOnce(T) -> Vec<ExprInst> + Sync + Send + 'static,
|
||||||
) -> Result<Vec<ExprInst>, SealedOrTaken> {
|
) -> Result<Vec<ExprInst>, SealedOrTaken> {
|
||||||
take_with_output(
|
take_with_output(
|
||||||
&mut *handle.0.as_ref().borrow_mut(),
|
&mut *handle.0.lock().unwrap(),
|
||||||
|state| match state {
|
|state| match state {
|
||||||
SharedResource::Busy(mut b) if !b.is_sealed() => {
|
SharedResource::Busy(mut b) if !b.is_sealed() => {
|
||||||
b.seal(seal);
|
b.seal(seal);
|
||||||
@@ -281,7 +284,7 @@ impl SeqScheduler {
|
|||||||
let (t, u): (T, U) =
|
let (t, u): (T, U) =
|
||||||
*data.downcast().expect("This is associated by ID");
|
*data.downcast().expect("This is associated by ID");
|
||||||
let handle2 = handle.clone();
|
let handle2 = handle.clone();
|
||||||
take_with_output(&mut *handle.0.as_ref().borrow_mut(), |state| {
|
take_with_output(&mut *handle.0.lock().unwrap(), |state| {
|
||||||
let busy = unwrap_or! { state => SharedResource::Busy;
|
let busy = unwrap_or! { state => SharedResource::Busy;
|
||||||
panic!("Handle with outstanding invocation must be busy")
|
panic!("Handle with outstanding invocation must be busy")
|
||||||
};
|
};
|
||||||
@@ -329,6 +332,8 @@ impl IntoSystem<'static> for SeqScheduler {
|
|||||||
prelude: Vec::new(),
|
prelude: Vec::new(),
|
||||||
code: HashMap::new(),
|
code: HashMap::new(),
|
||||||
handlers,
|
handlers,
|
||||||
|
lexer_plugin: None,
|
||||||
|
line_parser: None,
|
||||||
constants: ConstTree::namespace(
|
constants: ConstTree::namespace(
|
||||||
[i.i("system"), i.i("scheduler")],
|
[i.i("system"), i.i("scheduler")],
|
||||||
ConstTree::tree([
|
ConstTree::tree([
|
||||||
|
|||||||
@@ -3,15 +3,16 @@ use std::sync::Arc;
|
|||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
|
||||||
use super::Boolean;
|
use crate::error::RuntimeError;
|
||||||
use crate::foreign::{
|
use crate::foreign::{
|
||||||
xfn_1ary, xfn_2ary, xfn_3ary, xfn_4ary, Atomic, InertAtomic, XfnResult,
|
xfn_1ary, xfn_2ary, xfn_3ary, xfn_4ary, Atomic, InertAtomic, XfnResult,
|
||||||
};
|
};
|
||||||
use crate::interpreted::Clause;
|
use crate::interpreted::Clause;
|
||||||
use crate::systems::codegen::{opt, tuple};
|
use crate::systems::codegen::{opt, tuple};
|
||||||
use crate::systems::RuntimeError;
|
|
||||||
use crate::utils::{iter_find, unwrap_or};
|
use crate::utils::{iter_find, unwrap_or};
|
||||||
use crate::{ConstTree, Interner, Literal};
|
use crate::{ConstTree, Interner};
|
||||||
|
|
||||||
|
const INT_BYTES: usize = usize::BITS as usize / 8;
|
||||||
|
|
||||||
/// A block of binary data
|
/// A block of binary data
|
||||||
#[derive(Clone, Hash, PartialEq, Eq)]
|
#[derive(Clone, Hash, PartialEq, Eq)]
|
||||||
@@ -43,93 +44,86 @@ pub fn concatenate(a: Binary, b: Binary) -> XfnResult<Binary> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Extract a subsection of the binary data
|
/// Extract a subsection of the binary data
|
||||||
pub fn slice(s: Binary, i: u64, len: u64) -> XfnResult<Binary> {
|
pub fn slice(s: Binary, i: usize, len: usize) -> XfnResult<Binary> {
|
||||||
if i + len < s.0.len() as u64 {
|
if i + len < s.0.len() {
|
||||||
RuntimeError::fail(
|
RuntimeError::fail(
|
||||||
"Byte index out of bounds".to_string(),
|
"Byte index out of bounds".to_string(),
|
||||||
"indexing binary",
|
"indexing binary",
|
||||||
)?
|
)?
|
||||||
}
|
}
|
||||||
let data = s.0[i as usize..i as usize + len as usize].to_vec();
|
Ok(Binary(Arc::new(s.0[i..i + len].to_vec())))
|
||||||
Ok(Binary(Arc::new(data)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the index where the first argument first contains the second, if any
|
/// Return the index where the first argument first contains the second, if any
|
||||||
pub fn find(haystack: Binary, needle: Binary) -> XfnResult<Clause> {
|
pub fn find(haystack: Binary, needle: Binary) -> XfnResult<Clause> {
|
||||||
let found = iter_find(haystack.0.iter(), needle.0.iter());
|
let found = iter_find(haystack.0.iter(), needle.0.iter());
|
||||||
Ok(opt(found.map(|x| Literal::Uint(x as u64).into())))
|
Ok(opt(found.map(usize::atom_exi)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Split binary data block into two smaller blocks
|
/// Split binary data block into two smaller blocks
|
||||||
pub fn split(bin: Binary, i: u64) -> XfnResult<Clause> {
|
pub fn split(bin: Binary, i: usize) -> XfnResult<Clause> {
|
||||||
if bin.0.len() < i as usize {
|
if bin.0.len() < i {
|
||||||
RuntimeError::fail(
|
RuntimeError::fail(
|
||||||
"Byte index out of bounds".to_string(),
|
"Byte index out of bounds".to_string(),
|
||||||
"splitting binary",
|
"splitting binary",
|
||||||
)?
|
)?
|
||||||
}
|
}
|
||||||
let (asl, bsl) = bin.0.split_at(i as usize);
|
let (asl, bsl) = bin.0.split_at(i);
|
||||||
Ok(tuple([
|
Ok(tuple([asl, bsl].map(|s| Binary(Arc::new(s.to_vec())).atom_exi())))
|
||||||
Binary(Arc::new(asl.to_vec())).atom_cls().into(),
|
|
||||||
Binary(Arc::new(bsl.to_vec())).atom_cls().into(),
|
|
||||||
]))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read a number from a binary blob
|
/// Read a number from a binary blob
|
||||||
pub fn get_num(
|
pub fn get_num(
|
||||||
buf: Binary,
|
buf: Binary,
|
||||||
loc: u64,
|
loc: usize,
|
||||||
size: u64,
|
size: usize,
|
||||||
is_le: Boolean,
|
is_le: bool,
|
||||||
) -> XfnResult<Literal> {
|
) -> XfnResult<usize> {
|
||||||
if buf.0.len() < (loc + size) as usize {
|
if buf.0.len() < (loc + size) {
|
||||||
RuntimeError::fail(
|
RuntimeError::fail(
|
||||||
"section out of range".to_string(),
|
"section out of range".to_string(),
|
||||||
"reading number from binary data",
|
"reading number from binary data",
|
||||||
)?
|
)?
|
||||||
}
|
}
|
||||||
if 8 < size {
|
if INT_BYTES < size {
|
||||||
RuntimeError::fail(
|
RuntimeError::fail(
|
||||||
"more than 8 bytes provided".to_string(),
|
"more than std::bin::int_bytes bytes provided".to_string(),
|
||||||
"reading number from binary data",
|
"reading number from binary data",
|
||||||
)?
|
)?
|
||||||
}
|
}
|
||||||
let mut data = [0u8; 8];
|
let mut data = [0u8; INT_BYTES];
|
||||||
let section = &buf.0[loc as usize..(loc + size) as usize];
|
let section = &buf.0[loc..(loc + size)];
|
||||||
let num = if is_le.0 {
|
let num = if is_le {
|
||||||
data[0..size as usize].copy_from_slice(section);
|
data[0..size].copy_from_slice(section);
|
||||||
u64::from_le_bytes(data)
|
usize::from_le_bytes(data)
|
||||||
} else {
|
} else {
|
||||||
data[8 - size as usize..].copy_from_slice(section);
|
data[INT_BYTES - size..].copy_from_slice(section);
|
||||||
u64::from_be_bytes(data)
|
usize::from_be_bytes(data)
|
||||||
};
|
};
|
||||||
Ok(Literal::Uint(num))
|
Ok(num)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert a number into a blob
|
/// Convert a number into a blob
|
||||||
pub fn from_num(size: u64, is_le: Boolean, data: u64) -> XfnResult<Binary> {
|
pub fn from_num(size: usize, is_le: bool, data: usize) -> XfnResult<Binary> {
|
||||||
if size > 8 {
|
if INT_BYTES < size {
|
||||||
RuntimeError::fail(
|
RuntimeError::fail(
|
||||||
"more than 8 bytes requested".to_string(),
|
"more than std::bin::int_bytes bytes requested".to_string(),
|
||||||
"converting number to binary",
|
"converting number to binary",
|
||||||
)?
|
)?
|
||||||
}
|
}
|
||||||
let bytes = if is_le.0 {
|
let bytes = match is_le {
|
||||||
data.to_le_bytes()[0..size as usize].to_vec()
|
true => data.to_le_bytes()[0..size].to_vec(),
|
||||||
} else {
|
false => data.to_be_bytes()[8 - size..].to_vec(),
|
||||||
data.to_be_bytes()[8 - size as usize..].to_vec()
|
|
||||||
};
|
};
|
||||||
Ok(Binary(Arc::new(bytes)))
|
Ok(Binary(Arc::new(bytes)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Detect the number of bytes in the blob
|
/// Detect the number of bytes in the blob
|
||||||
pub fn size(b: Binary) -> XfnResult<Literal> {
|
pub fn size(b: Binary) -> XfnResult<usize> { Ok(b.0.len()) }
|
||||||
Ok(Literal::Uint(b.0.len() as u64))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn bin(i: &Interner) -> ConstTree {
|
pub fn bin(i: &Interner) -> ConstTree {
|
||||||
ConstTree::tree([(
|
ConstTree::tree([(
|
||||||
i.i("bin"),
|
i.i("binary"),
|
||||||
ConstTree::tree([
|
ConstTree::tree([
|
||||||
(i.i("concat"), ConstTree::xfn(xfn_2ary(concatenate))),
|
(i.i("concat"), ConstTree::xfn(xfn_2ary(concatenate))),
|
||||||
(i.i("slice"), ConstTree::xfn(xfn_3ary(slice))),
|
(i.i("slice"), ConstTree::xfn(xfn_3ary(slice))),
|
||||||
@@ -138,6 +132,7 @@ pub fn bin(i: &Interner) -> ConstTree {
|
|||||||
(i.i("get_num"), ConstTree::xfn(xfn_4ary(get_num))),
|
(i.i("get_num"), ConstTree::xfn(xfn_4ary(get_num))),
|
||||||
(i.i("from_num"), ConstTree::xfn(xfn_3ary(from_num))),
|
(i.i("from_num"), ConstTree::xfn(xfn_3ary(from_num))),
|
||||||
(i.i("size"), ConstTree::xfn(xfn_1ary(size))),
|
(i.i("size"), ConstTree::xfn(xfn_1ary(size))),
|
||||||
|
(i.i("int_bytes"), ConstTree::atom(INT_BYTES)),
|
||||||
]),
|
]),
|
||||||
)])
|
)])
|
||||||
}
|
}
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
export operators[ != == ]
|
export ::(!=, ==)
|
||||||
|
|
||||||
export const not := \bool. if bool then false else true
|
export const not := \bool. if bool then false else true
|
||||||
macro ...$a != ...$b =0x3p36=> (not (...$a == ...$b))
|
macro ...$a != ...$b =0x3p36=> (not (...$a == ...$b))
|
||||||
macro ...$a == ...$b =0x3p36=> (equals (...$a) (...$b))
|
macro ...$a == ...$b =0x3p36=> (equals (...$a) (...$b))
|
||||||
|
export macro ...$a and ...$b =0x4p36=> (ifthenelse (...$a) (...$b) false)
|
||||||
|
export macro ...$a or ...$b =0x4p36=> (ifthenelse (...$a) true (...$b))
|
||||||
export macro if ...$cond then ...$true else ...$false:1 =0x1p84=> (
|
export macro if ...$cond then ...$true else ...$false:1 =0x1p84=> (
|
||||||
ifthenelse (...$cond) (...$true) (...$false)
|
ifthenelse (...$cond) (...$true) (...$false)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,26 +1,17 @@
|
|||||||
use crate::foreign::{xfn_1ary, xfn_2ary, InertAtomic, XfnResult};
|
use crate::foreign::{xfn_1ary, xfn_2ary, XfnResult, Atom};
|
||||||
use crate::interner::Interner;
|
use crate::interner::Interner;
|
||||||
use crate::representations::interpreted::Clause;
|
use crate::representations::interpreted::Clause;
|
||||||
use crate::systems::AssertionError;
|
use crate::error::AssertionError;
|
||||||
use crate::{ConstTree, Literal, Location};
|
use crate::{ConstTree, Location, OrcString};
|
||||||
|
|
||||||
/// Booleans exposed to Orchid
|
use super::Numeric;
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
|
||||||
pub struct Boolean(pub bool);
|
|
||||||
impl InertAtomic for Boolean {
|
|
||||||
fn type_str() -> &'static str { "a boolean" }
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<bool> for Boolean {
|
|
||||||
fn from(value: bool) -> Self { Self(value) }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Takes a boolean and two branches, runs the first if the bool is true, the
|
/// Takes a boolean and two branches, runs the first if the bool is true, the
|
||||||
/// second if it's false.
|
/// second if it's false.
|
||||||
// Even though it's a ternary function, IfThenElse is implemented as an unary
|
// Even though it's a ternary function, IfThenElse is implemented as an unary
|
||||||
// foreign function, as the rest of the logic can be defined in Orchid.
|
// foreign function, as the rest of the logic can be defined in Orchid.
|
||||||
pub fn if_then_else(b: Boolean) -> XfnResult<Clause> {
|
pub fn if_then_else(b: bool) -> XfnResult<Clause> {
|
||||||
Ok(match b.0 {
|
Ok(match b {
|
||||||
true => Clause::pick(Clause::constfn(Clause::LambdaArg)),
|
true => Clause::pick(Clause::constfn(Clause::LambdaArg)),
|
||||||
false => Clause::constfn(Clause::pick(Clause::LambdaArg)),
|
false => Clause::constfn(Clause::pick(Clause::LambdaArg)),
|
||||||
})
|
})
|
||||||
@@ -29,16 +20,25 @@ pub fn if_then_else(b: Boolean) -> XfnResult<Clause> {
|
|||||||
/// Compares the inner values if
|
/// Compares the inner values if
|
||||||
///
|
///
|
||||||
/// - both are string,
|
/// - both are string,
|
||||||
|
/// - both are bool,
|
||||||
/// - both are either uint or num
|
/// - both are either uint or num
|
||||||
pub fn equals(a: Literal, b: Literal) -> XfnResult<Boolean> {
|
pub fn equals(a: Atom, b: Atom) -> XfnResult<bool> {
|
||||||
Ok(Boolean::from(match (a, b) {
|
let (a, b) = match (a.try_downcast::<OrcString>(), b.try_downcast::<OrcString>()) {
|
||||||
(Literal::Str(s1), Literal::Str(s2)) => s1 == s2,
|
(Ok(a), Ok(b)) => return Ok(a == b),
|
||||||
(Literal::Num(n1), Literal::Num(n2)) => n1 == n2,
|
(Err(a), Err(b)) => (a, b),
|
||||||
(Literal::Uint(i1), Literal::Uint(i2)) => i1 == i2,
|
_ => return Ok(false),
|
||||||
(Literal::Num(n1), Literal::Uint(u1)) => *n1 == (u1 as f64),
|
};
|
||||||
(Literal::Uint(u1), Literal::Num(n1)) => *n1 == (u1 as f64),
|
match (a.request::<Numeric>(), b.request::<Numeric>()) {
|
||||||
(..) => AssertionError::fail(Location::Unknown, "the expected type")?,
|
(Some(a), Some(b)) => return Ok(a.as_float() == b.as_float()),
|
||||||
}))
|
(None, None) => (),
|
||||||
|
_ => return Ok(false),
|
||||||
|
};
|
||||||
|
match (a.try_downcast::<bool>(), b.try_downcast::<bool>()) {
|
||||||
|
(Ok(a), Ok(b)) => return Ok(a == b),
|
||||||
|
(Err(_), Err(_)) => (),
|
||||||
|
_ => return Ok(false),
|
||||||
|
};
|
||||||
|
AssertionError::fail(Location::Unknown, "the expected type")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bool(i: &Interner) -> ConstTree {
|
pub fn bool(i: &Interner) -> ConstTree {
|
||||||
@@ -47,8 +47,8 @@ pub fn bool(i: &Interner) -> ConstTree {
|
|||||||
ConstTree::tree([
|
ConstTree::tree([
|
||||||
(i.i("ifthenelse"), ConstTree::xfn(xfn_1ary(if_then_else))),
|
(i.i("ifthenelse"), ConstTree::xfn(xfn_1ary(if_then_else))),
|
||||||
(i.i("equals"), ConstTree::xfn(xfn_2ary(equals))),
|
(i.i("equals"), ConstTree::xfn(xfn_2ary(equals))),
|
||||||
(i.i("true"), ConstTree::atom(Boolean(true))),
|
(i.i("true"), ConstTree::atom(true)),
|
||||||
(i.i("false"), ConstTree::atom(Boolean(false))),
|
(i.i("false"), ConstTree::atom(false)),
|
||||||
]),
|
]),
|
||||||
)])
|
)])
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user